#!/usr/bin/env python3
#
# Batterie de test les webservices nlptools
#
# -*- coding: utf-8 -*-
# usage :
# python3 run_test_NLPtools_EZ.py urlv
#
"""
@author: stephane schneider
"""
import requests
import sys
outputs=['doc', 'json']
languages=['en','fr']
https_proxy="http://proxyout.inist.fr:8080/"
http_proxy="http://proxyout.inist.fr:8080/"
proxyDict = {
"http" : http_proxy,
"https" : https_proxy,
}
# dev
#changer la valeur de uri pour pointer vers la dev ou la prod
uri=sys.argv[1]
# parameter
headers = {'content-type': 'application/octet-stream'}
query= {"indent": "true"}
doc_en="""[
{"id":"PhnlUo_d6LoJPLN3YUjh5qBLc","value":"Non-local effects by homogenization or 3D–1D dimension reduction in elastic materials reinforced by stiff fibers.We first consider an elastic thin heterogeneous cylinder of radius of order ε: the interior of the cylinder is occupied by a stiff material (fiber) that is surrounded by a soft material (matrix). By assuming that the elasticity tensor of the fiber does not scale with ε and that of the matrix scales with ε2, we prove that the one dimensional model is a nonlocal system.We then consider a reference configuration domain filled out by periodically distributed rods similar to those described above. We prove that the homogenized model is a second order nonlocal problem.In particular, we show that the homogenization problem is directly connected to the 3D–1D dimensional reduction problem."}
]"""
doc_en1="""[
{"id": "paDxcIPh8aDRAi18YAhjivtuE",
"value": "Dynamics of fully coupled rotators with unimodal and bimodal frequencies distribution."}
]"""
doc_fr="""[
{"id":"xBKeVishRTsAqsdsqJn5YWo0HpuD","value":"Le déplacement chimique des carbones en phase gazeuse et en fonction de la concentration dans dix-neuf solvants dans le but de séparer les différents termes de constante dus aux interactions"}
]"""
# data :
# -- fonctionne pas
# doc = "../data/data.json"
# test_files = { 'file': open(doc, 'rb')}
# response = requests.post(url , params = query, files = test_files , stream=True, headers=headers)
#{stemmer,termMatcher,ner,NPchunker,POStagger,gazetteer,NPchunkerDP,lefff_tagger}
for language in languages:
if language == "en":
engines=["postagger", "stemmer", "npchunker", "npchunkerdp", "termmatcher", "ner"]
doc=doc_en1
else:
engines=["postagger", "stemmer"]
doc=doc_fr
for engine in engines:
for output in outputs:
url ="{}/v1/{}/{}/analyze".format(uri, language, engine)
query['output'] = output
response = requests.post(url , params = query, data = doc.encode('utf-8'), stream=True, headers=headers) #proxies=proxyDict
print("\n------------ {} ({} - {}) :".format(engine,output,language ))
print(url)
print("REQUEST STATUS : {}".format(response.status_code))
print("HEADERS : {}\n".format(response.headers))
if response.status_code == 200:
print(response.text)
else:
print("ERROR : nothing found !")