diff --git a/.gitignore b/.gitignore index d19c7c3..5fff46e 100644 --- a/.gitignore +++ b/.gitignore @@ -59,3 +59,4 @@ examples/ .idea out.tar.gz +tests-results/ diff --git a/Makefile b/Makefile index 374a342..04c6ece 100644 --- a/Makefile +++ b/Makefile @@ -40,14 +40,27 @@ @git tag | grep "$(filter-out $@,$(MAKECMDGOALS))" | sort -V | tail -1 examples: ## Launch examples for a directory, e.g. make examples mapping-tools - @httpyac --all "$(filter-out $@,$(MAKECMDGOALS))/examples.http" + @npx httpyac --all "$(filter-out $@,$(MAKECMDGOALS))/examples.http" example-metadata: ## Give an example metadata, from examples.http, e.g. make example-metadata affiliations-tools 3 @./bin/generate-example-metadata.mjs $(filter-out $@,$(MAKECMDGOALS)) -generate-example-tests: ## Convert examples.http to test.hurl, e.g. make generate-example-tests affiliations-tools > affiliations-tools/tests.hurl +generate-example-tests: ## Convert examples.http to test.hurl, e.g. make generate-example-tests affiliations-tools @./bin/generate-example-tests.mjs $(filter-out $@,$(MAKECMDGOALS)) > $(filter-out $@,$(MAKECMDGOALS))/tests.hurl +test: ## Launch tests for a directory, e.g. make test mapping-tools + @hurl --test "$(filter-out $@,$(MAKECMDGOALS))/tests.hurl" + +test-all: ## Launch tests for all directories + @hurl --test */tests.hurl + +test-report: ## Add a test to the tests results, e.g. make test-report affiliations-tools + @hurl --test "$(filter-out $@,$(MAKECMDGOALS))/tests.hurl" --report-html tests-results + +test-report-all: ## Launch tests for all directories + @rm -rf tests-results + @hurl --test */tests.hurl --report-html tests-results + # Remove error message about lacking rules for targets' parameters %: @: diff --git a/data-computer/swagger.json b/data-computer/swagger.json index e85b6ff..147d8ae 100644 --- a/data-computer/swagger.json +++ b/data-computer/swagger.json @@ -2,7 +2,7 @@ "info": { "title": "data-computer - Calculs sur fichier corpus compressé", "summary": "Algorithmes de calculs sur un corpus compressé", - "version": "1.6.1", + "version": "2.0.0", "termsOfService": "https://objectif-tdm.inist.fr/", "contact": { "name": "Inist-CNRS", @@ -14,9 +14,13 @@ "x-comment": "Will be automatically completed by the ezs server." }, { - "url": "http://vitdmservices.intra.inist.fr:49303/", - "description": "Next version for production", + "url": "http://vptdmjobs.intra.inist.fr:49154/", + "description": "Production release", "x-profil": "Standard" + }, + { + "url": "http://vitdmservices.intra.inist.fr:49303/", + "description": "For internal tests" } ], "tags": [ @@ -29,4 +33,4 @@ } } ] -} \ No newline at end of file +} diff --git a/data-computer/v1/base-line.ini b/data-computer/v1/base-line.ini index 11ef65a..f2ed737 100644 --- a/data-computer/v1/base-line.ini +++ b/data-computer/v1/base-line.ini @@ -14,74 +14,41 @@ post.parameters.0.in = query post.parameters.0.name = indent post.parameters.0.schema.type = boolean +post.parameters.1.description = URL pour signaler que le traitement est terminé +post.parameters.1.in = header +post.parameters.1.name = X-Webhook-Success +post.parameters.1.schema.type = string +post.parameters.1.schema.format = uri +post.parameters.1.required = false +post.parameters.2.description = URL pour signaler que le traitement a échoué +post.parameters.2.in = header +post.parameters.2.name = X-Webhook-Failure +post.parameters.2.schema.type = string +post.parameters.2.schema.format = uri +post.parameters.2.required = false [use] plugin = basics plugin = analytics -# Step 0 (générique) : Lire le fichier standard tar.gz -[TARExtract] -compress = true -path = */*.json - -# Step 1 (générique) : Créer un identifiant unique pour le corpus reçu -[singleton] -[singleton/identify] -[singleton/env] -path = generator -value = base-line -path = identifier -value = get('uri').replace('uid:/', '') +# Step 1 (générique): Charger le fichier corpus +[delegate] +file = charger.cfg # Step 2 (générique): Traiter de manière asynchnore les items reçus [fork] standalone = true +logger = logger.cfg -# Step 2.1 (générique): Lancer un calcul sur tous les items reçus -[fork/delegate] - -# Step 2.1.1 (spécifique): S'assurer d'avoir un tableau -[fork/delegate/exchange] +# Step 2.1 (spécifique): Lancer un calcul sur tous les items reçus +[fork/exchange] value = self().omit('uri') -# Step 2.2 (générique): Création d'un fichier résulat standard -[fork/TARDump] -compress = true -manifest = fix({version: '1'}) -manifest = fix({identifier: env('identifier')}) -manifest = fix({generator: env('generator')}) +# Step 2.2 (générique): Enregister le résulat et signaler que le traitment est fini +[fork/delegate] +file = recorder.cfg -# Step 2.3 (générique): Sauvegarder sur disque le résulat -[fork/FILESave] -location = /tmp/retrieve -identifier = env('identifier') -jsonl = false -compress = false - -# Step 2.4 (générique): Signaler le fin du traitement via un appel à un webhook (si il a été précisé) -[fork/swing] -test = env('headers.x-hook').startsWith('http') - -# Step 2.4.1 (générique): Séléctionner les informations à envoyer au webhook -[fork/swing/replace] -path = body -value = self().pick(['size', 'atime', 'mtime', 'ctime']).set('identifier', env('identifier')).set('generator', env('generator')).set('state', 'ready') - -# Step 2.4.2 (générique): Envoyer la requète HTTP -[fork/swing/URLFetch] -url = env('headers.x-hook') -path = body -headers = Content-Type:application/json - -# Step 2.4.3 (faculatif) : Ajouter une trace dans log -[fork/swing/debug] -text = webhook triggered - -# Step 2.5 (faculatif) : Ajouter une trace dans log -[fork/debug] -text = process completed - -# Step 3 : Renvoyer immédiatement seul élément indiquant comment récupérer le résulat quand il sera prêt +# Step 3 : Renvoyer immédiatement un seul élément indiquant comment récupérer le résulat quand il sera prêt [shift] [replace] path = id diff --git a/data-computer/v1/charger.cfg b/data-computer/v1/charger.cfg new file mode 100644 index 0000000..6deae0b --- /dev/null +++ b/data-computer/v1/charger.cfg @@ -0,0 +1,18 @@ +[use] +plugin = basics +plugin = analytics + +# Step 0 (générique) : Lire le fichier standard tar.gz +[TARExtract] +compress = true +path = */*.json + +# Step 1 (générique) : Créer un identifiant unique pour le corpus reçu +[singleton] +[singleton/identify] +[singleton/env] +path = generator +value = base-line +path = identifier +value = get('uri').replace('uid:/', '') + diff --git a/data-computer/v1/graph-segment.ini b/data-computer/v1/graph-segment.ini index b0b1aed..d9990a9 100644 --- a/data-computer/v1/graph-segment.ini +++ b/data-computer/v1/graph-segment.ini @@ -14,39 +14,33 @@ post.parameters.0.in = query post.parameters.0.name = indent post.parameters.0.schema.type = boolean +post.parameters.1.description = URL pour signaler que le traitement est terminé +post.parameters.1.in = header +post.parameters.1.name = X-Webhook-Success +post.parameters.1.schema.type = string +post.parameters.1.schema.format = uri +post.parameters.1.required = false +post.parameters.2.description = URL pour signaler que le traitement a échoué +post.parameters.2.in = header +post.parameters.2.name = X-Webhook-Failure +post.parameters.2.schema.type = string +post.parameters.2.schema.format = uri +post.parameters.2.required = false [use] plugin = basics plugin = analytics -# Step 0 (générique) : Lire le fichier standard tar.gz -[TARExtract] -compress = true -path = */*.json - -# Step 1 (générique) : Créer un identifiant unique pour le corpus reçu -[singleton] -# Step 1.1 (générique) : Controle du premier objet en considérant que les suivants seront structrellement identiques -[singleton/validate] -path = id -rule = required|string - -path = value -rule = required|array - -# Step 1.2 (générique) : Créer un identifiant unique pour le corpus reçu -[singleton/identify] -[singleton/env] -path = generator -value = graph-segment -path = identifier -value = get('uri').replace('uid:/', '') +# Step 1 (générique): Charger le fichier corpus +[delegate] +file = charger.cfg # Step 2 (générique): Traiter de manière asynchnore les items reçus [fork] standalone = true +logger = logger.cfg -# Step 2.1 (générique): Lancer un calcul sur tous les items reçus +# Step 2.1 (spécifique): Lancer un calcul sur tous les items reçus [fork/delegate] # Step 2.1.1 (spécifique): S'assurer d'avoir un tableau @@ -79,44 +73,13 @@ path = origin value = get('value').uniq() -# Step 2.2 (générique): Création d'un fichier résulat standard -[fork/TARDump] -compress = true -manifest = fix({version: '1'}) -manifest = fix({identifier: env('identifier')}) -manifest = fix({generator: env('generator')}) +[fork/transit] -# Step 2.3 (générique): Sauvegarder sur disque le résulat -[fork/FILESave] -location = /tmp/retrieve -identifier = env('identifier') -jsonl = false -compress = false +# Step 2.2 (générique): Enregister le résulat et signaler que le traitment est fini +[fork/delegate] +file = recorder.cfg -# Step 2.4 (générique): Signaler le fin du traitement via un appel à un webhook (si il a été précisé) -[fork/swing] -test = env('headers.x-hook').startsWith('http') - -# Step 2.4.1 (générique): Séléctionner les informations à envoyer au webhook -[fork/swing/replace] -path = body -value = self().pick(['size', 'atime', 'mtime', 'ctime']).set('identifier', env('identifier')).set('generator', env('generator')).set('state', 'ready') - -# Step 2.4.2 (générique): Envoyer la requète HTTP -[fork/swing/URLFetch] -url = env('headers.x-hook') -path = body -headers = Content-Type:application/json - -# Step 2.4.3 (faculatif) : Ajouter une trace dans log -[fork/swing/debug] -text = webhook triggered - -# Step 2.5 (faculatif) : Ajouter une trace dans log -[fork/debug] -text = process completed - -# Step 3 : Renvoyer immédiatement seul élément indiquant comment récupérer le résulat quand il sera prêt +# Step 3 : Renvoyer immédiatement un seul élément indiquant comment récupérer le résulat quand il sera prêt [shift] [replace] path = id diff --git a/data-computer/v1/lda.ini b/data-computer/v1/lda.ini index 78f9ac0..a767074 100644 --- a/data-computer/v1/lda.ini +++ b/data-computer/v1/lda.ini @@ -14,73 +14,42 @@ post.parameters.0.in = query post.parameters.0.name = indent post.parameters.0.schema.type = boolean - +post.parameters.1.description = URL pour signaler que le traitement est terminé +post.parameters.1.in = header +post.parameters.1.name = X-Webhook-Success +post.parameters.1.schema.type = string +post.parameters.1.schema.format = uri +post.parameters.1.required = false +post.parameters.2.description = URL pour signaler que le traitement a échoué +post.parameters.2.in = header +post.parameters.2.name = X-Webhook-Failure +post.parameters.2.schema.type = string +post.parameters.2.schema.format = uri +post.parameters.2.required = false [use] plugin = basics plugin = analytics -plugin = spawn -# Step 0 (générique) : Lire le fichier standard tar.gz -[TARExtract] -compress = true - -# Step 1 (générique) : Créer un identifiant unique pour le corpus reçu -[singleton] -[singleton/identify] -[singleton/env] -path = generator -value = lda -path = identifier -value = get('uri').replace('uid:/', '') +# Step 1 (générique): Charger le fichier corpus +[delegate] +file = charger.cfg # Step 2 (générique): Traiter de manière asynchnore les items reçus [fork] standalone = true +logger = logger.cfg -# Step 2.1 (générique): Lancer un calcul sur tous les items reçus +# Step 2.1 (spécifique): Lancer un calcul sur tous les items reçus [fork/exec] # command should be executable ! command = ./v1/lda.py -# Step 2.2 (générique): Création d'un fichier résulat standard -[fork/TARDump] -compress = true -manifest = fix({version: '1'}) -manifest = fix({identifier: env('identifier')}) -manifest = fix({generator: env('generator')}) +# Step 2.2 (générique): Enregister le résulat et signaler que le traitment est fini +[fork/delegate] +file = recorder.cfg -# Step 2.3 (générique): Sauvegarder sur disque le résulat -[fork/FILESave] -location = /tmp/retrieve -identifier = env('identifier') -jsonl = false -compress = false - -# Step 2.4 (générique): Signaler le fin du traitement via un appel à un webhook (si il a été précisé) -[fork/swing] -test = env('headers.x-hook').startsWith('http') - -# Step 2.4.1 (générique): Séléctionner les informations à envoyer au webhook -[fork/swing/replace] -path = body -value = self().pick(['size', 'atime', 'mtime', 'ctime']).set('identifier', env('identifier')).set('generator', env('generator')).set('state', 'ready') - -# Step 2.4.2 (générique): Envoyer la requète HTTP -[fork/swing/URLFetch] -url = env('headers.x-hook') -path = body -headers = Content-Type:application/json - -# Step 2.4.3 (faculatif) : Ajouter une trace dans log -[fork/swing/debug] -text = webhook triggered - -# Step 2.5 (faculatif) : Ajouter une trace dans log -[fork/debug] -text = process completed - -# Step 3 : Renvoyer immédiatement seul élément indiquant comment récupérer le résulat quand il sera prêt +# Step 3 : Renvoyer immédiatement un seul élément indiquant comment récupérer le résulat quand il sera prêt [shift] [replace] path = id diff --git a/data-computer/v1/logger.cfg b/data-computer/v1/logger.cfg new file mode 100644 index 0000000..49db973 --- /dev/null +++ b/data-computer/v1/logger.cfg @@ -0,0 +1,44 @@ +; [use] +plugin = basics +plugin = analytics + +# On ne garde que la première erreur déclénchée +[shift] + +[debug] +text = Error trapped + +[assign] +path = body.error.type +value = get('type') + +path = body.error.scope +value = get('scope') + +path = body.error.message +value = get('message') + +path = env +value = env() + +[swing] +test = env('headers.x-webhook-failure').startsWith('http') + +[swing/URLFetch] +url = env('headers.x-webhook-failure').trim() +path = body +headers = Content-Type:application/json +target = result + +# On enregistre uniqument quelques informations (à supprimer pour avoir la trace complète) +[exchange] +value = get('body') + +[FILESave] +location = /tmp/retrieve +identifier = env('identifier') +jsonl = true +compress = false + +[debug] +text = Error was saved diff --git a/data-computer/v1/recorder.cfg b/data-computer/v1/recorder.cfg new file mode 100644 index 0000000..f768491 --- /dev/null +++ b/data-computer/v1/recorder.cfg @@ -0,0 +1,43 @@ +[use] +plugin = basics +plugin = analytics + +# Step 2.2 (générique): Création d'un fichier résulat standard +[TARDump] +compress = true +manifest = fix({version: '1'}) +manifest = fix({identifier: env('identifier')}) +manifest = fix({generator: env('generator')}) + +# Step 2.3 (générique): Sauvegarder sur disque le résulat +[FILESave] +location = /tmp/retrieve +identifier = env('identifier') +jsonl = false +compress = false + +# Step 2.4 (générique): Signaler le fin du traitement via un appel à un webhook (si il a été précisé) +[swing] +test = env('headers.x-webhook-success').startsWith('http') + +# Step 2.4.1 (générique): Séléctionner les informations à envoyer au webhook +[swing/replace] +path = body +value = self().pick(['size', 'atime', 'mtime', 'ctime']).set('identifier', env('identifier')).set('generator', env('generator')).set('state', 'ready') + +# Step 2.4.2 (générique): Envoyer la requète HTTP +[swing/URLFetch] +url = env('headers.x-webhook-success').trim() +path = body +headers = Content-Type:application/json +retries = 1 + +# Step 2.4.3 (faculatif) : Ajouter une trace dans log +[swing/debug] +text = webhook triggered + +# Step 2.5 (faculatif) : Ajouter une trace dans log +[debug] +text = process completed + + diff --git a/data-computer/v1/stopwords/fr.json b/data-computer/v1/stopwords/fr.json deleted file mode 100644 index e86310d..0000000 --- a/data-computer/v1/stopwords/fr.json +++ /dev/null @@ -1 +0,0 @@ -["abord", "afin", "aie", "ainsi", "allaient", "allo", "allo", "allons", "apres", "assez", "attendu", "aucun", "aucune", "aujourd", "auquel", "aura", "auront", "aussi", "autre", "autres", "aux", "auxquelles", "auxquels", "avaient", "avais", "avait", "avant", "avec", "avoir", "ayant", "bah", "beaucoup", "bien", "bigre", "boum", "bravo", "brrr", "car", "ceci", "cela", "celle", "celleci", "cellela", "celles", "cellesci", "cellesla", "celui", "celuici", "celuila", "cent", "cependant", "certain", "certaine", "certaines", "certains", "certes", "ces", "cet", "cette", "ceux", "ceuxci", "ceuxla", "chacun", "chaque", "cher", "chere", "cheres", "chers", "chez", "chiche", "chut", "cinq", "cinquantaine", "cinquante", "cinquantieme", "cinquieme", "clac", "clic", "combien", "comme", "comment", "compris", "concernant", "contre", "couic", "crac", "dans", "debout", "dedans", "dehors", "dela", "depuis", "derriere", "des", "des", "desormais", "desquelles", "desquels", "dessous", "dessus", "deux", "deuxieme", "deuxiemement", "devant", "devers", "devra", "different", "differente", "differentes", "differents", "dire", "divers", "diverse", "diverses", "dix", "dixhuit", "dixieme", "dixneuf", "dixsept", "doit", "doivent", "donc", "dont", "douze", "douzieme", "dring", "duquel", "durant", "effet", "elle", "ellememe", "elles", "ellesmemes", "encore", "entre", "envers", "environ", "est", "etant", "etaient", "etais", "etait", "etant", "etc", "ete", "etre", "etre", "euh", "eux", "euxmemes", "excepte", "facon", "fais", "faisaient", "faisant", "fait", "feront", "flac", "floc", "font", "gens", "hein", "helas", "hem", "hep", "hola", "hop", "hormis", "hors", "hou", "houp", "hue", "hui", "huit", "huitieme", "hum", "hurrah", "ils", "importe", "jusqu", "jusque", "laquelle", "las", "lequel", "les", "les", "lesquelles", "lesquels", "leur", "leurs", "longtemps", "lorsque", "lui", "luimeme", "maint", "mais", "malgre", "meme", "memes", "merci", "mes", "mien", "mienne", "miennes", "miens", "mille", "mince", "moi", "moimeme", "moins", "mon", "moyennant", "neanmoins", "neuf", "neuvieme", "nombreuses", "nombreux", "non", "nos", "notre", "notre", "notres", "nous", "nousmemes", "nul", "ohe", "ole", "olle", "ont", "onze", "onzieme", "ore", "ouf", "ouias", "oust", "ouste", "outre", "paf", "pan", "par", "parmi", "partant", "particulier", "particuliere", "particulierement", "pas", "passe", "pendant", "personne", "peu", "peut", "peuvent", "peux", "pff", "pfft", "pfut", "pif", "plein", "plouf", "plus", "plusieurs", "plutot", "pouah", "pour", "pourquoi", "premier", "premiere", "premierement", "pres", "proche", "psitt", "puisque", "quand", "quant", "quanta", "quantasoi", "quarante", "quatorze", "quatre", "quatrevingt", "quatrieme", "quatriemement", "que", "quel", "quelconque", "quelle", "quelles", "quelque", "quelques", "quelqu", "quels", "qui", "quiconque", "quinze", "quoi", "quoique", "revoici", "revoila", "rien", "sacrebleu", "sans", "sapristi", "sauf", "seize", "selon", "sept", "septieme", "sera", "seront", "ses", "sien", "sienne", "siennes", "siens", "sinon", "six", "sixieme", "soi", "soimeme", "soit", "soixante", "son", "sont", "sous", "stop", "suis", "suivant", "sur", "surtout", "tac", "tant", "tel", "telle", "tellement", "telles", "tels", "tenant", "tes", "tic", "tien", "tienne", "tiennes", "tiens", "toc", "toi", "toimeme", "ton", "touchant", "toujours", "tous", "tout", "toute", "toutes", "treize", "trente", "tres", "trois", "troisieme", "troisiemement", "trop", "tsoin", "tsouin", "une", "unes", "uns", "vais", "vas", "vers", "via", "vif", "vifs", "vingt", "vivat", "vive", "vives", "vlan", "voici", "voila", "vont", "vos", "votre", "votre", "votres", "vous", "vousmemes", "zut", "alors", "aucuns", "bon", "devrait", "dos", "droite", "debut", "essai", "faites", "fois", "force", "haut", "ici", "juste", "maintenant", "mine", "mot", "nommes", "nouveaux", "parce", "parole", "personnes", "piece", "plupart", "seulement", "soyez", "sujet", "tandis", "valeur", "voie", "voient", "etat", "etions"] \ No newline at end of file diff --git a/data-computer/v1/tree-segment.ini b/data-computer/v1/tree-segment.ini index f5924f5..e2ac6f6 100644 --- a/data-computer/v1/tree-segment.ini +++ b/data-computer/v1/tree-segment.ini @@ -14,39 +14,33 @@ post.parameters.0.in = query post.parameters.0.name = indent post.parameters.0.schema.type = boolean +post.parameters.1.description = URL pour signaler que le traitement est terminé +post.parameters.1.in = header +post.parameters.1.name = X-Webhook-Success +post.parameters.1.schema.type = string +post.parameters.1.schema.format = uri +post.parameters.1.required = false +post.parameters.2.description = URL pour signaler que le traitement a échoué +post.parameters.2.in = header +post.parameters.2.name = X-Webhook-Failure +post.parameters.2.schema.type = string +post.parameters.2.schema.format = uri +post.parameters.2.required = false [use] plugin = basics plugin = analytics -# Step 0 (générique) : Lire le fichier standard tar.gz -[TARExtract] -compress = true -path = */*.json - -# Step 1 (générique) : Créer un identifiant unique pour le corpus reçu -[singleton] -# Step 1.1 (générique) : Controle du premier objet en considérant que les suivants seront structrellement identiques -[singleton/validate] -path = id -rule = required|string - -path = value -rule = required|array - -# Step 1.2 (générique) : Créer un identifiant unique pour le corpus reçu -[singleton/identify] -[singleton/env] -path = generator -value = tree-segment -path = identifier -value = get('uri').replace('uid:/', '') +# Step 1 (générique): Charger le fichier corpus +[delegate] +file = charger.cfg # Step 2 (générique): Traiter de manière asynchnore les items reçus [fork] standalone = true +logger = logger.cfg -# Step 2.1 (générique): Lancer un calcul sur tous les items reçus +# Step 2.1 (spécifique): Lancer un calcul sur tous les items reçus [fork/delegate] # Step 2.1.1 (spécifique): S'assurer d'avoir des tableaux de tableaux @@ -76,44 +70,13 @@ path = origin value = get('value').uniq() -# Step 2.2 (générique): Création d'un fichier résulat standard -[fork/TARDump] -compress = true -manifest = fix({version: '1'}) -manifest = fix({identifier: env('identifier')}) -manifest = fix({generator: env('generator')}) +[fork/transit] -# Step 2.3 (générique): Sauvegarder sur disque le résulat -[fork/FILESave] -location = /tmp/retrieve -identifier = env('identifier') -jsonl = false -compress = false +# Step 2.2 (générique): Enregister le résulat et signaler que le traitment est fini +[fork/delegate] +file = recorder.cfg -# Step 2.4 (générique): Signaler le fin du traitement via un appel à un webhook (si il a été précisé) -[fork/swing] -test = env('headers.x-hook').startsWith('http') - -# Step 2.4.1 (générique): Séléctionner les informations à envoyer au webhook -[fork/swing/replace] -path = body -value = self().pick(['size', 'atime', 'mtime', 'ctime']).set('identifier', env('identifier')).set('generator', env('generator')).set('state', 'ready') - -# Step 2.4.2 (générique): Envoyer la requète HTTP -[fork/swing/URLFetch] -url = env('headers.x-hook') -path = body -headers = Content-Type:application/json - -# Step 2.4.3 (faculatif) : Ajouter une trace dans log -[fork/swing/debug] -text = webhook triggered - -# Step 2.5 (faculatif) : Ajouter une trace dans log -[fork/debug] -text = process completed - -# Step 3 : Renvoyer immédiatement seul élément indiquant comment récupérer le résulat quand il sera prêt +# Step 3 : Renvoyer immédiatement un seul élément indiquant comment récupérer le résulat quand il sera prêt [shift] [replace] path = id diff --git a/domains-classifier/examples.http b/domains-classifier/examples.http new file mode 100644 index 0000000..efc165e --- /dev/null +++ b/domains-classifier/examples.http @@ -0,0 +1,18 @@ +# These examples can be used directly in VSCode, using REST Client extension (humao.rest-client) + +@baseUrl=https://domains-classifier.services.istex.fr +# @baseUrl=https://domains-classifier.vptdmservices.intra.inist.fr +# @baseUrl=http://vptdmservices.intra.inist.fr:49215 + +### +# @name v1EnClassify +# Détection de la langue d'un texte +POST {{baseUrl}}/v1/en/classify?indent=true HTTP/1.1 +Content-Type: application/json + +[ + { + "idt": "08-040289", + "value": "Planck 2015 results. XIII. Cosmological parameters.We present results based on full-mission Planck observations of temperature and polarization anisotropies of the CMB. These data are consistent with the six-parameter inflationary LCDM cosmology. From the Planck temperature and lensing data, for this cosmology we find a Hubble constant, H0= (67.8 +/- 0.9) km/s/Mpc, a matter density parameter Omega_m = 0.308 +/- 0.012 and a scalar spectral index with n_s = 0.968 +/- 0.006. (We quote 68% errors on measured parameters and 95% limits on other parameters.) Combined with Planck temperature and lensing data, Planck LFI polarization measurements lead to a reionization optical depth of tau = 0.066 +/- 0.016. Combining Planck with other astrophysical data we find N_ eff = 3.15 +/- 0.23 for the effective number of relativistic degrees of freedom and the sum of neutrino masses is constrained to < 0.23 eV. Spatial curvature is found to be |Omega_K| < 0.005. For LCDM we find a limit on the tensor-to-scalar ratio of r <0.11 consistent with the B-mode constraints from an analysis of BICEP2, Keck Array, and Planck (BKP) data. Adding the BKP data leads to a tighter constraint of r < 0.09. We find no evidence for isocurvature perturbations or cosmic defects. The equation of state of dark energy is constrained to w = -1.006 +/- 0.045. Standard big bang nucleosynthesis predictions for the Planck LCDM cosmology are in excellent agreement with observations. We investigate annihilating dark matter and deviations from standard recombination, finding no evidence for new physics. The Planck results for base LCDM are in agreement with BAO data and with the JLA SNe sample. However the amplitude of the fluctuations is found to be higher than inferred from rich cluster counts and weak gravitational lensing. Apart from these tensions, the base LCDM cosmology provides an excellent description of the Planck CMB observations and many other astrophysical data sets." + } +] \ No newline at end of file diff --git a/domains-classifier/swagger.json b/domains-classifier/swagger.json new file mode 100644 index 0000000..9d6b323 --- /dev/null +++ b/domains-classifier/swagger.json @@ -0,0 +1,32 @@ +{ + "info": { + "title": "domains-classifier - Classification en domaines scientifiques", + "summary": "Utilise une succession arborescente de modèles de type Fasttext pour prédire un code de classement Pascal/Francis", + "version": "1.5.0", + "termsOfService": "https://objectif-tdm.inist.fr/", + "contact": { + "name": "Inist-CNRS", + "url": "https://www.inist.fr/nous-contacter/" + } + }, + "servers": [ + { + "x-comment": "Will be automatically completed by the ezs server." + }, + { + "url": "http://vptdmservices.intra.inist.fr:49215/", + "description": "Latest version for production", + "x-profil": "Standard" + } + ], + "tags": [ + { + "name": "affiliation-rnsr", + "description": "Classification en domaines scientifiques", + "externalDocs": { + "description": "Plus de documentation", + "url": "https://github.com/Inist-CNRS/ezmaster-apps/tree/main/applications/ws-affiliation-rnsr" + } + } + ] +} \ No newline at end of file diff --git a/domains-classifier/tests.hurl b/domains-classifier/tests.hurl new file mode 100644 index 0000000..853146d --- /dev/null +++ b/domains-classifier/tests.hurl @@ -0,0 +1,39 @@ +POST https://domains-classifier.services.istex.fr/v1/en/classify?indent=true +content-type: application/json +[ + { + "idt": "08-040289", + "value": "Planck 2015 results. XIII. Cosmological parameters.We present results based on full-mission Planck observations of temperature and polarization anisotropies of the CMB. These data are consistent with the six-parameter inflationary LCDM cosmology. From the Planck temperature and lensing data, for this cosmology we find a Hubble constant, H0= (67.8 +/- 0.9) km/s/Mpc, a matter density parameter Omega_m = 0.308 +/- 0.012 and a scalar spectral index with n_s = 0.968 +/- 0.006. (We quote 68% errors on measured parameters and 95% limits on other parameters.) Combined with Planck temperature and lensing data, Planck LFI polarization measurements lead to a reionization optical depth of tau = 0.066 +/- 0.016. Combining Planck with other astrophysical data we find N_ eff = 3.15 +/- 0.23 for the effective number of relativistic degrees of freedom and the sum of neutrino masses is constrained to < 0.23 eV. Spatial curvature is found to be |Omega_K| < 0.005. For LCDM we find a limit on the tensor-to-scalar ratio of r <0.11 consistent with the B-mode constraints from an analysis of BICEP2, Keck Array, and Planck (BKP) data. Adding the BKP data leads to a tighter constraint of r < 0.09. We find no evidence for isocurvature perturbations or cosmic defects. The equation of state of dark energy is constrained to w = -1.006 +/- 0.045. Standard big bang nucleosynthesis predictions for the Planck LCDM cosmology are in excellent agreement with observations. We investigate annihilating dark matter and deviations from standard recombination, finding no evidence for new physics. The Planck results for base LCDM are in agreement with BAO data and with the JLA SNe sample. However the amplitude of the fluctuations is found to be higher than inferred from rich cluster counts and weak gravitational lensing. Apart from these tensions, the base LCDM cosmology provides an excellent description of the Planck CMB observations and many other astrophysical data sets." + } +] + +HTTP 200 +[{ + "idt": "08-040289", + "value": [ + { + "code": { + "id": "001", + "value": "Sciences exactes et technologie" + }, + "confidence": 1.0000077486038208, + "rang": 1 + }, + { + "code": { + "id": "001B", + "value": "Physique" + }, + "confidence": 0.7340989708900452, + "rang": 2 + }, + { + "code": { + "id": "001B00", + "value": "Généralités" + }, + "confidence": 0.9693299531936646, + "rang": 3 + } + ] +}] diff --git a/domains-classifier/v1/en/classify.ini b/domains-classifier/v1/en/classify.ini index 2515f95..af3c850 100644 --- a/domains-classifier/v1/en/classify.ini +++ b/domains-classifier/v1/en/classify.ini @@ -17,7 +17,22 @@ post.parameters.2.schema.type = integer post.parameters.2.description = profondeur [0...3] - +# Example +post.requestBody.content.application/json.example.0.idt = 08-040289 +post.requestBody.content.application/json.example.0.value = Planck 2015 results. XIII. Cosmological parameters.We present results based on full-mission Planck observations of temperature and polarization anisotropies of the CMB. These data are consistent with the six-parameter inflationary LCDM cosmology. + From the Planck temperature and lensing data, for this cosmology we find a Hubble constant, H0= (67.8 +/- 0.9) km/s/Mpc, a matter density parameter Omega_m = 0.308 +/- 0.012 and a scalar spectral index with n_s = 0.968 +/- 0.006. (We quote 68% errors on measured parameters and 95% limits on other parameters.) Combined with Planck temperature and lensing data, Planck LFI polarization measurements lead to a reionization optical depth of tau = 0.066 +/- 0.016. Combining Planck with other astrophysical data we find N_ eff = 3.15 +/- 0.23 for the effective number of relativistic degrees of freedom and the sum of neutrino masses is constrained to < 0.23 eV. Spatial curvature is found to be |Omega_K| < 0.005. For LCDM we find a limit on the tensor-to-scalar ratio of r <0.11 consistent with the B-mode constraints from an analysis of BICEP2, Keck Array, and Planck (BKP) data. Adding the BKP data leads to a tighter constraint of r < 0.09. We find no evidence for isocurvature perturbations or cosmic defects. The equation of state of dark energy is constrained to w = -1.006 +/- 0.045. Standard big bang nucleosynthesis predictions for the Planck LCDM cosmology are in excellent agreement with observations. We investigate annihilating dark matter and deviations from standard recombination, finding no evidence for new physics. The Planck results for base LCDM are in agreement with BAO data and with the JLA SNe sample. However the amplitude of the fluctuations is found to be higher than inferred from rich cluster counts and weak gravitational lensing. Apart from these tensions, the base LCDM cosmology provides an excellent description of the Planck CMB observations and many other astrophysical data sets. post.responses.default.content.application/json.example.0.idt = 08-040289 +post.responses.default.content.application/json.example.0.value.0.code.id = 001 +post.responses.default.content.application/json.example.0.value.0.code.value = Sciences exactes et technologie +post.responses.default.content.application/json.example.0.value.0.confidence = 1.0000077486038208 +post.responses.default.content.application/json.example.0.value.0.rang = 1 +post.responses.default.content.application/json.example.0.value.1.code.id = 001B +post.responses.default.content.application/json.example.0.value.1.code.value = Physique +post.responses.default.content.application/json.example.0.value.1.confidence = 0.7340989708900452 +post.responses.default.content.application/json.example.0.value.1.rang = 2 +post.responses.default.content.application/json.example.0.value.2.code.id = 001B00 +post.responses.default.content.application/json.example.0.value.2.code.value = Généralités +post.responses.default.content.application/json.example.0.value.2.confidence = 0.9693299531936646 +post.responses.default.content.application/json.example.0.value.2.rang = 3 [use] plugin = @ezs/spawn diff --git a/hal-classifier/examples.http b/hal-classifier/examples.http index 25910e5..fd5e60b 100644 --- a/hal-classifier/examples.http +++ b/hal-classifier/examples.http @@ -1,7 +1,12 @@ # These examples can be used directly in VSCode, using REST Client extension (humao.rest-client) -# Classification dans les domaines de niveau de la base HAL -POST https://hal-classifier.services.inist.fr/v1/en/classhalen?indent=true HTTP/1.1 +# @baseUrl=http://localhost:31976 +@baseUrl=https://hal-classifier.services.istex.fr + +### +# @name v1EnClasshalen +# @description Classification dans les domaines de niveau de la base HAL +POST {{baseUrl}}/v1/en/classhalen?indent=true HTTP/1.1 Content-Type: application/json [ diff --git a/hal-classifier/tests.hurl b/hal-classifier/tests.hurl new file mode 100644 index 0000000..bc5fd46 --- /dev/null +++ b/hal-classifier/tests.hurl @@ -0,0 +1,30 @@ +POST https://hal-classifier.services.istex.fr/v1/en/classhalen?indent=true +content-type: application/json +[ +{ +"id":1, +"value":"In the southern French Massif Central, the Montagne Noire axial zone is a NE-SW elongated granite-migmatite dome emplaced within Visean south-verging recumbent folds and intruded by syn- to late-migmatization granitoids. The tectonic setting of this dome is still disputed, thus several models have been proposed. In order to better understand the emplacement mechanism of this dome, petrofabric and Anisotropy of Magnetic Susceptibility (AMS) studies have been carried out. In the granites and migmatites that form the dome core, magmatic texture and to a lesser extent weak solid-state texture are dominant. As a paramagnetic mineral, biotite is the main carrier of the magnetic susceptibility. On the basis of 135 AMS sites, the magnetic fabrics appear as independent of the lithology but related to the dome architecture. Coupling our results with previous structural and geochronological studies, allows us to propose a new emplacement model. Between 340-325 Ma, the Palaeozoic series underwent a compressional deformation represented by nappes and recumbent folds involving the thermal event leading to partial melting. Until ~325-310 Ma, the dome emplacement was assisted by diapiric processes. An extensional event took place at 300 Ma, after the emplacement of the late to post-migmatitic granitic plutons. In the northeast side of the dome, a brittle normal-dextral faulting controlled the opening of the Graissessac coal-basin." +}, +{"id":2, +"value":"The COVID-19 pandemic, also known as the coronavirus pandemic, is an ongoing global pandemic of coronavirus disease 2019 (COVID-19) caused by severe acute respiratory syndrome coronavirus2 (SARS-CoV-2). It was first identified in December 2019 in Wuhan, China. The World Health Organization declared the outbreak a Public Health Emergency of International Concern on 20 January 2020, and later a pandemic on 11 March 2020. As of 2 April 2021, more than 129 million cases have been confirmed, with more than 2.82 million deaths attributed to COVID-19, making it one of the deadliest pandemics in history." +} +] + + +HTTP 200 +[{ + "id": 1, + "value": { + "code": "sdu", + "labelFr": "Planète et Univers [physics]", + "labelEn": "Sciences of the Universe [physics]" + } +}, +{ + "id": 2, + "value": { + "code": "sdv", + "labelFr": "Sciences du Vivant [q-bio]", + "labelEn": "Life Sciences [q-bio]" + } +}] diff --git a/hal-classifier/v1/en/classhalen.ini b/hal-classifier/v1/en/classhalen.ini index ef15201..9f111b7 100644 --- a/hal-classifier/v1/en/classhalen.ini +++ b/hal-classifier/v1/en/classhalen.ini @@ -17,6 +17,20 @@ post.parameters.1.schema.type = boolean post.parameters.1.description = Indent or not the JSON Result +# Examples +post.requestBody.content.application/json.example.0.id = 1 +post.requestBody.content.application/json.example.0.value = In the southern French Massif Central, the Montagne Noire axial zone is a NE-SW elongated granite-migmatite dome emplaced within Visean south-verging recumbent folds and intruded by syn- to late-migmatization granitoids. The tectonic setting of this dome is still disputed, thus several models have been proposed. In order to better understand the emplacement mechanism of this dome, petrofabric and Anisotropy of Magnetic Susceptibility (AMS) studies have been carried out. In the granites and migmatites that form the dome core, magmatic texture and to a lesser extent weak solid-state texture are dominant. As a paramagnetic mineral, biotite is the main carrier of the magnetic susceptibility. On the basis of 135 AMS sites, the magnetic fabrics appear as independent of the lithology but related to the dome architecture. Coupling our results with previous structural and geochronological studies, allows us to propose a new emplacement model. Between 340-325 Ma, the Palaeozoic series underwent a compressional deformation represented by nappes and recumbent folds involving the thermal event leading to partial melting. Until ~325-310 Ma, the dome emplacement was assisted by diapiric processes. An extensional event took place at 300 Ma, after the emplacement of the late to post-migmatitic granitic plutons. In the northeast side of the dome, a brittle normal-dextral faulting controlled the opening of the Graissessac coal-basin. +post.requestBody.content.application/json.example.1.id = 2 +post.requestBody.content.application/json.example.1.value = The COVID-19 pandemic, also known as the coronavirus pandemic, is an ongoing global pandemic of coronavirus disease 2019 (COVID-19) caused by severe acute respiratory syndrome coronavirus2 (SARS-CoV-2). It was first identified in December 2019 in Wuhan, China. The World Health Organization declared the outbreak a Public Health Emergency of International Concern on 20 January 2020, and later a pandemic on 11 March 2020. As of 2 April 2021, more than 129 million cases have been confirmed, with more than 2.82 million deaths attributed to COVID-19, making it one of the deadliest pandemics in history. +post.responses.default.content.application/json.example.0.id = 1 +post.responses.default.content.application/json.example.0.value.code = sdu +post.responses.default.content.application/json.example.0.value.labelFr = Planète et Univers [physics] +post.responses.default.content.application/json.example.0.value.labelEn = Sciences of the Universe [physics] +post.responses.default.content.application/json.example.1.id = 2 +post.responses.default.content.application/json.example.1.value.code = sdv +post.responses.default.content.application/json.example.1.value.labelFr = Sciences du Vivant [q-bio] +post.responses.default.content.application/json.example.1.value.labelEn = Life Sciences [q-bio] + [use] plugin = @ezs/spawn plugin = @ezs/basics diff --git a/irc3-species/examples.http b/irc3-species/examples.http new file mode 100644 index 0000000..f2b18e6 --- /dev/null +++ b/irc3-species/examples.http @@ -0,0 +1,29 @@ +# These examples can be used directly in VSCode, using REST Client extension (humao.rest-client) + +# @baseUrl=http://localhost:31976 +@baseUrl=https://irc3-species.services.istex.fr + +### +# @name v1Irc3sp +# @description Recherche de noms d'espèces +POST {{baseUrl}}/v1/irc3sp?indent=true HTTP/1.1 +Content-Type: application/json + +[{ + "id": 1, + "value": "Trophic diversity accumulation curves of (a) Pseudopercis semifasciata, (b) Acanthistius patachonicus and (c) Pinguipes brasilianus. Horizontal lines show Brillouin diversity index (Hz) values (Hz± 0·05 Hz) and the vertical line shows a value n- 2 (n = number of stomachs)." +},{ + "id": 2, + "value": "Phasianus colchicus/versicolor: in our study, the best match." +},{ + "id": 3, + "value": "short lower jaw in Etheostoma bellator Suttkus" +}, { + "id": 4, + "value": [ + "Carnivore diet analysis based on next‐generation sequencing: application to the leopard cat (Prionailurus bengalensis) in Pakistan ", + "The leopard cat (Prionailurus bengalensis) is a small felid (weight 1.7–7.1 kg; Sunquist & Sunquist 2009), with a wide range in Asia (8.66 × 106 km2; Nowell & Jackson 1996). ", + "Muridae (mainly Rattus spp. and Mus spp.) seem to represent the main prey items throughout the leopard cat distribution range, supplemented by a wide variety of other prey including small mammals such as shrews and ground squirrels, birds, reptiles, frogs and fish (Tatara & Doi 1994; Grassman et al. 2005; Austin et al. 2007; Rajaratnam et al. 2007; Watanabe 2009; Fernandez & de Guia 2011). ", + "More recently, Deagle et al. (2009, 2010) investigated the diet of Australian fur seals (Arctocephalus pusillus) and penguins (Eudyptula minor) by combining a blocking oligonucleotide approach with 454 GS‐FLX pyrosequencing technologies. " + ] +}] diff --git a/irc3-species/tests.hurl b/irc3-species/tests.hurl new file mode 100644 index 0000000..bca0ff3 --- /dev/null +++ b/irc3-species/tests.hurl @@ -0,0 +1,36 @@ +POST https://irc3-species.services.istex.fr/v1/irc3sp?indent=true +content-type: application/json +[{ + "id": 1, + "value": "Trophic diversity accumulation curves of (a) Pseudopercis semifasciata, (b) Acanthistius patachonicus and (c) Pinguipes brasilianus. Horizontal lines show Brillouin diversity index (Hz) values (Hz± 0·05 Hz) and the vertical line shows a value n- 2 (n = number of stomachs)." +},{ + "id": 2, + "value": "Phasianus colchicus/versicolor: in our study, the best match." +},{ + "id": 3, + "value": "short lower jaw in Etheostoma bellator Suttkus" +}, { + "id": 4, + "value": [ + "Carnivore diet analysis based on next‐generation sequencing: application to the leopard cat (Prionailurus bengalensis) in Pakistan ", + "The leopard cat (Prionailurus bengalensis) is a small felid (weight 1.7–7.1 kg; Sunquist & Sunquist 2009), with a wide range in Asia (8.66 × 106 km2; Nowell & Jackson 1996). ", + "Muridae (mainly Rattus spp. and Mus spp.) seem to represent the main prey items throughout the leopard cat distribution range, supplemented by a wide variety of other prey including small mammals such as shrews and ground squirrels, birds, reptiles, frogs and fish (Tatara & Doi 1994; Grassman et al. 2005; Austin et al. 2007; Rajaratnam et al. 2007; Watanabe 2009; Fernandez & de Guia 2011). ", + "More recently, Deagle et al. (2009, 2010) investigated the diet of Australian fur seals (Arctocephalus pusillus) and penguins (Eudyptula minor) by combining a blocking oligonucleotide approach with 454 GS‐FLX pyrosequencing technologies. " + ] +}] + + +HTTP 200 +[ { + "id": 1, + "value": [ "Acanthistius patachonicus", "Pinguipes brasilianus", "Pseudopercis semifasciata" ] +}, { + "id": 2, + "value": [ "Phasianus colchicus" ] +}, { + "id": 3, + "value": [ "Etheostoma bellator" ] +}, { + "id": 4, + "value": [ "Arctocephalus pusillus", "Eudyptula minor", "Prionailurus bengalensis" ] +}] diff --git a/kos2vec/README.md b/kos2vec/README.md index d52eb11..020f620 100755 --- a/kos2vec/README.md +++ b/kos2vec/README.md @@ -1,74 +1,74 @@ - - # kos2vec ## Application d’indexation sémantique sur une ressource termino-ontologique (RTO) ------------- -**Identification de concepts sur la mémoire basé sur une ontology et utilisant un modèle de langue** + +Identification de concepts sur la mémoire basé sur une ontology et utilisant un modèle de langue. ## Principe de fonctionnement - -![text](image.jpg) - - -Le système prend en entrée les métadonnées associées à un article (titre, résumé) et renvoie une sélection de concepts tirés du thesaurus mémoire. -Il se compose de 3 modules principaux : - -- le module syntaxique analyse les documents d'entrée et identifie les concepts qui sont explicitement mentionnés dans le document. - -- le Module semantique extrait des candidats termes (cunking) et calcule la similarité de ceux-ci avec les nœuds de l'ontologie en tirant parti de l'intégration des mots dans un modèle Embedding. Il sélection des termes RTO directement présent ou proche voisin dans le modèle. +![schéma de principe](image.jpg) -![text](image2.jpg) - +Le système prend en entrée les métadonnées associées à un article (titre, +résumé) et renvoie une sélection de concepts tirés du thesaurus mémoire. +Il se compose de 3 modules principaux : -- le module de post-traitement combine les résultats de ces deux modules, élimine les valeurs aberrantes et les améliore en incluant les "super-concepts pertinents" (broader). +1. le module syntaxique analyse les documents d'entrée et identifie les concepts + qui sont explicitement mentionnés dans le document. +2. le Module sémantique extrait des candidats termes (*cunking*) et calcule la + similarité de ceux-ci avec les nœuds de l'ontologie en tirant parti de + l'intégration des mots dans un modèle *Embedding*. Il sélectionne des termes + RTO directement présents ou proches voisins dans le modèle. + ![text](image2.jpg) +3. le module de post-traitement combine les résultats de ces deux modules, + élimine les valeurs aberrantes et les améliore en incluant les + "super-concepts pertinents" (broader). +L'approche exploite une RTO de loterre et des plongements lexicaux calculés sur +un corpus du domaine. -L'approche exploite une RTO de loterre et des plongements lexicaux calculés sur un corpus du domaine. - - * Le modele de langue est de type **Word2Vec** et il construit sur un corpus Istex de 587.721 résumés **annotés par les termes de la RTO et les ngrams les plus fréquents** (collocation lexicale). - * L'Ontology mémoire provient du site Inist **Loterre** : https://skosmos.loterre.fr/P66/fr/ - +- Le modèle de langue est de type **Word2Vec** et il est construit sur un corpus + Istex de 587.721 résumés **annotés par les termes de la RTO et les ngrams les + plus fréquents** (collocation lexicale). +- L'ontologie mémoire provient du site Inist **Loterre** : ## Utilisation ### Sollicitation du WebService - [/v1/{code_vocab}/index?indent=True](/v1/en/index?indent=True) -| nom de la ressource|Code_vocab|Sur loterre| -|--- |:-: |:-: | -| memoire Psychologie | P66 | https://skosmos.loterre.fr/P66/en/ | -| MeSH |JVR|https://skosmos.loterre.fr/JVR/en/| -| education | 216 |https://skosmos.loterre.fr/216/en/| -| sociologie | 3JP |https://skosmos.loterre.fr/3JP/en/| -| philosophie | 73G |https://skosmos.loterre.fr/73G/en/| -| litterature | P21 |https://skosmos.loterre.fr/P21/en/| -| SAGEThesaurus | SAG || - - -* Prend en entrée un flux **json** au format **id/value** : -``` +| nom de la ressource | Code_vocab | Sur loterre | +| ------------------- | :--------: | :----------------------------------: | +| memoire Psychologie | P66 | | +| MeSH | JVR | | +| education | 216 | | +| sociologie | 3JP | | +| philosophie | 73G | | +| litterature | P21 | | +| SAGEThesaurus | SAG | | + +Prend en entrée un flux **json** au format **id/value** : + +```json [ {"idt":"11-0278198","value":"reduction fear child comparison positive information imagery control condition study... effect ... "}, {"idt":"07-0413881","value":"avoidance hemodilution selective cerebral perfusion neurobehavioral outcome ... "} ] ``` -* Produit en sortie un **flux json** contenant les résultats d'une indexation sur le thesaurus mémoire : - - * **"idt"** : identifiant fourni en entrée - * **"syntactic"** : résultat de l'indexation syntaxique - * **"semantic"** : résultat de l'indexation semantique - * **"union"** : union des deux indexations - * **"enhancement"** : trace textuelle des indexations - * **"explanation"** : les concepts broader de tous les concepts trouvés +Produit en sortie un **flux json** contenant les résultats d'une indexation sur le thesaurus mémoire : + +- **"idt"** : identifiant fourni en entrée +- **"syntactic"** : résultat de l'indexation syntaxique +- **"semantic"** : résultat de l'indexation semantique +- **"union"** : union des deux indexations +- **"enhancement"** : trace textuelle des indexations +- **"explanation"** : les concepts broader de tous les concepts trouvés #### Exemple -``` + +```bash cat <