From ebc2302122ae7a93446ead8aac5fc321d650a595 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9rgio=20Rebelo?= Date: Thu, 26 Jan 2023 18:17:35 +0100 Subject: [PATCH] feat(api): modularise st #9 --- src/index.js | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/index.js b/src/index.js index 4b7a9e4..a4de9cc 100644 --- a/src/index.js +++ b/src/index.js @@ -25,14 +25,15 @@ APP.get("/lines/:delimiter/:lang/:input/", async (req, res) => { const text = req.params.input; const sentences = text.split(delimiter); const lang = req.params.lang; - const results = await analysis(text, lang, sentences); + const results = await analysis(sentences, lang); res.status(results[0]).send(JSON.stringify(results[1])); }); APP.get("/text/:lang/:input", async (req, res) => { const text = req.params.input; const lang = req.params.lang; - const results = await analysis(text, lang); + const sentences = (await _sentenceTokenizer(text)).flat(); + const results = await analysis(sentences, lang); res.status(results[0]).send(JSON.stringify(results[1])); }); @@ -48,6 +49,10 @@ const errHandler = (code, msg) => { } } +const _sentenceTokenizer = async (text) => { + return sentenceTokeniser(text); +} + const _lexiconGlobalResults = async (sentences) => { // compute global lexicon value @@ -69,14 +74,12 @@ const _lexiconGlobalResults = async (sentences) => { return res.length === 0 ? [['neutral', 1]] : res; } -const analysis = async (text, lang, sentences = []) => { +const analysis = async (sentences = [], lang) => { + const text = sentences.flat().join(' '); // classification analysis const classificationResults = await classification(text, lang); if (!classificationResults.success) return [400, errHandler(400, `Error in the classification method`)]; - // sentence tokenizer (if necessary) - if (sentences.length === 0) sentences = (await sentenceTokeniser(text)).flat(); - // lexicon-based analysis let lexiconResults = { "global": null, "sentences": [] }; for (const sentence of sentences) {