-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapp.py
124 lines (97 loc) · 4.13 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
#-----------------------------------------------------------------------------------------
# Licensed under the MIT License. See LICENSE in the project root for license information.
#-----------------------------------------------------------------------------------------
import os
import json
import requests
import math
from flask import Flask, render_template, request
from flask import Flask
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
app = Flask(__name__)
limiter = Limiter(
app,
key_func=get_remote_address,
default_limits=["2 per minute", "1 per second"],
)
from google.cloud import language_v1
from google.cloud.language_v1 import enums
client = language_v1.LanguageServiceClient()
type_ = enums.Document.Type.PLAIN_TEXT
language = "en"
encoding_type = enums.EncodingType.UTF8
@app.route("/insult")
@limiter.limit("2 per minute")
def insult():
url = 'https://evilinsult.com/generate_insult.php?lang=en&type=json'
r = requests.get(url)
j = r.json()
insult_str = j['insult']
remove_table = dict.fromkeys(map(ord, '+"@#$'), None)
insult_str = insult_str.translate(remove_table)
insult_str_len = len(insult_str)
print('Insult: ' + insult_str)
units = str(math.ceil((insult_str_len/1000)*2))
print('Number of Google Natural Language units used processing this insult: ' + units)
doc_data = analyze_sentiment(insult_str)
entity_data = []
entity_response = analyze_entity_sentiment(insult_str)
for entity in entity_response.entities:
sentiment = entity.sentiment
entity_data.append({'entity_name': entity.name, 'entity_score': sentiment.score, 'entity_magnitude': sentiment.magnitude, 'entity_salience': entity.salience})
html = render_template('insult.html', text=insult_str, score=doc_data.document_sentiment.score, magnitude=doc_data.document_sentiment.magnitude, entities=entity_data, usage=units)
return html
@app.route("/news")
@limiter.limit("1 per minute")
def news():
subject = request.args.get('subject', default = '', type = str)
news_api_key = os.environ['NEWSAPIKEY']
url = ('https://newsapi.org/v2/top-headlines?'
'q=' + subject + '&'
'country=us&'
'language=en&'
'apiKey=' + news_api_key)
r = requests.get(url)
j = r.json()
news_str = ''
for k,v in j.items():
if k == "articles":
articles_json = v
for article in articles_json:
description = str(article['description'])
news_str = news_str + description + ' '
remove_table = dict.fromkeys(map(ord, '+"@#$'), None)
news_str = news_str.translate(remove_table)
news_str_len = len(news_str)
print('News: ' + news_str)
units = str(math.ceil((news_str_len/1000)*2))
print('Number of Google Natural Language units used processing this news: ' + units)
doc_data = analyze_sentiment(news_str)
entity_data = []
entity_response = analyze_entity_sentiment(news_str)
for entity in entity_response.entities:
sentiment = entity.sentiment
entity_data.append({'entity_name': entity.name, 'entity_score': sentiment.score, 'entity_magnitude': sentiment.magnitude, 'entity_salience': entity.salience})
if subject == '':
subject = 'everything'
html = render_template('news.html', subject=subject, text=news_str, score=doc_data.document_sentiment.score, magnitude=doc_data.document_sentiment.magnitude, entities=entity_data, usage=units)
return html
def analyze_sentiment(text_content):
"""
Analyzing Sentiment in a String
Args:
text_content The text content to analyze
"""
document = {"content": text_content, "type": type_, "language": language}
response = client.analyze_sentiment(document, encoding_type=encoding_type)
return response
def analyze_entity_sentiment(text_content):
"""
Analyzing Entity Sentiment in a String
Args:
text_content The text content to analyze
"""
document = {"content": text_content, "type": type_, "language": language}
response = client.analyze_entity_sentiment(document, encoding_type=encoding_type)
return response