-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathSentiment.py
More file actions
executable file
·132 lines (109 loc) · 5.32 KB
/
Sentiment.py
File metadata and controls
executable file
·132 lines (109 loc) · 5.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
#tejas Ghalsasi tejas.ghalsasi@csu.fullerton.edu
#Varsha Hawaldar varshahawaldar@csu.fullerton.edu
import json
import tweepy # To consume Twitter's API
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
from textblob import TextBlob #predict the sentiment of Tweet, see 'https://textblob.readthedocs.io/en/dev/'
from elasticsearch import Elasticsearch #pip install Elasticsearch if not intalled yet
from datetime import datetime
import calendar
import numpy as np
from Tkinter import *
import webbrowser
#from django.utils.dateparse import parse_datetime
#from http.client import IncompleteRead
#log in to your Twitter Application Management to create an App, url: 'https://apps.twitter.com'
CONSUMER_KEY="azbzyCWOzQWftqt9XHdO3P7fO"
CONSUMER_SECRET="jka4RJwiSGw3jUm37c3vMCaGu2LTcrzToILnPFLPBoWBSRxhBg"
ACCESS_TOKEN="971165929194205184-nV2r1Qo930xviluyQRLVqhuHD41pelG"
ACCESS_SECRET="Uof6dzF7rhzhEJTuRRudsTDZpUFGpRm1s0hVR2JMEmYwO"
# create instance of elasticsearch
es = Elasticsearch()
class TweetStreamListener(StreamListener):
# re-write the on_data function in the TweetStreamListener
# This function enables more functions than 'on_status', see 'https://stackoverflow.com/questions/31054656/what-is-the-difference-between-on-data-and-on-status-in-the-tweepy-library'
def on_data(self, data):
# To understand the key-values pulled from Twitter, see 'https://dev.twitter.com/overview/api/tweets'
dict_data = json.loads(data)
# pass Tweet into TextBlob to predict the sentiment
tweet = TextBlob(dict_data["text"]) if "text" in dict_data.keys() else None
# if the object contains Tweet
if tweet:
# determine if sentiment is positive, negative, or neutral
if tweet.sentiment.polarity < 0:
sentiment = "negative"
elif tweet.sentiment.polarity == 0:
sentiment = "neutral"
else:
sentiment = "positive"
# print the predicted sentiment with the Tweets
print(sentiment, tweet.sentiment.polarity, dict_data["text"])
# extract the first hashtag from the object
# transform the Hashtags into proper case
if len(dict_data["entities"]["hashtags"])>0:
hashtags=dict_data["entities"]["hashtags"][0]["text"].title()
else:
#Elasticeach does not take None object
hashtags="None"
# add text and sentiment info to elasticsearch
#tzinfo=+0100
es.index(index="logstash-a",
# create/inject data into the cluster with index as 'logstash-a'
# create the naming pattern in Management/Kinaba later in order to push the data to a dashboard
doc_type="test-type",
body={"author": dict_data["user"]["screen_name"],
"followers":dict_data["user"]["followers_count"],
#parse the milliscond since epoch to elasticsearch and reformat into datatime stamp in Kibana later
"date": datetime.strptime(dict_data["created_at"], '%a %b %d %H:%M:%S +0000 %Y'),
"message": dict_data["text"] if "text" in dict_data.keys() else " ",
"hashtags":hashtags,
"polarity": tweet.sentiment.polarity,
"subjectivity": tweet.sentiment.subjectivity,
"sentiment": sentiment})
return True
# on failure, print the error code and do not disconnect
def on_error(self, status):
printstat='this is ur status failed'
print(printstat)
print(status)
def clicked():
stock_quote =txt.get()
w2.configure(text=stock_quote+" is the value entered")
webbrowser.open('file:///home/aibot/Desktop/GIT_folder/Sentiment_Analysis_using_Python_Twitter_Kibana/index.html', new=2)
listener = TweetStreamListener()
# set twitter keys/tokens
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
# The most exception break up the kernel in my test is ImcompleteRead. This exception handler ensures
# the stream to resume when breaking up by ImcompleteRead
while True:
try:
# create instance of the tweepy stream
stream = Stream(auth, listener)
# search twitter for keyword "facebook"
stream.filter(track=[stock_quote])
#except IncompleteRead:
# continue
except KeyboardInterrupt:
# or however you want to exit this loop
stream.disconnect()
break
#main code
#component definations and initializations
window = Tk()
window.title('Sentiment Analyzer')
window.geometry('720x480')
w2=Label(window, text="")
w = Label(window, text="Enter Company Name")
btn = Button(window, text="Predict Social Sentiment", command=clicked )
txt = Entry(window,width=15)
btn2=Button(window, text="Quit", command=quit )
#final packing of items in the applications
w.pack()
txt.pack()
btn.pack()
btn2.pack()
w2.pack()
window.mainloop()