import json
import tweepy
import time
from tweepy import OAuthHandler
consumer_key = '*******************************'
consumer_secret = '*******************************'
access_token = '*******************************'
access_token_secret = '*******************************'
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
#api = tweepy.API(auth)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True, retry_count=3, retry_delay=60)
# Creates the user object. The me() method returns the user whose authentication keys were used.
user = api.me()
print('Name: ' + user.name)
print('ID: ' + str(user.id))
print('Location: ' + user.location)
print('Friends: ' + str(user.friends_count))
Name: uzay00 ID: 14519511 Location: Friends: 404
user = api.get_user('kadirakgulll')
print('Name: ' + user.name)
print('ID: ' + str(user.id))
print('Location: ' + user.location)
print('Friends: ' + str(user.friends_count))
Name: Kadir ID: 2332464662 Location: Türkiye Friends: 143
user.description
'Burayı okuyan kaldı mı ?'
user.created_at
datetime.datetime(2014, 2, 7, 22, 2, 54)
# To print out the last 20 tweets by @VeriDefteri
VeriDefteri_tweets = api.user_timeline('kadirakgulll')
for tweet in VeriDefteri_tweets:
print( tweet.created_at, tweet.text)
2018-10-12 06:26:34 RT @ProfDemirtas: Geri kalmış ülkelere: GEÇMİŞ OLSUN https://t.co/jfDZwzYFqB 2018-10-11 21:48:11 RT @ckucukozmen: Dünyanın ilk enflasyon polisi bizde. Ama haberi sunanlar neye gülüyor anlamadım. https://t.co/4a4WIUObWL 2018-10-11 15:08:18 RT @coolstuffcheap: HDMI https://t.co/ZIW1Mtdp9U 2018-10-10 20:52:31 -Knock Knock -Who's there? -Let the devil in -What? Eminem - Venom https://t.co/npjZ5DmX34 @YouTube aracılığıyla 2018-10-10 20:04:10 RT @educatedear: Arda’nın tüm bunlara rağmen 5-10 sene sonra, belki çok daha yakın, bu ülkede teknik direktör, sportif direktör falan olaca… 2018-10-09 22:09:56 RT @ozgurugzo: ben izlerken ruhumu teslim ettim neyine gülüyosunuz acaba ya https://t.co/D9IhtfNwBS 2018-10-09 17:13:51 RT @ASRomaEN: International football is important but club football is importanter 2018-10-08 17:04:54 RT @canwaves: keşke Servet-i Fünun dönemine geri dönebilseydik ya https://t.co/jUArFBQ890 2018-10-07 15:35:36 RT @siyasifenomen: Siyasetçilerin, eşlerini ya da akrabalarını kamuda işe almasını yasaklayan kanun; Parlamento’da “oy birliğiye” kabul edi… 2018-10-07 10:52:26 RT @TuhafAmaGercek: İnsan tükürüğü, morfinden 6 kat daha güçlü bir ağrı kesici olan Opiorphin adlı kimyasal bir bileşim içerir. https://t.c… 2018-10-07 10:50:43 RT @takostate: mont mu alsam yoksa tofaş mı bir turlu karar veremiyorum 2018-10-06 19:02:13 RT @BBahadirErdem: İŞKURun 1500 kişilik temizlik işçisi kadrosuna 6000 Üniversite 8000 Yüksek Okul mezunu başvuruyorsa ülkede düşünülmesi v… 2018-10-06 14:56:13 RT @Roxabbe: Patates kızartması ve su sevmeyen insan hiç görmedim bi de tom hardy 2018-10-05 20:52:28 RT @CirkinIstanbul: Büfe mi bisiklet yolunun üzerine yapılmış, bisiklet yolu mu büfenin altından geçirilmiş? @uskudarbld #tavukmuyumurtudan… 2018-10-05 18:00:45 RT @theRA_official: Şu sağdaki Burcu mayonez. Soldaki Hellmann's. Yerli malını sikiyim. https://t.co/WAhja7IybP 2018-10-05 18:00:12 RT @demarkegaming: #5EkimDünyaÖğretmenlerGünü https://t.co/5hGSlkeYR5 2018-10-04 19:19:49 RT @archillect: https://t.co/F8ua5NgnlU 2018-10-04 13:56:51 RT @sonat_isik: arkadaşlar nude atma akımının karşıtı gibi gözükmek istemem ama lut kavminde de her şeyin toplu nudelaşma ile başladığına e… 2018-10-04 10:46:25 RT @Ucupak: bugün "kasten adam öldürme, uyuşturucu ticareti, tecavüz ve çocuk istismarı" suçlarına af isteyen aziz halkımız, 2-3 ay önce bu… 2018-10-04 10:45:25 RT @kul0s: narkotik uyuma https://t.co/72WMOLy4Zy
tweet.text
'RT @kul0s: narkotik uyuma https://t.co/72WMOLy4Zy'
tweet.author.id
2332464662
tweet.favorited
False
#Recent tweets from accounts you follow:
tweets = api.home_timeline()
for tweet in tweets:
print(tweet.text)
RT @cagrimbakirci: @teslajnr @MeteAtature İnsanlar gerçek hayatta karşılaşsalar insanlara yapamayacakları şeyleri internette yapabiliyorlar… RT @deaneckles: How are biological and social contagion affected by changes to network structure? Recent work has claimed a "weakness of lo… "Hey onbeşli onbeşli" ağıtını düğünlerde göbek havası olarak çalmakta sorun yokmuş, ama Çav Bella ile kalça kıvırma… https://t.co/jzLs3ypmaW İçişleri Bakanlığı, yolcuyu mağdur eden taksici sorununa el koydu https://t.co/Gd54MHvaa0 UK to cut hybrid car subsidies https://t.co/pkfuc0Hu59 @say_cem @BahaOkar @NalanMahsereci @BilimveGelecek Odanıza gelip bizzat imzalatabiliyor muyuz hocam? :) Graphics processing units that were designed to make video games look better are now being deployed to power everyt… https://t.co/pEuIp1v5DL Have you ever wondered what would happen if a drone hit an airplane wing? Researchers at the University of Dayton t… https://t.co/qIGvFyiK02 How to set up your home Wi-Fi https://t.co/5D9c9pBmcp https://t.co/wxcNx6WnPt Yargıtay Başkanı: Türk yargısında 5 kara delik var https://t.co/oNC2WwfoLX RT @ebskisafilm: #Eğitim konusunda söyleyecek sözün varsa filmini çek, toplamda 40 bin TL olan bu ödüllerden birini kazan. SON BAŞVURU TA… "Commuting patterns: the flow and jump model and supporting data" https://t.co/K3EI0zpcl7 "The Statistical Physics of Real-World Networks. (arXiv:1810.05095v1 [physics.soc-ph])" https://t.co/6GGKSgSRTn "Network localization is unalterable by infections in bursts. (arXiv:1810.04880v1 [physics.soc-ph])" https://t.co/FtZkWPffoI "Leveraging local network communities to predict academic performance. (arXiv:1810.04730v1 [https://t.co/lwVVolmoyC… https://t.co/0eqelxr8e5 "Detecting Core-Periphery Structures by Surprise. (arXiv:1810.04717v1 [physics.soc-ph])" https://t.co/TIc6DpP7Uk RT @Phil_Baty: It has happened: China has overtaken the US to become the world’s largest producer of scientific research papers - well ahea… RT @eglerean: Excellent blog post by @OnervaKorhonen. I especially liked figure 2 https://t.co/b6GTYZHBuy https://t.co/Fy4jeqNZvN
# tweet = api.update_status('Made with Tweepy yeah!')
def get_friends(user_id):
users = []
page_count = 0
for user in tweepy.Cursor(api.friends, id=user_id, count=10).pages():
page_count += 1
print ('Getting page {} for friends'.format(page_count))
users.extend(user)
return users
friends = get_friends(user.id)
Getting page 1 for friends Getting page 2 for friends Getting page 3 for friends Getting page 4 for friends Getting page 5 for friends Getting page 6 for friends Getting page 7 for friends Getting page 8 for friends Getting page 9 for friends Getting page 10 for friends Getting page 11 for friends Getting page 12 for friends Getting page 13 for friends Getting page 14 for friends Getting page 15 for friends
for f in friends:
# print(f._json['name'], f._json['id'])
pass
import re
import tweepy
from tweepy import OAuthHandler
from textblob import TextBlob
class TwitterClient(object):
'''
Generic Twitter Class for sentiment analysis.
'''
def __init__(self):
'''
Class constructor or initialization method.
'''
# keys and tokens from the Twitter Dev Console
consumer_key = '*******************************'
consumer_secret = '*******************************'
access_token = '*******************************'
access_token_secret = '*******************************'
# attempt authentication
try:
# create OAuthHandler object
self.auth = OAuthHandler(consumer_key, consumer_secret)
# set access token and secret
self.auth.set_access_token(access_token, access_token_secret)
# create tweepy API object to fetch tweets
self.api = tweepy.API(self.auth)
except:
print("Error: Authentication Failed")
def clean_tweet(self, tweet):
'''
Utility function to clean tweet text by removing links, special characters
using simple regex statements.
'''
return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split())
def get_tweet_sentiment(self, tweet):
'''
Utility function to classify sentiment of passed tweet
using textblob's sentiment method
'''
# create TextBlob object of passed tweet text
analysis = TextBlob(self.clean_tweet(tweet))
# set sentiment
if analysis.sentiment.polarity > 0:
return 'positive'
elif analysis.sentiment.polarity == 0:
return 'neutral'
else:
return 'negative'
def get_tweets(self, query, count = 10):
'''
Main function to fetch tweets and parse them.
'''
# empty list to store parsed tweets
tweets = []
try:
# call twitter api to fetch tweets
fetched_tweets = self.api.search(q = query, count = count)
# parsing tweets one by one
for tweet in fetched_tweets:
# empty dictionary to store required params of a tweet
parsed_tweet = {}
# saving text of tweet
parsed_tweet['text'] = tweet.text
# saving sentiment of tweet
parsed_tweet['sentiment'] = self.get_tweet_sentiment(tweet.text)
# appending parsed tweet to tweets list
if tweet.retweet_count > 0:
# if tweet has retweets, ensure that it is appended only once
if parsed_tweet not in tweets:
tweets.append(parsed_tweet)
else:
tweets.append(parsed_tweet)
# return parsed tweets
return tweets
except tweepy.TweepError as e:
# print error (if any)
print("Error : " + str(e))
def main(query = 'Donald Trump', count = 10):
# creating object of TwitterClient Class
api = TwitterClient()
# calling function to get tweets
tweets = api.get_tweets(query, count)
# picking positive tweets from tweets
ptweets = [tweet for tweet in tweets if tweet['sentiment'] == 'positive']
# percentage of positive tweets
print("Positive tweets percentage:")
print(100*len(ptweets)/len(tweets))
# picking negative tweets from tweets
ntweets = [tweet for tweet in tweets if tweet['sentiment'] == 'negative']
# percentage of negative tweets
print("Negative tweets percentage:")
print(100*len(ntweets)/len(tweets))
# percentage of neutral tweets
print("Neutral tweets percentage:")
print(100 - 100*len(ntweets)/len(tweets) - 100*len(ptweets)/len(tweets))
# printing first 5 positive tweets
print("\n\nPositive tweets:")
for tweet in ptweets[:10]:
print(tweet['text'])
# printing first 5 negative tweets
print("\n\nNegative tweets:")
for tweet in ntweets[:10]:
print(tweet['text'])
main(query = 'deep learning', count = 100)
Positive tweets percentage: 22.727272727272727 Negative tweets percentage: 9.090909090909092 Neutral tweets percentage: 68.18181818181819 Positive tweets: RT @math_rachel: Is your company interested in sponsoring diversity fellowships for our upcoming Practical Deep Learning for Coders course… RT @DataScienceNIG: Dr. Abiodun Modupe kick-started the session on "Deep Learning for Natural Language Processing ( NLP)" by laying a good… RT @SwissCognitive: Real World Application of Multi-Agent Deep Reinforcement Learning: Autonomous Traffic Flow Management at GTC Europe 201… I’ve just come up with a great idea for the application of AI and Deep Learning to what currently appears to be an… https://t.co/pf9MHK1TVj Humans will take forever just to get a single information while these AI will only take minutes. As we become more… https://t.co/ZvZtAy10bS A new developmental reinforcement learning approach for sensorimotor space enlargement https://t.co/yUFMRH57wL Real World Application of Multi-Agent Deep Reinforcement Learning: Autonomous Traffic Flow Management at GTC Europe… https://t.co/55ccFtq2k0 RT @PyDataFFM: Thanks for all the 'thank yous' :) Seems everyone had a great time at PyData Frankfurt #1 - we will be back next month with… RT @Robertson_SJ: NEW: Machine and deep learning for sport-specific movement recognition: a systematic review of model development and perf… RT @OpenAI: Apply for our Winter 2019 OpenAI Scholars Program, open to individuals from underrepresented groups in STEM interested in becom… Negative tweets: RT @jeremyphoward: Learn: - Intro Machine Learning https://t.co/gkefwjTwGh - Practical Deep Learning https://t.co/rue2Hahfv5 - Cutting Edg… RT @teamrework: We sat down with @RolandMemisevic from @twentybn to discuss his experiences at RE•WORK summits and gave us a sneak peek as… RT @SwissCognitive: How Artificial Intelligence Is Helping #Pharmaceuticals Develop Drugs #Bot #Cloud_Computing #CTO #Deep #Deep_Learning #… RT @jimhaseloff: Extensive review of "Opportunities and obstacles for deep learning in biology and medicine” J. R. Soc. Interface 15: 20170… https://t.co/3jgZpDEIxc D Kartik et. al. Policy Design for Active Sequential Hypothesis Testing using Deep Learni… https://t.co/9RjfsnncUY RT @fchollet: You can start training Keras models on TPUs, from the comfort of your browser, in a few seconds. Try it: https://t.co/G95rxp7…
# empty list to store parsed tweets
tweets = []
fetched_tweets = api.search(q = 'CMPE 251', count = 10)
# parsing tweets one by one
for tweet in fetched_tweets:
tweets.append((tweet.id_str, tweet.author.id_str, tweet.author.name , tweet.text))
tweets
[('1050652767985913856', '1585577172', 'Enes Gül', 'Cmpe 251 falan filan intermilan'), ('1050652749417738243', '338222489', 'Hasan Kemik', 'CMPE 251, course tryout!'), ('1050652697097990144', '2332464662', 'Kadir', 'CMPE 251 data science ?!'), ('1050652682220797953', '318510807', 'Selin Yeşilselve', 'Cmpe 251 is a great lesson!!!'), ('1050652679997779969', '14519511', 'uzay00', 'CMPE 251 is fun!! haha'), ('1050652671361728512', '217888241', 'Bartu Işıklar', 'CMPE 251 is not fun')]
tweets[-1]
'CMPE 251 is not fun'