Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@responses.activate
def test_request_should_handle_rate_limit(self):
"""Test that Twython raises an rate limit error on 429"""
endpoint = 'statuses/home_timeline'
url = self.get_url(endpoint)
self.register_response(responses.GET, url,
body='{"errors":[{"message":"Rate Limit"}]}', status=429)
self.assertRaises(TwythonRateLimitError, self.api.request, endpoint)
:param token:
:param kwargs:
"""
super(ServiceTwitter, self).__init__(token, **kwargs)
self.consumer_key = settings.TH_TWITTER_KEY['consumer_key']
self.consumer_secret = settings.TH_TWITTER_KEY['consumer_secret']
self.token = token
self.oauth = 'oauth1'
self.service = 'ServiceTwitter'
if self.token is not None:
token_key, token_secret = self.token.split('#TH#')
try:
self.twitter_api = Twython(self.consumer_key,
self.consumer_secret,
token_key, token_secret)
except (TwythonAuthError, TwythonRateLimitError) as e:
us = UserService.objects.get(token=token)
logger.error(e.msg, e.error_code)
update_result(us.trigger_id, msg=e.msg, status=False)
def dump_user_profile_job(user_id, save_location, twython_connector: TwythonConnector):
profile_info = None
# Fetch and save user information if the file is not already present
if not Path("{}/{}.json".format(save_location, user_id)).is_file():
try:
profile_info = twython_connector.get_twython_connection(GET_USER).show_user(user_id=user_id)
except TwythonRateLimitError as ex:
logging.exception("Twython API rate limit exception")
finally:
if profile_info:
json.dump(profile_info, open("{}/{}.json".format(save_location, user_id), "w"))
try:
tweet_objects_map = twython_connector.get_twython_connection(Constants.GET_TWEET).lookup_status(id=tweet_list,
include_entities=True,
map=True)['id']
for tweet in tweet_chunk:
tweet_object = tweet_objects_map[str(tweet.tweet_id)]
if tweet_object:
dump_dir = "{}/{}/{}/{}".format(config.dump_location, tweet.news_source, tweet.label, tweet.news_id)
tweet_dir = "{}/tweets".format(dump_dir)
create_dir(dump_dir)
create_dir(tweet_dir)
json.dump(tweet_object, open("{}/{}.json".format(tweet_dir, tweet.tweet_id), "w"))
except TwythonRateLimitError:
logging.exception("Twython API rate limit exception")
except Exception as ex:
logging.exception("exception in collecting tweet objects")
return None
def new_func(*a, **k):
try:
func(*a, **k)
except TwythonRateLimitError:
pass
except:
log.exception("Thread %d with function %r, args of %r, and kwargs of %r failed to run." % (threading.current_thread().ident, func, a, k))
# pass
try:
tweets = []
print '--- Searching'
print ','.join(tweetBulk)
tweets = twitter.lookup_status(id=','.join(tweetBulk))
nbRetrieved = 0
for tweet in tweets:
if excludeRetweets and 'retweeted_status' in tweet:
print '--- Excluding retweet', tweet['id_str']
else:
print '--- Output', tweet['id_str']
json.dump(tweet, outfile)
outfile.write('\n')
nb += 1
print '--- Retrieved:', nb
except twython.TwythonRateLimitError as error:
remainder = float(twitter.get_lastfunction_header(header='x-rate-limit-reset')) - time.time()
print '--- Sleeping:', remainder
if remainder > 0:
time.sleep(remainder)
continue
except Exception, e:
print e
print '--- Unkown exception.... sleeping 15 min anyway before retry'
time.sleep(60*15)
print '--- Finished with:', nb
# reading credentials
credentials = [l.strip() for l in open("{0}/data/credential.txt".format(pkg_path))]
APP_KEY = credentials[0]
APP_SECRET = credentials[1]
OAUTH_TOKEN = credentials[2]
OAUTH_TOKEN_SECRET = credentials[3]
#TODO: incremental backup time interval?
back_up_interval = 90
client_args = {'headers': {'Accept-Encoding': 'deflate, gzip'}}
#client_args = {'headers': {'User-Agent': 'geoloc', 'Accept-Encoding': 'deflate, gzip', 'Host': 'api.twitter.com'}}
while True:
try:
stream = MyStreamer(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET, client_args = client_args)
stream.statuses.filter(locations='-180,-90,180,90')
except twython.TwythonRateLimitError:
time.sleep(back_up_interval)
except twython.TwythonAuthError:
time.sleep(back_up_interval)
except twython.TwythonError:
time.sleep(back_up_interval)
except requests.exceptions.ChunkedEncodingError:
pass
except requests.exceptions.ConnectionError:
time.sleep(back_up_interval)
except BufferError:
break
locations = set(get_all_locations_in_india())
num_followers = min(MAX_FOLLOWERS, twitter.show_user(screen_name=root)['followers_count'])
next_cursor = -1; num_requests = 0; users_downloaded = 0; num_good_followers = 0; page_number = 1; file_count = 0
last_time = datetime.now()
time_start = datetime.now()
good_followers = []
while users_downloaded < num_followers:
write_log('Downloading followers page %d for %s\n' % (page_number, root))
try:
response = twitter.get_followers_list(screen_name=root, count=FOLLOWER_BATCH_SIZE, cursor=next_cursor)
followers = response['users']
except TwythonRateLimitError:
write_log('Sleeping...\n')
sleep(RATE_LIMIT_WINDOW)
continue
except TwythonError:
continue
except KeyError:
continue
size_good_followers = len(good_followers)
good_followers.extend((list(get_good_followers(followers, locations, MIN_TWEETS))))
num_good_followers += len(good_followers) - size_good_followers
num_requests += 1
if num_requests == FOLLOWER_RATE_LIMIT:
check_clock(last_time)
last_time = datetime.now()
print("Retrieved %s follower IDs from twitter" % len(follower_ids))
# store what we've got so far
# insert follower_ids in the followers collection
res = db.followers.update_one(
{"screen_name": screen_name},
{ '$set': {"ids": follower_ids} }
)
if res.matched_count == 0:
print("Unable to update IDs for: ",screen_name)
elif res.modified_count == 0:
print("%s IDs not modified"% screen_name)
else:
print("%s now has %s IDs " % (screen_name, str(len(follower_ids))) )
followers_status(screen_name)
except TwythonRateLimitError as e:
# Wait if we hit the Rate limit
followers_status(screen_name)
wait_for_awhile()
except:
print(" FAILED: Unexpected error:", sys.exc_info()[0])
pass
# followers_status(screen_name)
# -----------------------------------------------------------
# Timelines
# -----------------------------------------------------------
if opts.timelines:
# catch IDs that error out
error_ids = list()
def get_tweets_from_hashtag(twitter, hashtag, tries=10, count=100):
''' Returns tweets that contain a given hashtag using the given tries and count '''
max_id = float("inf")
tweets = []
for n in range(1,tries+1):
results = []
try:
results.append(twitter.search(q=hashtag, count=count, max_id=max_id))
for results2 in results:
for tweet in results2['statuses']:
tweets.append(tweet['text'].encode('utf-8'))
tweets.append("twitter.com/TwttPoet/status/"+tweet['id_str'].encode('utf-8'))
max_id = tweet['id']
except TwythonRateLimitError as e:
seconds = str(float(twitter.get_lastfunction_header('x-rate-limit-reset'))-time()+5)
raise IOError('You hit the rate limit! Try again in '+seconds+' seconds.')
return tweets