How to use the twython.exceptions function in twython

To help you get started, we’ve selected a few twython examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dmuth / twitter-aws-comprehend / 0-fetch-tweets.py View on Github external
input("Press [Enter] to open the auth page in your web browser! [Enter] ")

	webbrowser.open(auth_url)
	oauth_verifier = input("Enter Your PIN: ")

	oauth_token = auth['oauth_token']
	oauth_token_secret = auth['oauth_token_secret']
	logger.debug("OAUTH Token: " + oauth_token)
	logger.debug("OAUTH Token Secret: " + oauth_token_secret)

	twitter = twython.Twython(app_key, app_secret, oauth_token, oauth_token_secret)

	try:
		final_step = twitter.get_authorized_tokens(oauth_verifier)

	except twython.exceptions.TwythonError as e:
		print ("! ")
		print ("! Caught twython.exceptions.TwythonError:", e)
		print ("! ")
		print ("! Did you enter the right PIN code?")
		print ("! ")
		exit(1)

	final_oauth_token = final_step['oauth_token']
	final_oauth_token_secret = final_step['oauth_token_secret']
	logger.debug("Final OUATH token: " + final_oauth_token)
	logger.debug("Final OAUTH token secret: " + final_oauth_token_secret)

	twitter_data = {
		"app_key": app_key,
		"app_secret": app_secret,
		"final_oauth_token": final_oauth_token,
github tq010or / acl2013 / geoloc / adapters / twitter_adapter.py View on Github external
def parse_user_timeline(input_data):
    """    Parse user timeline data    """
    err_msg = None
    if isinstance(input_data, basestring): # Crawl and parse up to 200 recent statuses from user timeline using Oauth
        try:
            params = {"screen_name":input_data, "count":200}
            input_data = api.get(utl_endpoint, params)
            if not input_data:
                err_msg = "Please ensure the user has public available tweets"
                return (None, err_msg)
        except twython.exceptions.TwythonError:
            err_msg = "Please check <b>" + input_data  + "</b> is correctly spelt and not protected."
            return (None, err_msg) 
        else:
            #return distill_data(input_data, simplify_twitter_obj)
            #NOTE: twython JSON library support JSON inherently
            return distill_data(input_data, simplify_twitter_json)
    elif isinstance(input_data, list):
        return distill_data(input_data, simplify_twitter_json)
    else:
        err_msg = "Invalida input for parsing user timeline"
        return (None, err_msg)
github bianjiang / tweetf0rm / tweetf0rm / twitterapi / twitter_api.py View on Github external
raise Exception("show_status: tweet_id cannot be None")

		tweet = None
		retry_cnt = MAX_RETRY_CNT
		while retry_cnt > 1:
			try:
				tweet = self.show_status(id=tweet_id)

				# logger.debug('%d > %d ? %s'%(prev_max_id, current_max_id, bool(prev_max_id > current_max_id)))
				logger.info("Fetched tweet [%s]" % (tweet_id))

				break

			except twython.exceptions.TwythonRateLimitError:
				self.rate_limit_error_occured('statuses', '/statuses/show')
			except twython.exceptions.TwythonError as te:
				if ( te.error_code == 404 or te.error_code == 403 ):
					logger.info("Tweet [%s] unavailable. Error code: %d" % (tweet_id, te.error_code))

					break
				else:
					time.sleep(10)
					logger.error("exception: %s"%(te))
					retry_cnt -= 1
					if (retry_cnt == 0):
						raise MaxRetryReached("max retry reached due to %s"%(te))
			except Exception as exc:
				time.sleep(10)
				logger.error("exception: %s, %s"%(exc, type(exc)))
				retry_cnt -= 1
				if (retry_cnt == 0):
					raise MaxRetryReached("max retry reached due to %s"%(exc))
github digitalgreenorg / dg / social_website / management / commands / social_media.py View on Github external
def update_footer_stats(self):
        logger = logging.getLogger('social_website')    
        footer = WebsiteFooter()
        try:
            footer.fetch_facebook_likes()
        except (httplib.HTTPException, urllib2.HTTPError, urllib2.URLError):
            logger.error("error in updating facebook likes")
        try:
            footer.fetch_twitter_followers()
        except exceptions:
            logger.error("error in updating twitter followers")
        try:
            footer.fetch_youtube_videos()
        except (httplib.HTTPException, urllib2.HTTPError, urllib2.URLError):
            logger.error("error in updating youtube videos")
        try:
            footer.fetch_linkedin_subscribers()
        except Exception:
            logger.error("error in updating linkedin subscribers")
        footer.write()
        logger.info("Updated footer stats")
github jsvine / twick / twick / search.py View on Github external
def query(self, q, **kw):
        opts = copy(defaults)
        opts.update(kw)
        wait = 1
        while True:
            try:
                response = Response(self.twitter.search(q=q, **opts))
                break
            except twython.exceptions.TwythonError as err:
                logger.info("Twython error: {0}".format(err))
                logger.info("Waiting {0} seconds...".format(wait))
                sleep(wait)
                wait *= 2
        return response
github bianjiang / tweetf0rm / tweetf0rm / twitterapi / twitter_api.py View on Github external
#no new tweets found
				if (prev_max_id == current_max_id):
					break;

				result_tweets.extend(tweets['statuses'])

				cnt += len(tweets['statuses'])

				#logger.info(cnt)

				logger.debug('%d > %d ? %s'%(prev_max_id, current_max_id, bool(prev_max_id > current_max_id)))

				time.sleep(1)

			except twython.exceptions.TwythonRateLimitError:
				self.rate_limit_error_occured('search', '/search/tweets')
			except Exception as exc:
				time.sleep(10)
				logger.debug("exception: %s"%exc)
				retry_cnt -= 1
				if (retry_cnt == 0):
					raise MaxRetryReached("max retry reached due to %s"%(exc))

		if (len(result_tweets) > 0):
			for tweet in result_tweets:
				for handler in write_to_handlers:
					handler.append(json.dumps(tweet), bucket=bucket, key=key)

				for handler in cmd_handlers:
					handler.append(json.dumps(tweet), bucket=bucket, key=key)
		else:
github digitalgreenorg / dg / social_website / scripts / write_footer_social.py View on Github external
response = urllib2.urlopen('https://graph.facebook.com/digitalgreenorg')
        data = json.loads(response.read())
        facebook_likes = data['likes']
        facebook_likes = '{:,}'.format(int(facebook_likes))
    except (httplib.HTTPException, urllib2.HTTPError, urllib2.URLError):
        facebook_likes = old_fb
        logger.error("error in updating facebook likes")

    # Twitter Followers
    try:
        twitter = Twython(APP_KEY_TWITTER, APP_SECRET_TWITTER,
                          OAUTH_TOKEN_TWITTER, OAUTH_TOKEN_SECRET_TWITTER)
        twitter_obj = twitter.get_followers_ids()
        twitter_followers = len(twitter_obj['ids'])
        twitter_followers = '{:,}'.format(int(twitter_followers))
    except exceptions:
        twitter_followers = old_twitter
        logger.error("error in updating twitter followers\n")

    # Youtube Videos
    try:
        response = urllib2.urlopen('https://gdata.youtube.com/feeds/api/users/digitalgreenorg/uploads?v=2&alt=jsonc&max-results=0')
        data = json.loads(response.read())
        youtube_videos = data['data']['totalItems']
        youtube_videos = '{:,}'.format(int(youtube_videos))
    except (httplib.HTTPException, urllib2.HTTPError, urllib2.URLError):
        youtube_videos = old_yt
        logger.error("error in updating youtube videos\n")

    # Linkedin Followers
    try:
        url = "http://api.linkedin.com/v1/companies/619071:(id,name,num-followers)?format=json"