How to use the prawcore.NotFound function in prawcore

To help you get started, we’ve selected a few prawcore examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github praw-dev / praw / tests / integration / models / reddit / test_subreddit.py View on Github external
def test_sticky__not_set(self):
        subreddit = self.reddit.subreddit(pytest.placeholders.test_subreddit)
        with self.recorder.use_cassette("TestSubreddit.test_sticky__not_set"):
            with pytest.raises(NotFound):
                subreddit.sticky(2)
github praw-dev / praw / tests / integration / models / reddit / test_subreddit.py View on Github external
def test_traffic__not_public(self):
        subreddit = self.reddit.subreddit("announcements")
        with self.recorder.use_cassette(
            "TestSubreddit.test_traffic__not_public"
        ):
            with pytest.raises(NotFound):
                subreddit.traffic()
github praw-dev / praw / tests / integration / models / reddit / test_wikipage.py View on Github external
def test_invalid_page(self):
        subreddit = self.reddit.subreddit(pytest.placeholders.test_subreddit)
        page = WikiPage(self.reddit, subreddit, "invalid")
        with self.recorder.use_cassette("TestWikiPage.test_invalid_page"):
            with pytest.raises(NotFound):
                page.content_md
github praw-dev / prawcore / tests / test_sessions.py View on Github external
def test_request__not_found(self):
        with Betamax(REQUESTOR).use_cassette("Session_request__not_found"):
            session = prawcore.Session(script_authorizer())
            self.assertRaises(
                prawcore.NotFound,
                session.request,
                "GET",
                "/r/reddit_api_test/wiki/invalid",
            )
github NotYourGuy / scraperr / scrapper.py View on Github external
args = parser.parse_args()

hot_subreddit = reddit.subreddit(args.subreddit).top(args.period,
                                                     limit=args.limit)

try:
    url = [post.url for post in hot_subreddit]
except prawcore.ResponseException:
    print('An error occurred during authorisation. Please check that'
          'your Reddit app credentials are set correctly and try again.')
    sys.exit(-1)
except prawcore.OAuthException:
    print('An error occurred during authorisation. Please check that'
          'your Reddit account credentials are set correctly and try again.')
    sys.exit(-2)
except prawcore.NotFound:
    print('Failed to find a subreddit called "{}". Please check that'
          'the subreddit exists and try again.'.format(args.subreddit))
    sys.exit(-3)

# https://stackoverflow.com/a/3173388


def main():
    # check if gallery-dl is installed on the system
    if shutil.which("gallery-dl") is None:
        print('gallery-dl not found on system')
        sys.exit(-4)
    download_urls()


def download_urls():
github georgeglessner / RedditImageDownloader / download_images.py View on Github external
subreddit = arguments.get('--subreddit')
    num_pics = int(arguments.get('--number'))
    search_term = arguments.get('--query')
    page = arguments.get('--page')

    # prompt for a subreddit if none given
    if subreddit == None:
        while True:
            # obtain subreddit to download images from, and number of images to download
            subreddit = raw_input('Please enter subreddit: ')

            # check that subreddit exists
            try:
                reddit.subreddits.search_by_name(subreddit, exact=True)
                break
            except NotFound:
                print 'Subreddit %s does not exist.' % subreddit

    # determine what to search
    if search_term == None:
        if page == 'hot':
            results = reddit.subreddit(subreddit).hot()
        elif page == 'controversial':
            results = reddit.subreddit(subreddit).controversial()
        elif page == 'top':
            results = reddit.subreddit(subreddit).top()
        elif page == 'rising':
            results = reddit.subreddit(subreddit).rising()
        elif page == 'new':
            results = reddit.subreddit(subreddit).new()
    else:
        results = reddit.subreddit(subreddit).search(
github JosephLai241 / Universal-Reddit-Scraper / reddit_scraper.py View on Github external
def existence(reddit,sub_list):
    found = []
    not_found = []
    
    for sub in sub_list:
        try:
            reddit.subreddits.search_by_name(sub, exact = True)
            found.append(sub)
        except NotFound:
            not_found.append(sub)
        
    return found,not_found
github JosephLai241 / Universal-Reddit-Scraper / urs / utils / Validation.py View on Github external
def _check_redditors(found, not_found, object_list, reddit):
        for user in object_list:
            try:
                reddit.redditor(user).id
                found.append(user)
            except NotFound:
                not_found.append(user)