How to use the prawcore.exceptions.NotFound function in prawcore

To help you get started, we’ve selected a few prawcore examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GermainZ / Taskerbot / taskerbot.py View on Github external
def load_sub_config(self, subreddit):
        logging.debug("Loading mods…")
        mods = [mod.name for mod in self.r.subreddit(subreddit).moderator()]
        logging.info("Mods loaded: %s.", mods)
        logging.debug("Loading reasons…")
        try:
            reasons = yaml.safe_load(
                html.unescape(
                    self.r.subreddit(subreddit).wiki["taskerbot"].content_md
                )
            )
            SCHEMA_VALIDATOR.validate(reasons)
            logging.info("Reasons loaded.")
        except (jsonschema.exceptions.ValidationError, NotFound):
            reasons = None
            logging.warning(
                "r/%s/wiki/taskerbot not found or invalid, ignoring", subreddit
            )
        return mods, reasons
github jrittenh / JustNoSinclair / justnosinclair.py View on Github external
and re.search("|".join(domains), submission.url, re.IGNORECASE) \
                    and submission_timely:
                        try:
                            print("SINCLAIR", "[" + subreddit + "]", submission.title, submission.url)
                            submission.reply(comment)
                            posts_replied_to.append(submission.id)
                            with open("posts_replied_to", "a") as f:
                                f.write(submission.id + "\n")
                        except exceptions.Forbidden:
                            remove_subreddit(local_subreddits, subreddit, "banned")
                        except Exception as e:
                            print(type(e))
                            print(e)
        except exceptions.Forbidden:
            remove_subreddit(local_subreddits, subreddit, "private")
        except exceptions.NotFound:
            remove_subreddit(local_subreddits, subreddit, "invalid")
        except exceptions.Redirect:
            remove_subreddit(local_subreddits, subreddit, "not_found")
        except KeyError:
            remove_subreddit(local_subreddits, subreddit, "removed")
except Exception as e:
    print(type(e))
    print(e)
github tylerbrockett / Alert-Bot-Reddit / src / bot_modules / reddit_handler.py View on Github external
def get_submissions(self, subreddit, index, num_subs):
        submissions = []
        posts = 200 if (subreddit == 'all') else self.NUM_POSTS
        try:
            subs = self.reddit.subreddit(subreddit).new(limit=posts)
            for submission in subs:
                submissions.append(submission)
            Logger.log(Logger.aligntext(subreddit.lower(), 30) + '(' + str(index) + '/' + str(num_subs) + ')', Color.CYAN)
        except Forbidden as e:
            Logger.log(Logger.aligntext(subreddit.lower(), 30) + 'Forbidden (403)', Color.RED)
        except NotFound as e:
            Logger.log(Logger.aligntext(subreddit.lower(), 30) + 'NotFound (404)', Color.RED)
        except Exception as e:
            Logger.log(Logger.aligntext(subreddit.lower(), 30) + str(e), Color.RED)
        return submissions
github mcandocia / TreeGrabForReddit / user_scrape.py View on Github external
def scrape_user(username, opts, scraper, force_read=False):
    sys.stdout.write('extracting information for %s...' % username)
    sys.stdout.flush()
    try:
        user = scraper.redditor(username)
        user_id = user.id
    except (NotFound, RequestException, AttributeError):

        # going to see what happens if I let praw_object_data.py catch this
        user_id = None
        print('user %s is not a valid user' % username)
        #return 1
    
    previous_time = localize(opts.db.get_user_update_time(user_id))
    if previous_time is None or force_read:
        data = pod.get_user_data(user, opts, mode='user')
    elif float((datetime.datetime.now(pytz.utc) - previous_time).\
               seconds)/(3600.*24) > opts.user_delay:
        data = pod.get_user_data(user, opts, mode='user')
    else:
        print('%s is too recently in database' % username)
        return 2
    comments = data['commentdata']
github danthedaniel / puni / puni / base.py View on Github external
def get_json(self):
        """Get the JSON stored on the usernotes wiki page.

        Returns a dict representation of the usernotes (with the notes BLOB
        decoded).

        Raises:
            RuntimeError if the usernotes version is incompatible with this
                version of puni.
        """
        try:
            usernotes = self.subreddit.wiki[self.page_name].content_md
            notes = json.loads(usernotes)
        except NotFound:
            self._init_notes()
        else:
            if notes['ver'] != self.schema:
                raise RuntimeError(
                    'Usernotes schema is v{0}, puni requires v{1}'.
                    format(notes['ver'], self.schema)
                )

            self.cached_json = self._expand_json(notes)

        return self.cached_json
github mcandocia / TreeGrabForReddit / praw_object_data.py View on Github external
'gold':comment.gilded,
                'silver': comment.gildings.get('gid_1', 0),
                'platinum': comment.gildings.get('gid_3', 0),
                'is_stickied': comment.stickied,
                'score':comment.score,
                'is_distinguished':comment.distinguished is not None,
                'thread_id':comment.link_id[3:],
                'subreddit':comment.subreddit.display_name,
                'subreddit_id':subreddit_id,
                'absolute_position':None,
                'nreplies':None,
                'thread_begin_timestamp':None,
                'scrape_mode':mode,
                'timestamp':datetime.now()
                }
    except NotFound:
        print('comment deleted before cacheable (shouldn\'t happen)')
        return {}
    return {comment.id:data}
github Dan6erbond / Banhacker / banhammer / subreddit.py View on Github external
def load_reactions(self):
        if self.custom_emotes:
            try:
                reaction_page = self.subreddit.wiki['banhammer-reactions']
                reacts = reaction.get_reactions(reaction_page.content_md)["reactions"]
                if len(reacts) > 0: self.reactions = reacts
            except prawcore.exceptions.NotFound:
                pass
            except Exception as e:
                print(type(e), e)

        if not len(self.reactions) > 0:
            dir_path = os.path.dirname(os.path.realpath(__file__))
            with open(dir_path + "/reactions.yaml", encoding="utf8") as f:
                content = f.read()
                self.reactions = reaction.get_reactions(content)["reactions"]
                try:
                    self.subreddit.wiki.create("banhammer-reactions", content, reason="Reactions not found")
                except Exception as e:
                    print(e)
github cincodenada / image_linker_bot / bot.py View on Github external
# Fall back to local file
if not imageconf:
  imageconf = yaml.load(open('imagelist.yaml'))
  shutil.copy('imagelist.yaml','imagelist.%d.yaml' % (time.time()))
  bot.log('Loaded imagelist from file')

imagemap = ImageMap(imageconf, bot.config['bot']['animated_extensions'], bot.config['bot']['switchable_extensions'])

markdown = imagemap.get_formatted()

# Update the image map on the wiki
try:
  try:
    curmd = bot.get_wiki('imagelist')
  except prawcore.exceptions.NotFound:
    curmd = None

  if(curmd != markdown):
    bot.write_wiki('imagelist', markdown, 'Updating image list')
    bot.log("Wrote updated imagelist to wiki")
except Exception as e:
  bot.log("Couldn't update wiki page: " + str(sys.exc_info()[0]))

  bot.log("Updating files...")
  # Fall back to shuffling files around
  shutil.copy('imagelist.md','imagelist.%d.md' % (time.time()))
  shutil.copy('imagelist.md','imagelist.previous.md')
  mdf = open('imagelist.md','w')
  mdf.write(markdown)
  mdf.close()