How to use sickrage - 10 common examples

To help you get started, we’ve selected a few sickrage examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pymedusa / Medusa / sickbeard / View on Github external
self.description = ep_details.findtext('plot')
                    if self.description is None:
                        self.description = ''

                    if ep_details.findtext('aired'):
                        raw_airdate = [int(x) for x in ep_details.findtext('aired').split('-')]
                        self.airdate =[0], raw_airdate[1], raw_airdate[2])
                        self.airdate =

                    self.hasnfo = True
                self.hasnfo = False

            if ek(os.path.isfile, replace_extension(nfo_file, 'tbn')):
                self.hastbn = True
                self.hastbn = False
github pymedusa / Medusa / sickbeard / View on Github external
main_db_con = db.DBConnection()
        sql_results =
            b'SELECT '
            b'  * '
            b'FROM '
            b'  tv_episodes '
            b'WHERE '
            b'  showid = ? '
            b'  AND season = ? '
            b'  AND episode = ?', [, season, episode])

        if len(sql_results) > 1:
            raise MultipleEpisodesInDatabaseException('Your DB has two records for the same show somehow.')
        elif not sql_results:
            logger.log(u'{id}: Episode {ep} not found in the database'.format
                       (, ep=episode_num(self.season, self.episode)),
            return False
            if sql_results[0][b'name']:
       = sql_results[0][b'name']

            self.season = season
            self.episode = episode
            self.absolute_number = sql_results[0][b'absolute_number']
            self.description = sql_results[0][b'description']
            if not self.description:
                self.description = ''
            if sql_results[0][b'subtitles'] and sql_results[0][b'subtitles']:
                self.subtitles = sql_results[0][b'subtitles'].split(',')
            self.subtitles_searchcount = sql_results[0][b'subtitles_searchcount']
            self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch']
github pymedusa / Medusa / sickbeard / View on Github external
        if pattern is None:
            # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
            if and sickbeard.NAMING_CUSTOM_ABD and not self.related_episodes:
                pattern = sickbeard.NAMING_ABD_PATTERN
            elif and sickbeard.NAMING_CUSTOM_SPORTS and not self.related_episodes:
                pattern = sickbeard.NAMING_SPORTS_PATTERN
            elif and sickbeard.NAMING_CUSTOM_ANIME:
                pattern = sickbeard.NAMING_ANIME_PATTERN
                pattern = sickbeard.NAMING_PATTERN

        # split off the dirs only, if they exist
        name_groups = re.split(r'[\\/]', pattern)

        return sanitize_filename(self._format_pattern(name_groups[-1], multi, anime_type))
github pymedusa / Medusa / sickbeard / View on Github external
def is_location_valid(self, location=None):
        """Whether the location is a valid file.

        :param location:
        :type location: str
        :rtype: bool
        return ek(os.path.isfile, location or self._location)
github pymedusa / Medusa / sickbeard / View on Github external
logger.log(str(self.indexerid) + u': Loading all episodes from the show directory ' + self.location,

        # get file list
        media_files = helpers.listMediaFiles(self.location)
        logger.log(u'%s: Found files: %s' %
                   (self.indexerid, media_files), logger.DEBUG)

        # create TVEpisodes from each media file (if possible)
        sql_l = []
        for media_file in media_files:
            cur_episode = None

            logger.log(str(self.indexerid) + u': Creating episode from ' + media_file, logger.DEBUG)
                cur_episode = self.make_ep_from_file(ek(os.path.join, self.location, media_file))
            except (ShowNotFoundException, EpisodeNotFoundException) as e:
                logger.log(u'Episode ' + media_file + u' returned an exception: ' + ex(e), logger.ERROR)
            except EpisodeDeletedException:
                logger.log(u'The episode deleted itself when I tried making an object for it', logger.DEBUG)

            if cur_episode is None:

            # see if we should save the release name in the db
            ep_file_name = ek(os.path.basename, cur_episode.location)
            ep_file_name = ek(os.path.splitext, ep_file_name)[0]

                parse_result = NameParser(False, showObj=self, tryIndexers=True).parse(ep_file_name)
            except (InvalidNameException, InvalidShowException):
github pymedusa / Medusa / sickbeard / View on Github external
same_file = False

            cur_ep = self.get_episode(season, current_ep)
            if not cur_ep:
                    cur_ep = self.get_episode(season, current_ep, filepath)
                    if not cur_ep:
                        raise EpisodeNotFoundException
                except EpisodeNotFoundException:
                    logger.log(u'{0}: Unable to figure out what this file is, skipping {1}'.format
                               (self.indexerid, filepath), logger.ERROR)

                # if there is a new file associated with this ep then re-check the quality
                if not cur_ep.location or ek(os.path.normpath, cur_ep.location) != ek(os.path.normpath, filepath):
                        u'{0}: The old episode had a different file associated with it, '
                        u're-checking the quality using the new filename {1}'.format(self.indexerid, filepath),
                    check_quality_again = True

                with cur_ep.lock:
                    old_size = cur_ep.file_size
                    cur_ep.location = filepath
                    # if the sizes are the same then it's probably the same file
                    same_file = old_size and cur_ep.file_size == old_size

            if root_ep is None:
                root_ep = cur_ep
github pymedusa / Medusa / lib / anidbhttp / View on Github external
def cache_image(self, image_url):
        Store cache of image in cache dir
        :param image_url: Source URL
        path = ek(os.path.abspath, ek(os.path.join, sickbeard.CACHE_DIR, 'images', 'anidb'))

        if not ek(os.path.exists, path):
            ek(os.makedirs, path)

        full_path = ek(os.path.join, path, ek(os.path.basename, image_url))

        if not ek(os.path.isfile, full_path):
            helpers.download_file(image_url, full_path, session=self.session)
github pymedusa / Medusa / sickbeard / View on Github external
cur_loc, base_name_only=False, subfolders=True)

                    if related_files:
                        logger.log(u'{id}: Found hanging associated files for {ep}, deleting: {files}'.format
                                   (id=self.indexerid, ep=episode_num(season, episode), files=related_files),
                        for related_file in related_files:
                                ek(os.remove, related_file)
                            except Exception as e:
                                logger.log(u'Could not delete associated file: {0}. Error: {1}'.format
                                           (related_file, e), logger.WARNING)

        # clean up any empty season folders after deletion of associated files
        for sub_dir in ek(os.listdir, self.location):
            helpers.delete_empty_folders(ek(os.path.join, self.location, sub_dir), self.location)

        if sql_l:
            main_db_con = db.DBConnection()
github pymedusa / Medusa / sickbeard / providers / View on Github external
if len(torrent_hash) == 32:
                    torrent_hash = b16encode(b32decode(torrent_hash)).upper()

                if not torrent_hash:
                    logger.log(u"Unable to extract torrent hash from magnet: " + ex(result.url), logger.ERROR)
                    return urls, filename

                urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.btCacheURLS]
            except Exception:
                logger.log(u"Unable to extract torrent hash or name from magnet: " + ex(result.url), logger.ERROR)
                return urls, filename
            urls = [result.url]

        if self.providerType == GenericProvider.TORRENT:
            filename = ek(os.path.join, sickbeard.TORRENT_DIR, sanitize_filename( + '.' + self.providerType)

        elif self.providerType == GenericProvider.NZB:
            filename = ek(os.path.join, sickbeard.NZB_DIR, sanitize_filename( + '.' + self.providerType)

        return urls, filename
github pymedusa / Medusa / sickbeard / View on Github external
            return False

        # don't update show status if show dir is missing, unless it's missing on purpose
        if not and \
                not sickbeard.CREATE_MISSING_SHOW_DIRS and not sickbeard.ADD_SHOWS_WO_DIR:
            logger.log(u'The show dir %s is missing, not bothering to change the episode statuses '
                       u"since it'd probably be invalid" %

        if self.location:
            logger.log(u'{id}: Setting status for {ep} based on status {status} and location {location}'.format
                       (, ep=episode_num(season, episode),
                        status=statusStrings[self.status], location=self.location), logger.DEBUG)

        if not ek(os.path.isfile, self.location):
            if (self.airdate >= or self.airdate == and \
                    self.status in (UNAIRED, UNKNOWN, WANTED):
                # Need to check if is UNAIRED otherwise code will step into second 'IF'
                # and make episode as default_ep_status
                # If is a leaked episode and user manually snatched, it will respect status
                # If is a fake (manually snatched), when user set as FAILED, status will be WANTED
                # and code below will make it UNAIRED again
                logger.log(u'%s: Episode airs in the future or has no airdate, marking it %s' %
                           (, statusStrings[UNAIRED]), logger.DEBUG)
                self.status = UNAIRED
            elif self.status in (UNAIRED, UNKNOWN):
                # Only do UNAIRED/UNKNOWN, it could already be snatched/ignored/skipped,
                # or downloaded/archived to disconnected media
                logger.log(u'Episode has already aired, marking it %s' %
                           statusStrings[], logger.DEBUG)
                self.status = if self.season > 0 else SKIPPED  # auto-skip specials