How to use the memorious.model.Event function in memorious

To help you get started, we’ve selected a few memorious examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github alephdata / memorious / memorious / logic / context.py View on Github external
def emit_warning(self, message, type=None, *args):
        if len(args):
            message = message % args
        self.log.warning(message)
        return Event.save(self.crawler,
                          self.stage,
                          Event.LEVEL_WARNING,
                          self.run_id,
                          error=type,
                          message=message)
github alephdata / memorious / memorious / ui / views.py View on Github external
def events(name):
    crawler = get_crawler(name)
    page = int(request.args.get('page', 1))
    start = (max(1, page) - 1) * PAGE_SIZE
    end = start + PAGE_SIZE
    run_id = request.args.get('run_id')
    level = request.args.get('level')
    stage_name = request.args.get('stage_name')

    if stage_name:
        events = Event.get_stage_events(crawler, stage_name, start, end, level)
    elif run_id:
        events = Event.get_run_events(crawler, run_id, start, end, level)
    else:
        events = Event.get_crawler_events(crawler, start, end, level)
    total = len(events)
    pages = int(math.ceil((float(total) / PAGE_SIZE)))
    return render_template('events.html',
                           crawler=crawler,
                           results=events,
                           page=page,
                           pages=pages)
github alephdata / memorious / memorious / ui / reporting.py View on Github external
def crawler_stages(crawler):
    """See the number of executions of each stage."""
    stages = []
    for stage in crawler:
        data = Event.get_stage_counts(crawler, stage)
        data['total_ops'] = stage.op_count
        data['stage'] = stage
        stages.append(data)
    return stages
github alephdata / memorious / memorious / ui / reporting.py View on Github external
def crawlers_index():
    """Generate a list of all crawlers, sorted alphabetically, with op
    counts."""
    crawlers = []
    for crawler in manager:
        data = Event.get_counts(crawler)
        data['last_active'] = crawler.last_run
        data['total_ops'] = crawler.op_count
        data['running'] = crawler.is_running
        data['crawler'] = crawler
        crawlers.append(data)
    return crawlers
github alephdata / memorious / memorious / ui / views.py View on Github external
def index():
    """Generate a list of all crawlers, alphabetically, with op counts."""
    crawlers = []
    for crawler in manager:
        data = Event.get_counts(crawler)
        data['last_active'] = crawler.last_run
        data['total_ops'] = crawler.op_count
        data['running'] = crawler.is_running
        data['crawler'] = crawler
        crawlers.append(data)
    return render_template('index.html', crawlers=crawlers)
github alephdata / memorious / memorious / logic / context.py View on Github external
def emit_exception(self, exc):
        self.log.exception(exc)
        return Event.save(self.crawler,
                          self.stage,
                          Event.LEVEL_ERROR,
                          self.run_id,
                          error=exc.__class__.__name__,
                          message=str(exc))
github alephdata / memorious / memorious / logic / context.py View on Github external
def emit_exception(self, exc):
        self.log.exception(exc)
        return Event.save(self.crawler,
                          self.stage,
                          Event.LEVEL_ERROR,
                          self.run_id,
                          error=exc.__class__.__name__,
                          message=str(exc))
github alephdata / memorious / memorious / logic / crawler.py View on Github external
def flush(self):
        """Delete all run-time data generated by this crawler."""
        Queue.flush(self)
        Event.delete(self)
        Crawl.flush(self)