How to use the spade.model function in spade

To help you get started, we’ve selected a few spade examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mozilla / spade / spade / utils / data_aggregator.py View on Github external
self.props[webkit_prop] = ('-moz-%s' % name, name)
            moz_equiv = self.props[webkit_prop][0]
            unpref_equiv = self.props[webkit_prop][1]
            moz_count = self.get_prop_count(linkedcss, moz_equiv)
            unpref_count = self.get_prop_count(linkedcss, unpref_equiv)

            if webkit_count > moz_count and webkit_count > unpref_count:
                total_css_issues += 1

            data.webkit_count += webkit_count
            data.moz_count += moz_count
            data.unpref_count += unpref_count
            data.save()

        # Create this linkedcss's data model
        return model.LinkedCSSData.objects.create(
            linked_css=linkedcss,
            num_rules=total_rules,
            num_properties=total_properties,
            css_issues=total_css_issues,
            )
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
    @staticmethod
    def get_url_scan(url, batch):
        urls = model.URLScan.objects.filter(site_scan__batch__id=batch.id)
        urls = urls.filter(page_url=url.page_url)
        if urls.count():
            return urls[0]
        return None
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
    @transaction.commit_on_success
    def aggregate_urlcontent(self, urlcontent):
        """
        Given a particular urlcontent, aggregate the stats from its children
        into the data model and return it
        """
        linkedstyles = model.LinkedCSS.objects.filter(linked_from=urlcontent)

        # Initialize counters
        total_rules = 0
        total_properties = 0
        total_css_issues = 0

        # Aggregate data for each linked css stylesheet
        for linkedcss in linkedstyles:
            linkedcss_data = self.aggregate_linkedcss(linkedcss)
            total_rules += linkedcss_data.num_rules
            total_properties += linkedcss_data.num_properties
            total_css_issues += linkedcss_data.css_issues

        # Create this urlcontent's data model
        return model.URLContentData.objects.create(
            urlcontent=urlcontent,
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
    @transaction.commit_on_success
    def aggregate_urlscan(self, urlscan):
        """
        Given a particular urlscan, aggregate the stats from its children into
        the data model and return it
        """
        urlcontents = model.URLContent.objects.filter(url_scan=urlscan).iterator()

        # Initialize counters
        total_rules = 0
        total_properties = 0
        total_css_issues = 0

        # Aggregate data for each urlcontent
        for urlcontent in urlcontents:
            urlcontent_data = self.aggregate_urlcontent(urlcontent)
            total_rules += urlcontent_data.num_rules
            total_properties += urlcontent_data.num_properties
            total_css_issues += urlcontent_data.css_issues

        # Create this urlscan's data model
        return model.URLScanData.objects.create(
            urlscan=urlscan,
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
total_rules = model.CSSRule.objects.filter(linkedcss=linkedcss).count()
        total_properties = model.CSSProperty.objects.filter(
            rule__linkedcss=linkedcss).count()
        total_css_issues = 0

        # find all webkit properties
        webkit_props = set()
        for rule in linkedcss.cssrule_set.iterator():
            for webkit_prop in rule.cssproperty_set.filter(prefix='-webkit-').iterator():
                webkit_props.add(webkit_prop.full_name)

        sitescan = linkedcss.linked_from.all()[0].url_scan.site_scan
        for webkit_prop in webkit_props:
            name = webkit_prop[8:]  # strip away the prefix
            # check to see if we already have a cssprop data object for this
            data = model.CSSPropertyData.objects.filter(name=name, sitescan=sitescan)
            if len(data):
                data = data[0]
            else:
                data = model.CSSPropertyData.objects.create(name=name, sitescan=sitescan)
            data.linkedcsss.add(linkedcss)
            webkit_count = self.get_prop_count(linkedcss, webkit_prop)
            if webkit_prop not in self.props:
                # just add it to self.props and go on as if it were there
                self.props[webkit_prop] = ('-moz-%s' % name, name)
            moz_equiv = self.props[webkit_prop][0]
            unpref_equiv = self.props[webkit_prop][1]
            moz_count = self.get_prop_count(linkedcss, moz_equiv)
            unpref_count = self.get_prop_count(linkedcss, unpref_equiv)

            if webkit_count > moz_count and webkit_count > unpref_count:
                total_css_issues += 1
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
into the data model and return it
        """
        # A single LinkedCSS can be linked from multiple URLContents, thus we
        # have to check if its already been evaluated. (TODO we could keep an
        # in-memory cache of these to save db queries, if needed to help
        # performance)
        try:
            return model.LinkedCSSData.objects.get(linked_css=linkedcss)
        except model.LinkedCSSData.DoesNotExist:
            pass

        print "Aggregating CSS data for %s" % linkedcss.url

        # Initialize counters
        total_rules = model.CSSRule.objects.filter(linkedcss=linkedcss).count()
        total_properties = model.CSSProperty.objects.filter(
            rule__linkedcss=linkedcss).count()
        total_css_issues = 0

        # find all webkit properties
        webkit_props = set()
        for rule in linkedcss.cssrule_set.iterator():
            for webkit_prop in rule.cssproperty_set.filter(prefix='-webkit-').iterator():
                webkit_props.add(webkit_prop.full_name)

        sitescan = linkedcss.linked_from.all()[0].url_scan.site_scan
        for webkit_prop in webkit_props:
            name = webkit_prop[8:]  # strip away the prefix
            # check to see if we already have a cssprop data object for this
            data = model.CSSPropertyData.objects.filter(name=name, sitescan=sitescan)
            if len(data):
                data = data[0]
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
Given a particular linkedcss, aggregate the stats from its children
        into the data model and return it
        """
        # A single LinkedCSS can be linked from multiple URLContents, thus we
        # have to check if its already been evaluated. (TODO we could keep an
        # in-memory cache of these to save db queries, if needed to help
        # performance)
        try:
            return model.LinkedCSSData.objects.get(linked_css=linkedcss)
        except model.LinkedCSSData.DoesNotExist:
            pass

        print "Aggregating CSS data for %s" % linkedcss.url

        # Initialize counters
        total_rules = model.CSSRule.objects.filter(linkedcss=linkedcss).count()
        total_properties = model.CSSProperty.objects.filter(
            rule__linkedcss=linkedcss).count()
        total_css_issues = 0

        # find all webkit properties
        webkit_props = set()
        for rule in linkedcss.cssrule_set.iterator():
            for webkit_prop in rule.cssproperty_set.filter(prefix='-webkit-').iterator():
                webkit_props.add(webkit_prop.full_name)

        sitescan = linkedcss.linked_from.all()[0].url_scan.site_scan
        for webkit_prop in webkit_props:
            name = webkit_prop[8:]  # strip away the prefix
            # check to see if we already have a cssprop data object for this
            data = model.CSSPropertyData.objects.filter(name=name, sitescan=sitescan)
            if len(data):
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
    @staticmethod
    def get_previous_batch(batch):
        prev_batches = model.Batch.objects.filter(finish_time__lt=batch.finish_time)
        prev = prev_batches.order_by('-finish_time')[:1]
        if prev:
            return prev[0]
        return None
github mozilla / spade / spade / utils / cssparser.py View on Github external
Calls parse on the internal CSS, stores css properties into db model
        """
        css = linkedcss.raw_css.read()
        for rule in cssutils.parseString(css).cssRules.rulesOfType(
                cssutils.css.CSSRule.STYLE_RULE):
            # Get selector and properties from rule
            rule = self.parse_rule(rule)
            selector = rule[0]
            properties = rule[1]

            # Create CSS rule in model
            current_rule = model.CSSRule.objects.create(linkedcss=linkedcss,
                                                        selector=selector)

            model.CSSProperty.objects.bulk_create(
                [model.CSSProperty(
                    rule=current_rule,
                    prefix=v[0],
                    name=v[1],
                    value=v[2]) for v in properties.values()]
            )
github mozilla / spade / spade / utils / data_aggregator.py View on Github external
urlcontents = model.URLContent.objects.filter(url_scan=urlscan).iterator()

        # Initialize counters
        total_rules = 0
        total_properties = 0
        total_css_issues = 0

        # Aggregate data for each urlcontent
        for urlcontent in urlcontents:
            urlcontent_data = self.aggregate_urlcontent(urlcontent)
            total_rules += urlcontent_data.num_rules
            total_properties += urlcontent_data.num_properties
            total_css_issues += urlcontent_data.css_issues

        # Create this urlscan's data model
        return model.URLScanData.objects.create(
            urlscan=urlscan,
            num_rules=total_rules,
            num_properties=total_properties,
            css_issues=total_css_issues,
            )