How to use the pywikibot.Claim function in pywikibot

To help you get started, we’ve selected a few pywikibot examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wikimedia / pywikibot / scripts / freebasemappingupload.py View on Github external
def run(self):
        """Run the bot."""
        # Set up some items we will use a lot.
        self.claim = pywikibot.Claim(self.repo, 'P646')  # freebase mapping
        # And sources!
        self.statedin = pywikibot.Claim(self.repo, 'P248')  # stated in
        # Freebase data dump
        freebasedumpitem = pywikibot.ItemPage(self.repo, 'Q15241312')
        self.statedin.setTarget(freebasedumpitem)
        # date of publication
        self.dateofpub = pywikibot.Claim(self.repo, 'P577')
        oct28 = pywikibot.WbTime(site=self.repo, year=2013, month=10, day=28,
                                 precision='day')
        self.dateofpub.setTarget(oct28)

        for line in gzip.open(self.filename):
            self.processLine(line.strip())
github wikimedia / pywikibot / scripts / illustrate_wikidata.py View on Github external
def __init__(self, generator, wdproperty='P18'):
        """
        Initializer.

        @param generator: A generator that yields Page objects
        @type generator: generator
        @param wdproperty: The property to add. Should be of type commonsMedia
        @type wdproperty: str
        """
        super(IllustrateRobot, self).__init__()
        self.generator = generator
        self.wdproperty = wdproperty
        self.cacheSources()

        claim = pywikibot.Claim(self.repo, self.wdproperty)
        if claim.type != 'commonsMedia':
            raise ValueError('{} is of type {}, should be commonsMedia'
                             .format(self.wdproperty, claim.type))
github ricordisamoa / wiki / wikidata / merge.py View on Github external
item1.get(force=True)
	item2.get(force=True)
	for prop in item2.claims:
		for claim2 in item2.claims[prop]:
			if prop in item1.claims:
				for claim1 in item1.claims[prop]:
					if claim1.getTarget()==claim2.getTarget():
						for source in claim2.sources:
							try:
								claim1.addSource(source,bot=1)
								pywikibot.output(u'\03{{lightgreen}}imported a source for {propid} into {qid}\03{{lightblue}}'.format(propid=prop,qid=item1.getID()))
								item1.get(force=True)
							except:
								pass
			else:
				claim=pywikibot.Claim(claim2.site,prop)
				claim.setTarget(claim2.getTarget())
				item1.addClaim(claim)
				pywikibot.output(u'\03{{lightgreen}}imported a claim for {propid} into {qid}\03{{lightblue}}'.format(propid=prop,qid=item1.getID()))
				for source in claim2.sources:
					try:
						claim.addSource(source,bot=1)
						pywikibot.output(u'\03{{lightgreen}}imported a source for {propid} into {qid}\03{{lightblue}}'.format(propid=prop,qid=item1.getID()))
						item1.get(force=True)
					except:
						pass
	delete_item(item2,item1)
github wikimedia / pywikibot / scripts / harvest_template.py View on Github external
if template not in self.templateTitles:
                continue
            # We found the template we were looking for
            for field, value in fielddict.items():
                field = field.strip()
                value = value.strip()
                if not field or not value:
                    continue

                if field not in self.fields:
                    continue

                # This field contains something useful for us
                prop, options = self.fields[field]
                claim = pywikibot.Claim(self.repo, prop)
                if claim.type == 'wikibase-item':
                    # Try to extract a valid page
                    match = pywikibot.link_regex.search(value)
                    if match:
                        link_text = match.group(1)
                    else:
                        if self._get_option_with_fallback(options, 'islink'):
                            link_text = value
                        else:
                            pywikibot.output(
                                '{} field {} value {} is not a wikilink. '
                                'Skipping.'
                                .format(claim.getID(), field, value))
                            continue

                    linked_item = self._template_link_target(item, link_text)
github notconfusing / harvest_infobox_book / fix_databaseconstraints.py View on Github external
linecode = mwp.parse(line)
            linebits = linecode.filter()
            qid = ''
            isbn = ''
            for linebit in linebits:
                if isinstance(linebit, mwp.nodes.wikilink.Wikilink):
                    qid = linebit[2:-2]
                if isinstance(linebit, mwp.nodes.text.Text) and linebit != '*':
                    isbn = linebit[1:]
                    print 'qid', qid, ' isbn', isbn
            if qid.startswith('Q'):
                wditem = pywikibot.ItemPage(wikidata, qid)
                cleanedisbn = cleanisbn(isbn)
                if cleanedisbn:
                    if boolvalidate(cleanedisbn):
                        isbn10claim = pywikibot.Claim(site=wikidata, pid='P957')
                        isbn10claim.setTarget(cleanedisbn)
                        wditem.addClaim(isbn10claim)
                    page_parts = wditem.get()
                    claims = page_parts['claims']
                    for claimnum, claimlist in claims.iteritems():
                        if claimnum == 'P212':
                            for claim in claimlist:
                                isbn = claim.target
                                wditem.removeClaims(claim)
            
            fixcases['prevtouched'] = linenum
            savecases()

print 'done'
github konstin / github-wikidata-bot / main.py View on Github external
if src_url.target_equals(url):
            break
    else:
        src_url = Claim(Settings.wikidata_repo, src_p)
        src_url.setTarget(url)
        src_retrieved = Claim(Settings.wikidata_repo, Properties.retrieved)
        src_retrieved.setTarget(retrieved)

        sources = [src_url, src_retrieved]

        if title:
            src_title = Claim(Settings.wikidata_repo, Properties.title)
            src_title.setTarget(pywikibot.WbMonolingualText(title, "en"))
            sources.append(src_title)
        if date:
            src_date = Claim(Settings.wikidata_repo, Properties.publication_date)
            src_date.setTarget(date)
            sources.append(src_date)
        claim.addSources(sources, summary=get_sumary(edit_group_hash))

    return src_url
github WikiDP / wikidp-portal / wikidp / controllers / api.py View on Github external
prop_string (str): Wikidata Property Identifier [ex. 'P1234']
        value (str): Value matching accepted property
        data_type: (could be other data types)
        qualifiers (list): list of data about qualifier claims
        meta (dict, optional): Contains information about qualifiers/references/summaries
    Returns:
        bool: True if successful, False otherwise
    """
    try:
        # TODO: Account for all dataTypes
        claim = pywikibot.Claim(REPO, prop_string)
        target = get_target_by_type(data_type, value)
        claim.setTarget(target)

        for q_data in qualifiers:
            qualifier = pywikibot.Claim(REPO, q_data.get('pid'))
            target = get_target_by_type(q_data.get('type'), q_data.get('value'))
            qualifier.setTarget(target)
            claim.addQualifier(qualifier, summary=u'Adding a qualifier.')

        if meta:
            # TODO: Add Ability to include references, summaries, and qualifiers
            pass
        item.addClaim(claim, summary=u'Adding claim')
        return True
    except (TypeError, Exception):
        return False
github weblyzard / ewrt / src / eWRT / ws / wikidata / preferred_claim_value.py View on Github external
def attribute_preferred_value(claim_instances):
    """When an attribute has several instances, try to
    retrieve the one with rank=preferred. Raises a ValueError
    when no or more than one `preferred` instances are found.
    :param claim_instances: List of `Claim`s.
    :returns a 1-member list containing the unique `preferred`
        value, or the input list if it has length 1. Raises
        ValueError otherwise."""

    if len(claim_instances) == 1:
        return claim_instances
    else:
        try:
            claim_instances = [Claim.fromJSON(DataSite('wikidata', 'wikidata'), claim_instance) for claim_instance in claim_instances]
        # for claim_instance in claim_instances:
        #     try:
        #         claim_instance = Claim.fromJSON(DataSite('wikidata', 'wikidata'), claim_instance)
        #     except:
        #         pass
        #     try:
        #         claim_instance.get()
        except TypeError:
                pass
        preferred = [
            claim for claim in claim_instances if claim.rank == 'preferred']
        if len(preferred) == 0:
            raise ValueError('No claim instance marked as preferred!')
        elif len(preferred) > 1:
            sample_claim = preferred[0]
            logger.info(
github weblyzard / ewrt / src / eWRT / ws / wikidata / wikibot_parse_item.py View on Github external
"""
        Parse additional information about a specified claim. The result
        (dict format) is accessible through ParseClaim(claim).claim_details

        :param claim: pywikibot.Claim object to be parsed
        :type claim: pywikibot.Claim
        :param languages: list of language ISO codes
        :type languages: List(str)
        :param literals: list of literal properties to be included in result
        :type literals: List(str)
        """
        if qualifiers is None:
            qualifiers = QUALIFIERS
        self.qualifiers = qualifiers
        if not isinstance(claim, Claim):
            claim = Claim.fromJSON(site=DataSite('wikidata', 'wikidata'),
                                   data=claim)

        self.include_attribute_labels = include_attribute_labels
        self.claim = claim
        self.languages = languages
        self.literals = literals
        if self.include_attribute_labels:
            self.literals = ['labels']
        if delay:
            self.claim_details = {}
        else:
            self.claim_details = self.parse_claim()