Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_add_claim_with_qualifier(self):
"""Test adding a claim with a qualifier to an item and a property."""
testsite = self.get_repo()
item = pywikibot.ItemPage(testsite, 'Q68')
item.get()
if 'P115' in item.claims:
item.removeClaims(item.claims['P115'])
claim = pywikibot.page.Claim(
testsite, 'P115', datatype='wikibase-item')
target = pywikibot.ItemPage(testsite, 'Q271')
claim.setTarget(target)
item.addClaim(claim)
item.get(force=True)
end_date = pywikibot.page.Claim(testsite, 'P88', is_qualifier=True)
end_date.setTarget(pywikibot.WbTime(year=2012))
item.claims['P115'][0].addQualifier(end_date)
def setUp(self):
"""Add a claim with two qualifiers."""
super(TestWikibaseRemoveQualifier, self).setUp()
testsite = self.get_repo()
item = pywikibot.ItemPage(testsite, 'Q68')
item.get()
# Create claim with qualifier
if 'P115' in item.claims:
item.removeClaims(item.claims['P115'])
claim = pywikibot.page.Claim(
testsite, 'P115', datatype='wikibase-item')
target = pywikibot.ItemPage(testsite, 'Q271')
claim.setTarget(target)
item.addClaim(claim)
item.get(force=True)
qual_1 = pywikibot.page.Claim(testsite, 'P88', is_qualifier=True)
qual_1.setTarget(pywikibot.WbTime(year=2012))
item.claims['P115'][0].addQualifier(qual_1)
qual_2 = pywikibot.page.Claim(testsite, 'P580', is_qualifier=True)
qual_2.setTarget(pywikibot.ItemPage(testsite, 'Q67'))
item.claims['P115'][0].addQualifier(qual_2)
def test_set_redirect_target(self):
"""Test set_redirect_target method."""
testsite = self.get_repo()
item = pywikibot.ItemPage(testsite, 'Q1107')
target_id = 'Q68'
if not item.isRedirectPage():
item.editEntity(data={}, clear=True)
elif item.getRedirectTarget().getID() == 'Q68':
target_id = 'Q67'
target_item = pywikibot.ItemPage(testsite, target_id)
item.set_redirect_target(target_id, force=True)
self.assertTrue(item.isRedirectPage())
new_item = pywikibot.ItemPage(testsite, item.getID())
self.assertTrue(new_item.isRedirectPage())
self.assertEqual(new_item.getRedirectTarget(), target_item)
def setUpClass(cls):
"""Set up test class."""
super(TestIsbnWikibaseBot, cls).setUpClass()
# Check if the unit test item page and the property both exist
item_ns = cls.get_repo().item_namespace
for page in cls.get_site().search('IsbnWikibaseBotUnitTest',
total=1, namespaces=item_ns):
cls.test_page_qid = page.title()
item_page = ItemPage(cls.get_repo(), page.title())
for pid, claims in item_page.get()['claims'].items():
for claim in claims:
prop_page = pywikibot.PropertyPage(cls.get_repo(),
claim.getID())
prop_page.get()
if ('ISBN-10' in prop_page.labels.values() and
claim.getTarget() == '097522980x'):
return
raise unittest.SkipTest(
u'%s: "ISBN-10" property was not found in '
u'"IsbnWikibaseBotUnitTest" item page' % cls.__name__)
raise unittest.SkipTest(
u'%s: "IsbnWikibaseBotUnitTest" item page was not found'
% cls.__name__)
generator = genFactory.getCombinedGenerator()
repo = pywikibot.Site().data_repository()
for page in generator:
cats = [cat.title(underscore=True, withNamespace=False)
for cat in page.categories()]
features = model.label_case(cats)
res = Kian.kian(model.theta, features)[0]
if res > second_thrashhold:
try:
item = pywikibot.ItemPage.fromPage(page)
except pywikibot.NoPage:
continue
if model.property_name in item.claims:
continue
claim = pywikibot.Claim(repo, model.property_name)
claim.setTarget(pywikibot.ItemPage(repo, model.value))
item.addClaim(claim, summary='Bot: Adding %s:%s from %s '
'([[User:Ladsgroup/Kian|Powered by Kian]])' %
(model.property_name, model.value, model.wiki))
source = pywikibot.Claim(repo, 'P143')
source.setTarget(pywikibot.ItemPage(repo, sources[model.wiki]))
claim.addSource(source)
for site, link in toImport.iteritems():
if link.isRedirectPage():
toImport[site] = link.getRedirectTarget()
if items is not None:
for site, link in toImport.iteritems():
linkItem = pywikibot.ItemPage.fromPage(link)
if linkItem.exists():
if linkItem.getID() in items:
items[linkItem.getID()] += 1
else:
items[linkItem.getID()] = 1
if len(items) > 0:
# select the most appropriate item
items = list(sorted(items.keys(), key=items.__getitem__))
importInto = pywikibot.ItemPage(page.site.data_repository(), items[-1])
importInto.get(force=True)
if importInto is None or not importInto.exists():
pywikibot.warning(u'no data item to import sitelinks into')
return
for site, langlink in langlinks.iteritems():
if site.dbName() in importInto.sitelinks:
try:
if langlink.site.sametitle(importInto.sitelinks[site.dbName()], langlink.title()) or (
langlink.exists() and langlink.isRedirectPage() and langlink.getRedirectTarget().exists() and langlink.getRedirectTarget() == pywikibot.Page(site, importInto.sitelinks[site.dbName()])):
toRemove.append(langlink)
else:
pywikibot.error(u'interwiki conflict: {} != {}'.format(langlink.title(), importInto.sitelinks[site.dbName()]))
return
except Exception, e:
pywikibot.warning(e)
toImport = [sitelink for site, sitelink in toImport.iteritems() if sitelink.site.dbName() not in importInto.sitelinks or importInto.sitelinks[sitelink.site.dbName()] != sitelink.title()]
def cacheSources(self):
"""
Fetch the sources from the list on Wikidata.
It is stored internally and reused by getSource()
"""
page = pywikibot.Page(self.repo, 'List of wikis/python', ns=4)
self.source_values = json.loads(page.get())
for family_code, family in self.source_values.items():
for source_lang in family:
self.source_values[
family_code][source_lang] = pywikibot.ItemPage(
self.repo, family[source_lang])
def _generator():
if task == 'good':
item_no = good_name['wikidata'][1]
elif task == 'featured':
item_no = featured_name['wikidata'][1]
elif task == 'former':
item_no = former_name['wikidata'][1]
dp = pywikibot.ItemPage(self.repo, item_no)
dp.get()
for key in sorted(dp.sitelinks.keys()):
try:
site = self.site.fromDBName(key)
except pywikibot.SiteDefinitionError:
pywikibot.output('"%s" is not a valid site. Skipping...'
% key)
else:
if site.family == self.site.family:
yield site
'http://www.stellarium.org',
lambda x: re.match('latest version (\d+\.\d+(\.\d+)?)$', x.find('div', {'id': 'latestversion'}).find('a').text).group(1),
StrictVersion
)
},
'Q6410733': {
'P348': (
'http://kineticjs.com',
lambda x: re.match('^(\d+\.\d+\.\d+)$', x.find('div', {'id': 'downloadContainer'}).find('a', {'class': 'download'}).find('span').text).group(1),
StrictVersion
)
}
}
for qid in items:
item = pywikibot.ItemPage(site, qid)
if not item.exists():
pywikibot.output(u'\03{{lightyellow}}Warning: item {} does not exist'.format(item))
continue
item.get(force=True)
for prop in items[qid]:
propname = pywikibot.PropertyPage(site, prop)
propname.get()
propname = propname.labels['en']
new = items[qid][prop][1](BeautifulSoup(urllib2.urlopen(items[qid][prop][0]).read()))
pywikibot.output(u'{prop} for "{program}" according to "{url}": {new}'.format(prop=propname, program=item.labels['en'], url=items[qid][prop][0], new=new))
if prop in item.claims:
if len(item.claims[prop]) == 1:
old = item.claims[prop][0].getTarget()
comp = items[qid][prop][2]
if comp(new) > comp(old):
pywikibot.output(u'\03{{lightblue}}{program} has an out-of-date claim for {prop}: "{old}" instead of "{new}"'.format(program=item.labels['en'], prop=propname, old=old, new=new))
return
if urls[0].getTarget() != url_raw:
logger.error(
f"The url on the object ({urls[0].getTarget()}) doesn't match the url from the sparql query ({url_raw}) for {q_value}"
)
return
# Editing is in this case actually remove the old value and adding the new one
claim = Claim(Settings.wikidata_repo, source_p)
claim.setTarget(url_normalized)
claim.setSnakType("value")
item.addClaim(claim, summary=get_sumary(edit_group_hash))
item.removeClaims(urls[0], summary=get_sumary(edit_group_hash))
# Add git as protocol
git = ItemPage(Settings.wikidata_repo, "Q186055")
get_or_create_qualifiers(claim, Properties.protocol, git, edit_group_hash)