Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def testInterWikiForward(self):
self.site = self.get_site()
self.mainpage = pywikibot.Page(pywikibot.Link("Main Page", self.site))
# test pagelanglinks on commons,
# which forwards interwikis to wikipedia
ll = next(self.site.pagelanglinks(self.mainpage))
self.assertIsInstance(ll, pywikibot.Link)
self.assertEqual(ll.site.family.name, 'wikipedia')
('Foo', pywikibot.Link('Bar', self.wp_site)),
self.wp_site),
'[[Bar]]')
self.assertEqual(
textlib.replace_links(source_text, ('Foo', 'Bar#snafu'),
self.wp_site),
'[[Bar#bar|baz]]')
self.assertEqual(
textlib.replace_links(source_text,
('Foo', pywikibot.Page(self.wp_site,
'Bar#snafu')),
self.wp_site),
'[[Bar#bar|baz]]')
self.assertEqual(
textlib.replace_links(source_text,
('Foo', pywikibot.Link('Bar#snafu',
self.wp_site)),
self.wp_site),
'[[Bar#snafu]]')
self.assertEqual(
textlib.replace_links(source_text, ('Foo', 'Bar|foo'),
self.wp_site), '[[Bar#bar|baz]]')
self.assertEqual(
textlib.replace_links(source_text,
('Foo', pywikibot.Page(self.wp_site,
'Bar|foo')),
self.wp_site),
'[[Bar#bar|baz]]')
self.assertEqual(
textlib.replace_links(source_text,
('Foo', pywikibot.Link('Bar|foo',
self.wp_site)),
def test_invalid_link_as_source(self):
"""Test IndexPage from invalid Link as source."""
source = pywikibot.Link(self.not_existing_invalid_title,
source=self.site)
self.assertRaises(ValueError, IndexPage, source)
or press enter to quit:""")
if user_input == '':
self.quit()
else:
self.alternatives.append(user_input)
except pywikibot.IsNotRedirectPage:
pywikibot.output(
'The specified page is not a redirect. Skipping.')
return False
elif self.getAlternatives:
# not disambPage.isRedirectPage() or self.primary
try:
if self.primary:
try:
disambPage2 = pywikibot.Page(
pywikibot.Link(
primary_topic_format[self.mylang]
% disambPage.title(),
self.mysite))
links = disambPage2.linkedPages()
if self.first_only:
links = self.firstize(disambPage2, links)
links = [correctcap(l, disambPage2.get())
for l in links]
except pywikibot.NoPage:
pywikibot.output(
'Page does not exist; using first '
'link in page {0}.'.format(disambPage.title()))
links = disambPage.linkedPages()[:1]
links = [correctcap(l, disambPage.get())
for l in links]
else:
dn = False
edited = False
# This loop will run until we have finished the current page
while True:
m = self.linkR.search(text, pos=curpos)
if not m:
if n == 0:
# No changes necessary for this disambiguation title.
return 'nochange'
else:
# stop loop and save page
break
# Ensure that next time around we will not find this same hit.
curpos = m.start() + 1
try:
foundlink = pywikibot.Link(m.group('title'),
disambPage.site)
foundlink.parse()
except pywikibot.Error:
continue
# ignore interwiki links
if foundlink.site != disambPage.site:
continue
# Check whether the link found is to disambPage.
try:
if foundlink.canonical_title() != disambPage.title():
continue
except pywikibot.Error:
# must be a broken link
pywikibot.log(u"Invalid link [[%s]] in page [[%s]]"
% (m.group('title'), refPage.title()))
continue
except pywikibot.SiteDefinitionError as e:
pywikibot.log(e)
pywikibot.output(
'NOTE: Ignoring {0} which is a redirect ({1}) to an '
'unknown site.'.format(entry.title, target))
target_link = None
else:
if target_link.site != self.site:
pywikibot.output(
'NOTE: Ignoring {0} which is a redirect to '
'another site {1}.'
.format(entry.title, target_link.site))
target_link = None
# if the redirect does not link to another wiki
if target_link and target_link.title:
source = pywikibot.Link(entry.title, self.site)
if target_link.anchor:
pywikibot.output(
'HINT: {0} is a redirect with a pipelink.'
.format(entry.title))
redict[space_to_underscore(source)] = (
space_to_underscore(target_link))
if alsoGetPageTitles:
return redict, pageTitles
else:
return redict
def disambcategory(self):
"""Return Category in which disambig pages are listed."""
try:
name = self.namespace(14)+':'+self.family.disambcatname[self.code]
except KeyError:
raise Error(u"No disambiguation category name found for %(site)s"
% {'site': self})
return pywikibot.Category(pywikibot.Link(name, self))
dn = False
edited = False
# This loop will run until we have finished the current page
while True:
m = self.linkR.search(text, pos=curpos)
if not m:
if n == 0:
# No changes necessary for this disambiguation title.
return 'nochange'
else:
# stop loop and save page
break
# Ensure that next time around we will not find this same hit.
curpos = m.start() + 1
try:
foundlink = pywikibot.Link(m.group('title'),
disambPage.site)
foundlink.parse()
except pywikibot.Error:
continue
# ignore interwiki links
if foundlink.site != disambPage.site:
continue
# Check whether the link found is to disambPage.
try:
if foundlink.canonical_title() != disambPage.title():
continue
except pywikibot.Error:
# must be a broken link
pywikibot.log('Invalid link [[%s]] in page [[%s]]'
% (m.group('title'), refPage.title()))
continue
for node in parsed.nodes:
if isinstance(node, mwparserfromhell.nodes.tag.Tag):
if node.tag == 'li':
current_user = None
continue
if isinstance(node, mwparserfromhell.nodes.text.Text):
if node.endswith('\n'):
current_user = False
continue
if isinstance(node, mwparserfromhell.nodes.wikilink.Wikilink):
if current_user is False:
pywikibot.debug('Link to "{0}" ignored as outside '
'list'.format(node.title), _logger)
continue
obj = pywikibot.Link(node.title, self.site)
if obj.namespace == -1:
# the parser accepts 'special:prefixindex/' as a wildcard
# this allows a prefix that doesnt match an existing page
# to be a blue link, and can be clicked to see what pages
# will be included in the whitelist
name, sep, prefix = obj.title.partition('/')
if name.lower() in self._prefixindex_aliases:
if not prefix:
verbose_output('Whitelist everything')
page = ''
else:
page = prefix
verbose_output('Whitelist prefixindex hack for: '
+ page)
elif obj.namespace == 2 and not current_user:
"""Treat a page / item."""
pywikibot.output('Found ' + item.title())
imagename = page.properties().get('page_image_free')
if not imagename:
return
claims = item.get().get('claims')
if self.wdproperty in claims:
pywikibot.output('Item {} already contains image ({})'
.format(item.title(), self.wdproperty))
return
newclaim = pywikibot.Claim(self.repo, self.wdproperty)
commonssite = pywikibot.Site('commons', 'commons')
imagelink = pywikibot.Link(imagename, source=commonssite,
default_namespace=6)
image = pywikibot.FilePage(imagelink)
if image.isRedirectPage():
image = pywikibot.FilePage(image.getRedirectTarget())
if not image.exists():
pywikibot.output("{} doesn't exist so I can't link to it"
.format(image.title(as_link=True)))
return
newclaim.setTarget(image)
# A generator might yield pages from multiple sites
self.user_add_claim(item, newclaim, page.site)