How to use the pywikibot.Page function in pywikibot

To help you get started, we’ve selected a few pywikibot examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wikimedia / pywikibot / tests / archivebot_tests.py View on Github external
{{User:MiszaBot/config
         |archive = Talk:Main_Page/archive
         |algo = old(30d)
         }}
         
         == A ==
         foo bar
         
         == B ==
         foo bar bar bar
        """
        site = self.get_site()
        page = pywikibot.Page(site, 'Talk:For-pywikibot-archivebot')
        tmpl = pywikibot.Page(site, 'User:MiszaBot/config')
        archiver = archivebot.PageArchiver(
            page=page, template=tmpl, salt='', force=False)
        page = archivebot.DiscussionPage(page, archiver)
        page.load_page()
        self.assertEqual([x.title for x in page.threads], ['A', 'B'])
github wikimedia / pywikibot / tests / page_tests.py View on Github external
def testImageAndDataRepository(self):
        """Test image_repository and data_repository page attributes."""
        site = self.get_site()
        p1 = pywikibot.Page(site, 'Help:Test page#Testing')
        self.assertIsInstance(p1.image_repository, pywikibot.site.APISite)
        self.assertEqual(p1.image_repository,
                         pywikibot.site.APISite('commons', 'commons'))

        p2 = pywikibot.Page(site, 'File:Jean-Léon Gérôme 003.jpg')
        self.assertIsInstance(p2.data_repository, pywikibot.site.APISite)
        self.assertEqual(p2.data_repository,
                         pywikibot.site.APISite('wikidata', 'wikidata'))
github wikimedia / pywikibot / pywikibot / textlib.py View on Github external
    @type insite: BaseSite
    @return: string including wiki links formatted for inclusion
        in insite
    @rtype: unicode
    """
    if insite is None:
        insite = pywikibot.Site()
    if not links:
        return ''

    ar = interwikiSort(list(links.keys()), insite)
    s = []
    for site in ar:
        if isinstance(links[site], pywikibot.Link):
            links[site] = pywikibot.Page(links[site])
        if isinstance(links[site], pywikibot.Page):
            title = links[site].title(as_link=True, force_interwiki=True,
                                      insite=insite)
            link = title.replace('[[:', '[[')
            s.append(link)
        else:
            raise ValueError('links dict must contain Page or Link objects')
    if insite.code in insite.family.interwiki_on_one_line:
        sep = ' '
    else:
        sep = config.line_separator
    s = sep.join(s) + config.line_separator
    return s
github wikimedia / pywikibot / scripts / standardize_interwiki.py View on Github external
else:
                start = arg[7:]
    site = pywikibot.Site()
    comm = pywikibot.translate(site, comment)
    for pl in site.allpages(start):
        plname = pl.title()
        pywikibot.output('\nLoading {0}...'.format(plname))
        try:
            oldtext = pl.get()
        except pywikibot.IsRedirectPage:
            pywikibot.output('{0} is a redirect!'.format(plname))
            continue
        old = pl.interwiki()
        new = {}
        for pl2 in old:
            new[pl2.site] = pywikibot.Page(pl2)
        newtext = textlib.replaceLanguageLinks(oldtext, new, site=site)
        if new:
            if oldtext != newtext:
                pywikibot.showDiff(oldtext, newtext)
                # Submit changes
                try:
                    pl.put(newtext, comment=comm)
                except pywikibot.LockedPage:
                    pywikibot.output('{0} is locked'.format(plname))
                    continue
            else:
                pywikibot.output('No changes needed.')
                continue
        else:
            pywikibot.output('No interwiki found.')
            continue
github wikimedia / pywikibot / scripts / archivebot.py View on Github external
elif option == 'salt':
            salt = value
        elif option == 'force':
            force = True
        elif option == 'page':
            pagename = value
        elif option == 'namespace':
            namespace = value

    site = pywikibot.Site()

    if calc:
        if not salt:
            pywikibot.bot.suggest_help(missing_parameters=['-salt'])
            return False
        page = pywikibot.Page(site, calc)
        if page.exists():
            calc = page.title()
        else:
            pywikibot.output(
                'NOTE: the specified page "{0}" does not (yet) exist.'
                .format(calc))
        pywikibot.output('key = {}'.format(calc_md5_hexdigest(calc, salt)))
        return

    if not templates:
        pywikibot.bot.suggest_help(
            additional_text='No template was specified.')
        return False

    for template_name in templates:
        pagelist = []
github ricordisamoa / wiki / new_interwiki.py View on Github external
'prefixes': ['q', 's', 'voy'],
            'remove': ['q', 's', 'voy']
        },
        'itwikiquote': {
            'prefixes': ['w', 's', 'voy']
        }
    }
    if page.site.dbName() in config:
        for template in mwparserfromhell.parse(page.text).ifilter_templates():
            if template.name[0].upper() + template.name[1:] in [u'Ip', u'Interprogetto']:
                prefixes = config[page.site.dbName()]['prefixes']
                for param in template.params:
                    if unicode(param.value).strip() != '':
                        if unicode(param.name).isnumeric():
                            if unicode(param.value) in prefixes:
                                yield pywikibot.Page(page.site, unicode(param.value) + ':' + page.title())
                        elif unicode(param.name) in prefixes:
                            lang = (unicode(template.get(unicode(param.name) + '_site').value) + ':') if template.has(unicode(param.name) + '_site') else ''
                            yield pywikibot.Page(page.site, unicode(param.name) + ':' + lang + unicode(param.value))
github wikimedia / pywikibot / scripts / transferbot.py View on Github external
Page transfer configuration
    ---------------------------
    Source: %(fromsite)r
    Target: %(tosite)r

    Pages to transfer: %(gen_args)s

    Prefix for transferred pages: %(prefix)s
    """ % {'fromsite': fromsite, 'tosite': tosite,
           'gen_args': gen_args, 'prefix': prefix})

    for page in gen:
        target_title = (prefix + page.namespace().canonical_prefix()
                        + page.title(with_ns=False))
        targetpage = pywikibot.Page(tosite, target_title)
        edithistpage = pywikibot.Page(tosite, target_title + '/edithistory')
        summary = 'Moved page from {old} ([[{new}/edithistory|history]])' \
                  .format(old=page.title(as_link=True, insite=tosite),
                          new=targetpage.title() if not
                          targetpage.namespace().subpages else '')

        if targetpage.exists() and not overwrite:
            pywikibot.output(
                'Skipped {0} (target page {1} exists)'.format(
                    page.title(as_link=True),
                    targetpage.title(as_link=True)
                )
            )
            continue

        pywikibot.output('Moving {0} to {1}...'
                         .format(page.title(as_link=True),
github notconfusing / harvest_infobox_book / fix_databaseconstraints.py View on Github external
cleanedisbn = filter( lambda a: a in '1234567890xX-', isbn)
    numericisbn = filter( lambda a: a in '1234567890xX', isbn)
    if len(numericisbn)==10:
        return cleanedisbn
    else:
        return False

def boolvalidate(isbn):
    try:
        p = pyisbn.validate(isbn)
    except:
        return False
    return p


rootpage = pywikibot.Page(wikidata, 'Wikidata:Database_reports/Constraint_violations/P212#Format')

rootpage = rootpage.get()

wikicode = mwp.parse(rootpage)

def savecases():
    fixcasesJSON = open('fixcases.JSON', 'w')
    json.dump(fixcases, fixcasesJSON, indent=4)
    fixcasesJSON.close()


fixcasesJSON = open('fixcases.JSON','r')
fixcases = json.load(fixcasesJSON)


sections = wikicode.get_sections()
github wikimedia / pywikibot / scripts / casechecker.py View on Github external
def Page(self, title):
        """Create Page object from title."""
        return pywikibot.Page(self.site, title)
github wikimedia / pywikibot / scripts / makecat.py View on Github external
for page in artlist:
                checked[page] = page

    # Fetch articles in category, and mark as already checked (seen)
    # If category is empty, ask user if they want to look for pages
    # in a diferent category.
    articles = list(workingcat.articles(content=True))
    if not articles:
        pywikibot.output('Category {} does not exist or is empty. '
                         'Which page to start with?'
                         .format(workingcatname))
        answer = pywikibot.input('(Default is [[{}]]):'.format(workingcatname))
        if not answer:
            answer = workingcatname
        pywikibot.output('' + answer)
        pl = pywikibot.Page(mysite, answer)
        articles = [pl]

    for pl in articles:
        checked[pl] = pl
        bot.include(pl, summary=summary)

    gen = pagegenerators.DequePreloadingGenerator(tocheck)

    for page in gen:
        if bot.checkbroken or page.exists():
            bot.asktoadd(page, summary)