How to use the pywikibot.Site function in pywikibot

To help you get started, we’ve selected a few pywikibot examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wikimedia / pywikibot / tests / wikibase_edit_tests.py View on Github external
def test_WbGeoShape_edit(self):
        """Attempt adding a geo-shape with valid input."""
        # Clean the slate in preparation for test.
        testsite = self.get_repo()
        item = self._clean_item(testsite, 'P27199')

        # set new claim
        claim = pywikibot.page.Claim(testsite, 'P27199', datatype='geo-shape')
        commons_site = pywikibot.Site('commons', 'commons')
        page = pywikibot.Page(commons_site, 'Data:Lyngby Hovedgade.map')
        target = pywikibot.WbGeoShape(page)
        claim.setTarget(target)
        item.addClaim(claim)

        # confirm new claim
        item.get(force=True)
        claim = item.claims['P27199'][0]
        self.assertEqual(claim.getTarget(), target)
github wikimedia / pywikibot / tests / site_tests.py View on Github external
def testConstructors(self):
        """Test cases for site constructors."""
        if isinstance(self.site.family, pywikibot.family.WikimediaFamily):
            site = self.site
        else:
            site = None
        self.assertEqual(pywikibot.site.APISite.fromDBName('enwiki', site),
                         pywikibot.Site('en', 'wikipedia'))
        self.assertEqual(pywikibot.site.APISite.fromDBName('eswikisource', site),
                         pywikibot.Site('es', 'wikisource'))
        self.assertEqual(pywikibot.site.APISite.fromDBName('dewikinews', site),
                         pywikibot.Site('de', 'wikinews'))
        self.assertEqual(pywikibot.site.APISite.fromDBName('ukwikivoyage', site),
                         pywikibot.Site('uk', 'wikivoyage'))
        self.assertEqual(pywikibot.site.APISite.fromDBName('metawiki', site),
                         pywikibot.Site('meta', 'meta'))
        self.assertEqual(pywikibot.site.APISite.fromDBName('commonswiki', site),
                         pywikibot.Site('commons', 'commons'))
        self.assertEqual(pywikibot.site.APISite.fromDBName('wikidatawiki', site),
                         pywikibot.Site('wikidata', 'wikidata'))
github metakgp / metakgp-wiki / jobs / pywikibot / scripts / updatestatistics.py View on Github external
def update_list_of_pages(template, pages):
    template_page = pywikibot.Page(pywikibot.Link(template), pywikibot.Site())
    text = " This page is automatically generated. Changes will be overwritten, so '''do not modify'''.\n"
    for p in pages:
        text += "*[[%s]]\n" % p
    text = text.rstrip()
    if template_page.text == text:
        print template, 'unchanged, no edit made.'
        return
    else:
        print template, 'changed:'
        print text
        #diff = difflib.ndiff(template_page.text.splitlines(1),
        #                     text.splitlines(1))
        #for d in diff:
        #    print d,
        template_page.text = text
        template_page.save('Updated on ' +
github wikimedia / pywikibot / scripts / data_ingestion.py View on Github external
        @param site: target site
        @type site: APISite

        """
        self.URL = URL
        self.metadata = metadata
        self.metadata['_url'] = URL
        self.metadata['_filename'] = filename = posixpath.split(
            urlparse(URL)[2])[1]
        self.metadata['_ext'] = ext = filename.split('.')[-1]
        if ext == filename:
            self.metadata['_ext'] = ext = None
        self.contents = None

        if not site:
            site = pywikibot.Site('commons', 'commons')

        # default title
        super(Photo, self).__init__(site,
                                    self.getTitle('%(_filename)s.%(_ext)s'))
github wikimedia / pywikibot / scripts / patrol.py View on Github external
def __init__(self, page_title):
        """Initializer.

        @param page_title: The page title for this rule
        @type page_title: pywikibot.Page
        """
        self.site = pywikibot.Site()
        self.page_title = page_title
        self.linkedpages = None
github wikimedia / pywikibot / pywikibot / textlib.py View on Github external
# 'dontTouchRegexes' exist to reduce git blame only.
    dontTouchRegexes = result

    for exc in keys:
        if isinstance(exc, basestring):
            # assume the string is a reference to a standard regex above,
            # which may not yet have a site specific re compiled.
            if exc in _regex_cache:
                if type(_regex_cache[exc]) is tuple:
                    if not site and exc in ('interwiki', 'property', 'invoke',
                                            'category', 'file'):
                        issue_deprecation_warning(
                            'site=None',
                            "a valid site for '{}' regex".format(exc), 2,
                            since='20151006')
                        site = pywikibot.Site()

                    if (exc, site) not in _regex_cache:
                        re_text, re_var = _regex_cache[exc]
                        _regex_cache[(exc, site)] = re.compile(
                            re_text % re_var(site), re.VERBOSE)

                    result.append(_regex_cache[(exc, site)])
                else:
                    result.append(_regex_cache[exc])
            else:
                # nowiki, noinclude, includeonly, timeline, math and other
                # extensions
                _regex_cache[exc] = _tag_regex(exc)
                result.append(_regex_cache[exc])
            # handle alias
            if exc == 'source':
github JackPotte / JackBot / core / scripts / delete.py View on Github external
Process command line arguments and invoke bot.

    If args is an empty list, sys.argv is used.

    @param args: command line arguments
    @type args: list of unicode
    """
    pageName = ''
    summary = None
    generator = None
    options = {}

    # read command line parameters
    local_args = pywikibot.handle_args(args)
    genFactory = pagegenerators.GeneratorFactory()
    mysite = pywikibot.Site()

    for arg in local_args:

        if arg == '-always':
            options['always'] = True
        elif arg.startswith('-summary'):
            if len(arg) == len('-summary'):
                summary = pywikibot.input(u'Enter a reason for the deletion:')
            else:
                summary = arg[len('-summary:'):]
        elif arg.startswith('-images'):
            warn('-image option is deprecated. Please use -imageused instead.',
                 exceptions.ArgumentDeprecationWarning)
            local_args.append('-imageused' + arg[7:])
        elif arg.startswith('-undelete'):
            options['undelete'] = True
github wikimedia / pywikibot / scripts / flickrripper.py View on Github external
def buildDescription(flinfoDescription='', flickrreview=False, reviewer='',
                     override='', addCategory='', removeCategories=False):
    """Build the final description for the image.

    The description is based on the info from flickrinfo and improved.

    """
    description = '== {{int:filedesc}} ==\n{}'.format(flinfoDescription)
    if removeCategories:
        description = textlib.removeCategoryLinks(description,
                                                  pywikibot.Site(
                                                      'commons', 'commons'))
    if override:
        description = description.replace('{{cc-by-sa-2.0}}\n', '')
        description = description.replace('{{cc-by-2.0}}\n', '')
        description = description.replace('{{flickrreview}}\n', '')
        description = description.replace(
            '{{copyvio|Flickr, licensed as "All Rights Reserved" which is not '
            'a free license --~~~~}}\n',
            '')
        description = description.replace('=={{int:license}}==',
                                          '=={{int:license}}==\n' + override)
    elif flickrreview:
        if reviewer:
            description = description.replace(
                '{{flickrreview}}',
                '{{flickrreview|' + reviewer +
github wpoa / recitation-bot / recitation-bot / journal_article.py View on Github external
logging.info('Already exists image %s' % image_file)
                            metadata[image_dict][image]['uploaded_name'] = harmonized_name
                        else:
                            raise
        

        #now we start calling the uploader
        upload_sites = list()
        sites_map = {'commons':self.commons,
                     'equations':self.equations,
                     'tables':self.tables}
        for sitestr, flag in im_uploads.iteritems():
            upload_sites.append(sites_map[sitestr])

        for lang, family, image_dict in upload_sites:
            site = pywikibot.Site(lang, family)
            if not site.logged_in():
                site.login()

            upload(site, self.metadata, image_dict)

        self.phase['upload_images'] = True
github ricordisamoa / wiki / wikidata / merge.py View on Github external
recurse=int(arg[9:])
		elif arg.startswith('-total:'):
			total=int(arg[7:])
		elif arg.startswith('-bulk:'):
			bulk=arg[6:]
		elif arg.startswith('-bulk'):
			bulk=True
		elif arg.startswith('-dups'):
			dups=True
		elif arg.startswith('-custom'):
			text = pywikibot.Page(site,u'Bot requests#Merge multiple items',ns=4).get(force=True)
			regex = re.compile('^\s*\*\s*\[http\:\/\/nssdc\.gsfc\.nasa\.gov\/nmc\/spacecraftDisplay\.do\?id\=[\w\d\-\s]+\]\: \{\{[Qq]\|(?P\d+)\}\}\, \{\{[Qq]\|(?P\d+)\}\}',flags=re.MULTILINE)
			for match in regex.finditer(text):
				merge_items((pywikibot.ItemPage(site,'Q'+match.group('item1')),pywikibot.ItemPage(site,'Q'+match.group('item2'))),taxon_mode=False)
	if cat and lang2:
		site2=pywikibot.Site(lang2,pywikibot.Site().family.name)
		for page1 in pywikibot.Category(pywikibot.Site(),cat).articles(recurse=recurse,total=total):
			page2=pywikibot.Page(site2,page1.title())
			if page2.exists():
				item1=pywikibot.ItemPage.fromPage(page1)
				item2=pywikibot.ItemPage.fromPage(page2)
				if item1!=item2:
					merge_items((item1,item2))
	elif bulk:
		text = pywikibot.Page(site,u'Requests for deletions/Bulk'+('' if bulk==True else '/'+bulk),ns=4).get(force=True)
		regex = re.compile('\|(?P[Qq]\d+)')
		for match in regex.finditer(text):
			check_deletable(pywikibot.ItemPage(site,match.group('item')))
	elif dups:
		text = pywikibot.Page(site,'Byrial/Duplicates',ns=2).get(force=True)
		regex = re.compile('\*\s*\[\[(?P[Qq]\d+)\]\] \(1 link\, 0 statements\)\, \[\[(?P[Qq]\d+)\]\] \(1 link\, 0 statements\)\, duplicate link')
		for match in regex.finditer(text):