本文整理汇总了Python中pywikibot.textlib.replaceCategoryLinks函数的典型用法代码示例。如果您正苦于以下问题:Python replaceCategoryLinks函数的具体用法?Python replaceCategoryLinks怎么用?Python replaceCategoryLinks使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了replaceCategoryLinks函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_adjoining_links
def test_adjoining_links(self):
old = self.old.replace(config.LS, '')
cats = textlib.getCategoryLinks(old, site=self.site)
self.assertEqual(self.cats, cats)
sep = config.LS
config.line_separator = '' # use an empty separator temporarily
new = textlib.replaceCategoryLinks(old, cats, site=self.site)
self.assertEqual(old, new)
config.line_separator = sep # restore the default separator
开发者ID:anrao91,项目名称:pywikibot-core,代码行数:9,代码来源:textlib_tests.py
示例2: make_categories
def make_categories(page, list, site=None):
if site is None:
site = pywikibot.Site()
pllist = []
for p in list:
cattitle = "%s:%s" % (site.namespaces.CATEGORY, p)
pllist.append(pywikibot.Page(site, cattitle))
page.put_async(textlib.replaceCategoryLinks(page.get(), pllist,
site=page.site),
summary=i18n.twtranslate(site, 'catall-changing'))
开发者ID:Kat233hryn,项目名称:pywikibot-core,代码行数:10,代码来源:catall.py
示例3: test_adjoining_links
def test_adjoining_links(self):
cats_std = textlib.getCategoryLinks(self.old, site=self.site)
old = self.old.replace(config.LS, "")
cats = textlib.getCategoryLinks(old, site=self.site)
self.assertEqual(cats_std, cats)
sep = config.LS
config.line_separator = "" # use an empty separator temporarily
new = textlib.replaceCategoryLinks(old, cats, site=self.site)
# Restore the default separator.
config.line_separator = sep
self.assertEqual(old, new)
开发者ID:hasteur,项目名称:pywikibot_scripts,代码行数:11,代码来源:textlib_tests.py
示例4: run
def run(self):
"""Run the bot."""
if not all((self.getOption('action'), self.generator)):
return
catmode = (self.getOption('action') == 'categories')
for page in self.generator:
try:
self.current_page = page
commons = page.site.image_repository()
commonspage = getattr(pywikibot,
('Page', 'Category')[catmode]
)(commons, page.title())
try:
commonspage.get(get_redirect=True)
pagetitle = commonspage.title(withNamespace=not catmode)
if page.title() == pagetitle:
oldText = page.get()
text = oldText
# for Commons/Commonscat template
s = self.findTemplate.search(text)
s2 = getattr(self, 'findTemplate%d'
% (2, 3)[catmode]).search(text)
if s or s2:
pywikibot.output(u'** Already done.')
else:
cats = textlib.getCategoryLinks(text,
site=page.site)
text = textlib.replaceCategoryLinks(
u'%s{{commons%s|%s}}'
% (text, ('', 'cat')[catmode], pagetitle),
cats, site=page.site)
comment = i18n.twtranslate(
page.site, 'commons_link%s-template-added'
% ('', '-cat')[catmode])
try:
self.userPut(page, oldText, text,
summary=comment)
except pywikibot.EditConflict:
pywikibot.output(
u'Skipping %s because of edit conflict'
% page.title())
except pywikibot.NoPage:
pywikibot.output(u'%s does not exist in Commons'
% page.__class__.__name__)
except pywikibot.NoPage:
pywikibot.output(u'Page %s does not exist' % page.title())
except pywikibot.IsRedirectPage:
pywikibot.output(u'Page %s is a redirect; skipping.'
% page.title())
except pywikibot.LockedPage:
pywikibot.output(u'Page %s is locked' % page.title())
开发者ID:magul,项目名称:pywikibot-core,代码行数:54,代码来源:commons_link.py
示例5: test_adjoining_links
def test_adjoining_links(self):
"""Test getting and replacing adjacent categories."""
cats_std = textlib.getCategoryLinks(self.old, site=self.site)
old = self.old.replace(config.LS, '')
cats = textlib.getCategoryLinks(old, site=self.site)
self.assertEqual(cats_std, cats)
sep = config.LS
config.line_separator = '' # use an empty separator temporarily
new = textlib.replaceCategoryLinks(old, cats, site=self.site)
# Restore the default separator.
config.line_separator = sep
self.assertEqual(old, new)
开发者ID:metakgp,项目名称:batman,代码行数:12,代码来源:textlib_tests.py
示例6: replace_default_cat_with_new_categories_in_image_text
def replace_default_cat_with_new_categories_in_image_text(
old_text, base_category, new_categories):
"""Add new categories to page text and remove any base_category."""
if not new_categories:
# No categories to add. We do not want to remove the base one,
raise NoCategoryToAddException()
# Remove base category
page_text_without_base_category = textlib.replaceCategoryInPlace(
old_text, base_category, None)
final_text = textlib.replaceCategoryLinks(
page_text_without_base_category, new_categories, addOnly=True)
return final_text
开发者ID:wikimedia,项目名称:labs-tools-heritage,代码行数:13,代码来源:categorize_images.py
示例7: addCategory
def addCategory(site, page, cat):
old_text = page.text
cats = textlib.getCategoryLinks(old_text)
catpl = pywikibot.Category(site, cat)
if catpl not in cats:
print("\t'" + cat + "' not in page categories. Adding")
cats.append(catpl)
text = textlib.replaceCategoryLinks(page.text, cats, site=site)
userPut(page, old_text, text, minor=True, botflag=True)
return True
else:
print("\t'" + cat + "' already in page categories")
return False
开发者ID:WikiToLearn,项目名称:PDFCheck,代码行数:15,代码来源:check.py
示例8: treat
def treat(self, page):
"""Process one page."""
if page.isRedirectPage():
# if it's a redirect use the redirect target instead
redirTarget = page.getRedirectTarget()
if self.follow_redirects:
self.current_page = redirTarget
else:
pywikibot.warning(
"Page %s is a redirect to %s; skipping." % (page.title(asLink=True), redirTarget.title(asLink=True))
)
# loading it will throw an error if we don't jump out before
return
else:
self.current_page = page
if self.current_page.exists():
# Load the page
text = self.current_page.text
elif self.create:
pywikibot.output("Page %s doesn't exist yet; creating." % (self.current_page.title(asLink=True)))
text = ""
else:
pywikibot.output("Page %s does not exist; skipping." % self.current_page.title(asLink=True))
return
# store old text, so we don't have reload it every time
old_text = text
cats = textlib.getCategoryLinks(text)
pywikibot.output("Current categories:")
for cat in cats:
pywikibot.output("* %s" % cat.title())
catpl = pywikibot.Category(self.current_page.site, self.newcat)
if catpl in cats:
pywikibot.output("%s is already in %s." % (self.current_page.title(), catpl.title()))
else:
if self.sort:
catpl = self.sorted_by_last_name(catpl, self.current_page)
pywikibot.output("Adding %s" % catpl.title(asLink=True))
cats.append(catpl)
text = textlib.replaceCategoryLinks(text, cats, site=self.current_page.site)
comment = self.comment
if not comment:
comment = i18n.twtranslate(
self.current_page.site, "category-adding", {"newcat": catpl.title(withNamespace=False)}
)
try:
self.userPut(self.current_page, old_text, text, summary=comment, minor=True, botflag=True)
except pywikibot.PageSaveRelatedError as error:
pywikibot.output("Page %s not saved: %s" % (self.current_page.title(asLink=True), error))
开发者ID:emijrp,项目名称:pywikibot-core,代码行数:48,代码来源:category.py
示例9: set_category_status
def set_category_status(site, page, cat, status):
old_text = page.text
cats = textlib.getCategoryLinks(old_text)
catpl = pywikibot.Category(site, cat)
if status:
if catpl not in cats:
cats.append(catpl)
else:
if catpl in cats:
cats.remove(catpl)
text = textlib.replaceCategoryLinks(page.text, cats, site=site)
if old_text != text:
page.text = text
page.save(minor=True, botflag=True)
return True
return False
开发者ID:WikiToLearn,项目名称:pywikibot,代码行数:17,代码来源:wtlpywikibot.py
示例10: include
def include(pl, checklinks=True, realinclude=True, linkterm=None, summary=''):
cl = checklinks
if linkterm:
actualworkingcat = pywikibot.Category(mysite, workingcat.title(),
sortKey=linkterm)
else:
actualworkingcat = workingcat
if realinclude:
try:
text = pl.get()
except pywikibot.NoPage:
pass
except pywikibot.IsRedirectPage:
cl = True
pass
else:
cats = [x for x in pl.categories()]
if workingcat not in cats:
cats = [x for x in pl.categories()]
for c in cats:
if c in parentcats:
if removeparent:
pl.change_category(actualworkingcat,
summary=summary)
break
else:
pl.put(textlib.replaceCategoryLinks(
text, cats + [actualworkingcat], site=pl.site),
summary=summary)
if cl:
if checkforward:
for page2 in pl.linkedPages():
if needcheck(page2):
tocheck.append(page2)
checked[page2] = page2
if checkbackward:
for refPage in pl.getReferences():
if needcheck(refPage):
tocheck.append(refPage)
checked[refPage] = refPage
开发者ID:hasteur,项目名称:g13bot_tools_new,代码行数:40,代码来源:makecat.py
示例11: run
def run(self):
"""Start the bot."""
# Run the generator which will yield Pages which might need to be
# changed.
for page in self.generator:
if self.isTitleExcepted(page.title()):
pywikibot.output("Skipping %s because the title is on the exceptions list." % page.title(asLink=True))
continue
try:
# Load the page's text from the wiki
original_text = page.get(get_redirect=True)
if not page.canBeEdited():
pywikibot.output("You can't edit page %s" % page.title(asLink=True))
continue
except pywikibot.NoPage:
pywikibot.output("Page %s not found" % page.title(asLink=True))
continue
applied = set()
new_text = original_text
while True:
if self.isTextExcepted(new_text):
pywikibot.output(
"Skipping %s because it contains text "
"that is on the exceptions list." % page.title(asLink=True)
)
break
last_text = None
while new_text != last_text:
last_text = new_text
new_text = self.apply_replacements(last_text, applied, page)
if not self.recursive:
break
if new_text == original_text:
pywikibot.output("No changes were necessary in %s" % page.title(asLink=True))
break
if hasattr(self, "addedCat"):
# Fetch only categories in wikitext, otherwise the others will
# be explicitly added.
cats = textlib.getCategoryLinks(new_text, site=page.site)
if self.addedCat not in cats:
cats.append(self.addedCat)
new_text = textlib.replaceCategoryLinks(new_text, cats, site=page.site)
# Show the title of the page we're working on.
# Highlight the title in purple.
pywikibot.output(color_format("\n\n>>> {lightpurple}{0}{default} <<<", page.title()))
pywikibot.showDiff(original_text, new_text)
if self.getOption("always"):
break
choice = pywikibot.input_choice(
"Do you want to accept these changes?",
[("Yes", "y"), ("No", "n"), ("Edit", "e"), ("open in Browser", "b"), ("all", "a")],
default="N",
)
if choice == "e":
editor = editarticle.TextEditor()
as_edited = editor.edit(original_text)
# if user didn't press Cancel
if as_edited and as_edited != new_text:
new_text = as_edited
continue
if choice == "b":
pywikibot.bot.open_webbrowser(page)
try:
original_text = page.get(get_redirect=True, force=True)
except pywikibot.NoPage:
pywikibot.output("Page %s has been deleted." % page.title())
break
new_text = original_text
continue
if choice == "a":
self.options["always"] = True
if choice == "y":
page.text = new_text
page.save(
summary=self.generate_summary(applied), async=True, callback=self._count_changes, quiet=True
)
while not self._pending_processed_titles.empty():
proc_title, res = self._pending_processed_titles.get()
pywikibot.output("Page %s%s saved" % (proc_title, "" if res else " not"))
# choice must be 'N'
break
if self.getOption("always") and new_text != original_text:
try:
page.text = new_text
page.save(summary=self.generate_summary(applied), callback=self._count_changes, quiet=True)
except pywikibot.EditConflict:
pywikibot.output("Skipping %s because of edit conflict" % (page.title(),))
except pywikibot.SpamfilterError as e:
pywikibot.output("Cannot change %s because of blacklist entry %s" % (page.title(), e.url))
except pywikibot.LockedPage:
pywikibot.output("Skipping %s (locked page)" % (page.title(),))
except pywikibot.PageNotSaved as error:
pywikibot.output("Error putting page: %s" % (error.args,))
if self._pending_processed_titles.qsize() > 50:
while not self._pending_processed_titles.empty():
proc_title, res = self._pending_processed_titles.get()
pywikibot.output("Page %s%s saved" % (proc_title, "" if res else " not"))
开发者ID:PersianWikipedia,项目名称:pywikibot-core,代码行数:97,代码来源:replace.py
示例12: run
def run(self):
"""Start the bot."""
# Run the generator which will yield Pages which might need to be
# changed.
for page in self.generator:
if self.isTitleExcepted(page.title()):
pywikibot.output(
u'Skipping {0!s} because the title is on the exceptions list.'.format(page.title(asLink=True)))
continue
try:
# Load the page's text from the wiki
original_text = page.get(get_redirect=True)
if not page.canBeEdited():
pywikibot.output(u"You can't edit page {0!s}".format(page.title(asLink=True)))
continue
except pywikibot.NoPage:
pywikibot.output(u'Page {0!s} not found'.format(page.title(asLink=True)))
continue
applied = set()
new_text = original_text
while True:
if self.isTextExcepted(new_text):
pywikibot.output(u'Skipping %s because it contains text '
u'that is on the exceptions list.'
% page.title(asLink=True))
break
last_text = None
while new_text != last_text:
last_text = new_text
new_text = self.apply_replacements(last_text, applied,
page)
if not self.recursive:
break
if new_text == original_text:
pywikibot.output(u'No changes were necessary in {0!s}'.format(page.title(asLink=True)))
break
if hasattr(self, 'addedCat'):
# Fetch only categories in wikitext, otherwise the others will
# be explicitly added.
cats = textlib.getCategoryLinks(new_text, site=page.site)
if self.addedCat not in cats:
cats.append(self.addedCat)
new_text = textlib.replaceCategoryLinks(new_text,
cats,
site=page.site)
# Show the title of the page we're working on.
# Highlight the title in purple.
pywikibot.output(color_format(
'\n\n>>> {lightpurple}{0}{default} <<<', page.title()))
pywikibot.showDiff(original_text, new_text)
if self.getOption('always'):
break
choice = pywikibot.input_choice(
u'Do you want to accept these changes?',
[('Yes', 'y'), ('No', 'n'), ('Edit', 'e'),
('open in Browser', 'b'), ('all', 'a')],
default='N')
if choice == 'e':
editor = editarticle.TextEditor()
as_edited = editor.edit(original_text)
# if user didn't press Cancel
if as_edited and as_edited != new_text:
new_text = as_edited
continue
if choice == 'b':
pywikibot.bot.open_webbrowser(page)
try:
original_text = page.get(get_redirect=True, force=True)
except pywikibot.NoPage:
pywikibot.output(u'Page {0!s} has been deleted.'.format(page.title()))
break
new_text = original_text
continue
if choice == 'a':
self.options['always'] = True
if choice == 'y':
page.text = new_text
page.save(summary=self.generate_summary(applied), async=True,
callback=self._count_changes, quiet=True)
while not self._pending_processed_titles.empty():
proc_title, res = self._pending_processed_titles.get()
pywikibot.output('Page {0!s}{1!s} saved'.format(proc_title, '' if res else ' not'))
# choice must be 'N'
break
if self.getOption('always') and new_text != original_text:
try:
page.text = new_text
page.save(summary=self.generate_summary(applied),
callback=self._count_changes, quiet=True)
except pywikibot.EditConflict:
pywikibot.output(u'Skipping {0!s} because of edit conflict'.format(page.title()))
except pywikibot.SpamfilterError as e:
pywikibot.output(
u'Cannot change {0!s} because of blacklist entry {1!s}'.format(page.title(), e.url))
except pywikibot.LockedPage:
pywikibot.output(u'Skipping {0!s} (locked page)'.format(page.title()))
except pywikibot.PageNotSaved as error:
pywikibot.output(u'Error putting page: {0!s}'.format(error.args))
if self._pending_processed_titles.qsize() > 50:
while not self._pending_processed_titles.empty():
#.........这里部分代码省略.........
开发者ID:runt18,项目名称:pywikibot-core,代码行数:101,代码来源:replace.py
示例13: standardizePageFooter
def standardizePageFooter(self, text):
"""
Standardize page footer.
Makes sure that interwiki links and categories are put
into the correct position and into the right order. This
combines the old instances of standardizeInterwiki
and standardizeCategories.
The page footer consists of the following parts
in that sequence:
1. categories
2. additional information depending on the local site policy
3. interwiki
"""
categories = []
interwiki_links = []
# get categories
if not self.template:
categories = textlib.getCategoryLinks(text, site=self.site)
if not self.talkpage:
subpage = False
if self.template:
try:
tmpl, loc = moved_links[self.site.code]
del tmpl
except KeyError:
loc = None
if loc is not None and loc in self.title:
subpage = True
# get interwiki
interwiki_links = textlib.getLanguageLinks(
text, insite=self.site, template_subpage=subpage)
# remove interwiki
text = textlib.removeLanguageLinks(text, site=self.site)
# add categories, main to top
if categories:
# TODO: Sort categories in alphabetic order, e.g. using
# categories.sort()? (T100265)
# TODO: Get main categories from Wikidata?
main = pywikibot.Category(self.site, 'Category:' + self.title,
sort_key=' ')
if main in categories:
categories.pop(categories.index(main))
categories.insert(0, main)
text = textlib.replaceCategoryLinks(text, categories,
site=self.site)
# add interwiki
if interwiki_links:
text = textlib.replaceLanguageLinks(text, interwiki_links,
site=self.site,
template=self.template,
template_subpage=subpage)
return text
开发者ID:Zeffar,项目名称:Elobot,代码行数:61,代码来源:cosmetic_changes.py
示例14: test_standard_links
def test_standard_links(self):
"""Test getting and replacing categories."""
cats = textlib.getCategoryLinks(self.old, site=self.site)
new = textlib.replaceCategoryLinks(self.old, cats, site=self.site)
self.assertEqual(self.old, new)
开发者ID:metakgp,项目名称:batman,代码行数:5,代码来源:textlib_tests.py
示例15: run
def run(self):
"""Start the bot."""
# Run the generator which will yield Pages which might need to be
# changed.
for page in self.generator:
if self.isTitleExcepted(page.title()):
pywikibot.output(
u'Skipping %s because the title is on the exceptions list.'
% page.title(asLink=True))
continue
try:
# Load the page's text from the wiki
original_text = page.get(get_redirect=True)
if not page.canBeEdited():
pywikibot.output(u"You can't edit page %s"
% page.title(asLink=True))
continue
except pywikibot.NoPage:
pywikibot.output(u'Page %s not found' % page.title(asLink=True))
continue
applied = set()
new_text = original_text
while True:
if self.isTextExcepted(new_text):
pywikibot.output(u'Skipping %s because it contains text '
u'that is on the exceptions list.'
% page.title(asLink=True))
break
last_text = None
while new_text != last_text:
last_text = new_text
new_text = self.apply_replacements(last_text, applied)
if not self.recursive:
break
if new_text == original_text:
pywikibot.output(u'No changes were necessary in %s'
% page.title(asLink=True))
break
if hasattr(self, "addedCat"):
cats = page.categories(nofollow_redirects=True)
if self.addedCat not in cats:
cats.append(self.addedCat)
new_text = textlib.replaceCategoryLinks(new_text,
cats,
site=page.site)
# Show the title of the page we're working on.
# Highlight the title in purple.
pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<"
% page.title())
pywikibot.showDiff(original_text, new_text)
if self.acceptall:
break
choice = pywikibot.input_choice(
u'Do you want to accept these changes?',
[('Yes', 'y'), ('No', 'n'), ('Edit', 'e'),
('open in Browser', 'b'), ('all', 'a')],
default='N')
if choice == 'e':
editor = editarticle.TextEditor()
as_edited = editor.edit(original_text)
# if user didn't press Cancel
if as_edited and as_edited != new_text:
new_text = as_edited
continue
if choice == 'b':
webbrowser.open("http://%s%s" % (
page.site.hostname(),
page.site.nice_get_address(page.title(asUrl=True))
))
i18n.input('pywikibot-enter-finished-browser')
try:
original_text = page.get(get_redirect=True, force=True)
except pywikibot.NoPage:
pywikibot.output(u'Page %s has been deleted.'
% page.title())
break
new_text = original_text
continue
if choice == 'a':
self.acceptall = True
if choice == 'y':
page.put_async(new_text, self.generate_summary(applied), callback=self.count_changes)
# choice must be 'N'
break
if self.acceptall and new_text != original_text:
try:
page.put(new_text, self.generate_summary(applied), callback=self.count_changes)
except pywikibot.EditConflict:
pywikibot.output(u'Skipping %s because of edit conflict'
% (page.title(),))
except pywikibot.SpamfilterError as e:
pywikibot.output(
u'Cannot change %s because of blacklist entry %s'
% (page.title(), e.url))
except pywikibot.LockedPage:
pywikibot.output(u'Skipping %s (locked page)'
% (page.title(),))
except pywikibot.PageNotSaved as error:
pywikibot.output(u'Error putting page: %s'
% (error.args,))
开发者ID:leogregianin,项目名称:pywikibot-core,代码行数:100,代码来源:replace.py
示例16: standardizePageFooter
def standardizePageFooter(self, text):
"""
Standardize page footer.
Makes sure that interwiki links, categories and star templates are
put to the correct position and into the right order. This combines the
old instances standardizeInterwiki and standardizeCategories
The page footer has the following section in that sequence:
1. categories
2. ## TODO: template beyond categories ##
3. additional information depending on local site policy
4. stars templates for featured and good articles
5. interwiki links
"""
starsList = [
u'bueno',
u'bom interwiki',
u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed',
u'destacado', u'destaca[tu]',
u'enllaç[ _]ad',
u'enllaz[ _]ad',
u'leam[ _]vdc',
u'legătură[ _]a[bcf]',
u'liamm[ _]pub',
u'lien[ _]adq',
u'lien[ _]ba',
u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
u'liên[ _]kết[ _]chọn[ _]lọc',
u'ligam[ _]adq',
u'ligazón[ _]a[bd]',
u'ligoelstara',
u'ligoleginda',
u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km',
u'link[ _]sm', u'linkfa',
u'na[ _]lotura',
u'nasc[ _]ar',
u'tengill[ _][úg]g',
u'ua',
u'yüm yg',
u'רא',
u'وصلة مقالة جيدة',
u'وصلة مقالة مختارة',
]
categories = None
interwikiLinks = None
allstars = []
# The PyWikipediaBot is no longer allowed to touch categories on the
# German Wikipedia. See
# https://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Position_der_Personendaten_am_.22Artikelende.22
# ignoring nn-wiki of cause of the comment line above iw section
if not self.template and '{{Personendaten' not in text and \
'{{SORTIERUNG' not in text and '{{DEFAULTSORT' not in text and \
self.site.code not in ('et', 'it', 'bg', 'ru'):
categories = textlib.getCategoryLinks(text, site=self.site)
if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki':
subpage = False
if self.template:
loc = None
try:
tmpl, loc = moved_links[self.site.code]
del tmpl
except KeyError:
pass
if loc is not None and loc in self.title:
subpage = True
interwikiLinks = textlib.getLanguageLinks(
text, insite=self.site, template_subpage=subpage)
# Removing the interwiki
text = textlib.removeLanguageLinks(text, site=self.site)
# Removing the stars' issue
starstext = textlib.removeDisabledParts(text)
for star in starsList:
regex = re.compile(r'(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
% star, re.I)
found = regex.findall(starstext)
if found != []:
text = regex.sub('', text)
allstars += found
# Adding categories
if categories:
# TODO: Sorting categories in alphabetic order.
# e.g. using categories.sort()
# TODO: Taking main cats to top
# for name in categories:
# if re.search(u"(.+?)\|(.{,1}?)",name.title()) or name.title()==name.title().split(":")[0]+title:
# categories.remove(name)
# categories.insert(0, name)
text = textlib.replaceCategoryLinks(text, categories,
site=self.site)
# Adding stars templates
if allstars:
text = text.strip() + self.site.family.interwiki_text_separator
allstars.sort()
#.........这里部分代码省略.........
开发者ID:skamithi,项目名称:pywikibot-core,代码行数:101,代码来源:cosmetic_changes.py
示例17: apply
def apply(self, text, page):
categories = textlib.getCategoryLinks(text)
if len(categories) > len(set(categories)):
deduplicate(categories)
text = textlib.replaceCategoryLinks(text, categories, page.site)
return text
开发者ID:matejsuchanek,项目名称:pywikibot-scripts,代码行数:6,代码来源:checkwiki_errors.py
示例18: standardizePageFooter
def standardizePageFooter(self, text):
"""
Standardize page footer.
Makes sure that interwiki links and categories are put to the correct
position and into the right order. This combines the old instances
standardizeInterwiki and standardizeCategories.
The page footer has the following section in that sequence:
1. categories
2. ## TODO: template beyond categories ##
3. additional information depending on local site policy
4. interwiki links
"""
categories = None
interwikiLinks = None
# Pywikibot is no longer allowed to touch categories on the
# German Wikipedia. See
# https://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Position_der_Personendaten_am_.22Artikelende.22
# ignoring nn-wiki of cause of the comment line above iw section
if not self.template and '{{Personendaten' not in text and \
'{{SORTIERUNG' not in text and '{{DEFAULTSORT' not in text and \
self.site.code not in ('et', 'it', 'bg', 'ru'):
categories = textlib.getCategoryLinks(text, site=self.site)
if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki':
subpage = False
if self.template:
loc = None
try:
tmpl, loc = moved_links[self.site.code]
del tmpl
except KeyError:
pass
if loc is not None and loc in self.title:
subpage = True
interwikiLinks = textlib.getLanguageLinks(
text, insite=self.site, template_subpage=subpage)
# Removing the interwiki
text = textlib.removeLanguageLinks(text, site=self.site)
# Adding categories
if categories:
# TODO: Sorting categories in alphabetic order.
# e.g. using categories.sort()
# TODO: Taking main cats to top
# for name in categories:
# if (re.search(u"(.+?)\|(.{,1}?)",name.title()) or
# name.title() == name.title().split(":")[0] + title):
# categories.remove(name)
# categories.insert(0, name)
text = textlib.replaceCategoryLinks(text, categories,
site=self.site)
# Adding the interwiki
if interwikiLinks:
text = textlib.replaceLanguageLinks(text, interwikiLinks,
site=self.site,
template=self.template,
template_subpage=subpage)
return text
开发者ID:PersianWikipedia,项目名称:pywikibot-core,代码行数:63,代码来源:cosmetic_changes.py
示例19: add_text
def add_text(page, addText, summary=None, regexSkip=None,
regexSkipUrl=None, always=False, up=False, putText=True,
oldTextGiven=None, reorderEnabled=True, create=False):
"""
Add text to a page.
@rtype: tuple of (text, newtext, always)
"""
site = page.site
if not summary:
summary = i18n.twtranslate(site, 'add_text-adding',
{'adding': addText[:200]})
# When a page is tagged as "really well written" it has a star in the
# interwiki links. This is a list of all the templates used (in regex
# format) to make the stars appear.
errorCount = 0
if putText:
pywikibot.output(u'Loading %s...' % page.title())
if oldTextGiven is None:
try:
text = page.get()
except pywikibot.NoPage:
if create:
pywikibot.output(u"%s doesn't exist, creating it!"
% page.title())
text = u''
else:
pywikibot.output(u"%s doesn't exist, skip!" % page.title())
return (False, False, always)
except pywikibot.IsRedirectPage:
pywikibot.output(u"%s is a redirect, skip!" % page.title())
return (False, False, always)
else:
text = oldTextGiven
# Understand if the bot has to skip the page or not
# In this way you can use both -except and -excepturl
if regexSkipUrl is not None:
url = page.full_url()
result = re.findall(regexSkipUrl, site.getUrl(url))
if result != []:
pywikibot.output(
'Exception! regex (or word) used with -exceptUrl '
'is in the page. Skip!\n'
'Match was: %s' % result)
return (False, False, always)
if regexSkip is not None:
result = re.findall(regexSkip, text)
if result != []:
pywikibot.output(
'Exception! regex (or word) used with -except '
'is in the page. Skip!\n'
'Match was: %s' % result)
return (False, False, always)
# If not up, text put below
if not up:
newtext = text
# Translating the \\n into binary \n
addText = addText.replace('\\n', config.line_separator)
if (reorderEnabled):
# Gett
|
请发表评论