from pywikibot import Site, Page
site = Site()
logNewUsers = site.logevents('newusers', total = 500)
count = 0
for entry in logNewUsers:
user = entry['user']
page = Page(site, 'Thảo luận Thành viên:' + user)
if not page.exists():
print(page.title())
count += 1
print('Number of created welcome pages: ' + str(count))
# print(vars(entry))
# print(user)
import asyncio
from pywikibot import Site, Page, TitleblacklistError
site = Site()
logNewUsers = site.logevents('newusers', total = 500)
count = 0
for entry in logNewUsers:
user = entry['user']
page = Page(site, 'Thảo luận Thành viên:' + user)
if not page.exists():
page.text = '{{thế:welcome12}}\n[[Thành viên:P.T.Đ|P.T.Đ]] ([[Thảo luận Thành viên:P.T.Đ|thảo luận]]) ~~~~~'
try:
page.save(summary = 'Hoan nghênh thành viên mới!', minor = True, botflag = True, asynchronous = True)
except TitleblacklistError:
print('Page [[' + page.title() + ']] is title-blacklisted.')
count += 1
await asyncio.sleep(1)
print('Number of created welcome pages: ' + str(count))
# fix_bug_iabot.py
import pywikibot as pw
site = pw.Site()
# Variables
keyword = 'insource:/DeadURL/'
oldtext = 'no" == DeadURL or "không'
newtext = 'no'
sumtext = 'Fix bug of IABot.'
# Actions
searchResults = site.search(keyword, namespaces = 0, total = 10)
for result in searchResults:
page = pw.Page(site, result.title())
text = page.get()
page.text = text.replace(oldtext, newtext)
try:
page.save(summary = sumtext, minor = True, botflag = True)
except pw.LockedPage:
print('Page [[' + page.title() + ']] is locked')
11:31 10/01/2021: 3.551 bài có 'insource:/DeadURL/'
from string import ascii_uppercase
for c in ascii_uppercase:
print(c)
# python test.py A
from string import ascii_uppercase
import pywikibot as pw
import re
import sys
site = pw.Site()
# Variables
keyword = 'insource:/dead-*url *= *yes/ prefix:' + 'Z'
oldtext = 'dead-*url *= *yes'
newtext = 'url-status=dead'
sumtext = 'Replace dead-url=yes to url-status=dead.'
# Actions
searchResults = site.search(keyword, namespaces = 0, total = None)
for c in ascii_uppercase:
keyword = 'insource:/dead-*url *= *yes/ prefix:' + c
searchResults = site.search(keyword, namespaces = 0, total = None)
count = 0
for page in searchResults:
count += 1
print(c + ':' + str(count))
A:1923 B:1155 C:1927 D:984 E:717 F:389 G:439 H:667 I:195 J:308 K:491 L:645 M:1072 N:799 O:80 P:263 Q:94 R:211 S:540 T:401 U:50 V:170 W:53 X:56 Y:46 Z:30