Bruger:MGA73/sandkasse: Forskelle mellem versioner

Content deleted Content added
g
m Bot: Saving GFDL mention results
Linje 1:
{| class="wikitable sortable"
import requests
from bs4 import BeautifulSoup
import pywikibot
 
# Fetch input data
def fetch_wiki_list():
"""
Fetches the list of wiki URLs from a Wikipedia page.
"""
site = pywikibot.Site("da", "wikipedia") # Danish Wikipedia (change to your language)
page_title = "User:MGA73/GFDL-hunter-wikis" # Page with wiki list
page = pywikibot.Page(site, page_title)
return page.text.splitlines() # Split the page text into a list of lines
 
def fetch_gfdl_variants():
"""
Fetches the list of GFDL variant names from a Wikipedia page.
"""
site = pywikibot.Site("da", "wikipedia") # Danish Wikipedia (change to your language)
page_title = "User:MGA73/GFDL-hunter-L-var" # Page with GFDL variants
page = pywikibot.Page(site, page_title)
return page.text.splitlines() # Split the page text into a list of lines
 
def get_file_count(domain):
"""
Gets the number of files from the MediaWiki API of the wiki.
"""
try:
# Construct the API URL
api_url = f"https://{domain}/w/api.php"
params = {
'action': 'query',
'meta': 'siteinfo',
'siprop': 'statistics',
'format': 'json'
}
 
# Make the request to the API
response = requests.get(api_url, params=params)
data = response.json()
 
# Extract the number of files from the response
file_count = data['query']['statistics'].get('files', 'Unknown')
return file_count
 
except requests.exceptions.RequestException:
return 'Unknown' # Return 'Unknown' if there's an issue
 
# Process each wiki and generate interwiki prefix
def generate_prefix_and_check(wikis, variants):
results = []
for domain in wikis:
parts = domain.split('.')
language_code = parts[0]
project_name = parts[1]
 
# Define the interwiki prefix based on the project name and language code
if project_name == "wikipedia":
prefix_iw = f"w:{language_code}"
elif project_name == "wikibooks":
prefix_iw = f"b:{language_code}"
elif project_name == "wikinews":
prefix_iw = f"n:{language_code}"
elif project_name == "wikiquote":
prefix_iw = f"q:{language_code}"
elif project_name == "wikisource":
prefix_iw = f"s:{language_code}"
elif project_name == "wikiversity":
prefix_iw = f"v:{language_code}"
elif project_name == "wikivoyage":
prefix_iw = f"voy:{language_code}"
elif project_name == "wiktionary":
prefix_iw = f"wikt:{language_code}"
elif language_code == "commons":
prefix_iw = "c"
elif language_code == "foundation":
prefix_iw = "wmf"
elif language_code == "mediawiki":
prefix_iw = "mw"
elif language_code == "meta":
prefix_iw = "m"
elif language_code == "species":
prefix_iw = "species"
elif language_code == "wikifunctions":
prefix_iw = "wikifunctions"
elif language_code == "wikimania":
prefix_iw = "wikimania"
elif language_code == "wikisource":
prefix_iw = "s:oldwikisource"
else:
# Handle any domain not explicitly listed
prefix_iw = f"{project_name}:{language_code}"
 
# Fetch file count and GFDL mention
file_count = get_file_count(domain)
result = check_gfdl_mention(domain, variants)
 
# Store the processed domain, prefix, file count, and result
results.append((domain, prefix_iw, file_count, result))
 
return results
 
def check_gfdl_mention(domain, variants):
"""
Checks if any GFDL variant is mentioned on the specified wiki.
"""
try:
# Fetch the content from the wiki
response = requests.get(f"https://{domain}/wiki/MediaWiki:Licenses")
soup = BeautifulSoup(response.content, 'html.parser')
content = soup.get_text().lower()
 
# Check for any of the variants in the content
for variant in variants:
if variant.lower() in content:
return variant # Return the specific variant found
 
return "÷" # Use ÷ if no variant found
 
except requests.exceptions.RequestException:
return "Could not connect" # Use "Could not connect" if there's an issue
 
def check_gfdl_mentions(results, variants):
"""
Checks GFDL mentions for all domains in the results list.
"""
final_results = []
for domain, prefix_iw, file_count, _ in results:
try:
# Perform the actual GFDL mention check
result = check_gfdl_mention(domain, variants)
final_results.append((domain, prefix_iw, file_count, result))
except Exception as e:
print(f"Error checking GFDL mentions for {domain}: {e}")
final_results.append((domain, prefix_iw, 'Unknown', "Error"))
 
return final_results
 
def create_table(results):
"""
Creates a formatted table from the results.
"""
# Table header
table_header = """{| class="wikitable sortable"
|-
! Wiki
! Prefix
! Files
! Mention
! Remarks
|-
"""
| [[:w:en:MediaWiki:Licenses]] || 921012 || GFDL ||
 
|-
# Table rows
| [[:b:en:MediaWiki:Licenses]] || 2689 || ÷ ||
table_rows = []
|-
for domain, prefix_iw, file_count, result in results:
row = f"|-\n| [[:{prefix_iw}n:en:MediaWiki:Licenses]] || {file_count}4564 || {result}÷ ||"
|-
table_rows.append(row)
| [[:q:en:MediaWiki:Licenses]] || 0 || ÷ ||
 
|-
# Table footer
| [[:s:en:MediaWiki:Licenses]] || 16218 || GFDL ||
table_footer = "\n|}"
|-
 
| [[:v:en:MediaWiki:Licenses]] || 39798 || GFDL ||
# Combine everything into the table
|-
table_text = table_header + "\n".join(table_rows) + "\n" + table_footer
| [[:voy:en:MediaWiki:Licenses]] || 1824 || ÷ ||
 
|-
return table_text
| [[:wikt:en:MediaWiki:Licenses]] || 14 || gerrit ||
 
|-
def save_results_to_wiki(table_text):
| [[:c:MediaWiki:Licenses]] || 107325167 || GFDL ||
"""
|-
Saves the results to a Wikipedia page.
| [[:wmf:MediaWiki:Licenses]] || 968 || GFDL ||
"""
|-
site = pywikibot.Site("da", "wikipedia") # Danish Wikipedia (change to your language)
| [[:mw:MediaWiki:Licenses]] || 2783 || GFDL ||
page_title = "User:MGA73/sandkasse" # Replace with your sandbox page title
|-
page = pywikibot.Page(site, page_title)
| [[:m:MediaWiki:Licenses]] || 1973 || ÷ ||
 
|-
page.text = table_text
| [[:w:roa-rup:MediaWiki:Licenses]] || 0 || gerrit ||
page.save(summary="Bot: Saving GFDL mention results", botflag=True)
|-
 
| [[:wikt:roa-rup:MediaWiki:Licenses]] || 0 || ÷ ||
def main():
|-
# Fetch data from Wikipedia pages
| [[:w:roa-tara:MediaWiki:Licenses]] || 155 || gerrit ||
wikis = fetch_wiki_list()
|-
variants = fetch_gfdl_variants()
| [[:s:oldwikisource:MediaWiki:Licenses]] || 979 || ÷ ||
 
|-
# Generate prefixes and file counts
| [[:species:MediaWiki:Licenses]] || 0 || ÷ ||
prefix_results = generate_prefix_and_check(wikis, variants)
|-
 
| [[:wikifunctions:MediaWiki:Licenses]] || 0 || ÷ ||
# Check GFDL mentions
|-
final_results = check_gfdl_mentions(prefix_results, variants)
| [[:wikimania:MediaWiki:Licenses]] || 27 || ÷ ||
 
|}
# Create the formatted table
table_text = create_table(final_results)
 
# Save the results to a Wikipedia page
save_results_to_wiki(table_text)
 
if __name__ == '__main__':
main()