|
import typing |
|
import orjson, pathlib |
|
|
|
import typer |
|
|
|
app = typer.Typer() |
|
|
|
|
|
@app.command() |
|
def get_list_wikis(stats: pathlib.Path, urls:typing.Optional[pathlib.Path]=None): |
|
if urls: |
|
texturls = urls.read_text("utf-8").split("\n") |
|
sites = [] |
|
for wiki in orjson.loads(stats.read_bytes())["data"]: |
|
sitename, activeusers, admins, articles, edits, files, pages, users = wiki |
|
|
|
sitecode = sitename.split(".")[0] |
|
if sitecode in [ |
|
"aa", |
|
"ak", |
|
"cho", |
|
"ho", |
|
"hz", |
|
"ii", |
|
"kr", |
|
"lrc", |
|
"mh", |
|
"mus", |
|
"na", |
|
"ng", |
|
]: |
|
|
|
continue |
|
elif sitecode in ["tok", "tlh", "ru-sib", "mo"]: |
|
|
|
continue |
|
elif ( |
|
articles == 0 |
|
or sitecode.startswith("total") |
|
or not sitename.endswith("wikipedia") |
|
): |
|
continue |
|
depth = (edits / pages) * ((pages - articles) / articles) ** 2 |
|
if articles < 100_000 or depth < 5.1: |
|
continue |
|
print( |
|
sitename, "depth", depth, "articles", articles, "active_users", activeusers |
|
) |
|
sites.append(sitecode.replace("-","_")) |
|
if urls: |
|
filtered_urls = [] |
|
for url in texturls: |
|
url_wikicode = url.split("/")[-1].split("-")[0].split("wiki")[0] |
|
if url_wikicode in sites: |
|
filtered_urls.append(url) |
|
urls.with_stem(urls.stem + "_filtered").write_text("\n".join(filtered_urls)) |
|
|
|
|
|
if __name__ == "__main__": |
|
app() |
|
|