-
-
Notifications
You must be signed in to change notification settings - Fork 1
/
update_anime_ids.py
131 lines (111 loc) · 5.31 KB
/
update_anime_ids.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import json, os, sys
from datetime import datetime, UTC
if sys.version_info[0] != 3 or sys.version_info[1] < 11:
print("Version Error: Version: %s.%s.%s incompatible please use Python 3.11+" % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
sys.exit(0)
try:
import requests
from git import Repo
from lxml import html
from kometautils import KometaArgs, KometaLogger
except (ModuleNotFoundError, ImportError):
print("Requirements Error: Requirements are not installed")
sys.exit(0)
options = [
{"arg": "tr", "key": "trace", "env": "TRACE", "type": "bool", "default": False, "help": "Run with extra trace logs."},
{"arg": "lr", "key": "log-requests", "env": "LOG_REQUESTS", "type": "bool", "default": False, "help": "Run with every request logged."}
]
script_name = "Anime IDs"
base_dir = os.path.dirname(os.path.abspath(__file__))
args = KometaArgs("Kometa-Team/Anime-IDs", base_dir, options, use_nightly=False)
logger = KometaLogger(script_name, "anime_ids", os.path.join(base_dir, "logs"), is_trace=args["trace"], log_requests=args["log-requests"])
logger.screen_width = 160
logger.header(args, sub=True)
logger.separator()
logger.start()
AniDBIDs = html.fromstring(requests.get("https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml").content)
Manami = requests.get("https://raw.githubusercontent.com/manami-project/anime-offline-database/master/anime-offline-database.json").json()
Aggregations = requests.get("https://raw.githubusercontent.com/notseteve/AnimeAggregations/main/aggregate/AnimeToExternal.json").json()
anime_dicts = {}
logger.info("Scanning Anime-Lists")
for anime in AniDBIDs.xpath("//anime"):
anidb_id = str(anime.xpath("@anidbid")[0])
if not anidb_id:
continue
anidb_id = int(anidb_id[1:]) if anidb_id[0] == "a" else int(anidb_id)
if anidb_id not in anime_dicts:
anime_dicts[anidb_id] = {}
tvdb_id = str(anime.xpath("@tvdbid")[0])
try:
if tvdb_id:
anime_dicts[anidb_id]["tvdb_id"] = int(tvdb_id)
except ValueError:
pass
tvdb_season = str(anime.xpath("@defaulttvdbseason")[0])
if tvdb_season == "a":
tvdb_season = "-1"
try:
if tvdb_season:
anime_dicts[anidb_id]["tvdb_season"] = int(tvdb_season)
except ValueError:
pass
try:
anime_dicts[anidb_id]["tvdb_epoffset"] = int(str(anime.xpath("@episodeoffset")[0]))
except ValueError:
anime_dicts[anidb_id]["tvdb_epoffset"] = 0
imdb_id = str(anime.xpath("@imdbid")[0])
if imdb_id.startswith("tt"):
anime_dicts[anidb_id]["imdb_id"] = imdb_id
logger.info("Scanning Manami-Project")
for anime in Manami["data"]:
if "sources" not in anime:
continue
anidb_id = None
mal_id = None
anilist_id = None
for source in anime["sources"]:
if "anidb.net" in source:
anidb_id = int(source.partition("anime/")[2])
elif "myanimelist" in source:
mal_id = int((source.partition("anime/")[2]))
elif "anilist.co" in source:
anilist_id = int((source.partition("anime/")[2]))
if anidb_id and anidb_id in anime_dicts:
if mal_id:
anime_dicts[anidb_id]["mal_id"] = mal_id
if anilist_id:
anime_dicts[anidb_id]["anilist_id"] = anilist_id
logger.info("Scanning AnimeAggregations")
for anidb_id, anime in Aggregations["animes"].items():
anidb_id = int(anidb_id)
if anidb_id not in anime_dicts:
anime_dicts[anidb_id] = {}
if "IMDB" in anime["resources"] and "imdb_id" not in anime_dicts[anidb_id]:
anime_dicts[anidb_id]["imdb_id"] = ",".join(anime["resources"]["IMDB"])
if "MAL" in anime["resources"] and "mal_id" not in anime_dicts[anidb_id]:
anime_dicts[anidb_id]["mal_id"] = int(anime["resources"]["MAL"][0]) if len(anime["resources"]["MAL"][0]) == 1 else ",".join(anime["resources"]["MAL"])
if "TMDB" in anime["resources"] and anime["resources"]["TMDB"][0].startswith("tv") and "tmdb_show_id" not in anime_dicts[anidb_id]:
anime_dicts[anidb_id]["tmdb_show_id"] = int(anime["resources"]["TMDB"][0][3:])
if "TMDB" in anime["resources"] and anime["resources"]["TMDB"][0].startswith("movie") and "tmdb_movie_id" not in anime_dicts[anidb_id]:
anime_dicts[anidb_id]["tmdb_movie_id"] = int(anime["resources"]["TMDB"][0][6:])
logger.info("Scanning Anime ID Edits")
with open("anime_id_edits.json", "r") as f:
for anidb_id, ids in json.load(f).items():
anidb_id = int(anidb_id)
if anidb_id in anime_dicts:
for attr in ["tvdb_id", "mal_id", "anilist_id", "imdb_id", "tmdb_show_id", "tmdb_movie_id"]:
if attr in ids:
anime_dicts[anidb_id][attr] = ids[attr]
with open("anime_ids.json", "w") as write:
json.dump(anime_dicts, write, indent=2)
logger.separator()
if [item.a_path for item in Repo(path=".").index.diff(None) if item.a_path.endswith(".json")]:
logger.info("Saving Anime ID Changes")
with open("README.md", "r") as f:
data = f.readlines()
data[2] = f"Last generated at: {datetime.now(UTC).strftime('%B %d, %Y %I:%M %p')} UTC\n"
with open("README.md", "w") as f:
f.writelines(data)
else:
logger.info("No Anime ID Changes Detected")
logger.separator(f"{script_name} Finished\nTotal Runtime: {logger.runtime()}")