-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
182 lines (138 loc) · 5.63 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
from pathlib import Path
from enum import Enum
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from inspect import getfile
import urllib3
import sys
import os
import json
import hashlib
import requests
import re
import zipfile
import base64
dir_script = Path(getfile(lambda: 0)).parent.absolute()
class URI_type(Enum):
unknown = 0
map_file = 1
map_scoresaber = 2
map_beatsaver = 3
map_beatsaver_oneclick = 4
map_beatsaver_key = 5
map_bsaber = 6
playlist_file = 7
playlist_bsaber = 8
def get_resource_path(relative_path):
"""
Get pyinstaller-compatible path to resource. Needed for standalone version.
See https://stackoverflow.com/questions/51060894/adding-a-data-file-in-pyinstaller-using-the-onefile-option/51061279
"""
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = Path(sys._MEIPASS).absolute()
except Exception:
base_path = dir_script
return base_path.joinpath(relative_path)
def encode_image_to_base64_str(img_path):
img_path = Path(img_path)
with open(img_path, mode="rb") as fp:
img = fp.read()
img_encoded = base64.b64encode(img)
return img_encoded.decode()
def calculate_Level_hash_from_dir(levelPath):
levelPath = Path(levelPath)
infoPath = levelPath.joinpath("./info.dat")
if not infoPath.is_file():
print("info.dat at {} does not exist. Skipping. Note that this is not normal, please check the integrity of this level and consider re-downloading it.".format(levelPath))
return None
with open(infoPath, "rb") as tmpfile:
info_binary = tmpfile.read()
info_data = json.loads(info_binary)
# We need to calculate sha1 hashes of the concatenation of info.dat and each difficulty listed there
hasher = hashlib.sha1()
hasher.update(info_binary)
# List difficulties
difficulty_sets = info_data.get("_difficultyBeatmapSets")
# Read each difficulty file
for diffset in difficulty_sets:
for beatmap in diffset.get("_difficultyBeatmaps"):
beatmap_filename = beatmap.get("_beatmapFilename")
beatmap_filepath = levelPath.joinpath(beatmap_filename)
# Read difficulty file and concatenate to the binary info data
with open(beatmap_filepath, "rb") as tmpfile:
diff_binary = tmpfile.read()
hasher.update(diff_binary)
# Calculate the final hash
sha1 = hasher.hexdigest().upper()
return sha1
def calculate_Level_hash_from_zip(levelPath):
levelPath = Path(levelPath)
# Similar to calculate_Level_hash_from_dir(), but inside the zip
with zipfile.ZipFile(str(levelPath), "r") as zip_file:
with zip_file.open("Info.dat") as tmpfile:
info_binary = tmpfile.read()
info_data = json.loads(info_binary, encoding="utf-8")
hasher = hashlib.sha1()
hasher.update(info_binary)
difficulty_sets = info_data.get("_difficultyBeatmapSets")
for diffset in difficulty_sets:
for beatmap in diffset.get("_difficultyBeatmaps"):
beatmap_filename = beatmap.get("_beatmapFilename")
# Read difficulty file and concatenate to the binary info data
with zip_file.open(beatmap_filename, "r") as tmpfile:
diff_binary = tmpfile.read()
hasher.update(diff_binary)
# Calculate the final hash
sha1 = hasher.hexdigest().upper()
return sha1
def get_map_or_playlist_resource_type(input_string):
if os.path.exists(input_string):
uri_path = Path(input_string)
if not uri_path.is_file:
return URI_type.unknown
if uri_path.suffix == ".zip":
return URI_type.map_file
if uri_path.suffix == ".bplist":
return URI_type.playlist_file
parsed = urlparse(input_string)
if parsed.hostname == "beatsaver.com":
if parsed.scheme == "beatsaver":
return URI_type.map_beatsaver_oneclick
return URI_type.map_beatsaver
if parsed.hostname == "bsaber.com":
if parsed.path.split("/")[1] == "songs":
return URI_type.map_bsaber
else:
# WARNING really unverified here
return URI_type.playlist_bsaber
if parsed.hostname == "scoresaber.com":
if parsed.path.split("/")[1] == "leaderboard":
return URI_type.map_scoresaber
return URI_type.unknown
def get_level_hash_from_url(map_url, resource_type):
parsed = urlparse(map_url)
if resource_type is URI_type.map_beatsaver or resource_type is URI_type.map_bsaber:
split = parsed.path.split("/")
# URL may be https://beatsaver.com/beatmap/whatever or https://beatsaver.com/beatmap/whatever/
key = split[-1] if split[-1] != "" else split[-2]
return key
if resource_type is URI_type.map_scoresaber:
request = requests.get(map_url)
soup = BeautifulSoup(request.text, "html.parser")
ID_selector = soup.find(text="ID: ").next_element.next
return str(ID_selector)
def get_level_hashes_from_playlist(bplist_path):
with open(bplist_path, encoding="utf-8") as fp:
playlist = json.load(fp)
hashes = []
for level in playlist["songs"]:
hashes.append(level["hash"])
return hashes
def extract_bsaber_bplist_url(baseurl):
request = requests.get(baseurl)
soup = BeautifulSoup(request.text, "html.parser")
dl_button = soup.find("a", href=re.compile(r"/PlaylistAPI/"))
dl_url = dl_button.attrs.get("href")
absolute_url = "https://bsaber.com" + dl_url
return absolute_url