2020-07-19 11:04:58 -04:00
|
|
|
import io
|
2021-03-03 03:11:09 -05:00
|
|
|
import os
|
2020-07-22 14:28:42 -04:00
|
|
|
import pathlib
|
2020-07-19 11:04:58 -04:00
|
|
|
import sys
|
2021-03-03 03:11:09 -05:00
|
|
|
import urllib
|
2020-07-19 11:04:58 -04:00
|
|
|
import zipfile
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
# -----------------------------------------------------------------------------
|
2020-07-22 12:05:15 -04:00
|
|
|
def download_file(url, with_progressbar = True, proxy = None):
|
2020-07-19 11:04:58 -04:00
|
|
|
|
|
|
|
r = requests.get(
|
|
|
|
url,
|
|
|
|
stream = with_progressbar,
|
2020-07-22 12:05:15 -04:00
|
|
|
proxies = { "https": proxy, "http": proxy },
|
2020-07-19 11:04:58 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
if not with_progressbar:
|
|
|
|
return io.BytesIO(r.content)
|
|
|
|
|
|
|
|
total = r.headers.get("content-length")
|
|
|
|
|
|
|
|
f = io.BytesIO()
|
|
|
|
|
|
|
|
if total is not None:
|
|
|
|
downloaded = 0
|
|
|
|
total = int(total)
|
|
|
|
for data in r.iter_content(
|
|
|
|
chunk_size = max(int(total/1000), 1024*1024),
|
|
|
|
):
|
|
|
|
f.write(data)
|
|
|
|
downloaded += len(data)
|
|
|
|
done = int(50*downloaded/total)
|
|
|
|
sys.stdout.write( "\r[{}{}] ({:02d}%)".format(
|
|
|
|
"█" * done,
|
|
|
|
" " * (50-done),
|
|
|
|
done*2,
|
|
|
|
) )
|
|
|
|
sys.stdout.flush()
|
|
|
|
sys.stdout.write("\n")
|
|
|
|
else:
|
|
|
|
f = write(r.content)
|
|
|
|
|
|
|
|
return f
|
|
|
|
|
|
|
|
# -----------------------------------------------------------------------------
|
2021-03-03 03:11:09 -05:00
|
|
|
def download_res(
|
|
|
|
dstdir,
|
|
|
|
game = None,
|
|
|
|
source = None,
|
|
|
|
store_raw = False,
|
|
|
|
proxy = None,
|
|
|
|
log = None,
|
|
|
|
):
|
|
|
|
"""TODO: Docstring for download_res_pplus.
|
|
|
|
:returns: TODO
|
|
|
|
|
|
|
|
"""
|
|
|
|
if not game:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Select default source if needed
|
|
|
|
if not source:
|
|
|
|
if game.GAME.name == "pplus":
|
|
|
|
source = "smashlyon"
|
|
|
|
elif game.GAME.name == "ssbu":
|
|
|
|
source = "spriters"
|
|
|
|
|
|
|
|
if source not in ["smashlyon", "spriters"]:
|
|
|
|
raise NotImplementedError(
|
|
|
|
"The only working sources are 'smashlyon' and 'spriters'",
|
|
|
|
)
|
|
|
|
|
|
|
|
gamedir = dstdir / game.GAME.name
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
try:
|
|
|
|
gamedir.mkdir(parents=True)
|
|
|
|
except FileExistsError:
|
|
|
|
log.debug("Game directory already exist")
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
# A cache to save time
|
|
|
|
cache = {}
|
2020-07-19 11:04:58 -04:00
|
|
|
|
|
|
|
# Iter over each character
|
2021-03-03 03:11:09 -05:00
|
|
|
for character in game.EVERYONE:
|
2020-07-22 14:28:42 -04:00
|
|
|
log.warning("Downloading images for {}...".format(character.name))
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
chardir = gamedir / character.name
|
2020-07-22 14:28:42 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
if not store_raw:
|
2020-07-22 14:28:42 -04:00
|
|
|
try:
|
2021-03-03 03:11:09 -05:00
|
|
|
chardir.mkdir(parents=True)
|
|
|
|
except FileExistsError:
|
2020-07-22 14:28:42 -04:00
|
|
|
log.info(
|
2021-03-03 03:11:09 -05:00
|
|
|
"Directory already exists for {}" \
|
|
|
|
.format(character.name)
|
2020-07-22 14:28:42 -04:00
|
|
|
)
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
try:
|
|
|
|
next(chardir.iterdir())
|
|
|
|
log.warning(
|
|
|
|
"Directory not empty for {}, skipping" \
|
|
|
|
.format(character.name)
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
except StopIteration:
|
|
|
|
log.info(
|
|
|
|
"Directory empty, downloading",
|
|
|
|
)
|
|
|
|
|
2020-07-19 11:04:58 -04:00
|
|
|
# Download urls & write image files
|
2021-03-03 03:11:09 -05:00
|
|
|
for url_nb, url in enumerate(character.res_urls[source]):
|
|
|
|
|
|
|
|
# If we have the file in cache, just get it
|
|
|
|
if url in cache and cache[url] is not None:
|
|
|
|
log.debug("Found url '{}' in cache".format(url))
|
|
|
|
f = cache[url]
|
|
|
|
|
|
|
|
else:
|
2020-07-22 14:28:42 -04:00
|
|
|
try:
|
|
|
|
f = download_file(url, proxy = proxy)
|
2021-03-03 03:11:09 -05:00
|
|
|
|
2020-07-22 14:28:42 -04:00
|
|
|
except Exception as e:
|
2021-03-03 03:11:09 -05:00
|
|
|
try:
|
|
|
|
# Try the download a 2nd time
|
|
|
|
log.warning("Download failed ({}), retrying".format(e))
|
|
|
|
f = download_file(url, proxy = proxy)
|
|
|
|
except Exception as e:
|
|
|
|
log.error("Download failed({})".format(e))
|
|
|
|
log.debug(e, exc_info = True)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# We save the file in cache if it's the second time we need
|
|
|
|
# to download it.
|
|
|
|
if url in cache:
|
|
|
|
log.debug("Saving url '{}' in cache".format(url))
|
|
|
|
cache[url] = f
|
|
|
|
else:
|
|
|
|
log.debug("Marking url '{}' in cache".format(url))
|
|
|
|
cache[url] = None
|
|
|
|
|
|
|
|
|
|
|
|
# if store_raw: we just save the raw zip file
|
|
|
|
if store_raw:
|
|
|
|
outfile_name = pathlib.Path(
|
|
|
|
urllib.parse.urlparse(url).path
|
|
|
|
) \
|
|
|
|
.name
|
|
|
|
with open(str(gamedir/outfile_name), "wb") as outfile:
|
|
|
|
outfile.write(f.getbuffer())
|
|
|
|
|
|
|
|
# Add symlink for readablity
|
|
|
|
os.symlink(
|
|
|
|
str(outfile_name),
|
|
|
|
str( gamedir/
|
|
|
|
"{charname}.{nb}.zip".format(
|
|
|
|
charname = character.name,
|
|
|
|
nb = url_nb+1,
|
|
|
|
)),
|
|
|
|
)
|
|
|
|
|
|
|
|
continue
|
2020-07-22 14:28:42 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
# otherwise: get the characters pictures and write them in the
|
|
|
|
# outdir
|
|
|
|
with zipfile.ZipFile(f) as zf:
|
|
|
|
for source_filename in zf.namelist():
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
if "No Gamma Fix" in source_filename:
|
|
|
|
continue
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-07-03 14:20:43 -04:00
|
|
|
if not any(c in source_filename for c in characters.codenames):
|
2021-03-03 03:11:09 -05:00
|
|
|
continue
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
target_filename = pathlib.Path(source_filename).name
|
2020-07-22 14:28:42 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
if target_filename in ["","Tag.txt"]:
|
|
|
|
continue
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
target_filename = pathlib.Path(source_filename).name
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
target_filename = target_filename.replace(
|
2021-07-03 14:20:43 -04:00
|
|
|
character.codenames[0],
|
2021-03-03 03:11:09 -05:00
|
|
|
character.name,
|
|
|
|
)
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
log.debug("Writing file '{}'".format(target_filename))
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
target_filename = chardir / target_filename
|
2020-07-19 11:04:58 -04:00
|
|
|
|
2021-03-03 03:11:09 -05:00
|
|
|
with open(str(target_filename), "wb") as tf:
|
|
|
|
tf.write(zf.read(source_filename))
|
2020-07-19 11:04:58 -04:00
|
|
|
|
|
|
|
# =============================================================================
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import tempfile
|
|
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
level = logging.DEBUG,
|
|
|
|
format = "%(message)s",
|
|
|
|
)
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"dstdir",
|
|
|
|
default = None,
|
|
|
|
help = "directory where to store the downloaded resources " \
|
|
|
|
"(default to a temporary file)",
|
|
|
|
)
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
if args.dstdir is None:
|
|
|
|
args.dstdir = tempfile.mkdtemp()
|
|
|
|
logging.warning(
|
|
|
|
"Storing in temporary directory : {}".format(args.dstdir)
|
|
|
|
)
|
|
|
|
|
|
|
|
download_res_ssbu(
|
|
|
|
dstdir = args.dstdir,
|
|
|
|
log = logging,
|
|
|
|
)
|