Fix str-to-pathlib conversion issues
parent
1f26d485f5
commit
f807cf9c6f
|
@ -188,7 +188,7 @@ def main():
|
|||
# Initialize PLAYERSKINS db
|
||||
log.debug("loading playerskins db from '{}'" \
|
||||
.format(args.playerskinsdb))
|
||||
with open(args.playerskinsdb, "r") as f:
|
||||
with args.playerskinsdb.open("r") as f:
|
||||
smashgg.PLAYERSKINS = json.load(f)
|
||||
|
||||
#
|
||||
|
@ -265,7 +265,7 @@ def main():
|
|||
"{}.lkrz".format(tournament.slug)
|
||||
)
|
||||
|
||||
with open(str(args.lkrz_file), "w", encoding="utf8") as f:
|
||||
with args.lkrz_file.open("w", encoding="utf8") as f:
|
||||
f.write(lkrz_data)
|
||||
|
||||
# Default outfile is 'tournament-slug.svg'
|
||||
|
@ -275,13 +275,18 @@ def main():
|
|||
)
|
||||
|
||||
# Build the context which will be passed to the template
|
||||
try:
|
||||
dir_res_ssbu = args.imgdir.as_uri() # not absolute => error
|
||||
except ValueError:
|
||||
dir_res_ssbu = args.imgdir.as_posix()
|
||||
|
||||
context = {
|
||||
"tournament": tournament.clean_name(args.name_seo_delimiter),
|
||||
"players" : sorted(
|
||||
top_players.values(),
|
||||
key = lambda p: p.placement,
|
||||
),
|
||||
"dir_res_ssbu": args.imgdir.as_posix(),
|
||||
"dir_res_ssbu": dir_res_ssbu,
|
||||
}
|
||||
|
||||
rv = export.generate_outfile(
|
||||
|
|
|
@ -61,7 +61,7 @@ def generate_outfile(
|
|||
)
|
||||
import subprocess
|
||||
|
||||
jj2_tpl.stream(context).dump( tmpsvg )
|
||||
jj2_tpl.stream(context).dump( tmpsvg.name )
|
||||
|
||||
inkscape_process = subprocess.Popen(
|
||||
[
|
||||
|
@ -93,7 +93,7 @@ def generate_outfile(
|
|||
|
||||
except Exception as e:
|
||||
log.warning("Failed to export with inkscape")
|
||||
log.debug(e)
|
||||
log.debug(e, exc_info=True)
|
||||
|
||||
# To png, pdf or ps with cairosvg
|
||||
# -------------------------------------------------------------------------
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import io
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
|
@ -53,37 +53,50 @@ def download_res_ssbu(dstdir, proxy = None, log=None):
|
|||
|
||||
# -------------------------------------------------------------------------
|
||||
# Download stock icons
|
||||
log.info("Downloading stock icons...")
|
||||
log.warning("Downloading stock icons...")
|
||||
fstocks = download_file(stock_icons_url, proxy = proxy)
|
||||
zfstocks = zipfile.ZipFile(fstocks)
|
||||
|
||||
# Iter over each character
|
||||
for character in EVERYONE:
|
||||
log.info("Downloading images for {}...".format(character.name))
|
||||
log.warning("Downloading images for {}...".format(character.name))
|
||||
|
||||
# Create directory for this character
|
||||
chardir = os.path.join(
|
||||
dstdir,
|
||||
character.name,
|
||||
)
|
||||
chardir = dstdir / character.name
|
||||
|
||||
try:
|
||||
os.mkdir( chardir )
|
||||
chardir.mkdir()
|
||||
|
||||
except FileExistsError:
|
||||
log.warning(
|
||||
log.info(
|
||||
"Directory already exists for {}".format(character.name)
|
||||
)
|
||||
|
||||
if os.listdir( chardir ):
|
||||
try:
|
||||
next(chardir.iterdir())
|
||||
log.warning(
|
||||
"Directory not empty for {}, skipping" \
|
||||
.format(character.name)
|
||||
)
|
||||
continue
|
||||
except StopIteration:
|
||||
log.info(
|
||||
"Directory empty, downloading",
|
||||
)
|
||||
|
||||
# Download urls & write image files
|
||||
for url in character.spritersurls:
|
||||
try:
|
||||
f = download_file(url, proxy = proxy)
|
||||
except Exception as e:
|
||||
try:
|
||||
log.warning("Download failed ({}), retrying".format(e))
|
||||
f = download_file(url, proxy = proxy)
|
||||
except Exception as e:
|
||||
log.error("Download failed({})".format(e))
|
||||
log.debug(e, exc_info = True)
|
||||
continue
|
||||
|
||||
with zipfile.ZipFile(f) as zfchar:
|
||||
for zf in [zfchar,zfstocks]:
|
||||
for source_filename in zf.namelist():
|
||||
|
@ -91,13 +104,15 @@ def download_res_ssbu(dstdir, proxy = None, log=None):
|
|||
if "No Gamma Fix" in source_filename:
|
||||
continue
|
||||
|
||||
if os.path.basename(source_filename) in ["","Tag.txt"]:
|
||||
continue
|
||||
|
||||
if character.codename not in source_filename:
|
||||
continue
|
||||
|
||||
target_filename = os.path.basename(source_filename)
|
||||
target_filename = pathlib.Path(source_filename).name
|
||||
|
||||
if target_filename in ["","Tag.txt"]:
|
||||
continue
|
||||
|
||||
target_filename = pathlib.Path(source_filename).name
|
||||
|
||||
target_filename = target_filename.replace(
|
||||
character.codename,
|
||||
|
@ -106,12 +121,9 @@ def download_res_ssbu(dstdir, proxy = None, log=None):
|
|||
|
||||
log.debug("Writing file '{}'".format(target_filename))
|
||||
|
||||
target_filename = os.path.join(
|
||||
chardir,
|
||||
target_filename,
|
||||
)
|
||||
target_filename = chardir / target_filename
|
||||
|
||||
with open(target_filename, "wb") as tf:
|
||||
with open(str(target_filename), "wb") as tf:
|
||||
tf.write(zf.read(source_filename))
|
||||
|
||||
# =============================================================================
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
import os, os.path
|
||||
import pathlib
|
||||
|
||||
import requests
|
||||
|
||||
|
@ -175,23 +176,20 @@ def run_query(
|
|||
variables = {},
|
||||
token = "",
|
||||
proxy = None,
|
||||
query_dir = "queries",
|
||||
query_dir = pathlib.Path("queries"),
|
||||
query_extension = "gql",
|
||||
api_url = API_URL,
|
||||
log = None,
|
||||
):
|
||||
|
||||
# Load query
|
||||
query_path = os.path.join(
|
||||
query_dir,
|
||||
"{}.{}".format(
|
||||
query_path = query_dir / "{}.{}".format(
|
||||
query_name,
|
||||
query_extension,
|
||||
)
|
||||
)
|
||||
query = ""
|
||||
|
||||
with open(query_path, 'r') as query_file:
|
||||
with query_path.open("r") as query_file:
|
||||
query = query_file.read()
|
||||
|
||||
# Build payload
|
||||
|
|
Loading…
Reference in New Issue