Fix str-to-pathlib conversion issues

master
Lertsenem 2020-07-22 20:28:42 +02:00
parent 1f26d485f5
commit f807cf9c6f
4 changed files with 48 additions and 33 deletions

View File

@ -188,7 +188,7 @@ def main():
# Initialize PLAYERSKINS db # Initialize PLAYERSKINS db
log.debug("loading playerskins db from '{}'" \ log.debug("loading playerskins db from '{}'" \
.format(args.playerskinsdb)) .format(args.playerskinsdb))
with open(args.playerskinsdb, "r") as f: with args.playerskinsdb.open("r") as f:
smashgg.PLAYERSKINS = json.load(f) smashgg.PLAYERSKINS = json.load(f)
# #
@ -265,7 +265,7 @@ def main():
"{}.lkrz".format(tournament.slug) "{}.lkrz".format(tournament.slug)
) )
with open(str(args.lkrz_file), "w", encoding="utf8") as f: with args.lkrz_file.open("w", encoding="utf8") as f:
f.write(lkrz_data) f.write(lkrz_data)
# Default outfile is 'tournament-slug.svg' # Default outfile is 'tournament-slug.svg'
@ -275,13 +275,18 @@ def main():
) )
# Build the context which will be passed to the template # Build the context which will be passed to the template
try:
dir_res_ssbu = args.imgdir.as_uri() # not absolute => error
except ValueError:
dir_res_ssbu = args.imgdir.as_posix()
context = { context = {
"tournament": tournament.clean_name(args.name_seo_delimiter), "tournament": tournament.clean_name(args.name_seo_delimiter),
"players" : sorted( "players" : sorted(
top_players.values(), top_players.values(),
key = lambda p: p.placement, key = lambda p: p.placement,
), ),
"dir_res_ssbu": args.imgdir.as_posix(), "dir_res_ssbu": dir_res_ssbu,
} }
rv = export.generate_outfile( rv = export.generate_outfile(

View File

@ -61,7 +61,7 @@ def generate_outfile(
) )
import subprocess import subprocess
jj2_tpl.stream(context).dump( tmpsvg ) jj2_tpl.stream(context).dump( tmpsvg.name )
inkscape_process = subprocess.Popen( inkscape_process = subprocess.Popen(
[ [
@ -93,7 +93,7 @@ def generate_outfile(
except Exception as e: except Exception as e:
log.warning("Failed to export with inkscape") log.warning("Failed to export with inkscape")
log.debug(e) log.debug(e, exc_info=True)
# To png, pdf or ps with cairosvg # To png, pdf or ps with cairosvg
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------

View File

@ -1,5 +1,5 @@
import io import io
import os import pathlib
import sys import sys
import zipfile import zipfile
@ -53,37 +53,50 @@ def download_res_ssbu(dstdir, proxy = None, log=None):
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
# Download stock icons # Download stock icons
log.info("Downloading stock icons...") log.warning("Downloading stock icons...")
fstocks = download_file(stock_icons_url, proxy = proxy) fstocks = download_file(stock_icons_url, proxy = proxy)
zfstocks = zipfile.ZipFile(fstocks) zfstocks = zipfile.ZipFile(fstocks)
# Iter over each character # Iter over each character
for character in EVERYONE: for character in EVERYONE:
log.info("Downloading images for {}...".format(character.name)) log.warning("Downloading images for {}...".format(character.name))
# Create directory for this character # Create directory for this character
chardir = os.path.join( chardir = dstdir / character.name
dstdir,
character.name,
)
try: try:
os.mkdir( chardir ) chardir.mkdir()
except FileExistsError: except FileExistsError:
log.warning( log.info(
"Directory already exists for {}".format(character.name) "Directory already exists for {}".format(character.name)
) )
if os.listdir( chardir ): try:
next(chardir.iterdir())
log.warning( log.warning(
"Directory not empty for {}, skipping" \ "Directory not empty for {}, skipping" \
.format(character.name) .format(character.name)
) )
continue continue
except StopIteration:
log.info(
"Directory empty, downloading",
)
# Download urls & write image files # Download urls & write image files
for url in character.spritersurls: for url in character.spritersurls:
f = download_file(url, proxy = proxy) try:
f = download_file(url, proxy = proxy)
except Exception as e:
try:
log.warning("Download failed ({}), retrying".format(e))
f = download_file(url, proxy = proxy)
except Exception as e:
log.error("Download failed({})".format(e))
log.debug(e, exc_info = True)
continue
with zipfile.ZipFile(f) as zfchar: with zipfile.ZipFile(f) as zfchar:
for zf in [zfchar,zfstocks]: for zf in [zfchar,zfstocks]:
for source_filename in zf.namelist(): for source_filename in zf.namelist():
@ -91,13 +104,15 @@ def download_res_ssbu(dstdir, proxy = None, log=None):
if "No Gamma Fix" in source_filename: if "No Gamma Fix" in source_filename:
continue continue
if os.path.basename(source_filename) in ["","Tag.txt"]:
continue
if character.codename not in source_filename: if character.codename not in source_filename:
continue continue
target_filename = os.path.basename(source_filename) target_filename = pathlib.Path(source_filename).name
if target_filename in ["","Tag.txt"]:
continue
target_filename = pathlib.Path(source_filename).name
target_filename = target_filename.replace( target_filename = target_filename.replace(
character.codename, character.codename,
@ -106,12 +121,9 @@ def download_res_ssbu(dstdir, proxy = None, log=None):
log.debug("Writing file '{}'".format(target_filename)) log.debug("Writing file '{}'".format(target_filename))
target_filename = os.path.join( target_filename = chardir / target_filename
chardir,
target_filename,
)
with open(target_filename, "wb") as tf: with open(str(target_filename), "wb") as tf:
tf.write(zf.read(source_filename)) tf.write(zf.read(source_filename))
# ============================================================================= # =============================================================================

View File

@ -1,5 +1,6 @@
import json import json
import os, os.path import os, os.path
import pathlib
import requests import requests
@ -175,23 +176,20 @@ def run_query(
variables = {}, variables = {},
token = "", token = "",
proxy = None, proxy = None,
query_dir = "queries", query_dir = pathlib.Path("queries"),
query_extension = "gql", query_extension = "gql",
api_url = API_URL, api_url = API_URL,
log = None, log = None,
): ):
# Load query # Load query
query_path = os.path.join( query_path = query_dir / "{}.{}".format(
query_dir, query_name,
"{}.{}".format( query_extension,
query_name,
query_extension,
)
) )
query = "" query = ""
with open(query_path, 'r') as query_file: with query_path.open("r") as query_file:
query = query_file.read() query = query_file.read()
# Build payload # Build payload