Compare commits
3 Commits
cb3c17c5db
...
ce00b327bb
Author | SHA1 | Date |
---|---|---|
Christian Aribaud | ce00b327bb | |
Christian Aribaud | 7e52c5a532 | |
Christian Aribaud | 3d7428e76f |
17
README.md
17
README.md
|
@ -110,6 +110,23 @@ To do so, assuming you are using linux, after installing docker, you can run fro
|
|||
|
||||
You can now open `eslint_report.html` to see the result.
|
||||
|
||||
## Setting up quality checking for Python
|
||||
|
||||
Regarding Python, quality checking is done using [prospector](https://prospector.landscape.io/en/master/) using [mypy](https://mypy.readthedocs.io/en/stable/) as an additional checker.
|
||||
Auto-formating is done using [Black](https://pypi.org/project/black/)
|
||||
|
||||
Dependencies are managed not using `pip` but [`pipenv`](https://pipenv.pypa.io/en/latest/).
|
||||
|
||||
To setup prospector, you need to run:
|
||||
|
||||
+ Only once, in melpomene's root folder:
|
||||
+ Install pipenv: `pip install pipenv`
|
||||
+ Install melpomene's root folder: `pipenv install --dev`
|
||||
+ Every time to run the quality checking:
|
||||
+ `pipenv shell`
|
||||
+ `prospector -s veryhigh -w mypy --max-line-length 88 .`
|
||||
+ `black .`
|
||||
|
||||
# Credits
|
||||
|
||||
Most examples and the documentation of Melpomene uses illustrations from David "Deevad" Revoy's "Pepper & Carrot" webcomic, which is published under CC-BY 4.0. Full licence [here](https://www.peppercarrot.com/en/license/index.html).
|
||||
|
|
|
@ -3,10 +3,10 @@
|
|||
# CC-BY-NC-SA https://git.aribaud.net/caribaud/melpomene/
|
||||
|
||||
import sys
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
import argparse
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from xml.etree import ElementTree
|
||||
from xml.etree.ElementTree import Element
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
|
@ -14,63 +14,84 @@ HTML_TEMPLATE = Path(__file__).parent / "melpomene.html"
|
|||
HTML_TO_REPLACE = "<!-- your img tags here, see documentation -->"
|
||||
|
||||
|
||||
def extract_zooms(src_folder):
|
||||
def get_val_has_str(elem: Element, attrib: str, filepath: str | Path) -> str:
|
||||
value = elem.get(attrib)
|
||||
|
||||
if value is None:
|
||||
sys.exit(f"Attribute '{attrib}' is not valid in file {filepath}")
|
||||
|
||||
return str(value)
|
||||
|
||||
|
||||
def extract_zooms(src_folder) -> dict[int, Any]:
|
||||
folder = Path(src_folder)
|
||||
|
||||
zooms = {}
|
||||
|
||||
max_width = 0
|
||||
max_height = 0
|
||||
pages_zooms: dict[int, Any] = {}
|
||||
|
||||
idx = 0
|
||||
|
||||
for svg_path in folder.glob("*.svg"):
|
||||
|
||||
idx += 1
|
||||
|
||||
print(f"page {idx} : {svg_path.name}")
|
||||
|
||||
zooms[idx] = {
|
||||
# Setting up default values
|
||||
pages_zooms[idx] = {
|
||||
"name": svg_path.stem,
|
||||
"width": 0,
|
||||
"height": 0,
|
||||
"zooms": [],
|
||||
}
|
||||
|
||||
tree = ET.parse(svg_path)
|
||||
tree = ElementTree.parse(svg_path)
|
||||
root = tree.getroot()
|
||||
|
||||
if "." in root.get("width"):
|
||||
print(f"WARNING: file {svg_path} has a floating width, it will be rounded", file=sys.stderr)
|
||||
zooms[idx]["width"] = round(float(root.get("width")))
|
||||
if "." in root.get("height"):
|
||||
print(f"WARNING: file {svg_path} has a floating height, it will be rounded", file=sys.stderr)
|
||||
zooms[idx]["height"] = round(float(root.get("height")))
|
||||
width = get_val_has_str(root, "width", svg_path)
|
||||
height = get_val_has_str(root, "height", svg_path)
|
||||
|
||||
for area in root.findall('.//{*}rect'):
|
||||
zooms[idx]["zooms"].append([
|
||||
float(area.get("width")),
|
||||
float(area.get("height")),
|
||||
float(area.get("x")),
|
||||
float(area.get("y")),
|
||||
])
|
||||
if "." in width:
|
||||
print(
|
||||
f"WARNING: file {svg_path} has a floating width, it will be rounded",
|
||||
file=sys.stderr,
|
||||
)
|
||||
pages_zooms[idx]["width"] = round(float(width))
|
||||
|
||||
return zooms, max_width, max_height
|
||||
if "." in height:
|
||||
print(
|
||||
f"WARNING: file {svg_path} has a floating height, it will be rounded",
|
||||
file=sys.stderr,
|
||||
)
|
||||
pages_zooms[idx]["height"] = round(float(height))
|
||||
|
||||
zooms = []
|
||||
for area in root.findall(".//{*}rect"):
|
||||
zooms.append(
|
||||
[
|
||||
float(get_val_has_str(area, "width", svg_path)),
|
||||
float(get_val_has_str(area, "height", svg_path)),
|
||||
float(get_val_has_str(area, "x", svg_path)),
|
||||
float(get_val_has_str(area, "y", svg_path)),
|
||||
]
|
||||
)
|
||||
|
||||
pages_zooms[idx]["zooms"] = zooms
|
||||
|
||||
return pages_zooms
|
||||
|
||||
|
||||
def write_json_or_js(zooms, dest_file, is_js):
|
||||
|
||||
with open(dest_file, "w") as data_file:
|
||||
|
||||
def write_json_or_js(zooms, dest_file, is_js) -> None:
|
||||
with open(dest_file, "w", encoding="UTF-8") as data_file:
|
||||
if is_js:
|
||||
data_file.write("PAGES_ZOOMS = ")
|
||||
data_file.write("[\n")
|
||||
first_coma_skiped = False
|
||||
for page_idx in sorted(zooms.keys()):
|
||||
for zoom in zooms[page_idx]["zooms"]:
|
||||
|
||||
if zoom[2] < 0 or zoom[3] < 0:
|
||||
print(f"WARNING: negative pos x / pos y in page {page_idx} for zoom {zoom} (is the rectangle flipped?)")
|
||||
print(
|
||||
f"WARNING: negative pos x / pos y in page {page_idx} for "
|
||||
f"zoom {zoom} (is the rectangle flipped?)"
|
||||
)
|
||||
|
||||
if first_coma_skiped:
|
||||
data_file.write(",\n")
|
||||
|
@ -80,45 +101,71 @@ def write_json_or_js(zooms, dest_file, is_js):
|
|||
data_file.write("\n]\n")
|
||||
|
||||
|
||||
def write_html(zooms, dest_file, pages_width, pages_height, prefix, extention):
|
||||
|
||||
def write_html(zooms, dest_file, prefix, extention) -> None:
|
||||
img_tags = ""
|
||||
for page_idx in sorted(zooms.keys()):
|
||||
img_url = f"{prefix}{zooms[page_idx]['name']}.{extention}"
|
||||
zoom_html_data = [','.join([str(zoom) for zoom in page_zooms]) for page_zooms in zooms[page_idx]["zooms"]]
|
||||
zoom_html_str = ';'.join(zoom_html_data)
|
||||
img_tags = img_tags + f' <img loading="lazy" height="{zooms[page_idx]["height"]}" width="{zooms[page_idx]["width"]}" src="{img_url}" data-zooms="{zoom_html_str}"/>\n'
|
||||
zoom_html_data = [
|
||||
",".join([str(zoom) for zoom in page_zooms])
|
||||
for page_zooms in zooms[page_idx]["zooms"]
|
||||
]
|
||||
zoom_html_str = ";".join(zoom_html_data)
|
||||
img_tags = (
|
||||
img_tags
|
||||
+ " "
|
||||
+ f'<img loading="lazy" height="{zooms[page_idx]["height"]}" '
|
||||
+ f'width="{zooms[page_idx]["width"]}" src="{img_url}" '
|
||||
+ f'data-zooms="{zoom_html_str}"/>\n'
|
||||
)
|
||||
|
||||
img_tags = img_tags.strip()
|
||||
|
||||
with open(HTML_TEMPLATE) as template_file, open(dest_file, "w") as data_file:
|
||||
|
||||
with open(HTML_TEMPLATE, "r", encoding="UTF-8") as template_file, open(
|
||||
dest_file, "w", encoding="UTF-8"
|
||||
) as data_file:
|
||||
data = template_file.read().replace(HTML_TO_REPLACE, img_tags)
|
||||
|
||||
data_file.write(data)
|
||||
|
||||
|
||||
def generate_argparse():
|
||||
def generate_argparse() -> ArgumentParser:
|
||||
"""Generate Melpomene's generator input parser"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Helper that can generate JSON / JS / HTML files for Melpomene webcomic reader"
|
||||
parser = ArgumentParser(
|
||||
description=(
|
||||
"Helper that can generate JSON / JS / "
|
||||
"HTML files for Melpomene webcomic reader"
|
||||
)
|
||||
)
|
||||
|
||||
parser.add_argument("output_format", choices=["html", "json", "js"], help="The type of output to generate")
|
||||
parser.add_argument(
|
||||
"output_format",
|
||||
choices=["html", "json", "js"],
|
||||
help="The type of output to generate",
|
||||
)
|
||||
parser.add_argument("svg_folders", help="Path of the folder containing the SVGs")
|
||||
parser.add_argument("-o", metavar="dest_file", help="Where to write the generator output to")
|
||||
parser.add_argument("-p", default="", metavar="img_url_prefix", help="What to prefix the URL of the images when using HTML format.")
|
||||
parser.add_argument("-e", default="png", metavar="img_ext", help="What extention to use in the URL of the images when using HTML format.")
|
||||
parser.add_argument(
|
||||
"-o", metavar="dest_file", help="Where to write the generator output to"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
default="",
|
||||
metavar="img_url_prefix",
|
||||
help="What to prefix the URL of the images when using HTML format.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
default="png",
|
||||
metavar="img_ext",
|
||||
help="What extention to use in the URL of the images when using HTML format.",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
def run():
|
||||
args = generate_argparse().parse_args()
|
||||
|
||||
|
||||
# Get the final outout name
|
||||
output = None
|
||||
|
||||
|
@ -136,13 +183,17 @@ if __name__ == "__main__":
|
|||
elif args.output_format == "js" and not output.endswith(".js"):
|
||||
output += ".js"
|
||||
|
||||
zooms, max_width, max_height = extract_zooms(args.svg_folders)
|
||||
zooms = extract_zooms(args.svg_folders)
|
||||
|
||||
if args.output_format == "html":
|
||||
write_html(zooms, output, max_width, max_height, args.p, args.e)
|
||||
write_html(zooms, output, args.p, args.e)
|
||||
|
||||
elif args.output_format == "json":
|
||||
write_json_or_js(zooms, output, False)
|
||||
|
||||
elif args.output_format == "js":
|
||||
write_json_or_js(zooms, output, True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
|
|
Loading…
Reference in New Issue