Metadata and template based website compiler
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
This repo is archived. You can view files and clone it, but cannot push or open issues/pull-requests.
pixywerk/pixywerk2/__main__.py

157 lines
5.8 KiB

# iterate source tree
# create directors in target tree
# for each item:
# run processor(s) on item, each processor could be in a chain or a branch
# Processors also provide filename munging
# output target based on processor output
import argparse
import logging
import os
import shutil
import sys
import time
from typing import Dict, List, cast
from .metadata import MetaTree
from .processchain import ProcessorChains
from .processors.processors import PassthroughException
from .pygments import pygments_get_css, pygments_markup_contents_html
from .template_tools import (
date_iso8601,
file_content,
file_list,
file_list_hier,
file_json,
file_metadata,
file_name,
file_raw,
time_iso8601,
)
from .utils import deep_merge_dicts
logger = logging.getLogger()
def setup_logging(verbose: bool = False) -> None:
pass
def parse_var(varspec: str) -> List:
if (not ('=' in varspec)):
return [varspec, True]
return list(varspec.split('=', 2))
def get_args(args: List[str]) -> argparse.Namespace:
parser = argparse.ArgumentParser("Compile a Pixywerk directory into an output directory.")
parser.add_argument("root", help="The root of the pixywerk directory to process.")
parser.add_argument("output", help="The output directory to export post-compiled files to.")
parser.add_argument(
"-c", "--clean", help="Remove the target tree before proceeding (by renaming to .bak).", action="store_true"
)
parser.add_argument("-s", "--safe", help="Abort if the target directory already exists.", action="store_true")
parser.add_argument("-f", "--follow-links", help="Follow symbolic links in the input tree.", action="store_true")
parser.add_argument("-t", "--template", help="The template directory (default: root/templates)", default=None)
parser.add_argument("-d", "--dry-run", help="Perform a dry-run.", action="store_true")
parser.add_argument("-v", "--verbose", help="Output verbosely.", action="store_true")
parser.add_argument("--processors", help="Specify a path to a processor configuration file.", default=None)
parser.add_argument(
"-D", "--define", help="Add a variable to the metadata.", nargs="+", action="extend", type=parse_var)
result = parser.parse_args(args)
# validate arguments
if not os.path.isdir(result.root):
raise FileNotFoundError("can't find root folder {}".format(result.root))
if not result.template:
result.template = os.path.join(result.root, "templates")
result.excludes = [result.template]
return result
def main() -> int:
try:
args = get_args(sys.argv[1:])
except FileNotFoundError as ex:
print("error finding arguments: {}".format(ex))
return 1
setup_logging(args.verbose)
if os.path.exists(args.output) and args.clean:
bak = "{}.bak-{}".format(args.output, int(time.time()))
print("cleaning target {} -> {}".format(args.output, bak))
os.rename(args.output, bak)
process_chains = ProcessorChains(args.processors)
default_metadata = {
"templates": args.template,
"template": "default.jinja2",
"dir-template": "default-dir.jinja2",
"filters": {},
"build-time": time.time(),
"uuid-oid-root": "pixywerk",
"summary": "",
"description": "",
"author": "",
"author_email": "",
}
if args.define:
for var in args.define:
default_metadata[var[0]] = var[1]
meta_tree = MetaTree(args.root, default_metadata)
file_list_cache = cast(Dict, {})
file_cont_cache = cast(Dict, {})
file_name_cache = cast(Dict, {})
file_raw_cache = cast(Dict, {})
flist = file_list(args.root, file_list_cache)
default_metadata["globals"] = {
"get_file_list": flist,
"get_hier": file_list_hier(args.root, flist),
"get_file_name": file_name(args.root, meta_tree, process_chains, file_name_cache),
"get_file_content": file_content(args.root, meta_tree, process_chains, file_cont_cache),
"get_json": file_json(args.root),
"get_raw": file_raw(args.root, file_raw_cache),
"get_file_metadata": file_metadata(meta_tree),
"get_time_iso8601": time_iso8601("UTC"),
"get_date_iso8601": date_iso8601("UTC"),
"pygments_get_css": pygments_get_css,
"pygments_markup_contents_html": pygments_markup_contents_html,
"merge_dicts": deep_merge_dicts,
}
for root, _, files in os.walk(args.root, followlinks=args.follow_links):
workroot = os.path.relpath(root, args.root)
if workroot == ".":
workroot = ""
target_dir = os.path.join(args.output, workroot)
print("mkdir -> {}".format(target_dir))
if not args.dry_run:
try:
os.mkdir(target_dir)
except FileExistsError:
if args.safe:
print("error, target directory exists, aborting")
return 1
for f in files:
# fixme global generic filters
if f.endswith(".meta") or f.endswith("~"):
continue
metadata = meta_tree.get_metadata(os.path.join(workroot, f))
chain = process_chains.get_chain_for_filename(os.path.join(root, f), ctx=metadata)
print("process {} -> {} -> {}".format(os.path.join(root, f), repr(chain), os.path.join(target_dir, chain.output_filename)))
if not args.dry_run:
try:
with open(os.path.join(target_dir, chain.output_filename), "w") as outfile:
for line in chain.output:
outfile.write(line)
except PassthroughException:
shutil.copyfile(os.path.join(root, f), os.path.join(target_dir, chain.output_filename))
return 0
if __name__ == "__main__":
sys.exit(main())