Compare commits
4 Commits
ea8c4cc8c4
...
afdb00a4e7
Author | SHA1 | Date | |
---|---|---|---|
|
afdb00a4e7 | ||
|
d06c1ab792 | ||
|
e4bbe641b3 | ||
26fc849ed5 |
54
Makefile
54
Makefile
@ -27,9 +27,14 @@ OUT_DIR = build
|
|||||||
# SOURCE FILES:
|
# SOURCE FILES:
|
||||||
# all SRC_FLS and all files (recursively) in the SRC_DIRS will be built
|
# all SRC_FLS and all files (recursively) in the SRC_DIRS will be built
|
||||||
# all files in PROJECT_DIR (not recursively) are source files
|
# all files in PROJECT_DIR (not recursively) are source files
|
||||||
SRC_DIRS = de en script style
|
SRC_DIRS = de en script
|
||||||
SRC_FLS =
|
SRC_FLS =
|
||||||
|
|
||||||
|
# CSS FILES:
|
||||||
|
# directories which may contain sass and scss to compile sass to a correspondig css in OUT_DIR/CSS_DIR (also css, it will simply be copied)
|
||||||
|
CSS_DIRS = style
|
||||||
|
CSS_FILES =
|
||||||
|
|
||||||
# SOURCE FILES:
|
# SOURCE FILES:
|
||||||
# all RESOURCE_FLS and all files in the RESOURCE_DIRS will be copied to OUT_DIR
|
# all RESOURCE_FLS and all files in the RESOURCE_DIRS will be copied to OUT_DIR
|
||||||
RESOURCE_DIRS = resources
|
RESOURCE_DIRS = resources
|
||||||
@ -49,11 +54,17 @@ LANGS = de en
|
|||||||
# PREPROCESSOR
|
# PREPROCESSOR
|
||||||
# path to of the files that should be included
|
# path to of the files that should be included
|
||||||
INCLUDE_DIR = include
|
INCLUDE_DIR = include
|
||||||
|
# additional search paths passed to sass compiler
|
||||||
|
SASS_INCLUDE_DIRS = include/style
|
||||||
|
|
||||||
|
|
||||||
# ADVANCED
|
# ADVANCED
|
||||||
# the command to run the html preprocessor
|
# the command to run the html preprocessor
|
||||||
HTML_PP_CMD = python3 html-preprocessor --exit-on light
|
HTML_PP_CMD = python3 html-preprocessor --exit-on light
|
||||||
|
# command to compile sass and scss files with
|
||||||
|
# --indented is added for sass and --no-indented for scss
|
||||||
|
# --source-maps-urls=absolute is appended for generating dependency files
|
||||||
|
SASS_CMD = sass --color
|
||||||
|
|
||||||
DEP_DIR = .dependencies
|
DEP_DIR = .dependencies
|
||||||
|
|
||||||
@ -68,23 +79,27 @@ DEP_DIR = .dependencies
|
|||||||
# make everything relative to PROJECT_DIR
|
# make everything relative to PROJECT_DIR
|
||||||
_SRC_DIRS = $(addprefix $(PROJECT_DIR)/, $(SRC_DIRS))
|
_SRC_DIRS = $(addprefix $(PROJECT_DIR)/, $(SRC_DIRS))
|
||||||
_SRC_FLS = $(addprefix $(PROJECT_DIR)/, $(SRC_FLS))
|
_SRC_FLS = $(addprefix $(PROJECT_DIR)/, $(SRC_FLS))
|
||||||
|
_CSS_FLS = $(addprefix $(PROJECT_DIR)/, $(CSS_FLS))
|
||||||
|
_CSS_DIRS = $(addprefix $(PROJECT_DIR)/, $(CSS_DIRS))
|
||||||
|
_SASS_INCLUDE_DIRS = $(addprefix $(PROJECT_DIR)/, $(SASS_INCLUDE_DIRS))
|
||||||
_RES_DIRS = $(addprefix $(PROJECT_DIR)/, $(RESOURCE_DIRS))
|
_RES_DIRS = $(addprefix $(PROJECT_DIR)/, $(RESOURCE_DIRS))
|
||||||
_RES_FLS = $(addprefix $(PROJECT_DIR)/, $(RESOURCE_FLS))
|
_RES_FLS = $(addprefix $(PROJECT_DIR)/, $(RESOURCE_FLS))
|
||||||
_COMMON_DIR = $(addprefix $(PROJECT_DIR)/, $(COMMON_DIR))
|
_COMMON_DIR = $(addprefix $(PROJECT_DIR)/, $(COMMON_DIR))
|
||||||
_INCLUDE_DIR = $(addprefix $(PROJECT_DIR)/, $(INCLUDE_DIR))
|
_INCLUDE_DIR = $(addprefix $(PROJECT_DIR)/, $(INCLUDE_DIR))
|
||||||
|
|
||||||
# NORMAL SRC
|
# NORMAL SRC
|
||||||
# all SRC_DIRS + all subdirs of each srcdir
|
# all SRC_DIRS + CSS_DIRS + all subdirs of each srcdir
|
||||||
_SRC_SUB_DIRS = $(foreach srcdir, $(_SRC_DIRS), $(shell find $(srcdir)/ -type d 2>/dev/null))
|
_SRC_SUB_DIRS = $(foreach srcdir, $(_SRC_DIRS) $(_CSS_DIRS), $(shell find $(srcdir)/ -type d 2>/dev/null))
|
||||||
# add files in project dir
|
# add files in project dir
|
||||||
_SRC_FLS += $(shell find $(PROJECT_DIR)/ -maxdepth 1 -type f)
|
_SRC_FLS += $(shell find $(PROJECT_DIR)/ -maxdepth 1 -type f)
|
||||||
# add files src dirs, recursively
|
# add files src dirs, recursively
|
||||||
_SRC_FLS += $(foreach srcdir, $(_SRC_DIRS), $(shell find $(srcdir)/ -type f 2>/dev/null))
|
_SRC_FLS += $(foreach srcdir, $(_SRC_DIRS), $(shell find $(srcdir)/ -type f 2>/dev/null))
|
||||||
# OUT_DIRS = $(OUT_DIR) $(addprefix $(OUT_DIR)/, $(_SRC_SUB_DIRS))
|
_CSS_FLS += $(foreach srcdir, $(_CSS_DIRS), $(shell find $(srcdir)/ -type f 2>/dev/null))
|
||||||
|
|
||||||
OUT_DIRS = $(OUT_DIR)/ $(patsubst $(PROJECT_DIR)/%, $(OUT_DIR)/%, $(_SRC_SUB_DIRS))
|
OUT_DIRS = $(OUT_DIR)/ $(patsubst $(PROJECT_DIR)/%, $(OUT_DIR)/%, $(_SRC_SUB_DIRS))
|
||||||
# path of the source files after being processed
|
# path of the (css/sass) source files after being processed
|
||||||
# OUT_FLS = $($(notdir _SRC_FLS):%=$(OUT_DIR)/%)
|
|
||||||
OUT_FLS = $(patsubst $(PROJECT_DIR)/%, $(OUT_DIR)/%, $(_SRC_FLS))
|
OUT_FLS = $(patsubst $(PROJECT_DIR)/%, $(OUT_DIR)/%, $(_SRC_FLS))
|
||||||
|
OUT_FLS += $(patsubst $(PROJECT_DIR)/%, $(OUT_DIR)/%, $(foreach cssfile, $(_CSS_FLS), $(shell echo $(cssfile) | sed 's/\.s[ac]ss$$/.css/')))
|
||||||
|
|
||||||
# RESOURCES
|
# RESOURCES
|
||||||
_RES_SUB_DIRS = $(foreach srcdir, $(_RES_DIRS), $(shell find $(srcdir)/ -type d 2>/dev/null))
|
_RES_SUB_DIRS = $(foreach srcdir, $(_RES_DIRS), $(shell find $(srcdir)/ -type d 2>/dev/null))
|
||||||
@ -108,11 +123,15 @@ _DEP_DIRS = $(sort $(patsubst $(OUT_DIR)/%, $(DEP_DIR)/%, $(OUT_DIRS) $(ML_OUT
|
|||||||
# needed for reading
|
# needed for reading
|
||||||
_DEP_FLS = $(shell find $(DEP_DIR) -type f -name '*.d' 2>/dev/null)
|
_DEP_FLS = $(shell find $(DEP_DIR) -type f -name '*.d' 2>/dev/null)
|
||||||
|
|
||||||
|
# SASS, add load-paths
|
||||||
|
_SASS_CMD = $(SASS_CMD) $(foreach includedir, $(_SASS_INCLUDE_DIRS), --load-path=$(includedir)) --source-map-urls=absolute
|
||||||
|
|
||||||
# PRINTING
|
# PRINTING
|
||||||
FMT_VAR_SRC ="Variable '\e[1;34m%s\e[0m': \e[0;33m%s\e[0m\n"
|
FMT_VAR_SRC ="Variable '\e[1;34m%s\e[0m': \e[0;33m%s\e[0m\n"
|
||||||
FMT_VAR_OUT ="Variable '\e[1;34m%s\e[0m': \e[0;35m%s\e[0m\n"
|
FMT_VAR_OUT ="Variable '\e[1;34m%s\e[0m': \e[0;35m%s\e[0m\n"
|
||||||
FMT_DIR ="\e[1;34mMaking directory\e[0m: \e[0;35m%s\e[0m\n"
|
FMT_DIR ="\e[1;34mMaking directory\e[0m: \e[0;35m%s\e[0m\n"
|
||||||
FMT_OUT_HTML ="\e[1;34mBuilding html\e[0m \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
FMT_OUT_HTML ="\e[1;34mBuilding html\e[0m \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
||||||
|
FMT_OUT_CSS ="\e[1;34mBuilding css\e[0m \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
||||||
FMT_OUT_OTHER ="\e[1;34mBuilding\e[0m: \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
FMT_OUT_OTHER ="\e[1;34mBuilding\e[0m: \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
||||||
|
|
||||||
FMT_OUT_ML_HTML="\e[1;34mBuilding html\e[0m in lang \e[1;34m%s\e[0m: \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
FMT_OUT_ML_HTML="\e[1;34mBuilding html\e[0m in lang \e[1;34m%s\e[0m: \e[1;33m%s\e[0m at \e[1;35m%s\e[0m\n"
|
||||||
@ -140,6 +159,7 @@ print:
|
|||||||
@printf $(FMT_VAR_OUT) "OUT_FLS" "$(OUT_FLS)"
|
@printf $(FMT_VAR_OUT) "OUT_FLS" "$(OUT_FLS)"
|
||||||
@printf $(FMT_VAR_SRC) "_RES_FLS" "$(_RES_FLS)"
|
@printf $(FMT_VAR_SRC) "_RES_FLS" "$(_RES_FLS)"
|
||||||
@printf $(FMT_VAR_OUT) "RES_OUT_FLS" "$(RES_OUT_FLS)"
|
@printf $(FMT_VAR_OUT) "RES_OUT_FLS" "$(RES_OUT_FLS)"
|
||||||
|
@printf $(FMT_VAR_OUT) "_CSS_FLS" "$(_CSS_FLS)"
|
||||||
ifdef COMMON_DIR
|
ifdef COMMON_DIR
|
||||||
@printf $(FMT_VAR_SRC) "_ML_SRC_FLS" "$(_ML_SRC_FLS)"
|
@printf $(FMT_VAR_SRC) "_ML_SRC_FLS" "$(_ML_SRC_FLS)"
|
||||||
@printf $(FMT_VAR_OUT) "ML_OUT_FLS" "$(ML_OUT_FLS)"
|
@printf $(FMT_VAR_OUT) "ML_OUT_FLS" "$(ML_OUT_FLS)"
|
||||||
@ -181,6 +201,21 @@ $(OUT_DIR)/%.html: $(PROJECT_DIR)/%.html | $(OUT_DIRS) $(_DEP_DIRS)
|
|||||||
@#awk -i inplace '{FS="" sub(/<!--.*-->/,"")}1' $@
|
@#awk -i inplace '{FS="" sub(/<!--.*-->/,"")}1' $@
|
||||||
@#awk -i inplace '{if (NF != 0) print}' $@
|
@#awk -i inplace '{if (NF != 0) print}' $@
|
||||||
|
|
||||||
|
$(OUT_DIR)/%.css: $(PROJECT_DIR)/%.sass | $(OUT_DIRS) $(_DEP_DIRS)
|
||||||
|
@printf $(FMT_OUT_CSS) "$<" "$@";
|
||||||
|
@$(_SASS_CMD) --indented $< $@
|
||||||
|
@# generate a dependecy file from the source map and delete the map
|
||||||
|
@depfile=$(patsubst $(OUT_DIR)/%,$(DEP_DIR)/%,$@).d; echo -n "$@: " > "$$depfile"; \
|
||||||
|
jq -r '.sources | @sh' $@.map | tr -d \' | sed 's|file://||g' >> "$$depfile"; \
|
||||||
|
rm $@.map
|
||||||
|
$(OUT_DIR)/%.css: $(PROJECT_DIR)/%.scss | $(OUT_DIRS) $(_DEP_DIRS)
|
||||||
|
@printf $(FMT_OUT_CSS) "$<" "$@";
|
||||||
|
@$(_SASS_CMD) --no-indented $< $@
|
||||||
|
@# generate a dependecy file from the source map and delete the map
|
||||||
|
@depfile=$(patsubst $(OUT_DIR)/%,$(DEP_DIR)/%,$@).d; echo -n "$@: " > "$$depfile"; \
|
||||||
|
jq -r '.sources | @sh' $@.map | tr -d \' | sed 's|file://||g' >> "$$depfile"; \
|
||||||
|
rm $@.map
|
||||||
|
|
||||||
$(OUT_DIR)/%: $(PROJECT_DIR)/% | $(OUT_DIRS) $(RES_OUT_DIRS)
|
$(OUT_DIR)/%: $(PROJECT_DIR)/% | $(OUT_DIRS) $(RES_OUT_DIRS)
|
||||||
@printf $(FMT_OUT_OTHER) "$<" "$@"
|
@printf $(FMT_OUT_OTHER) "$<" "$@"
|
||||||
@cp -r $< $@
|
@cp -r $< $@
|
||||||
@ -196,8 +231,9 @@ stop:
|
|||||||
killall nginx
|
killall nginx
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
-rm $(OUT_FLS) $(ML_OUT_FLS) 2>/dev/null
|
-@rm $(OUT_FLS) $(ML_OUT_FLS) 2>/dev/null
|
||||||
-rm -r $(DEP_DIR) 2>/dev/null
|
-@rm -r $(DEP_DIR) 2>/dev/null
|
||||||
|
|
||||||
cleaner:
|
cleaner:
|
||||||
-rm -r $(OUT_DIR)
|
-@rm -r $(OUT_DIR)
|
||||||
|
-@rm -r $(DEP_DIR) 2>/dev/null
|
||||||
|
19
README.md
19
README.md
@ -42,19 +42,34 @@ refer to the article [on my website](https://quintern.xyz/en/software/buwuma.htm
|
|||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
### include
|
### include
|
||||||
Include the content of a file at the position of the command.
|
Include the content of a file (or only a specific section in that file) at the position of the command.
|
||||||
|
|
||||||
**Synopsis**:
|
**Synopsis**:
|
||||||
`<!-- #include path/to-a-text-file.html -->`
|
`<!-- #include path/to-a-text-file.html -->`
|
||||||
|
`<!-- #include path/to-a-text-file.html section_name -->`
|
||||||
|
|
||||||
**Argument**:
|
**Argument**:
|
||||||
A absolute or relative path to a text file
|
A absolute or relative path to a text file [ + section name ]
|
||||||
|
|
||||||
**Return Value**:
|
**Return Value**:
|
||||||
The content of the file or `<!-- Could not include '{args}' -->` empty string if the file can not be opened.
|
The content of the file or `<!-- Could not include '{args}' -->` empty string if the file can not be opened.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
### section
|
||||||
|
Start a section in a file. The section is only used by the `include` command to determine the start and end of a section
|
||||||
|
|
||||||
|
**Synopsis**:
|
||||||
|
`<!-- #section section_name -->`
|
||||||
|
|
||||||
|
**Argument**:
|
||||||
|
Name of the section
|
||||||
|
|
||||||
|
**Return Value**:
|
||||||
|
Empty String
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
### set
|
### set
|
||||||
Set the value of a variable
|
Set the value of a variable
|
||||||
|
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
#!/bin/python3
|
#!/bin/python3
|
||||||
import os
|
import os
|
||||||
|
from os import path
|
||||||
import re
|
import re
|
||||||
from sys import argv
|
from sys import argv
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
import argparse
|
||||||
|
|
||||||
"""
|
"""
|
||||||
TODO:
|
TODO:
|
||||||
@ -211,24 +213,65 @@ They all need to return a string, which will be placed
|
|||||||
into the source file at the place where the command was.
|
into the source file at the place where the command was.
|
||||||
"""
|
"""
|
||||||
def cmd_include(args: str, variables:dict[str, str]={}) -> str:
|
def cmd_include(args: str, variables:dict[str, str]={}) -> str:
|
||||||
|
args = args.split(' ')
|
||||||
pdebug(f"cmd_include: args='{args}', variables='{variables}'")
|
pdebug(f"cmd_include: args='{args}', variables='{variables}'")
|
||||||
|
filename = args[0]
|
||||||
content = ""
|
content = ""
|
||||||
try:
|
try:
|
||||||
with open(args) as file:
|
with open(filename) as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
except:
|
if len(args) > 1: # if section was specified
|
||||||
error(f"cmd_include: Could not open file '{args}'", level=error_levels["serious"], exit_code=exit_codes["FileNotFound"])
|
target_section = args[1]
|
||||||
content = f"<!-- Could not include '{args}' -->"
|
p = HTMLParser(content, {})
|
||||||
if args.endswith(".md"):
|
p.pos["start"] = p.pos["end"] = -1
|
||||||
|
while p.i < len(p): # at start of new line or end of comment
|
||||||
|
p.next_line()
|
||||||
|
ptrace(f"cmd_include: Processing at i={p.i} in line {pos2line(p.file, p.i)}")
|
||||||
|
# print(filename, p.i, pos2line(p.file, p.i))
|
||||||
|
# TODO: hangs here
|
||||||
|
|
||||||
|
if not p.find_comment_begin(): continue
|
||||||
|
if not p.find_comment_end(): continue
|
||||||
|
|
||||||
|
match = p.find_command()
|
||||||
|
if match:
|
||||||
|
command = match.groups()[0]
|
||||||
|
cmd_args = match.groups()[1].replace('\t', ' ').strip(' ')
|
||||||
|
pdebug(f"cmd_include Found command '{command}' with args '{cmd_args}'")
|
||||||
|
if command == "section":
|
||||||
|
if cmd_args.startswith(target_section):
|
||||||
|
print(p.pos)
|
||||||
|
p.pos["start"] = max(p.pos["cmt_end"] + len(COMMENT_END), p.pos["line_end"] + 1)
|
||||||
|
print(f">{content[p.pos['start']:p.pos['start']+1]}<")
|
||||||
|
elif p.pos["start"] >= 0: #end
|
||||||
|
p.pos["end"] = max(p.pos["cmt_end"] + len(COMMENT_END), p.pos["line_end"] + 1)
|
||||||
|
# p.pos["end"] = p.pos["cmt_beg"]
|
||||||
|
p.replace_command_with_output("")
|
||||||
|
p.command_end()
|
||||||
|
if p.pos["start"] >= 0 and p.pos["end"] > 0: break
|
||||||
|
if p.pos["start"] >= 0:
|
||||||
|
if p.pos["end"] < 0:
|
||||||
|
p.pos["end"] = len(p)
|
||||||
|
content = p[p.pos["start"]:p.pos["end"]]
|
||||||
|
print(content)
|
||||||
|
else:
|
||||||
|
error(f"cmd_include: Could not find section {target_section} in file {filename}")
|
||||||
|
except FileNotFoundError:
|
||||||
|
error(f"cmd_include: Could not open file '{filename}'", level=error_levels["serious"], exit_code=exit_codes["FileNotFound"])
|
||||||
|
content = f"<!-- Could not include '{filename}' -->"
|
||||||
|
if filename.endswith(".md"):
|
||||||
try:
|
try:
|
||||||
from markdown import markdown
|
from markdown import markdown
|
||||||
content = markdown(content, output_format="xhtml")
|
content = markdown(content, output_format="xhtml")
|
||||||
except:
|
except:
|
||||||
error(f"cmd_include: Could convert markdown to html for file '{args}'. Is python-markdown installed?", level=error_levels["critical"], exit_code=exit_codes["MarkdownConversionError"])
|
error(f"cmd_include: Could convert markdown to html for file '{filename}'. Is python-markdown installed?", level=error_levels["critical"], exit_code=exit_codes["MarkdownConversionError"])
|
||||||
content = f"<!-- Could not convert to html: '{args}' -->"
|
content = f"<!-- Could not convert to html: '{filename}' -->"
|
||||||
glob_dependcies.append(args)
|
glob_dependcies.append(filename)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
def cmd_section(args: str, variables:dict[str, str]={}) -> str:
|
||||||
|
return ""
|
||||||
|
|
||||||
def cmd_return(args: str, variables:dict[str, str]={}) -> str:
|
def cmd_return(args: str, variables:dict[str, str]={}) -> str:
|
||||||
# re_set_map = r"([a-zA-Z0-9_]+)\?\{(([a-zA-Z0-9_]+:.+,)*([a-zA-Z0-9_]+:.+))\}"
|
# re_set_map = r"([a-zA-Z0-9_]+)\?\{(([a-zA-Z0-9_]+:.+,)*([a-zA-Z0-9_]+:.+))\}"
|
||||||
# <!-- #set section=lang?{*:Fallback,de:Abschnitt,en:Section} -->
|
# <!-- #set section=lang?{*:Fallback,de:Abschnitt,en:Section} -->
|
||||||
@ -293,6 +336,7 @@ def cmd_warning(args: str, variables:dict[str, str]={}) -> str:
|
|||||||
|
|
||||||
command2function:dict[str, Callable[[str, dict[str,str]], str]] = {
|
command2function:dict[str, Callable[[str, dict[str,str]], str]] = {
|
||||||
"include": cmd_include,
|
"include": cmd_include,
|
||||||
|
"section": cmd_section,
|
||||||
"set": cmd_set,
|
"set": cmd_set,
|
||||||
"return": cmd_return,
|
"return": cmd_return,
|
||||||
"default": cmd_default,
|
"default": cmd_default,
|
||||||
@ -324,7 +368,7 @@ class Parser():
|
|||||||
self.file = self.file[:start] + self.file[stop:]
|
self.file = self.file[:start] + self.file[stop:]
|
||||||
for k,pos in self.pos.items():
|
for k,pos in self.pos.items():
|
||||||
if pos >= stop: self.pos[k] -= delete_length
|
if pos >= stop: self.pos[k] -= delete_length
|
||||||
elif pos > start and not k in ignore_bounds: error(f"Position {k}={pos} within deleted range [{start},{stop})", level=1)
|
elif pos > start and not k in ignore_bounds: error(f"Parser.remove: Position {k}={pos} within deleted range [{start},{stop})", level=1)
|
||||||
|
|
||||||
def replace(self, start, stop, replacement):
|
def replace(self, start, stop, replacement):
|
||||||
assert(stop >= start)
|
assert(stop >= start)
|
||||||
@ -334,7 +378,7 @@ class Parser():
|
|||||||
length_difference = stop - start - len(replacement)
|
length_difference = stop - start - len(replacement)
|
||||||
for k,pos in self.pos.items():
|
for k,pos in self.pos.items():
|
||||||
if pos >= stop: self.pos[k] -= length_difference
|
if pos >= stop: self.pos[k] -= length_difference
|
||||||
elif pos > start: error(f"Position {k}={pos} within replaced range [{start},{stop})", level=1)
|
elif pos > start: error(f"Parser.replace: Position {k}={pos} within replaced range [{start},{stop})", level=1)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self.file[key]
|
return self.file[key]
|
||||||
@ -343,76 +387,147 @@ class Parser():
|
|||||||
return len(self.file)
|
return len(self.file)
|
||||||
|
|
||||||
|
|
||||||
|
class HTMLParser(Parser):
|
||||||
|
"""
|
||||||
|
Parse a html file
|
||||||
|
Each function operates the positon indicated by i until the position "line_end"
|
||||||
|
"""
|
||||||
|
def __init__(self, file, variables:dict[str, str], remove_comments=False):
|
||||||
|
super().__init__(file)
|
||||||
|
self.i = 0
|
||||||
|
self.variables = variables
|
||||||
|
self.pos["cmt_beg"] = -1
|
||||||
|
self.pos["cmt_end"] = -1
|
||||||
|
self.pos["cmd_beg"] = -1
|
||||||
|
self.pos["cmd_end"] = -1
|
||||||
|
self.pos["line_end"] = -1
|
||||||
|
self.pos["conditional_block_beg"] = -1 # char pos of the first char of the last block, if waiting for elif, else or endif
|
||||||
|
self.state["cmd_in_cmt"] = False
|
||||||
|
self.state["last_condition"] = False # if the last if condition was true
|
||||||
|
self.remove_comments = remove_comments
|
||||||
|
|
||||||
def parse_file(_file:str, variables:dict[str,str]):
|
def next_line(self):
|
||||||
p = Parser(_file)
|
"""update i and line_end"""
|
||||||
sidenav_include_pos = -1
|
self.pos["line_end"] = self.file.find('\n', self.i+1)
|
||||||
p.pos["cmt_beg"] = -1
|
if self.pos["line_end"] < 0: self.pos["line_end"] = len(self)
|
||||||
p.pos["cmt_end"] = -1
|
|
||||||
p.pos["cmd_beg"] = -1
|
|
||||||
p.pos["cmdend"] = -1
|
|
||||||
p.pos["conditional_block_beg"] = -1 # char pos of the first char of the last block, if waiting for elif, else or endif
|
|
||||||
p.state["cmd_in_cmt"] = False
|
|
||||||
p.state["last_condition"] = False # if the last if condition was true
|
|
||||||
i = 0
|
|
||||||
# if file.count(COMMENT_BEGIN) != file.count(COMMENT_END):
|
|
||||||
|
|
||||||
while i < len(p): # at start of new line or end of comment
|
def use_variables(self):
|
||||||
ptrace(f"Processing at i={i} in line {pos2line(p.file, i)}")
|
"""replace variable usages in the current line"""
|
||||||
|
self.replace(self.i, self.pos["line_end"], substitute_variables(self[self.i:self.pos["line_end"]], self.variables))
|
||||||
|
ptrace("> Line after variable substitution:", self.file[self.i:self.pos["line_end"]])
|
||||||
|
|
||||||
# replace variable usages in the current line
|
def add_sidenav_headings(self):
|
||||||
p.pos["line_end"] = p.file.find('\n', i)
|
"""check if heading for sidenav in line"""
|
||||||
if p.pos["line_end"] < 0: p.pos["line_end"] = len(p)
|
match = re.search(re_sidenav_heading, self[self.i:self.pos["line_end"]])
|
||||||
p.replace(i, p.pos["line_end"], replace_variables(p[i:p.pos["line_end"]], variables))
|
|
||||||
ptrace("> Line after replacing variables:", p.file[i:p.pos["line_end"]])
|
|
||||||
|
|
||||||
# check if heading for sidenav in line
|
|
||||||
match = re.search(re_sidenav_heading, p[i:p.pos["line_end"]])
|
|
||||||
if match:
|
if match:
|
||||||
Sidenav.addEntry(match.groups()[1], f"#{match.groups()[0]}")
|
Sidenav.addEntry(match.groups()[1], f"#{match.groups()[0]}")
|
||||||
ptrace("> Found heading with id:", match.groups())
|
ptrace("> Found heading with id:", match.groups())
|
||||||
|
|
||||||
# look for comment
|
def find_comment_begin(self) -> bool:
|
||||||
if p.pos["cmt_beg"] < 0: # if not in comment, find next comment
|
"""
|
||||||
p.pos["cmt_beg"] = p.file.find(COMMENT_BEGIN, i, p.pos["line_end"])
|
find the beginning of a comment in the current line
|
||||||
|
if comment begin was found, jump into the comment, return True
|
||||||
|
"""
|
||||||
|
# look for comment begin
|
||||||
|
if self.pos["cmt_beg"] < 0: # if not in comment, find next comment
|
||||||
|
self.pos["cmt_beg"] = self.file.find(COMMENT_BEGIN, self.i, self.pos["line_end"])
|
||||||
# ptrace(f"i={i}, line_end={line_end}, comment_begin={comment_begin}")
|
# ptrace(f"i={i}, line_end={line_end}, comment_begin={comment_begin}")
|
||||||
if p.pos["cmt_beg"] < 0:
|
if self.pos["cmt_beg"] < 0:
|
||||||
i = p.pos["line_end"] + 1
|
self.i = self.pos["line_end"] + 1
|
||||||
continue
|
return False
|
||||||
else:
|
else:
|
||||||
# jump to comment_begin
|
# jump to comment_begin
|
||||||
old_i = i
|
old_i = self.i
|
||||||
i = p.pos["cmt_beg"] + len(COMMENT_BEGIN) # after comment begin
|
self.i = self.pos["cmt_beg"] + len(COMMENT_BEGIN) # after comment begin
|
||||||
ptrace(f"> Found comment begin, jumping from pos {old_i} to {i}")
|
ptrace(f"> Found comment begin, jumping from pos {old_i} to {self.i}")
|
||||||
|
return True
|
||||||
|
return True # still in previous comment
|
||||||
|
|
||||||
|
|
||||||
|
def find_comment_end(self):
|
||||||
|
"""
|
||||||
|
call afterfind_comment_begin returns true to update the cmt_end
|
||||||
|
call continue when returning false
|
||||||
|
"""
|
||||||
# in comment, i at the character after COMMENT_BEGIN
|
# in comment, i at the character after COMMENT_BEGIN
|
||||||
p.pos["cmt_end"] = p.file.find(COMMENT_END, i) #, p.pos["line_end"])
|
self.pos["cmt_end"] = self.file.find(COMMENT_END, self.i) #, self.pos["line_end"])
|
||||||
# sanity checks
|
# sanity checks
|
||||||
if p.pos["cmt_end"] < 0:
|
if self.pos["cmt_end"] < 0:
|
||||||
error(f"Comment starting in line {pos2line(p.file, p.pos['cmt_beg'])} is never ended.", level=error_levels["serious"])
|
error(f"Comment starting in line {pos2line(self.file, self.pos['cmt_beg'])} is never ended.", level=error_levels["serious"])
|
||||||
|
return False
|
||||||
else:
|
else:
|
||||||
tmp_next_begin = p.file.find(COMMENT_BEGIN, i)
|
tmp_next_begin = self.file.find(COMMENT_BEGIN, self.i)
|
||||||
if 0 < tmp_next_begin and tmp_next_begin < p.pos["cmt_end"]:
|
if 0 < tmp_next_begin and tmp_next_begin < self.pos["cmt_end"]:
|
||||||
error(f"Found next comment begin before the comment starting in line {pos2line(p.file, p.pos['cmt_beg'])} is ended! Skipping comment. Comment without proper closing tags: '{p.file[i:p.pos['line_end']]}'", level=error_levels["light"])
|
error(f"Found next comment begin before the comment starting in line {pos2line(self.file, self.pos['cmt_beg'])} is ended! Skipping comment. Comment without proper closing tags: '{self.file[self.i:self.pos['line_end']]}'", level=error_levels["light"])
|
||||||
p.pos["cmt_beg"] = -1
|
self.pos["cmt_beg"] = -1
|
||||||
continue
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def find_command(self):
|
||||||
# either at newline (if in multiline comment) or at comment end
|
# either at newline (if in multiline comment) or at comment end
|
||||||
p.pos["cmd_beg"] = i
|
self.pos["cmd_beg"] = self.i
|
||||||
p.pos["cmd_end"] = min(p.pos["line_end"], p.pos["cmt_end"])
|
self.pos["cmd_end"] = min(self.pos["line_end"], self.pos["cmt_end"])
|
||||||
assert p.pos["cmd_end"] >= i, f"cmd_end={p.pos['cmd_end']}, i={i}, line_end={p.pos['line_end']}, cmt_end={p.pos['cmt_end']}"
|
assert self.pos["cmd_end"] >= self.i, f"cmd_end={self.pos['cmd_end']}, i={self.i}, line_end={self.pos['line_end']}, cmt_end={self.pos['cmt_end']}"
|
||||||
ptrace(f"> Possible command end: {p.pos['cmd_end']}, possible command: '{p[i:p.pos['cmd_end']]}'")
|
ptrace(f"> Possible command end: {self.pos['cmd_end']}, possible command: '{self[self.i:self.pos['cmd_end']]}'")
|
||||||
|
|
||||||
# find commands
|
# find commands
|
||||||
match = re.fullmatch(re_preprocessor_command, p[i:p.pos["cmd_end"]].strip(" "))
|
match = re.fullmatch(re_preprocessor_command, self[self.i:self.pos["cmd_end"]].strip(" "))
|
||||||
if match: # command comment
|
if match:
|
||||||
p.state["cmd_in_cmt"] = True
|
self.state["cmd_in_cmt"] = True
|
||||||
|
return match
|
||||||
|
|
||||||
|
def replace_command_with_output(self, command_output):
|
||||||
|
self.replace(self.i, self.pos["cmd_end"], command_output)
|
||||||
|
ptrace(f"> After insertion, the line is now '{self.file[self.i:self.pos['line_end']]}'")
|
||||||
|
|
||||||
|
def command_end(self):
|
||||||
|
if self.pos["cmd_end"] == self.pos["cmt_end"]: # reached end of comment
|
||||||
|
if self.state["cmd_in_cmt"] or self.remove_comments:
|
||||||
|
# remove comment tags if a command was found
|
||||||
|
remove_newline = 0
|
||||||
|
if self[self.pos["cmt_beg"]-1] == '\n' and self[self.pos["cmt_end"]+len(COMMENT_END)] == '\n': # if the comment consumes the whole line, remove the entire line
|
||||||
|
remove_newline = 1
|
||||||
|
# remove comment if done
|
||||||
|
ptrace(f"Deleting opening comment tags")
|
||||||
|
self.remove(self.pos["cmt_beg"], self.pos["cmt_beg"] + len(COMMENT_BEGIN))
|
||||||
|
self.remove(self.pos["cmt_end"], self.pos["cmt_end"] + len(COMMENT_END) + remove_newline, ignore_bounds=["cmt_end", "cmd_end", "line_end"])
|
||||||
|
# process the line again, because a command might have inserted new comments
|
||||||
|
self.i -= len(COMMENT_BEGIN)
|
||||||
|
self.state["cmd_in_cmt"] = False
|
||||||
|
self.pos["cmt_beg"] = -1
|
||||||
|
self.pos["cmt_end"] = -1
|
||||||
|
self.pos["cmd_end"] = -1
|
||||||
|
else: # multiline comment
|
||||||
|
self.pos["cmt_end"] = -1
|
||||||
|
self.pos["cmd_end"] = -1
|
||||||
|
self.i = self.pos["line_end"] + 1
|
||||||
|
ptrace(f"> Multiline comment, jumping to next line.")
|
||||||
|
# i = possible_command_end commented, because if something containing new commands is inserted we need to parse that as well
|
||||||
|
|
||||||
|
|
||||||
|
def parse_file(_file:str, variables:dict[str,str], remove_comments):
|
||||||
|
p = HTMLParser(_file, variables, remove_comments=remove_comments)
|
||||||
|
sidenav_include_pos = -1
|
||||||
|
|
||||||
|
while p.i < len(p): # at start of new line or end of comment
|
||||||
|
p.next_line()
|
||||||
|
ptrace(f"Processing at i={p.i} in line {pos2line(p.file, p.i)}")
|
||||||
|
|
||||||
|
p.use_variables()
|
||||||
|
p.add_sidenav_headings()
|
||||||
|
|
||||||
|
if not p.find_comment_begin(): continue
|
||||||
|
|
||||||
|
if not p.find_comment_end(): continue
|
||||||
|
|
||||||
|
match = p.find_command()
|
||||||
|
if match:
|
||||||
command = match.groups()[0]
|
command = match.groups()[0]
|
||||||
args = match.groups()[1].replace('\t', ' ').strip(' ')
|
args = match.groups()[1].replace('\t', ' ').strip(' ')
|
||||||
pdebug(f"> Found command '{command}' with args '{args}'")
|
pdebug(f"> Found command '{command}' with args '{args}'")
|
||||||
# delete from previous block if
|
# delete from previous block if
|
||||||
if command in ["elif", "else", "endif"]:
|
if command in ["elif", "else", "endif"]:
|
||||||
if p.pos["conditional_block_beg"] < 0: error(f"Misplaced '{command}' in line {pos2line(p.file, i)}")
|
if p.pos["conditional_block_beg"] < 0: error(f"Misplaced '{command}' in line {pos2line(p.file, p.i)}")
|
||||||
if p.state["last_condition"]:
|
if p.state["last_condition"]:
|
||||||
# delete block from here at next endif
|
# delete block from here at next endif
|
||||||
p.state["last_condition"] = False
|
p.state["last_condition"] = False
|
||||||
@ -420,28 +535,28 @@ def parse_file(_file:str, variables:dict[str,str]):
|
|||||||
# delete block from last condition statement
|
# delete block from last condition statement
|
||||||
ptrace(f"> Deleting block from last condition")
|
ptrace(f"> Deleting block from last condition")
|
||||||
p.remove(p.pos["conditional_block_beg"], p.pos["cmt_beg"])
|
p.remove(p.pos["conditional_block_beg"], p.pos["cmt_beg"])
|
||||||
i = p.pos["cmd_beg"]
|
p.i = p.pos["cmd_beg"]
|
||||||
p.pos["conditional_block_beg"] = i
|
p.pos["conditional_block_beg"] = p.i
|
||||||
if command == "endif":
|
if command == "endif":
|
||||||
p.pos["conditional_block_beg"] = -1
|
p.pos["conditional_block_beg"] = -1
|
||||||
p.state["last_condition"] = False
|
p.state["last_condition"] = False
|
||||||
p.state["any_condition"] = False
|
p.state["any_condition"] = False
|
||||||
# evaluate ifs
|
# evaluate ifs
|
||||||
if command == "if":
|
if command == "if":
|
||||||
p.pos["conditional_block_beg"] = i
|
p.pos["conditional_block_beg"] = p.i
|
||||||
p.state["last_condition"] = evaluate_condition(args)
|
p.state["last_condition"] = evaluate_condition(args)
|
||||||
p.state["any_condition"] = p.state["last_condition"]
|
p.state["any_condition"] = p.state["last_condition"]
|
||||||
pdebug(f"> Command {command} condition evaluated to {p.state['last_condition']}")
|
pdebug(f"> Command {command} condition evaluated to {p.state['last_condition']}")
|
||||||
cmd_output = ""
|
cmd_output = ""
|
||||||
elif command =="elif":
|
elif command =="elif":
|
||||||
p.pos["conditional_block_beg"] = i
|
p.pos["conditional_block_beg"] = p.i
|
||||||
p.state["last_condition"] = evaluate_condition(args) if not p.state["any_condition"] else False
|
p.state["last_condition"] = evaluate_condition(args) if not p.state["any_condition"] else False
|
||||||
if p.state["last_condition"]:
|
if p.state["last_condition"]:
|
||||||
p.state["any_condition"] = True
|
p.state["any_condition"] = True
|
||||||
pdebug(f"> Command {command} condition evaluated to {p.state['last_condition']}")
|
pdebug(f"> Command {command} condition evaluated to {p.state['last_condition']}")
|
||||||
cmd_output = ""
|
cmd_output = ""
|
||||||
elif command == "else":
|
elif command == "else":
|
||||||
p.pos["conditional_block_beg"] = i
|
p.pos["conditional_block_beg"] = p.i
|
||||||
p.state["last_condition"] = True if not p.state["any_condition"] else False
|
p.state["last_condition"] = True if not p.state["any_condition"] else False
|
||||||
cmd_output = ""
|
cmd_output = ""
|
||||||
elif p.pos["conditional_block_beg"] < 0 or p.state["last_condition"]:
|
elif p.pos["conditional_block_beg"] < 0 or p.state["last_condition"]:
|
||||||
@ -451,38 +566,15 @@ def parse_file(_file:str, variables:dict[str,str]):
|
|||||||
elif command == "endif":
|
elif command == "endif":
|
||||||
cmd_output = ""
|
cmd_output = ""
|
||||||
elif command not in command2function:
|
elif command not in command2function:
|
||||||
error(f"Invalid command in line {pos2line(p.file, i)}: {command}", level=error_levels["light"])
|
error(f"Invalid command in line {pos2line(p.file, p.i)}: {command}", level=error_levels["light"])
|
||||||
cmd_output = ""
|
cmd_output = ""
|
||||||
else:
|
else:
|
||||||
cmd_output = command2function[command](args, variables)
|
cmd_output = command2function[command](args, variables)
|
||||||
else:
|
else:
|
||||||
cmd_output = ""
|
cmd_output = ""
|
||||||
p.replace(i, p.pos["cmd_end"], cmd_output)
|
p.replace_command_with_output(cmd_output)
|
||||||
ptrace(f"> After command, the line is now '{p.file[i:p.pos['line_end']]}'")
|
|
||||||
|
|
||||||
|
p.command_end()
|
||||||
if p.pos["cmd_end"] == p.pos["cmt_end"]: # reached end of comment
|
|
||||||
if p.state["cmd_in_cmt"]:
|
|
||||||
# remove comment tags if a command was found
|
|
||||||
remove_newline = 0
|
|
||||||
if p[p.pos["cmt_beg"]-1] == '\n' and p[p.pos["cmt_end"]+len(COMMENT_END)] == '\n': # if the comment consumes the whole line, remove the entire line
|
|
||||||
remove_newline = 1
|
|
||||||
# remove comment if done
|
|
||||||
ptrace(f"Deleting opening comment tags")
|
|
||||||
p.remove(p.pos["cmt_beg"], p.pos["cmt_beg"] + len(COMMENT_BEGIN))
|
|
||||||
p.remove(p.pos["cmt_end"], p.pos["cmt_end"] + len(COMMENT_END) + remove_newline, ignore_bounds=["cmt_end", "cmd_end", "line_end"])
|
|
||||||
# process the line again, because a command might have inserted new comments
|
|
||||||
i -= len(COMMENT_BEGIN)
|
|
||||||
p.state["cmd_in_cmt"] = False
|
|
||||||
p.pos["cmt_beg"] = -1
|
|
||||||
p.pos["cmt_end"] = -1
|
|
||||||
p.pos["cmd_end"] = -1
|
|
||||||
else: # multiline comment
|
|
||||||
p.pos["cmt_end"] = -1
|
|
||||||
p.pos["cmd_end"] = -1
|
|
||||||
i = p.pos["line_end"] + 1
|
|
||||||
ptrace(f"> Multiline comment, jumping to next line.")
|
|
||||||
# i = possible_command_end commented, because if something containing new commands is inserted we need to parse that as well
|
|
||||||
|
|
||||||
if sidenav_include_pos >= 0:
|
if sidenav_include_pos >= 0:
|
||||||
return p.file[:sidenav_include_pos] + Sidenav.generate() + p.file[sidenav_include_pos:]
|
return p.file[:sidenav_include_pos] + Sidenav.generate() + p.file[sidenav_include_pos:]
|
||||||
@ -490,7 +582,7 @@ def parse_file(_file:str, variables:dict[str,str]):
|
|||||||
return p.file
|
return p.file
|
||||||
|
|
||||||
|
|
||||||
def replace_variables(html:str, variables:dict[str, str]):
|
def substitute_variables(html:str, variables:dict[str, str]):
|
||||||
"""
|
"""
|
||||||
find usage of variables and replace them with their value
|
find usage of variables and replace them with their value
|
||||||
"""
|
"""
|
||||||
@ -502,6 +594,8 @@ def replace_variables(html:str, variables:dict[str, str]):
|
|||||||
pdebug(f"> Found variable usage {match.groups()[0]}, match from {match.start()} to {match.end()}")
|
pdebug(f"> Found variable usage {match.groups()[0]}, match from {match.start()} to {match.end()}")
|
||||||
value = ""
|
value = ""
|
||||||
if match.groups()[0] in variables: value = variables[match.groups()[0]]
|
if match.groups()[0] in variables: value = variables[match.groups()[0]]
|
||||||
|
else:
|
||||||
|
pdebug(f"Variable {match.groups()[0]} is used but not defined")
|
||||||
for _ in range(match.start(), match.end()):
|
for _ in range(match.start(), match.end()):
|
||||||
html_list.pop(match.start())
|
html_list.pop(match.start())
|
||||||
html_list.insert(match.start(), value.strip(" "))
|
html_list.insert(match.start(), value.strip(" "))
|
||||||
@ -510,100 +604,70 @@ def replace_variables(html:str, variables:dict[str, str]):
|
|||||||
"""
|
"""
|
||||||
************************************************************ COMMAND LINE ************************************************************
|
************************************************************ COMMAND LINE ************************************************************
|
||||||
"""
|
"""
|
||||||
def missing_arg_val(arg):
|
|
||||||
print("Missing argument for", arg)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
def missing_arg(arg):
|
|
||||||
print("Missing ", arg)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
def help():
|
|
||||||
helpstring = """Synopsis:
|
|
||||||
Inject <inject-file> into <target-file>:
|
|
||||||
python3 html-inect.py --input <input-file> --output <output-file> [OPTIONS]
|
|
||||||
\nCommand line options:
|
|
||||||
--input <file> path to the input file
|
|
||||||
--output <file> output to this file instead of overwriting target
|
|
||||||
--inplace edit target file in place
|
|
||||||
--var <varname>=<value> set the value of a variable. Can be used multiple times
|
|
||||||
--output-deps <file> output a Makefile listing all dependencies
|
|
||||||
--help show this
|
|
||||||
--exit-on <errorlevel> where errorlevel is 'light', 'serious' or 'critical'
|
|
||||||
"""
|
|
||||||
print(helpstring)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(prog="bUwUma html preprocessor")
|
||||||
|
parser.add_argument("--input", action="store", help="path to the input file", required=True)
|
||||||
|
parser.add_argument("--output", action="store", help="output to this file", default="")
|
||||||
|
parser.add_argument("--inplace", action="store_true", help="overwrite input file")
|
||||||
|
parser.add_argument("--var", action="append", help="set a variable --var varname=value", default=[])
|
||||||
|
parser.add_argument("--output-deps", action="store", help="output a Makefile listing all dependencies", default="")
|
||||||
|
parser.add_argument("--exit-on", action="store", help="exit when an error of the given severity occures", choices=["light", "serious", "critical"], default="serious")
|
||||||
|
parser.add_argument("--debug", action="store_true", help="be more verbose", default=False)
|
||||||
|
parser.add_argument("--trace", action="store_true", help="be extremly verbose", default=False)
|
||||||
|
parser.add_argument("--preserve-comments", action="store_true", help="do not remove normal html comments", default=False)
|
||||||
variables:dict[str, str] = {}
|
variables:dict[str, str] = {}
|
||||||
# parse args
|
|
||||||
target_path = ""
|
args = parser.parse_args()
|
||||||
output_path = ""
|
|
||||||
dep_output_path = ""
|
for var in args.var:
|
||||||
gen_sidenav = False
|
sep = var.find('=')
|
||||||
inplace = False
|
if sep > 0 and sep < len(var) - 1:
|
||||||
i = 1
|
variables[var[:sep].strip(" ")] = var[sep+1:].strip(" ")
|
||||||
while i in range(1, len(argv)):
|
|
||||||
if argv[i] == "--input":
|
|
||||||
if len(argv) > i + 1: target_path = argv[i+1].strip(" ")
|
|
||||||
else: missing_arg_val(argv[i])
|
|
||||||
i += 1
|
|
||||||
elif argv[i] == "--output":
|
|
||||||
if len(argv) > i + 1: output_path = argv[i+1].strip(" ")
|
|
||||||
else: missing_arg_val(argv[i])
|
|
||||||
i += 1
|
|
||||||
elif argv[i] == "--output-deps":
|
|
||||||
if len(argv) > i + 1: dep_output_path = argv[i+1].strip(" ")
|
|
||||||
else: missing_arg_val(argv[i])
|
|
||||||
i += 1
|
|
||||||
elif argv[i] == "--exit-on":
|
|
||||||
if argv[i+1].strip(" ") in error_levels.keys():
|
|
||||||
if len(argv) > i + 1: exit_on_error_level = error_levels[argv[i+1].strip(" ")]
|
|
||||||
else: missing_arg_val(argv[i])
|
|
||||||
else:
|
else:
|
||||||
error(f"Invalid argument for --exit-on: {argv[i+1]}. Valid are {error_levels.keys()}")
|
parser.error(f"Invalid argument: --var '{var}'\n\tUsage: --var <varname>=<value>")
|
||||||
i += 1
|
|
||||||
elif argv[i] == "--var":
|
args.input = args.input.strip(" ")
|
||||||
if len(argv) > i + 1:
|
args.output = args.output.strip(" ")
|
||||||
sep = argv[i+1].find('=')
|
args.output_deps = args.output_deps.strip(" ")
|
||||||
if sep > 0 and sep < len(argv[i+1]):
|
TRACE = args.trace
|
||||||
variables[argv[i+1][:sep].strip(" ")] = argv[i+1][sep+1:].strip(" ")
|
if args.trace: args.debug = True
|
||||||
else: missing_arg_val(argv[i])
|
DEBUG = args.debug
|
||||||
i += 1
|
|
||||||
elif argv[i] == "--inplace":
|
|
||||||
inplace = True
|
|
||||||
elif argv[i] == "--help":
|
|
||||||
help()
|
|
||||||
exit(0)
|
|
||||||
else:
|
|
||||||
error(f"Invalid argument: {argv[i]}")
|
|
||||||
i += 1
|
|
||||||
# sanity checks
|
# sanity checks
|
||||||
if not target_path: missing_arg("--input")
|
if not path.isfile(args.input):
|
||||||
if not os.path.isfile(target_path): error(f"Invalid target: {target_path} (does not exist)")
|
parser.error(f"Invalid input file:: {args.input}")
|
||||||
if inplace: output_path = target_path
|
if args.output:
|
||||||
if not output_path:
|
if not path.isdir(path.dirname(args.output)):
|
||||||
print("Missing output path, just printing to stdout. Use --output or --inplace to save the result.")
|
parser.error(f"Invalid path to output file - directory does not exist: '{path.dirname(args.output)}'")
|
||||||
|
elif args.inplace:
|
||||||
|
args.output = args.input
|
||||||
|
if args.inplace and args.output:
|
||||||
|
parser.error(f"Only one of --output or --inplace mut be given")
|
||||||
|
if args.output_deps:
|
||||||
|
if not path.isdir(path.dirname(args.output_deps)):
|
||||||
|
parser.error(f"Invalid path to dependency file - directory does not exist: '{path.dirname(args.output_deps)}'")
|
||||||
|
if not args.output:
|
||||||
|
parser.error(f"--output-deps requires either --output <file> our --inplace")
|
||||||
|
|
||||||
# get html
|
# get html
|
||||||
with open(target_path, "r") as file:
|
with open(args.input, "r") as file:
|
||||||
target_html = file.read()
|
target_html = file.read()
|
||||||
|
|
||||||
|
output_html = parse_file(target_html, variables, not args.preserve_comments)
|
||||||
output_html = parse_file(target_html, variables)
|
|
||||||
|
|
||||||
# pdebug(f"Output: {output_html}")
|
# pdebug(f"Output: {output_html}")
|
||||||
|
|
||||||
# save
|
# save
|
||||||
if output_path:
|
if args.output:
|
||||||
with open(output_path, "w") as file:
|
with open(args.output, "w") as file:
|
||||||
file.write(output_html)
|
file.write(output_html)
|
||||||
else:
|
else:
|
||||||
print(output_html)
|
print(output_html)
|
||||||
|
|
||||||
if dep_output_path:
|
if args.output_deps:
|
||||||
if output_path != target_path:
|
if args.output != args.input:
|
||||||
glob_dependcies.append(target_path)
|
glob_dependcies.append(args.input)
|
||||||
depfile = generate_dependecy_file(output_path, glob_dependcies)
|
depfile = generate_dependecy_file(args.output, glob_dependcies)
|
||||||
pdebug(f"Writing dependency file to {os.path.abspath(dep_output_path)}: {depfile}")
|
pdebug(f"Writing dependency file to {os.path.abspath(args.output_deps)}: {depfile}")
|
||||||
with open(dep_output_path, "w") as file:
|
with open(args.output_deps, "w") as file:
|
||||||
file.write(depfile)
|
file.write(depfile)
|
||||||
|
Loading…
Reference in New Issue
Block a user