Compare commits
14 Commits
48f13f97a3
...
main
Author | SHA1 | Date | |
---|---|---|---|
6f2dc19d07
|
|||
49a522bf15
|
|||
b332ed833e
|
|||
7525b5847a
|
|||
778fe4855f
|
|||
64a18bb358
|
|||
612532576b
|
|||
0338c7fffe
|
|||
affe19378b
|
|||
0e515abaec
|
|||
34f0d4bb8e
|
|||
db627f7255
|
|||
50d0bbeedf
|
|||
d0ae5d79c9
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +1,4 @@
|
|||||||
/.mypy_cache/
|
/.mypy_cache/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
|
|
||||||
|
*.pyc
|
||||||
|
@@ -20,7 +20,7 @@ RUN python3 -m compileall -q -j 2 /usr/local/lib/j2cfg/
|
|||||||
RUN libpython="${PYTHON_SITE_PACKAGES%/*}" ; \
|
RUN libpython="${PYTHON_SITE_PACKAGES%/*}" ; \
|
||||||
find "${libpython}/" -mindepth 1 -maxdepth 1 -printf '%P\0' \
|
find "${libpython}/" -mindepth 1 -maxdepth 1 -printf '%P\0' \
|
||||||
| sed -zEn \
|
| sed -zEn \
|
||||||
-e '/^(collections|concurrent|encodings|importlib|json|logging|multiprocessing|re|urllib)$/p' \
|
-e '/^(collections|concurrent|encodings|importlib|json|logging|multiprocessing|re|tomllib|urllib)$/p' \
|
||||||
| sort -zV \
|
| sort -zV \
|
||||||
| env -C "${libpython}" xargs -0r \
|
| env -C "${libpython}" xargs -0r \
|
||||||
python3 -m compileall -q -j 2 ; \
|
python3 -m compileall -q -j 2 ; \
|
||||||
@@ -33,9 +33,7 @@ RUN libpython="${PYTHON_SITE_PACKAGES%/*}" ; \
|
|||||||
python3 -m compileall -q -j 2
|
python3 -m compileall -q -j 2
|
||||||
|
|
||||||
## Python cache warmup
|
## Python cache warmup
|
||||||
RUN j2cfg-single /usr/local/lib/j2cfg/j2cfg/test.j2 /tmp/test ; \
|
RUN env -C /usr/local/lib/j2cfg/j2cfg j2cfg-single test.j2 -
|
||||||
cat /tmp/test ; echo ; echo ; \
|
|
||||||
rm -f /tmp/test
|
|
||||||
|
|
||||||
WORKDIR /pycache
|
WORKDIR /pycache
|
||||||
RUN find /usr/local/ -type f -name '*.py[co]' -printf '%P\0' \
|
RUN find /usr/local/ -type f -name '*.py[co]' -printf '%P\0' \
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
# FROM docker.io/debian:bookworm-slim as base-upstream
|
# FROM docker.io/debian:trixie-slim as base-upstream
|
||||||
ARG PYTHONTAG=3.12.11-slim-bookworm
|
ARG PYTHONTAG=3.12.11-slim-trixie
|
||||||
FROM docker.io/python:${PYTHONTAG} AS base-upstream
|
FROM docker.io/python:${PYTHONTAG} AS base-upstream
|
||||||
|
|
||||||
FROM base-upstream AS base-intermediate
|
FROM base-upstream AS base-intermediate
|
||||||
@@ -19,7 +19,6 @@ ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin \
|
|||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PYTHONDONTWRITEBYTECODE=1
|
PYTHONDONTWRITEBYTECODE=1
|
||||||
|
|
||||||
COPY /apt/prefs.backports /etc/apt/preferences.d/backports
|
|
||||||
COPY /apt/sources.debian /etc/apt/sources.list.d/debian.sources
|
COPY /apt/sources.debian /etc/apt/sources.list.d/debian.sources
|
||||||
|
|
||||||
## prevent services from auto-starting, part 1
|
## prevent services from auto-starting, part 1
|
||||||
@@ -54,13 +53,13 @@ RUN apt-env.sh apt-get update ; \
|
|||||||
jdupes \
|
jdupes \
|
||||||
jq \
|
jq \
|
||||||
libcap2-bin \
|
libcap2-bin \
|
||||||
libjemalloc2 \
|
|
||||||
logrotate \
|
logrotate \
|
||||||
netbase \
|
netbase \
|
||||||
netcat-openbsd \
|
netcat-openbsd \
|
||||||
openssl \
|
openssl \
|
||||||
procps \
|
procps \
|
||||||
psmisc \
|
psmisc \
|
||||||
|
systemd-standalone-sysusers \
|
||||||
tzdata \
|
tzdata \
|
||||||
zstd \
|
zstd \
|
||||||
; \
|
; \
|
||||||
@@ -273,8 +272,8 @@ RUN env -C /root rm -f .bash_history .python_history .wget-hsts
|
|||||||
FROM base-intermediate AS certs
|
FROM base-intermediate AS certs
|
||||||
SHELL [ "/bin/sh", "-ec" ]
|
SHELL [ "/bin/sh", "-ec" ]
|
||||||
|
|
||||||
## "2025.04.26"
|
## "2025.08.03"
|
||||||
ENV CERTIFI_COMMIT=275c9eb55733a464589c15fb4566fddd4598e5b2
|
ENV CERTIFI_COMMIT=a97d9ad8f87c382378dddc0b0b33b9770932404e
|
||||||
|
|
||||||
# ARG CERTIFI_BASE_URI='https://raw.githubusercontent.com/certifi/python-certifi'
|
# ARG CERTIFI_BASE_URI='https://raw.githubusercontent.com/certifi/python-certifi'
|
||||||
|
|
||||||
|
@@ -1,4 +1,3 @@
|
|||||||
{%- import 'snip/log.j2mod' as ngx_log -%}
|
{%- import 'snip/log.j2mod' as _log -%}
|
||||||
|
{#- {{ _log.error_log(dest='error.log', level=env.NGX_LOGLEVEL) }} -#}
|
||||||
{# {{ ngx_log.error_log(dest='error.log', level=env.NGX_LOGLEVEL) }} #}
|
{{ _log.error_log(level=env.NGX_LOGLEVEL) }}
|
||||||
{{ ngx_log.error_log(level=env.NGX_LOGLEVEL) }}
|
|
||||||
|
@@ -1,4 +1,3 @@
|
|||||||
{%- import 'snip/log.j2mod' as ngx_log -%}
|
{%- import 'snip/log.j2mod' as _log -%}
|
||||||
|
{#- {{ _log.access_log(dest='access.log', format='main') }} -#}
|
||||||
{# {{ ngx_log.access_log(dest='access.log', format='main') }} #}
|
{{ _log.access_log(format='main') }}
|
||||||
{{ ngx_log.access_log(format='main') }}
|
|
||||||
|
@@ -1 +0,0 @@
|
|||||||
include snip/http-alt-svc;
|
|
@@ -1,4 +1,6 @@
|
|||||||
subrequest_output_buffer_size 16k;
|
subrequest_output_buffer_size 16k;
|
||||||
client_body_buffer_size 16k;
|
client_body_buffer_size 16k;
|
||||||
|
client_max_body_size 64k;
|
||||||
|
|
||||||
client_header_buffer_size 4k;
|
client_header_buffer_size 4k;
|
||||||
large_client_header_buffers 8 16k;
|
large_client_header_buffers 8 16k;
|
||||||
|
5
angie/autoconf/http-realip.conf.j2
Normal file
5
angie/autoconf/http-realip.conf.j2
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{%- set hdr = j2cfg.headers.real_ip_from or 'off' -%}
|
||||||
|
{%- if hdr != 'off' %}
|
||||||
|
real_ip_header {{ hdr | http_header }};
|
||||||
|
{% include 'realip-set-from.j2inc' %}
|
||||||
|
{%- endif %}
|
@@ -17,8 +17,6 @@ map $remote_addr
|
|||||||
map $http_forwarded
|
map $http_forwarded
|
||||||
$proxy_add_forwarded
|
$proxy_add_forwarded
|
||||||
{
|
{
|
||||||
volatile;
|
|
||||||
|
|
||||||
## if the incoming Forwarded header is syntactically valid, append to it
|
## if the incoming Forwarded header is syntactically valid, append to it
|
||||||
"~^(,[ \\t]*)*([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?(;([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?)*([ \\t]*,([ \\t]*([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?(;([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?)*)?)*$" "$http_forwarded, $proxy_forwarded_elem";
|
"~^(,[ \\t]*)*([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?(;([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?)*([ \\t]*,([ \\t]*([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?(;([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?)*)?)*$" "$http_forwarded, $proxy_forwarded_elem";
|
||||||
|
|
||||||
|
1
angie/autoconf/http-server-tokens.conf
Normal file
1
angie/autoconf/http-server-tokens.conf
Normal file
@@ -0,0 +1 @@
|
|||||||
|
server_tokens off;
|
1
angie/autoconf/mail-realip.conf.j2
Normal file
1
angie/autoconf/mail-realip.conf.j2
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{% include 'realip-set-from.j2inc' %}
|
8
angie/autoconf/realip-set-from.j2inc
Normal file
8
angie/autoconf/realip-set-from.j2inc
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{%- set src = (j2cfg.trusted_sources or []) | any_to_str_list | uniq_str_list | ngx_esc -%}
|
||||||
|
{%- if src %}
|
||||||
|
{%- for s in src %}
|
||||||
|
set_real_ip_from {{ s }};
|
||||||
|
{%- endfor %}
|
||||||
|
{%- else %}
|
||||||
|
set_real_ip_from unix: ;
|
||||||
|
{%- endif %}
|
1
angie/autoconf/stream-realip.conf.j2
Normal file
1
angie/autoconf/stream-realip.conf.j2
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{% include 'realip-set-from.j2inc' %}
|
@@ -3,7 +3,7 @@
|
|||||||
{%- if mime_types -%}
|
{%- if mime_types -%}
|
||||||
brotli_types
|
brotli_types
|
||||||
{%- for t in mime_types %}
|
{%- for t in mime_types %}
|
||||||
{{ t }}
|
{{ t | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endif -%}
|
{%- endif -%}
|
@@ -3,7 +3,7 @@
|
|||||||
{%- if mime_types -%}
|
{%- if mime_types -%}
|
||||||
gzip_types
|
gzip_types
|
||||||
{%- for t in mime_types %}
|
{%- for t in mime_types %}
|
||||||
{{ t }}
|
{{ t | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endif -%}
|
{%- endif -%}
|
@@ -4,7 +4,7 @@ include conf/ssl/*.conf;
|
|||||||
ssl_buffer_size 4k;
|
ssl_buffer_size 4k;
|
||||||
|
|
||||||
{%- if env.NGX_HTTP_SSL_PROFILE %}
|
{%- if env.NGX_HTTP_SSL_PROFILE %}
|
||||||
include snip/ssl-{{ env.NGX_HTTP_SSL_PROFILE }};
|
include {{ "snip/ssl-{}".format(env.NGX_HTTP_SSL_PROFILE) | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if j2cfg.tls.stapling.enable %}
|
{%- if j2cfg.tls.stapling.enable %}
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
include conf/ssl/*.conf;
|
include conf/ssl/*.conf;
|
||||||
|
|
||||||
{%- if env.NGX_MAIL_SSL_PROFILE %}
|
{%- if env.NGX_MAIL_SSL_PROFILE %}
|
||||||
include snip/ssl-{{ env.NGX_MAIL_SSL_PROFILE }};
|
include {{ "snip/ssl-{}".format(env.NGX_MAIL_SSL_PROFILE) | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
include conf/ssl/*.conf;
|
include conf/ssl/*.conf;
|
||||||
|
|
||||||
{%- if env.NGX_STREAM_SSL_PROFILE %}
|
{%- if env.NGX_STREAM_SSL_PROFILE %}
|
||||||
include snip/ssl-{{ env.NGX_STREAM_SSL_PROFILE }};
|
include {{ "snip/ssl-{}".format(env.NGX_STREAM_SSL_PROFILE) | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
{%- if mime_types -%}
|
{%- if mime_types -%}
|
||||||
zstd_types
|
zstd_types
|
||||||
{%- for t in mime_types %}
|
{%- for t in mime_types %}
|
||||||
{{ t }}
|
{{ t | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endif -%}
|
{%- endif -%}
|
@@ -58,3 +58,15 @@ tls:
|
|||||||
dhparam: tls/dh1024.pem
|
dhparam: tls/dh1024.pem
|
||||||
session_tickets: off
|
session_tickets: off
|
||||||
session_timeout: 1d
|
session_timeout: 1d
|
||||||
|
|
||||||
|
## well-known values are:
|
||||||
|
## - 'off' - don't use "realip" module
|
||||||
|
## - 'proxy_protocol' - get information from PROXY protocol if listen directive has "proxy_protocol" option
|
||||||
|
## - 'X-Forwarded-For' - get information from 'X-Forwarded-For' header
|
||||||
|
## - 'X-Real-IP' - get information from 'X-Real-IP' header
|
||||||
|
## any other value will be treated as uncommon (but valid?) header
|
||||||
|
real_ip_from: off
|
||||||
|
## list of trusted sources to be used in "realip" module
|
||||||
|
## ref: https://en.angie.software/angie/docs/configuration/modules/http/http_realip/
|
||||||
|
## if empty, defaults to "unix:"
|
||||||
|
trusted_sources: []
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{%- set transparent = false -%}
|
{%- set transparent = false -%}
|
||||||
{%- if env.NGX_HTTP_TRANSPARENT_PROXY != None -%}
|
{%- if env.NGX_HTTP_TRANSPARENT_PROXY != None -%}
|
||||||
{%- set transparent = (env.NGX_HTTP_TRANSPARENT_PROXY | from_gobool) -%}
|
{%- set transparent = env.NGX_HTTP_TRANSPARENT_PROXY | from_gobool -%}
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
|
|
||||||
---
|
---
|
||||||
|
@@ -1,55 +1,55 @@
|
|||||||
{%- macro proxy_cache_path(name, size='1m') %}
|
{% macro proxy_cache_path(name, size='1m') %}
|
||||||
{%- set path = '/run/ngx/cache'|join_prefix('proxy_' + name) -%}
|
{%- set path = join_prefix('/run/ngx/cache', 'proxy_' + name) -%}
|
||||||
{%- set zone_file = '/run/ngx/lib'|join_prefix('proxy_' + name + '.keys') -%}
|
{%- set zone_file = join_prefix('/run/ngx/lib', 'proxy_' + name + '.keys') -%}
|
||||||
proxy_cache_path {{ path | ngx_esc }}
|
proxy_cache_path {{ path | ngx_esc }}
|
||||||
{{ "keys_zone={}:{}:file={}".format(name, size, zone_file) | ngx_esc }}
|
{{ "keys_zone={}:{}:file={}".format(name, size, zone_file) | ngx_esc }}
|
||||||
{%- for v in varargs %}
|
{%- for v in varargs %}
|
||||||
{{ v | ngx_esc }}
|
{{ v | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- for k, v in kwargs|dictsort %}
|
{%- for k, v in kwargs|dictsort %}
|
||||||
{{ "{}={}".format(k, v) | ngx_esc }}
|
{{ "{}={}".format(k, v) | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
{%- macro fastcgi_cache_path(name, size='1m') %}
|
{% macro fastcgi_cache_path(name, size='1m') %}
|
||||||
{%- set path = '/run/ngx/cache'|join_prefix('fastcgi_' + name) -%}
|
{%- set path = join_prefix('/run/ngx/cache', 'fastcgi_' + name) -%}
|
||||||
fastcgi_cache_path {{ path | ngx_esc }}
|
fastcgi_cache_path {{ path | ngx_esc }}
|
||||||
{{ "keys_zone={}:{}".format(name, size) | ngx_esc }}
|
{{ "keys_zone={}:{}".format(name, size) | ngx_esc }}
|
||||||
{%- for v in varargs %}
|
{%- for v in varargs %}
|
||||||
{{ v | ngx_esc }}
|
{{ v | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- for k, v in kwargs|dictsort %}
|
{%- for k, v in kwargs|dictsort %}
|
||||||
{{ "{}={}".format(k, v) | ngx_esc }}
|
{{ "{}={}".format(k, v) | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
{%- macro scgi_cache(name, size='1m') %}
|
{% macro scgi_cache(name, size='1m') %}
|
||||||
{%- set path = '/run/ngx/cache'|join_prefix('scgi_' + name) -%}
|
{%- set path = join_prefix('/run/ngx/cache', 'scgi_' + name) -%}
|
||||||
scgi_cache {{ path | ngx_esc }}
|
scgi_cache {{ path | ngx_esc }}
|
||||||
{{ "keys_zone={}:{}".format(name, size) | ngx_esc }}
|
{{ "keys_zone={}:{}".format(name, size) | ngx_esc }}
|
||||||
{%- for v in varargs %}
|
{%- for v in varargs %}
|
||||||
{{ v | ngx_esc }}
|
{{ v | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- for k, v in kwargs|dictsort %}
|
{%- for k, v in kwargs|dictsort %}
|
||||||
{{ "{}={}".format(k, v) | ngx_esc }}
|
{{ "{}={}".format(k, v) | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
{%- macro uwsgi_cache(name, size='1m') %}
|
{% macro uwsgi_cache(name, size='1m') %}
|
||||||
{%- set path = '/run/ngx/cache'|join_prefix('uwsgi_' + name) -%}
|
{%- set path = join_prefix('/run/ngx/cache', 'uwsgi_' + name) -%}
|
||||||
uwsgi_cache {{ path | ngx_esc }}
|
uwsgi_cache {{ path | ngx_esc }}
|
||||||
{{ "keys_zone={}:{}".format(name, size) | ngx_esc }}
|
{{ "keys_zone={}:{}".format(name, size) | ngx_esc }}
|
||||||
{%- for v in varargs %}
|
{%- for v in varargs %}
|
||||||
{{ v | ngx_esc }}
|
{{ v | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- for k, v in kwargs|dictsort %}
|
{%- for k, v in kwargs|dictsort %}
|
||||||
{{ "{}={}".format(k, v) | ngx_esc }}
|
{{ "{}={}".format(k, v) | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endmacro %}
|
{% endmacro %}
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
## sourced by conf/fastcgi/headers.conf
|
## sourced by conf/fastcgi/headers.conf
|
||||||
## set/remove request headers
|
## set/remove request headers
|
||||||
{%- set req_hdr_dict = j2cfg.request_headers or {} -%}
|
{%- for h, v in j2cfg.request_headers | cgi_header | dictsort %}
|
||||||
{%- for h, v in req_hdr_dict|dictsort %}
|
fastcgi_param {{ h }} {{ v | ngx_esc }};
|
||||||
fastcgi_param {{ h | as_cgi_hdr }} {{ v | ngx_esc }};
|
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,7 +1,5 @@
|
|||||||
## sourced by conf/fastcgi/headers.conf
|
## sourced by conf/fastcgi/headers.conf
|
||||||
## hide response headers
|
## hide response headers
|
||||||
{%- set resp_hdr_dict = j2cfg.response_headers or {} -%}
|
{%- for h in j2cfg.response_headers | dict_keys | http_header | uniq_str_list %}
|
||||||
{%- set resp_hdr_list = resp_hdr_dict | dict_keys -%}
|
|
||||||
{%- for h in resp_hdr_list %}
|
|
||||||
fastcgi_hide_header {{ h }};
|
fastcgi_hide_header {{ h }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,6 +1,5 @@
|
|||||||
## sourced by conf/grpc/headers.conf
|
## sourced by conf/grpc/headers.conf
|
||||||
## set/remove request headers
|
## set/remove request headers
|
||||||
{%- set req_hdr_dict = j2cfg.request_headers or {} -%}
|
{%- for h, v in j2cfg.request_headers | http_header | dictsort %}
|
||||||
{%- for h, v in req_hdr_dict|dictsort %}
|
|
||||||
grpc_set_header {{ h }} {{ v | ngx_esc }};
|
grpc_set_header {{ h }} {{ v | ngx_esc }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,7 +1,5 @@
|
|||||||
## sourced by conf/grpc/headers.conf
|
## sourced by conf/grpc/headers.conf
|
||||||
## hide response headers
|
## hide response headers
|
||||||
{%- set resp_hdr_dict = j2cfg.response_headers or {} -%}
|
{%- for h in j2cfg.response_headers | dict_keys | http_header | uniq_str_list %}
|
||||||
{%- set resp_hdr_list = resp_hdr_dict | dict_keys -%}
|
|
||||||
{%- for h in resp_hdr_list %}
|
|
||||||
grpc_hide_header {{ h }};
|
grpc_hide_header {{ h }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,4 +1,3 @@
|
|||||||
## sourced by autoconf/http-alt-svc.conf
|
|
||||||
{#- prologue -#}
|
{#- prologue -#}
|
||||||
{%- set extra_proto = ['v3', 'v2'] -%}
|
{%- set extra_proto = ['v3', 'v2'] -%}
|
||||||
{%- set confload = ( env.NGX_HTTP_CONFLOAD or '' ) | str_split_to_list -%}
|
{%- set confload = ( env.NGX_HTTP_CONFLOAD or '' ) | str_split_to_list -%}
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
## sourced by autoconf/http-response-headers.conf
|
## sourced by autoconf/http-response-headers.conf
|
||||||
## add response headers
|
## add response headers
|
||||||
{%- set resp_hdr_dict = j2cfg.response_headers or {} -%}
|
{%- for h, v in j2cfg.response_headers | http_header | dictsort %}
|
||||||
{%- for h, v in resp_hdr_dict|dictsort %}
|
|
||||||
add_header {{ h }} {{ v | ngx_esc }};
|
add_header {{ h }} {{ v | ngx_esc }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,12 +1,35 @@
|
|||||||
{%- macro error_log(dest='error.log', level='warn') %}
|
{% macro error_log(dest='error.log', level='warn') %}
|
||||||
error_log {{ '/run/ngx/log'|join_prefix(dest) | ngx_esc }} {{ level | ngx_esc }};
|
{%- if dest == 'off' %}
|
||||||
{%- endmacro %}
|
## not a really disabled log but quiet as less as possible
|
||||||
|
error_log stderr emerg;
|
||||||
|
{%- else %}
|
||||||
|
error_log
|
||||||
|
{%- if dest == 'stderr' %}
|
||||||
|
{{ dest }}
|
||||||
|
{%- elif dest.startswith('syslog:') %}
|
||||||
|
{{ dest | ngx_esc }}
|
||||||
|
{%- else %}
|
||||||
|
{{ join_prefix('/run/ngx/log', dest) | ngx_esc }}
|
||||||
|
{%- endif %}
|
||||||
|
{{ level | ngx_esc }};
|
||||||
|
{%- endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
{%- macro access_log(dest='access.log', format='main') %}
|
{% macro access_log(dest='access.log', format='main') %}
|
||||||
access_log {{ '/run/ngx/log'|join_prefix(dest) | ngx_esc }} {{ format | ngx_esc }}
|
{%- if dest == 'off' %}
|
||||||
{%- for k, v in kwargs|dictsort %}
|
access_log off;
|
||||||
|
{%- else %}
|
||||||
|
access_log
|
||||||
|
{%- if dest.startswith('syslog:') %}
|
||||||
|
{{ dest | ngx_esc }}
|
||||||
|
{%- else %}
|
||||||
|
{{ join_prefix('/run/ngx/log', dest) | ngx_esc }}
|
||||||
|
{%- endif %}
|
||||||
|
{{ format | ngx_esc }}
|
||||||
|
{%- for k, v in kwargs|dictsort %}
|
||||||
{{ "{}={}".format(k, v) | ngx_esc }}
|
{{ "{}={}".format(k, v) | ngx_esc }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
;
|
;
|
||||||
{%- endmacro %}
|
{%- endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
## sourced by conf/proxy-http/headers.conf
|
## sourced by conf/proxy-http/headers.conf
|
||||||
## set/remove request headers
|
## set/remove request headers
|
||||||
{%- set req_hdr_dict = j2cfg.request_headers or {} -%}
|
{%- for h, v in j2cfg.request_headers | http_header | dictsort %}
|
||||||
{%- for h, v in req_hdr_dict|dictsort %}
|
|
||||||
proxy_set_header {{ h }} {{ v | ngx_esc }};
|
proxy_set_header {{ h }} {{ v | ngx_esc }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,7 +1,5 @@
|
|||||||
## sourced by conf/proxy-http/headers.conf
|
## sourced by conf/proxy-http/headers.conf
|
||||||
## hide response headers
|
## hide response headers
|
||||||
{%- set resp_hdr_dict = j2cfg.response_headers or {} -%}
|
{%- for h in j2cfg.response_headers | dict_keys | http_header | uniq_str_list %}
|
||||||
{%- set resp_hdr_list = resp_hdr_dict | dict_keys -%}
|
|
||||||
{%- for h in resp_hdr_list %}
|
|
||||||
proxy_hide_header {{ h }};
|
proxy_hide_header {{ h }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,6 +1,5 @@
|
|||||||
## sourced by conf/scgi/headers.conf
|
## sourced by conf/scgi/headers.conf
|
||||||
## set/remove request headers
|
## set/remove request headers
|
||||||
{%- set req_hdr_dict = j2cfg.request_headers or {} -%}
|
{%- for h, v in j2cfg.request_headers | cgi_header | dictsort %}
|
||||||
{%- for h, v in req_hdr_dict|dictsort %}
|
scgi_param {{ h }} {{ v | ngx_esc }};
|
||||||
scgi_param {{ h | as_cgi_hdr }} {{ v | ngx_esc }};
|
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,7 +1,5 @@
|
|||||||
## sourced by conf/scgi/headers.conf
|
## sourced by conf/scgi/headers.conf
|
||||||
## hide response headers
|
## hide response headers
|
||||||
{%- set resp_hdr_dict = j2cfg.response_headers or {} -%}
|
{%- for h in j2cfg.response_headers | dict_keys | http_header | uniq_str_list %}
|
||||||
{%- set resp_hdr_list = resp_hdr_dict | dict_keys -%}
|
|
||||||
{%- for h in resp_hdr_list %}
|
|
||||||
scgi_hide_header {{ h }};
|
scgi_hide_header {{ h }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,28 +1,28 @@
|
|||||||
{%- if ssl_profile.protocols %}
|
{%- if ssl_profile.protocols %}
|
||||||
ssl_protocols {{ ssl_profile.protocols }};
|
ssl_protocols {{ ssl_profile.protocols | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.prefer_server_ciphers %}
|
{%- if ssl_profile.prefer_server_ciphers %}
|
||||||
ssl_prefer_server_ciphers {{ ssl_profile.prefer_server_ciphers }};
|
ssl_prefer_server_ciphers {{ ssl_profile.prefer_server_ciphers | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.ciphers %}
|
{%- if ssl_profile.ciphers %}
|
||||||
ssl_ciphers {{ ssl_profile.ciphers }};
|
ssl_ciphers {{ ssl_profile.ciphers | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.dhparam %}
|
{%- if ssl_profile.dhparam %}
|
||||||
ssl_dhparam {{ ssl_profile.dhparam }};
|
ssl_dhparam {{ ssl_profile.dhparam | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.ecdh_curve %}
|
{%- if ssl_profile.ecdh_curve %}
|
||||||
ssl_ecdh_curve {{ ssl_profile.ecdh_curve }};
|
ssl_ecdh_curve {{ ssl_profile.ecdh_curve | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if ssl_profile.session_cache %}
|
{%- if ssl_profile.session_cache %}
|
||||||
ssl_session_cache {{ ssl_profile.session_cache }};
|
ssl_session_cache {{ ssl_profile.session_cache | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.session_timeout %}
|
{%- if ssl_profile.session_timeout %}
|
||||||
ssl_session_timeout {{ ssl_profile.session_timeout }};
|
ssl_session_timeout {{ ssl_profile.session_timeout | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.session_tickets %}
|
{%- if ssl_profile.session_tickets %}
|
||||||
ssl_session_tickets {{ ssl_profile.session_tickets }};
|
ssl_session_tickets {{ ssl_profile.session_tickets | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if ssl_profile.session_ticket_key %}
|
{%- if ssl_profile.session_ticket_key %}
|
||||||
ssl_session_ticket_key {{ ssl_profile.session_ticket_key }};
|
ssl_session_ticket_key {{ ssl_profile.session_ticket_key | ngx_esc }};
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
## sourced by conf/uwsgi/headers.conf
|
## sourced by conf/uwsgi/headers.conf
|
||||||
## set/remove request headers
|
## set/remove request headers
|
||||||
{%- set req_hdr_dict = j2cfg.request_headers or {} -%}
|
{%- for h, v in j2cfg.request_headers | cgi_header | dictsort %}
|
||||||
{%- for h, v in req_hdr_dict|dictsort %}
|
uwsgi_param {{ h }} {{ v | ngx_esc }};
|
||||||
uwsgi_param {{ h | as_cgi_hdr }} {{ v | ngx_esc }};
|
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,7 +1,5 @@
|
|||||||
## sourced by conf/uwsgi/headers.conf
|
## sourced by conf/uwsgi/headers.conf
|
||||||
## hide response headers
|
## hide response headers
|
||||||
{%- set resp_hdr_dict = j2cfg.response_headers or {} -%}
|
{%- for h in j2cfg.response_headers | dict_keys | http_header | uniq_str_list %}
|
||||||
{%- set resp_hdr_list = resp_hdr_dict | dict_keys -%}
|
|
||||||
{%- for h in resp_hdr_list %}
|
|
||||||
uwsgi_hide_header {{ h }};
|
uwsgi_hide_header {{ h }};
|
||||||
{%- endfor %}
|
{%- endfor %}
|
@@ -1,27 +0,0 @@
|
|||||||
Package: src:curl
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
||||||
|
|
||||||
Package: src:debhelper
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
||||||
|
|
||||||
Package: src:elfutils
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
||||||
|
|
||||||
Package: src:iproute2
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
||||||
|
|
||||||
Package: src:libbpf
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
||||||
|
|
||||||
Package: src:systemd
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
||||||
|
|
||||||
Package: src:sysvinit
|
|
||||||
Pin: release n=bookworm-backports
|
|
||||||
Pin-Priority: 600
|
|
@@ -1,11 +1,11 @@
|
|||||||
Types: deb
|
Types: deb
|
||||||
URIs: http://deb.debian.org/debian
|
URIs: http://deb.debian.org/debian
|
||||||
Suites: bookworm bookworm-updates bookworm-proposed-updates bookworm-backports
|
Suites: trixie trixie-updates trixie-proposed-updates trixie-backports
|
||||||
Components: main
|
Components: main
|
||||||
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
|
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
|
||||||
|
|
||||||
Types: deb
|
Types: deb
|
||||||
URIs: http://deb.debian.org/debian-security
|
URIs: http://deb.debian.org/debian-security
|
||||||
Suites: bookworm-security
|
Suites: trixie-security
|
||||||
Components: main
|
Components: main
|
||||||
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
|
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
|
||||||
|
@@ -10,7 +10,7 @@ BUILDAH_ISOLATION="${BUILDAH_ISOLATION:-chroot}"
|
|||||||
BUILDAH_NETWORK="${BUILDAH_NETWORK:-host}"
|
BUILDAH_NETWORK="${BUILDAH_NETWORK:-host}"
|
||||||
set +a
|
set +a
|
||||||
|
|
||||||
PYTHONTAG="${PYTHONTAG:-3.12.11-slim-bookworm}"
|
PYTHONTAG="${PYTHONTAG:-3.12.11-slim-trixie}"
|
||||||
|
|
||||||
grab_site_packages() {
|
grab_site_packages() {
|
||||||
podman run \
|
podman run \
|
||||||
|
@@ -3,12 +3,12 @@
|
|||||||
configuration:
|
configuration:
|
||||||
|
|
||||||
```nginx
|
```nginx
|
||||||
{%- import 'snip/cache.j2mod' as ngx_cache -%}
|
{%- import 'snip/cache.j2mod' as _cache -%}
|
||||||
|
|
||||||
{%- set my_caches = (j2cfg.my_caches or []) -%}
|
{%- set my_caches = (j2cfg.my_caches or []) -%}
|
||||||
|
|
||||||
{%- for h in my_caches %}
|
{%- for h in my_caches %}
|
||||||
{{ ngx_cache.proxy_cache_path(h.name, size='10m', levels='1:2', inactive=h.max_time) }}
|
{{ _cache.proxy_cache_path(h.name, size='10m', levels='1:2', inactive=h.max_time) }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
server {
|
server {
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
{%- import 'snip/cache.j2mod' as ngx_cache -%}
|
{%- import 'snip/cache.j2mod' as _cache -%}
|
||||||
{%- set my_caches = (j2cfg.my_caches or []) -%}
|
{%- set my_caches = (j2cfg.my_caches or []) -%}
|
||||||
|
|
||||||
map $uri
|
map $uri
|
||||||
@@ -34,7 +34,7 @@ proxy_cache_use_stale error timeout invalid_header updating http_429 ht
|
|||||||
proxy_cache_revalidate on;
|
proxy_cache_revalidate on;
|
||||||
|
|
||||||
{%- for h in my_caches %}
|
{%- for h in my_caches %}
|
||||||
{{ ngx_cache.proxy_cache_path(h.name, size='10m', levels='1:2', inactive=h.max_time) }}
|
{{ _cache.proxy_cache_path(h.name, size='10m', levels='1:2', inactive=h.max_time) }}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
server {
|
server {
|
||||||
|
@@ -82,99 +82,6 @@ user_install() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
expand_file_envsubst() {
|
|
||||||
local __ret __src __dst
|
|
||||||
|
|
||||||
__ret=0
|
|
||||||
for __src ; do
|
|
||||||
[ -n "${__src}" ] || continue
|
|
||||||
|
|
||||||
if ! [ -f "${__src}" ] ; then
|
|
||||||
__ret=1
|
|
||||||
log_always "file not found: ${__src}"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
case "${__src}" in
|
|
||||||
*.in ) ;;
|
|
||||||
* )
|
|
||||||
__ret=1
|
|
||||||
log "expand_file_envsubst: file name extension mismatch: ${__src}"
|
|
||||||
continue
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
__dst=$(strip_suffix "${__src}" '.in')
|
|
||||||
if [ -e "${__dst}" ] ; then
|
|
||||||
__ret=1
|
|
||||||
log "expand_file_envsubst: destination file already exists: ${__dst}"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
log "Running envsubst: ${__src} -> ${__dst}"
|
|
||||||
envsubst.sh < "${__src}" > "${__dst}" || __ret=1
|
|
||||||
done
|
|
||||||
return ${__ret}
|
|
||||||
}
|
|
||||||
|
|
||||||
expand_file_j2cfg() {
|
|
||||||
j2cfg-single "$@" || return $?
|
|
||||||
}
|
|
||||||
|
|
||||||
expand_dir_envsubst() {
|
|
||||||
local __template_list __have_args __ret __orig_file
|
|
||||||
|
|
||||||
__template_list=$(mktemp) || return
|
|
||||||
|
|
||||||
find "$@" -follow -name '*.in' -type f \
|
|
||||||
| sort -uV > "${__template_list}"
|
|
||||||
|
|
||||||
__ret=0
|
|
||||||
if [ -s "${__template_list}" ] ; then
|
|
||||||
__have_args="${ENVSUBST_ARGS:+1}"
|
|
||||||
if [ -z "${__have_args}" ] ; then
|
|
||||||
## optimize envsubst.sh invocation by caching argument list
|
|
||||||
## ref: envsubst.sh
|
|
||||||
ENVSUBST_ARGS=$(mktemp) || return
|
|
||||||
envsubst-args.sh > "${ENVSUBST_ARGS}"
|
|
||||||
export ENVSUBST_ARGS
|
|
||||||
fi
|
|
||||||
|
|
||||||
while read -r __orig_file ; do
|
|
||||||
[ -n "${__orig_file}" ] || continue
|
|
||||||
expand_file_envsubst "${__orig_file}" || __ret=1
|
|
||||||
done < "${__template_list}"
|
|
||||||
|
|
||||||
if [ -z "${__have_args}" ] ; then
|
|
||||||
rm -f "${ENVSUBST_ARGS}" ; unset ENVSUBST_ARGS
|
|
||||||
fi
|
|
||||||
unset __have_args
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -f "${__template_list}" ; unset __template_list
|
|
||||||
|
|
||||||
return ${__ret}
|
|
||||||
}
|
|
||||||
|
|
||||||
expand_dir_j2cfg() {
|
|
||||||
local __template_list __ret
|
|
||||||
|
|
||||||
__template_list=$(mktemp) || return
|
|
||||||
|
|
||||||
find "$@" -follow -name '*.j2' -type f -printf '%p\0' \
|
|
||||||
| sort -zuV > "${__template_list}"
|
|
||||||
|
|
||||||
__ret=0
|
|
||||||
if [ -s "${__template_list}" ] ; then
|
|
||||||
xargs -0r -n 1000 -a "${__template_list}" \
|
|
||||||
j2cfg-multi < /dev/null || __ret=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -f "${__template_list}" ; unset __template_list
|
|
||||||
|
|
||||||
return ${__ret}
|
|
||||||
}
|
|
||||||
|
|
||||||
is_builtin_module() {
|
is_builtin_module() {
|
||||||
[ -n "${1:-}" ] || return 1
|
[ -n "${1:-}" ] || return 1
|
||||||
[ -n "${2:-}" ] || return 1
|
[ -n "${2:-}" ] || return 1
|
||||||
|
@@ -1,5 +1,13 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
## if IEP_DEBUG is not set, allow scripts to delete source templates
|
||||||
|
## (they are likely not needed anymore)
|
||||||
|
if [ "${IEP_DEBUG}" = 1 ] ; then
|
||||||
|
unset ENVSUBST_UNLINK_SRC J2CFG_UNLINK_SRC
|
||||||
|
else
|
||||||
|
export ENVSUBST_UNLINK_SRC=1 J2CFG_UNLINK_SRC=1
|
||||||
|
fi
|
||||||
|
|
||||||
unset NGX_DEBUG
|
unset NGX_DEBUG
|
||||||
NGX_DEBUG=$(/usr/sbin/angie --build-env 2>&1 | mawk '$1=="DEBUG:" {print $2;exit;}')
|
NGX_DEBUG=$(/usr/sbin/angie --build-env 2>&1 | mawk '$1=="DEBUG:" {print $2;exit;}')
|
||||||
NGX_DEBUG="${NGX_DEBUG:-0}"
|
NGX_DEBUG="${NGX_DEBUG:-0}"
|
||||||
|
@@ -5,6 +5,9 @@ set -ef
|
|||||||
|
|
||||||
## hack: override "cache", "lib" and "log" from /angie (and possibly from /etc/angie)
|
## hack: override "cache", "lib" and "log" from /angie (and possibly from /etc/angie)
|
||||||
fake_dir=$(mktemp -d)
|
fake_dir=$(mktemp -d)
|
||||||
|
if [ "${NGX_HTTP_STATIC_MERGE}" = 0 ] ; then
|
||||||
|
persist_dirs="${persist_dirs} static"
|
||||||
|
fi
|
||||||
for n in ${persist_dirs} ; do touch "${fake_dir}/$n" ; done
|
for n in ${persist_dirs} ; do touch "${fake_dir}/$n" ; done
|
||||||
|
|
||||||
install -d "${target_root}"
|
install -d "${target_root}"
|
||||||
@@ -13,12 +16,14 @@ overlaydirs --merge "${target_root}" /etc/angie.dist /etc/angie /angie "${fake_d
|
|||||||
## fixup after merge
|
## fixup after merge
|
||||||
for n in ${persist_dirs} ; do rm -f "${target_root}/$n" ; done
|
for n in ${persist_dirs} ; do rm -f "${target_root}/$n" ; done
|
||||||
rm -rf "${fake_dir}"
|
rm -rf "${fake_dir}"
|
||||||
|
if [ -d "${target_root}/mod" ] ; then
|
||||||
|
find "${target_root}/mod/" -follow -name '.*.preseed' -type f -exec rm -f {} +
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "${NGX_HTTP_STATIC_MERGE}" = 0 ] ; then
|
if [ "${NGX_HTTP_STATIC_MERGE}" = 0 ] ; then
|
||||||
src0=/etc/angie.dist/static
|
src0=/etc/angie.dist/static
|
||||||
dst="${target_root}/static"
|
dst="${target_root}/static"
|
||||||
|
|
||||||
rm -rf "${dst}"
|
|
||||||
for r in /angie /etc/angie ; do
|
for r in /angie /etc/angie ; do
|
||||||
src="$r/static"
|
src="$r/static"
|
||||||
[ -d "${src}" ] || continue
|
[ -d "${src}" ] || continue
|
||||||
@@ -29,6 +34,7 @@ if [ "${NGX_HTTP_STATIC_MERGE}" = 0 ] ; then
|
|||||||
|
|
||||||
install -d "${dst}"
|
install -d "${dst}"
|
||||||
overlaydirs --merge "${dst}" ${src0} "${src}"
|
overlaydirs --merge "${dst}" ${src0} "${src}"
|
||||||
|
break
|
||||||
done
|
done
|
||||||
[ -d "${dst}" ] || {
|
[ -d "${dst}" ] || {
|
||||||
log_always "static directory is almost empty!"
|
log_always "static directory is almost empty!"
|
||||||
|
@@ -29,9 +29,6 @@ expand_error() {
|
|||||||
expand_error_delim
|
expand_error_delim
|
||||||
}
|
}
|
||||||
|
|
||||||
set +e
|
|
||||||
## NB: j2cfg/ and static/ are handled separately
|
|
||||||
merge_dirs=$(find ./ -follow -mindepth 1 -maxdepth 1 -type d -printf '%P/\n' | grep -Fxv -e j2cfg/ -e static/ | sort -uV)
|
|
||||||
[ "${NGX_STRICT_LOAD}" = 0 ] || set -e
|
[ "${NGX_STRICT_LOAD}" = 0 ] || set -e
|
||||||
|
|
||||||
unset ENVSUBST_ARGS
|
unset ENVSUBST_ARGS
|
||||||
@@ -40,9 +37,7 @@ envsubst-args.sh > "${ENVSUBST_ARGS}"
|
|||||||
export ENVSUBST_ARGS
|
export ENVSUBST_ARGS
|
||||||
|
|
||||||
## envsubst is simple and fast
|
## envsubst is simple and fast
|
||||||
## expand j2cfg/ first, then other directories
|
envsubst-dirs j2cfg/ || expand_error
|
||||||
expand_dir_envsubst j2cfg/ || expand_error
|
|
||||||
expand_dir_envsubst ${merge_dirs} || expand_error
|
|
||||||
|
|
||||||
## j2cfg is more complex
|
## j2cfg is more complex
|
||||||
|
|
||||||
@@ -53,39 +48,53 @@ J2CFG_SEARCH_PATH="${target_root}"
|
|||||||
set +a
|
set +a
|
||||||
|
|
||||||
## expand j2cfg/ first
|
## expand j2cfg/ first
|
||||||
expand_dir_j2cfg j2cfg/ || expand_error
|
j2cfg-dirs j2cfg/ || expand_error
|
||||||
|
|
||||||
## dump [merged] j2cfg config
|
## dump [merged] j2cfg config
|
||||||
j2cfg_dump="${volume_root}/diag.j2cfg.yml"
|
j2cfg_dump="${volume_root}/diag.j2cfg.yml"
|
||||||
j2cfg-dump > "${j2cfg_dump}" || expand_error
|
j2cfg-dump-yml > "${j2cfg_dump}" || expand_error
|
||||||
export J2CFG_CONFIG="${j2cfg_dump}"
|
export J2CFG_CONFIG="${j2cfg_dump}"
|
||||||
|
|
||||||
## expand other directories
|
## expand other directories
|
||||||
expand_dir_j2cfg ${merge_dirs} || expand_error
|
## NB: j2cfg/ and static/ are handled separately
|
||||||
|
merge_dirs=$(mktemp)
|
||||||
|
{
|
||||||
|
set +e
|
||||||
|
find ./ -follow -mindepth 1 -maxdepth 1 -type d -printf '%P/\0' \
|
||||||
|
| grep -zFxv -e j2cfg/ -e static/ | sort -zuV
|
||||||
|
} > "${merge_dirs}"
|
||||||
|
|
||||||
|
xargs -0r -n 1000 -a "${merge_dirs}" \
|
||||||
|
envsubst-dirs < /dev/null || expand_error
|
||||||
|
|
||||||
|
xargs -0r -n 1000 -a "${merge_dirs}" \
|
||||||
|
j2cfg-dirs < /dev/null || expand_error
|
||||||
|
|
||||||
|
rm -f "${merge_dirs}" ; unset merge_dirs
|
||||||
|
|
||||||
## expand static/
|
## expand static/
|
||||||
## remove template sources in order to avoid leaking sensitive data
|
## NB: template sources are removed unless IEP_DEBUG is set!
|
||||||
if [ "${NGX_HTTP_STATIC_TEMPLATE}" = 1 ] ; then
|
if [ "${NGX_HTTP_STATIC_TEMPLATE}" = 1 ] ; then
|
||||||
|
|
||||||
template_list=$(mktemp)
|
template_list=$(mktemp)
|
||||||
|
|
||||||
find static/ -follow -name '*.in' -type f \
|
find static/ -follow -name '*.in' -type f -printf '%p\0' \
|
||||||
| {
|
| {
|
||||||
set +e
|
set +e
|
||||||
if [ -n "${NGX_STATIC_EXCLUDE_REGEX:-}" ] ; then
|
if [ -n "${NGX_STATIC_EXCLUDE_REGEX:-}" ] ; then
|
||||||
grep -Ev -e "${NGX_STATIC_EXCLUDE_REGEX}"
|
grep -zEv -e "${NGX_STATIC_EXCLUDE_REGEX}"
|
||||||
elif [ -n "${NGX_STATIC_INCLUDE_REGEX:-}" ] ; then
|
elif [ -n "${NGX_STATIC_INCLUDE_REGEX:-}" ] ; then
|
||||||
grep -E -e "${NGX_STATIC_INCLUDE_REGEX}"
|
grep -zE -e "${NGX_STATIC_INCLUDE_REGEX}"
|
||||||
else
|
else
|
||||||
cat
|
cat
|
||||||
fi
|
fi
|
||||||
} \
|
} \
|
||||||
| sort -uV > "${template_list}"
|
| sort -zuV > "${template_list}"
|
||||||
|
|
||||||
while read -r src ; do
|
if [ -s "${template_list}" ] ; then
|
||||||
[ -n "${src}" ] || continue
|
xargs -0r -n 1000 -a "${template_list}" \
|
||||||
expand_file_envsubst "${src}" || expand_error
|
envsubst-multi < /dev/null || expand_error
|
||||||
rm -fv "${src}"
|
fi
|
||||||
done < "${template_list}"
|
|
||||||
|
|
||||||
find static/ -follow -name '*.j2' -type f -printf '%p\0' \
|
find static/ -follow -name '*.j2' -type f -printf '%p\0' \
|
||||||
| {
|
| {
|
||||||
@@ -103,9 +112,6 @@ if [ "${NGX_HTTP_STATIC_TEMPLATE}" = 1 ] ; then
|
|||||||
if [ -s "${template_list}" ] ; then
|
if [ -s "${template_list}" ] ; then
|
||||||
xargs -0r -n 1000 -a "${template_list}" \
|
xargs -0r -n 1000 -a "${template_list}" \
|
||||||
j2cfg-multi < /dev/null || expand_error
|
j2cfg-multi < /dev/null || expand_error
|
||||||
|
|
||||||
xargs -0r -n 1000 -a "${template_list}" \
|
|
||||||
rm -fv < /dev/null
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rm -f "${template_list}"
|
rm -f "${template_list}"
|
||||||
|
@@ -47,7 +47,7 @@ $(
|
|||||||
| xargs -0r printf '%q\n' \
|
| xargs -0r printf '%q\n' \
|
||||||
| {
|
| {
|
||||||
## retain variables defined in ".core_worker_env" configuration key
|
## retain variables defined in ".core_worker_env" configuration key
|
||||||
## (if it was specified somewhere in dictionaries - either yaml or json)
|
## (if it was specified somewhere in dictionaries - either yaml, toml or json)
|
||||||
f="${target_root}/autoconf/core-worker-env.txt"
|
f="${target_root}/autoconf/core-worker-env.txt"
|
||||||
[ -s "$f" ] || exec cat
|
[ -s "$f" ] || exec cat
|
||||||
grep -Fxv -f "$f"
|
grep -Fxv -f "$f"
|
||||||
|
@@ -25,11 +25,6 @@ iep_preserve_env() {
|
|||||||
unset __IEP_MALLOC_ARENA_MAX
|
unset __IEP_MALLOC_ARENA_MAX
|
||||||
__IEP_MALLOC_ARENA_MAX="${MALLOC_ARENA_MAX:-4}"
|
__IEP_MALLOC_ARENA_MAX="${MALLOC_ARENA_MAX:-4}"
|
||||||
export MALLOC_ARENA_MAX=2
|
export MALLOC_ARENA_MAX=2
|
||||||
|
|
||||||
## jemalloc: preserve MALLOC_CONF
|
|
||||||
unset __IEP_MALLOC_CONF
|
|
||||||
__IEP_MALLOC_CONF="${MALLOC_CONF:-}"
|
|
||||||
unset MALLOC_CONF
|
|
||||||
}
|
}
|
||||||
|
|
||||||
iep_restore_env() {
|
iep_restore_env() {
|
||||||
@@ -52,12 +47,6 @@ iep_restore_env() {
|
|||||||
export MALLOC_ARENA_MAX="${__IEP_MALLOC_ARENA_MAX}"
|
export MALLOC_ARENA_MAX="${__IEP_MALLOC_ARENA_MAX}"
|
||||||
fi
|
fi
|
||||||
unset __IEP_MALLOC_ARENA_MAX
|
unset __IEP_MALLOC_ARENA_MAX
|
||||||
|
|
||||||
## jemalloc: restore MALLOC_CONF
|
|
||||||
if [ -n "${__IEP_MALLOC_CONF:-}" ] ; then
|
|
||||||
export MALLOC_CONF="${__IEP_MALLOC_CONF}"
|
|
||||||
fi
|
|
||||||
unset __IEP_MALLOC_CONF
|
|
||||||
}
|
}
|
||||||
|
|
||||||
iep_flush_volume() {
|
iep_flush_volume() {
|
||||||
|
@@ -9,7 +9,7 @@ def main():
|
|||||||
import j2cfg
|
import j2cfg
|
||||||
|
|
||||||
j = j2cfg.J2cfg(dump_only=True)
|
j = j2cfg.J2cfg(dump_only=True)
|
||||||
print(j.dump_config())
|
print(j.dump_config_yml())
|
||||||
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
@@ -1,24 +1,42 @@
|
|||||||
import importlib
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
import datetime
|
||||||
|
import importlib
|
||||||
|
import json
|
||||||
|
import tomllib
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
import wcmatch.wcmatch
|
import wcmatch.wcmatch
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from .functions import *
|
|
||||||
from .settings import *
|
from .settings import *
|
||||||
|
from .functions import *
|
||||||
|
|
||||||
J2CFG_CONFIG_EXT = ['yml', 'yaml', 'json']
|
|
||||||
|
|
||||||
|
|
||||||
class J2cfg:
|
class J2cfg:
|
||||||
def __init__(self, strict=True, config_file=None, config_path=None,
|
def ensure_fs_loader_for(self, directory: str, from_init = False) -> bool:
|
||||||
modules=None, search_path=None, template_suffix=None,
|
if self.dump_only:
|
||||||
dump_only=False):
|
raise ValueError('dump_only is True')
|
||||||
|
|
||||||
|
if self.j2fs_loaders is None:
|
||||||
|
raise ValueError('j2fs_loaders is None')
|
||||||
|
if directory in self.j2fs_loaders:
|
||||||
|
return not bool(from_init)
|
||||||
|
if directory == '':
|
||||||
|
print('J2cfg: ensure_fs_loader_for(): empty directory name, skipping', file=sys.stderr)
|
||||||
|
return False
|
||||||
|
if not os.path.isdir(directory):
|
||||||
|
print(f'J2cfg: ensure_fs_loader_for(): not a directory or does not exist, skipping: {directory}', file=sys.stderr)
|
||||||
|
return False
|
||||||
|
self.j2fs_loaders[directory] = jinja2.FileSystemLoader(
|
||||||
|
directory, encoding='utf-8', followlinks=True,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __init__(self, dump_only=False, strict=True, unlink_source=None,
|
||||||
|
config_file=None, config_path=None, search_path=None,
|
||||||
|
):
|
||||||
|
|
||||||
if dump_only is None:
|
if dump_only is None:
|
||||||
self.dump_only = False
|
self.dump_only = False
|
||||||
@@ -42,64 +60,63 @@ class J2cfg:
|
|||||||
|
|
||||||
self.kwargs = {'j2cfg': {}}
|
self.kwargs = {'j2cfg': {}}
|
||||||
|
|
||||||
def merge_dict_from_file(filename):
|
def merge_dict_from_file(filename) -> bool:
|
||||||
if filename is None:
|
if filename is None:
|
||||||
return False
|
return False
|
||||||
f = str(filename)
|
f = str(filename)
|
||||||
if f == '':
|
if f == "":
|
||||||
return False
|
return False
|
||||||
if not os.path.exists(f):
|
if not os.path.exists(f):
|
||||||
|
print(f'J2cfg: merge_dict_from_file(): path does not exist, skipping: {filename}', file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
if not os.path.isfile(f):
|
if not os.path.isfile(f):
|
||||||
print(
|
print(f'J2cfg: merge_dict_from_file(): not a file, skipping: {filename}', file=sys.stderr)
|
||||||
f'J2cfg: not a file, skipping: {filename}',
|
|
||||||
file=sys.stderr)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if f.endswith('.yml') or f.endswith('.yaml'):
|
ext = os.path.splitext(f)[1]
|
||||||
|
if ext not in J2CFG_CONFIG_EXT:
|
||||||
|
print(f'J2cfg: merge_dict_from_file(): non-recognized name extension: {f}', file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if ext in [ '.yml', '.yaml' ]:
|
||||||
with open(f, mode='r', encoding='utf-8') as fx:
|
with open(f, mode='r', encoding='utf-8') as fx:
|
||||||
for x in yaml.safe_load_all(fx):
|
for x in yaml.safe_load_all(fx):
|
||||||
if not x:
|
if not x:
|
||||||
|
# print(f'J2cfg: received empty document from: {f}', file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
self.kwargs['j2cfg'] = merge_dict_recurse(
|
self.kwargs['j2cfg'] = merge_dict_recurse(self.kwargs['j2cfg'], x)
|
||||||
self.kwargs['j2cfg'], x
|
|
||||||
)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if f.endswith('.json'):
|
if ext == '.toml':
|
||||||
|
with open(f, mode='rb') as fx:
|
||||||
|
x = tomllib.load(fx)
|
||||||
|
self.kwargs['j2cfg'] = merge_dict_recurse(self.kwargs['j2cfg'], x)
|
||||||
|
return True
|
||||||
|
|
||||||
|
if ext == '.json':
|
||||||
with open(f, mode='r', encoding='utf-8') as fx:
|
with open(f, mode='r', encoding='utf-8') as fx:
|
||||||
self.kwargs['j2cfg'] = merge_dict_recurse(
|
x = json.load(fx)
|
||||||
self.kwargs['j2cfg'], json.load(fx)
|
self.kwargs['j2cfg'] = merge_dict_recurse(self.kwargs['j2cfg'], x)
|
||||||
)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
print(
|
|
||||||
f'J2cfg: non-recognized name extension: {filename}',
|
|
||||||
file=sys.stderr)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def merge_dict_default():
|
def merge_dict_default():
|
||||||
search_pattern = '|'.join(['*.' + ext for ext in J2CFG_CONFIG_EXT])
|
search_pattern = '|'.join( [ '*' + ext for ext in J2CFG_CONFIG_EXT ] )
|
||||||
search_flags = wcmatch.wcmatch.SYMLINKS
|
search_flags = wcmatch.wcmatch.SYMLINKS
|
||||||
|
|
||||||
for d in self.config_path:
|
for d in self.config_path:
|
||||||
if not os.path.isdir(d):
|
if not os.path.isdir(d):
|
||||||
|
print(f'J2cfg: merge_dict_default(): not a directory or does not exist, skipping: {d}', file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
m = wcmatch.wcmatch.WcMatch(d, search_pattern,
|
m = wcmatch.wcmatch.WcMatch(d, search_pattern, flags=search_flags)
|
||||||
flags=search_flags)
|
|
||||||
for f in sorted(m.match()):
|
for f in sorted(m.match()):
|
||||||
if self.dump_only:
|
if self.dump_only:
|
||||||
real_f = os.path.realpath(f)
|
real_f = os.path.realpath(f)
|
||||||
if f == real_f:
|
if f == real_f:
|
||||||
print(
|
print(f'J2cfg: try loading {f}', file=sys.stderr)
|
||||||
f'J2cfg: try loading {f}',
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
print(
|
print(f'J2cfg: try loading {f} <- {real_f}', file=sys.stderr)
|
||||||
f'J2cfg: try loading {f} <- {real_f}',
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
merge_dict_from_file(f)
|
merge_dict_from_file(f)
|
||||||
|
|
||||||
if self.config_file is None:
|
if self.config_file is None:
|
||||||
@@ -109,11 +126,7 @@ class J2cfg:
|
|||||||
if os.path.isfile(self.config_file):
|
if os.path.isfile(self.config_file):
|
||||||
merge_dict_from_file(self.config_file)
|
merge_dict_from_file(self.config_file)
|
||||||
else:
|
else:
|
||||||
print(
|
print('J2cfg: config file does not exist, skipping: {self.config_file}', file=sys.stderr)
|
||||||
'J2cfg: J2cfg config file does not exist, skipping: '
|
|
||||||
+ f'{self.config_file}',
|
|
||||||
file=sys.stderr
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.dump_only:
|
if self.dump_only:
|
||||||
return
|
return
|
||||||
@@ -122,6 +135,15 @@ class J2cfg:
|
|||||||
if not isinstance(self.strict, bool):
|
if not isinstance(self.strict, bool):
|
||||||
self.strict = True
|
self.strict = True
|
||||||
|
|
||||||
|
if unlink_source is not None:
|
||||||
|
self.unlink_source = from_gobool(unlink_source)
|
||||||
|
else:
|
||||||
|
x = os.getenv('J2CFG_UNLINK_SRC')
|
||||||
|
if x is None:
|
||||||
|
self.unlink_source = False
|
||||||
|
else:
|
||||||
|
self.unlink_source = from_gobool(x)
|
||||||
|
|
||||||
self.search_path = search_path
|
self.search_path = search_path
|
||||||
if self.search_path is None:
|
if self.search_path is None:
|
||||||
self.search_path = os.getenv('J2CFG_SEARCH_PATH')
|
self.search_path = os.getenv('J2CFG_SEARCH_PATH')
|
||||||
@@ -132,78 +154,55 @@ class J2cfg:
|
|||||||
else:
|
else:
|
||||||
self.search_path = uniq_str_list(any_to_str_list(self.search_path))
|
self.search_path = uniq_str_list(any_to_str_list(self.search_path))
|
||||||
# RFC: should we use the current working directory early?
|
# RFC: should we use the current working directory early?
|
||||||
for d in [os.getcwd()]:
|
for d in [ os.getcwd() ]:
|
||||||
if d not in self.search_path:
|
if d not in self.search_path:
|
||||||
self.search_path.insert(0, d)
|
self.search_path.insert(0, d)
|
||||||
|
|
||||||
self.modules = modules or os.getenv('J2CFG_MODULES')
|
|
||||||
if self.modules is None:
|
|
||||||
self.modules = J2CFG_PYTHON_MODULES
|
|
||||||
else:
|
|
||||||
if isinstance(self.modules, str):
|
|
||||||
self.modules = str_split_to_list(self.modules)
|
|
||||||
else:
|
|
||||||
self.modules = any_to_str_list(self.modules)
|
|
||||||
self.modules = uniq_str_list(self.modules)
|
|
||||||
|
|
||||||
self.template_suffix = template_suffix or os.getenv('J2CFG_SUFFIX')
|
|
||||||
if self.template_suffix is None:
|
|
||||||
self.template_suffix = J2CFG_TEMPLATE_EXT
|
|
||||||
else:
|
|
||||||
self.template_suffix = str(self.template_suffix)
|
|
||||||
if self.template_suffix == '':
|
|
||||||
self.template_suffix = J2CFG_TEMPLATE_EXT
|
|
||||||
if not self.template_suffix.startswith('.'):
|
|
||||||
self.template_suffix = '.' + self.template_suffix
|
|
||||||
|
|
||||||
self.kwargs.update({
|
self.kwargs.update({
|
||||||
'env': os.environ,
|
'env': os.environ,
|
||||||
'env_vars_preserve': J2CFG_PRESERVE_ENVS,
|
'env_vars_preserve': J2CFG_PRESERVE_ENVS,
|
||||||
'env_vars_passthrough': J2CFG_PASSTHROUGH_ENVS,
|
'env_vars_passthrough': J2CFG_PASSTHROUGH_ENVS,
|
||||||
})
|
})
|
||||||
for m in self.modules:
|
|
||||||
if m in self.kwargs:
|
|
||||||
print(f'J2cfg: kwargs already has {m} key',
|
|
||||||
file=sys.stderr)
|
|
||||||
continue
|
|
||||||
self.kwargs[m] = importlib.import_module(m)
|
|
||||||
|
|
||||||
self.j2fs_loaders = {
|
self.j2fs_loaders = {}
|
||||||
d: jinja2.FileSystemLoader(
|
t = []
|
||||||
d, encoding='utf-8', followlinks=True,
|
for d in self.search_path:
|
||||||
) for d in self.search_path
|
d = os.path.abspath(d)
|
||||||
}
|
if self.ensure_fs_loader_for(d, from_init=True):
|
||||||
|
t.append(d)
|
||||||
|
continue
|
||||||
|
self.search_path = t
|
||||||
self.j2env = jinja2.Environment(
|
self.j2env = jinja2.Environment(
|
||||||
extensions=J2CFG_JINJA_EXTENSIONS,
|
extensions=J2CFG_JINJA_EXTENSIONS,
|
||||||
loader=jinja2.ChoiceLoader([
|
loader=jinja2.ChoiceLoader(
|
||||||
self.j2fs_loaders[d] for d in self.search_path
|
[ self.j2fs_loaders[d] for d in self.search_path ]
|
||||||
]),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def init_env(e: jinja2.Environment):
|
def init_env(e: jinja2.Environment):
|
||||||
|
for m in J2CFG_PYTHON_MODULES:
|
||||||
|
if m in e.globals:
|
||||||
|
print(f'J2cfg: globals already has {m} key, module will not be imported', file=sys.stderr)
|
||||||
|
continue
|
||||||
|
e.globals.update( { m: importlib.import_module(m) } )
|
||||||
|
for s in J2CFG_FUNCTIONS:
|
||||||
|
n = s.__name__
|
||||||
|
if n in e.globals:
|
||||||
|
print(f'J2cfg: globals already has {n} key, function will not be imported', file=sys.stderr)
|
||||||
|
continue
|
||||||
|
e.globals.update( { n: s } )
|
||||||
for s in J2CFG_FILTERS:
|
for s in J2CFG_FILTERS:
|
||||||
n = s.__name__
|
n = s.__name__
|
||||||
if n in e.filters:
|
if n in e.filters:
|
||||||
print(f'J2cfg: filters already has {n} key',
|
print(f'J2cfg: filters already has {n} key, filter will not be imported', file=sys.stderr)
|
||||||
file=sys.stderr)
|
|
||||||
continue
|
continue
|
||||||
e.filters[n] = s
|
e.filters.update( { n: s } )
|
||||||
|
|
||||||
init_env(self.j2env)
|
init_env(self.j2env)
|
||||||
|
|
||||||
def dump_config(self):
|
def dump_config_yml(self):
|
||||||
return yaml.safe_dump(self.kwargs['j2cfg'])
|
return yaml.safe_dump(self.kwargs['j2cfg'])
|
||||||
|
|
||||||
def ensure_fs_loader_for(self, directory: str):
|
|
||||||
if self.dump_only:
|
|
||||||
raise ValueError('dump_only is True')
|
|
||||||
|
|
||||||
if directory in self.j2fs_loaders:
|
|
||||||
return
|
|
||||||
self.j2fs_loaders[directory] = jinja2.FileSystemLoader(
|
|
||||||
directory, encoding='utf-8', followlinks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def render_file(self, file_in, file_out=None) -> bool:
|
def render_file(self, file_in, file_out=None) -> bool:
|
||||||
if self.dump_only:
|
if self.dump_only:
|
||||||
raise ValueError('dump_only is True')
|
raise ValueError('dump_only is True')
|
||||||
@@ -211,56 +210,93 @@ class J2cfg:
|
|||||||
def render_error(msg) -> bool:
|
def render_error(msg) -> bool:
|
||||||
if self.strict:
|
if self.strict:
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
print(f'J2cfg: {msg}', file=sys.stderr)
|
print(f'J2cfg: render_file(): {msg}', file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
_STDIN = '/dev/stdin'
|
||||||
|
_STDOUT = '/dev/stdout'
|
||||||
|
|
||||||
if file_in is None:
|
if file_in is None:
|
||||||
return render_error(
|
return render_error('argument "file_in" is None')
|
||||||
'argument "file_in" is None')
|
|
||||||
f_in = str(file_in)
|
f_in = str(file_in)
|
||||||
if f_in == '':
|
if f_in == '':
|
||||||
return render_error(
|
return render_error('argument "file_in" is empty')
|
||||||
'argument "file_in" is empty')
|
if f_in == '-':
|
||||||
if not os.path.exists(f_in):
|
f_in = _STDIN
|
||||||
return render_error(
|
if f_in == _STDIN:
|
||||||
f'file is missing: {file_in}')
|
f_stdin = True
|
||||||
if not os.path.isfile(f_in):
|
else:
|
||||||
return render_error(
|
f_stdin = os.path.samefile(f_in, _STDIN)
|
||||||
f'not a file: {file_in}')
|
if not f_stdin:
|
||||||
|
if not os.path.exists(f_in):
|
||||||
|
return render_error(f'file is missing: {file_in}')
|
||||||
|
if not os.path.isfile(f_in):
|
||||||
|
return render_error(f'not a file: {file_in}')
|
||||||
|
|
||||||
f_out = file_out
|
f_out = file_out
|
||||||
if f_out is None:
|
if f_out is None:
|
||||||
if not f_in.endswith(self.template_suffix):
|
if f_stdin:
|
||||||
return render_error(
|
f_out = _STDOUT
|
||||||
f'input file name extension mismatch: {file_in}')
|
else:
|
||||||
f_out = os.path.splitext(f_in)[0]
|
if not f_in.endswith(J2CFG_TEMPLATE_EXT):
|
||||||
|
return render_error(f'input file name extension mismatch: {file_in}')
|
||||||
|
f_out = os.path.splitext(f_in)[0]
|
||||||
|
if f_out == '-':
|
||||||
|
f_out = _STDOUT
|
||||||
|
if f_out == _STDOUT:
|
||||||
|
f_stdout = True
|
||||||
|
else:
|
||||||
|
f_stdout = os.path.exists(f_out) and os.path.samefile(f_out, _STDOUT)
|
||||||
|
|
||||||
dirs = self.search_path.copy()
|
dirs = self.search_path.copy()
|
||||||
for d in [os.getcwd(), os.path.dirname(f_in)]:
|
extra_dirs = [ os.getcwd() ]
|
||||||
if d in dirs:
|
if not f_stdin:
|
||||||
continue
|
f_in_dir = os.path.dirname(f_in)
|
||||||
self.ensure_fs_loader_for(d)
|
if f_in_dir == '':
|
||||||
dirs.insert(0, d)
|
f_in_dir = '.'
|
||||||
if f_in.startswith('/'):
|
extra_dirs.append(f_in_dir)
|
||||||
self.ensure_fs_loader_for('/')
|
for d in extra_dirs:
|
||||||
dirs.append('/')
|
d = os.path.abspath(d)
|
||||||
|
if self.ensure_fs_loader_for(d):
|
||||||
|
dirs.insert(0, d)
|
||||||
|
|
||||||
j2_environ = self.j2env.overlay(loader=jinja2.ChoiceLoader([
|
if not f_stdin:
|
||||||
self.j2fs_loaders[d] for d in dirs
|
if f_in.startswith('/'):
|
||||||
]))
|
for d in [ '/' ]:
|
||||||
j2_template = j2_environ.get_template(f_in)
|
if d in dirs:
|
||||||
|
continue
|
||||||
|
if self.ensure_fs_loader_for(d):
|
||||||
|
dirs.append('/')
|
||||||
|
|
||||||
|
j2_environ = self.j2env.overlay(
|
||||||
|
loader=jinja2.ChoiceLoader(
|
||||||
|
[ self.j2fs_loaders[d] for d in dirs ]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if f_stdin:
|
||||||
|
j2_template = j2_environ.from_string(''.join(sys.stdin.readlines()))
|
||||||
|
else:
|
||||||
|
j2_template = j2_environ.get_template(f_in)
|
||||||
rendered = j2_template.render(**self.kwargs)
|
rendered = j2_template.render(**self.kwargs)
|
||||||
|
|
||||||
if os.path.lexists(f_out):
|
if not (f_stdin and f_stdout):
|
||||||
if os.path.islink(f_out) or (not os.path.isfile(f_out)):
|
if os.path.exists(f_out) and os.path.samefile(f_in, f_out):
|
||||||
return render_error(
|
return render_error(f'unable to process template inplace: {file_in}')
|
||||||
f'output file is not safely writable: {f_out}')
|
if not f_stdout:
|
||||||
if os.path.exists(f_out):
|
if os.path.lexists(f_out) and (os.path.islink(f_out) or (not os.path.isfile(f_out))):
|
||||||
if os.path.samefile(f_in, f_out):
|
return render_error(f'output file is not safely writable: {f_out}')
|
||||||
return render_error(
|
|
||||||
f'unable to process template inplace: {file_in}')
|
|
||||||
|
|
||||||
with open(f_out, mode='w', encoding='utf-8') as f:
|
if f_stdout:
|
||||||
f.write(rendered)
|
sys.stdout.write(rendered)
|
||||||
|
sys.stdout.flush()
|
||||||
|
else:
|
||||||
|
with open(f_out, mode='w', encoding='utf-8') as f:
|
||||||
|
f.write(rendered)
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
if self.unlink_source:
|
||||||
|
if f_stdin:
|
||||||
|
return render_error('cannot unlink stdin')
|
||||||
|
os.unlink(f_in)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@@ -1,48 +1,58 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
|
import os.path
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import hashlib
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
from .settings import is_env_banned
|
from .settings import J2CFG_BANNED_ENVS
|
||||||
|
|
||||||
|
|
||||||
def is_sequence(x) -> bool:
|
J2CFG_BUILTIN_FUNCTIONS = [
|
||||||
if isinstance(x, str):
|
repr,
|
||||||
return False
|
sorted,
|
||||||
return isinstance(x, collections.abc.Sequence)
|
]
|
||||||
|
|
||||||
|
|
||||||
def is_mapping(x) -> bool:
|
def is_str(x) -> bool:
|
||||||
|
return isinstance(x, str)
|
||||||
|
|
||||||
|
|
||||||
|
def is_seq(x) -> bool:
|
||||||
|
return isinstance(x, collections.abc.Sequence) and not is_str(x)
|
||||||
|
|
||||||
|
|
||||||
|
def is_map(x) -> bool:
|
||||||
return isinstance(x, collections.abc.Mapping)
|
return isinstance(x, collections.abc.Mapping)
|
||||||
|
|
||||||
|
|
||||||
def uniq(a: list | set) -> list:
|
def uniq(a: list | set) -> list:
|
||||||
return sorted(set(a))
|
return list(set(a))
|
||||||
|
|
||||||
|
|
||||||
def remove_non_str(a: list | set) -> list:
|
def only_str(a: list | set) -> list:
|
||||||
return list(filter(lambda x: isinstance(x, str), a))
|
return list(filter(is_str, a))
|
||||||
|
|
||||||
|
|
||||||
def remove_empty_str(a: list | set) -> list:
|
def non_empty_str(a: list | set) -> list:
|
||||||
return list(filter(None, a))
|
return list(filter(None, a))
|
||||||
|
|
||||||
|
|
||||||
def uniq_str_list(a: list | set) -> list:
|
def uniq_str_list(a: list | set) -> list:
|
||||||
return remove_empty_str(uniq(a))
|
return uniq(non_empty_str(a))
|
||||||
|
|
||||||
|
|
||||||
def str_split_to_list(s: str, sep=r'\s+') -> list:
|
def str_split_to_list(s: str, sep=r'\s+') -> list:
|
||||||
return remove_empty_str(re.split(sep, s))
|
return non_empty_str(re.split(sep, s))
|
||||||
|
|
||||||
|
|
||||||
def dict_to_env_str_list(x: dict) -> list:
|
def dict_to_str_list(x: dict) -> list:
|
||||||
r = []
|
r = []
|
||||||
for k in sorted(x.keys()):
|
for k in x.keys():
|
||||||
if x[k] is None:
|
if x[k] is None:
|
||||||
r.append(f'{k}')
|
r.append(f'{k}')
|
||||||
else:
|
else:
|
||||||
@@ -53,152 +63,142 @@ def dict_to_env_str_list(x: dict) -> list:
|
|||||||
def any_to_str_list(x) -> list:
|
def any_to_str_list(x) -> list:
|
||||||
if x is None:
|
if x is None:
|
||||||
return []
|
return []
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return [x]
|
return [x]
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return [str(e) for e in x]
|
return [str(e) for e in x]
|
||||||
if is_mapping(x):
|
if is_map(x):
|
||||||
return dict_to_env_str_list(x)
|
return dict_to_str_list(x)
|
||||||
return [str(x)]
|
return [str(x)]
|
||||||
|
|
||||||
|
|
||||||
def is_re_match(x, pattern, flags=0) -> bool:
|
def is_re_match(x, pat, opt=0) -> bool:
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return bool(re.match(pattern, x, flags))
|
return bool(re.match(pat, x, opt))
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return any(is_re_match(v, pattern, flags) for v in x)
|
return any(is_re_match(v, pat, opt) for v in x)
|
||||||
if is_mapping(x):
|
if is_map(x):
|
||||||
return any(is_re_match(v, pattern, flags) for v in x.keys())
|
return any(is_re_match(v, pat, opt) for v in x.keys())
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def is_re_fullmatch(x, pattern, flags=0) -> bool:
|
def is_re_fullmatch(x, pat, opt=0) -> bool:
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return bool(re.fullmatch(pattern, x, flags))
|
return bool(re.fullmatch(pat, x, opt))
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return any(is_re_fullmatch(v, pattern, flags) for v in x)
|
return any(is_re_fullmatch(v, pat, opt) for v in x)
|
||||||
if is_mapping(x):
|
if is_map(x):
|
||||||
return any(is_re_fullmatch(v, pattern, flags) for v in x.keys())
|
return any(is_re_fullmatch(v, pat, opt) for v in x.keys())
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def re_match(x, pattern, flags=0):
|
def re_match(x, pat, opt=0):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return re.match(pattern, x, flags)
|
return re.match(pat, x, opt)
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return [v for v in x
|
return [v for v in x if re_match(v, pat, opt)]
|
||||||
if re_match(v, pattern, flags)]
|
if is_map(x):
|
||||||
if is_mapping(x):
|
return {k: v for k, v in x.items() if re_match(k, pat, opt)}
|
||||||
return {k: v for k, v in x.items()
|
|
||||||
if re_match(k, pattern, flags)}
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def re_fullmatch(x, pattern, flags=0):
|
def re_fullmatch(x, pat, opt=0):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return re.fullmatch(pattern, x, flags)
|
return re.fullmatch(pat, x, opt)
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return [v for v in x
|
return [v for v in x if re_fullmatch(v, pat, opt)]
|
||||||
if re_fullmatch(v, pattern, flags)]
|
if is_map(x):
|
||||||
if is_mapping(x):
|
return {k: v for k, v in x.items() if re_fullmatch(k, pat, opt)}
|
||||||
return {k: v for k, v in x.items()
|
|
||||||
if re_fullmatch(k, pattern, flags)}
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def re_match_negate(x, pattern, flags=0):
|
def re_match_neg(x, pat, opt=0):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return not bool(re.match(pattern, x, flags))
|
return not bool(re.match(pat, x, opt))
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return [v for v in x
|
return [v for v in x if re_match_neg(v, pat, opt)]
|
||||||
if re_match_negate(v, pattern, flags)]
|
if is_map(x):
|
||||||
if is_mapping(x):
|
return {k: v for k, v in x.items() if re_match_neg(k, pat, opt)}
|
||||||
return {k: v for k, v in x.items()
|
|
||||||
if re_match_negate(k, pattern, flags)}
|
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def re_fullmatch_negate(x, pattern, flags=0):
|
def re_fullmatch_neg(x, pat, opt=0):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return not bool(re.fullmatch(pattern, x, flags))
|
return not bool(re.fullmatch(pat, x, opt))
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return [v for v in x
|
return [v for v in x if re_fullmatch_neg(v, pat, opt)]
|
||||||
if re_fullmatch_negate(v, pattern, flags)]
|
if is_map(x):
|
||||||
if is_mapping(x):
|
return {k: v for k, v in x.items() if re_fullmatch_neg(k, pat, opt)}
|
||||||
return {k: v for k, v in x.items()
|
|
||||||
if re_fullmatch_negate(k, pattern, flags)}
|
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def dict_remap_keys(x: dict, key_map) -> dict:
|
def dict_remap_keys(x: dict, key_map) -> dict:
|
||||||
if key_map is None:
|
if key_map is None:
|
||||||
|
print('dict_remap_keys: key_map is None', file=sys.stderr)
|
||||||
return x
|
return x
|
||||||
p = set(x.keys())
|
|
||||||
m = {}
|
m = {}
|
||||||
for k in x:
|
for k in set(x.keys()):
|
||||||
v = key_map(k)
|
v = key_map(k)
|
||||||
if v == k:
|
if v in m:
|
||||||
|
# merely debug output
|
||||||
|
print(f'dict_remap_keys: duplicate key {repr(v)} <= {repr(k)}', file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
m[k] = v
|
m[v] = x[k]
|
||||||
p.discard(k)
|
return m
|
||||||
p.discard(v)
|
|
||||||
return {k: x[k] for k in p} | {v: x[k] for k, v in m.items()}
|
|
||||||
|
|
||||||
|
|
||||||
def re_sub(x, pattern, repl, count=0, flags=0):
|
def re_sub(x, pat, repl, count=0, opt=0):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return re.sub(pattern, repl, x, count, flags)
|
return re.sub(pat, repl, x, count, opt)
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return [
|
return [re_sub(v, pat, repl, count, opt) for v in x]
|
||||||
re_sub(v, pattern, repl, count, flags)
|
if is_map(x):
|
||||||
for v in x
|
return dict_remap_keys(x, lambda k: re_sub(k, pat, repl, count, opt))
|
||||||
]
|
|
||||||
if is_mapping(x):
|
|
||||||
return dict_remap_keys(
|
|
||||||
x, lambda k:
|
|
||||||
re_sub(k, pattern, repl, count, flags)
|
|
||||||
)
|
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def as_cgi_hdr(x):
|
def cgi_header(x):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return 'HTTP_' + re.sub('[^A-Z0-9]+', '_', x.upper()).strip('_')
|
s = re.sub('[^A-Z0-9]+', '_', x.upper()).strip('_')
|
||||||
if is_sequence(x):
|
if s == '':
|
||||||
return uniq([
|
# merely debug output
|
||||||
as_cgi_hdr(v)
|
print(f'cgi_header: x={repr(x)}', file=sys.stderr)
|
||||||
for v in x
|
raise ValueError('cgi_header: empty header name')
|
||||||
])
|
return 'HTTP_' + s
|
||||||
if is_mapping(x):
|
if is_seq(x):
|
||||||
return dict_remap_keys(
|
return [cgi_header(v) for v in x]
|
||||||
x, as_cgi_hdr
|
if is_map(x):
|
||||||
)
|
return dict_remap_keys(x, cgi_header)
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def as_ngx_var(x, pfx='custom'):
|
def http_header(x):
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
parts = remove_empty_str(
|
s = re.sub('[^a-zA-Z0-9]+', '-', x).strip('-')
|
||||||
[re.sub('[^a-z0-9]+', '_', str(i).lower()).strip('_')
|
if s == '':
|
||||||
for i in (pfx, x)]
|
# merely debug output
|
||||||
)
|
print(f'http_header: x={repr(x)}', file=sys.stderr)
|
||||||
|
raise ValueError('http_header: empty header name')
|
||||||
|
return s
|
||||||
|
if is_seq(x):
|
||||||
|
return [http_header(v) for v in x]
|
||||||
|
if is_map(x):
|
||||||
|
return dict_remap_keys(x, http_header)
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
|
def ngx_var(x, pfx='custom'):
|
||||||
|
if is_str(x):
|
||||||
|
parts = non_empty_str([re.sub('[^a-z0-9]+', '_', str(i).lower()).strip('_') for i in (pfx, x)])
|
||||||
if len(parts) < 2:
|
if len(parts) < 2:
|
||||||
print(
|
# merely debug output
|
||||||
f'as_ngx_var: parts={parts}',
|
print(f'ngx_var: parts={repr(parts)}', file=sys.stderr)
|
||||||
file=sys.stderr
|
raise ValueError('ngx_var: incomplete string array')
|
||||||
)
|
|
||||||
raise ValueError('as_ngx_var: incomplete string array')
|
|
||||||
return '$' + '_'.join(parts)
|
return '$' + '_'.join(parts)
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return uniq([
|
return [ngx_var(v, pfx) for v in x]
|
||||||
as_ngx_var(v, pfx)
|
if is_map(x):
|
||||||
for v in x
|
return dict_remap_keys(x, lambda k: ngx_var(k, pfx))
|
||||||
])
|
|
||||||
if is_mapping(x):
|
|
||||||
return dict_remap_keys(
|
|
||||||
x, lambda k:
|
|
||||||
as_ngx_var(k, pfx)
|
|
||||||
)
|
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
@@ -208,6 +208,12 @@ def any_to_env_dict(x) -> dict:
|
|||||||
|
|
||||||
h = {}
|
h = {}
|
||||||
|
|
||||||
|
def is_env_banned(k: str) -> bool:
|
||||||
|
for r in J2CFG_BANNED_ENVS:
|
||||||
|
if re.match(r, k):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def feed(k, parse=False, v=None):
|
def feed(k, parse=False, v=None):
|
||||||
if v is None:
|
if v is None:
|
||||||
return
|
return
|
||||||
@@ -225,12 +231,12 @@ def any_to_env_dict(x) -> dict:
|
|||||||
return
|
return
|
||||||
h[k] = v if v is None else str(v)
|
h[k] = v if v is None else str(v)
|
||||||
|
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
feed(x, True)
|
feed(x, True)
|
||||||
elif is_sequence(x):
|
elif is_seq(x):
|
||||||
for e in x:
|
for e in x:
|
||||||
feed(e, True)
|
feed(e, True)
|
||||||
elif is_mapping(x):
|
elif is_map(x):
|
||||||
for k in x:
|
for k in x:
|
||||||
feed(k, False, x[k])
|
feed(k, False, x[k])
|
||||||
else:
|
else:
|
||||||
@@ -240,7 +246,7 @@ def any_to_env_dict(x) -> dict:
|
|||||||
|
|
||||||
|
|
||||||
def dict_keys(x: dict) -> list:
|
def dict_keys(x: dict) -> list:
|
||||||
return sorted([k for k in x.keys()])
|
return sorted(x.keys())
|
||||||
|
|
||||||
|
|
||||||
def dict_empty_keys(x: dict) -> list:
|
def dict_empty_keys(x: dict) -> list:
|
||||||
@@ -259,39 +265,24 @@ def list_intersect(a: list | set, b: list | set) -> list:
|
|||||||
return list(set(a) & set(b))
|
return list(set(a) & set(b))
|
||||||
|
|
||||||
|
|
||||||
@jinja2.pass_environment
|
|
||||||
def sh_like_file_to_list(j2env, file_in: str) -> list:
|
|
||||||
tpl = j2env.get_template(file_in)
|
|
||||||
text = pathlib.Path(tpl.filename).read_text(encoding='utf-8')
|
|
||||||
lines = re.split(r'[\r\n]', text)
|
|
||||||
return list(itertools.filterfalse(
|
|
||||||
lambda x: re.match(r'\s*#', x), lines
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def ngx_esc(x):
|
def ngx_esc(x):
|
||||||
if isinstance(x, str):
|
if x is None:
|
||||||
if x == "":
|
return None
|
||||||
|
if is_str(x):
|
||||||
|
if x == '':
|
||||||
return "''"
|
return "''"
|
||||||
if re.search(r'(?:\s|[;{}()\[\]\\\'"*?])', x):
|
if re.search(r'(?:\s|[;{}()\[\]\\\'"*?])', x):
|
||||||
return repr(x)
|
return repr(x)
|
||||||
return x
|
return x
|
||||||
if is_sequence(x):
|
if is_seq(x):
|
||||||
return uniq([
|
return [ngx_esc(v) for v in x]
|
||||||
ngx_esc(v)
|
if is_map(x):
|
||||||
for v in x
|
return dict_remap_keys(x, ngx_esc)
|
||||||
])
|
|
||||||
if is_mapping(x):
|
|
||||||
return dict_remap_keys(
|
|
||||||
x, ngx_esc
|
|
||||||
)
|
|
||||||
if x is None:
|
|
||||||
return None
|
|
||||||
return ngx_esc(str(x))
|
return ngx_esc(str(x))
|
||||||
|
|
||||||
|
|
||||||
def from_gobool(x) -> bool:
|
def from_gobool(x) -> bool:
|
||||||
if isinstance(x, str):
|
if is_str(x):
|
||||||
return x.lower() in {'1', 't', 'true'}
|
return x.lower() in {'1', 't', 'true'}
|
||||||
return bool(x)
|
return bool(x)
|
||||||
|
|
||||||
@@ -304,14 +295,14 @@ def merge_dict_recurse(d1, d2: dict) -> dict:
|
|||||||
common = keys1 & keys2
|
common = keys1 & keys2
|
||||||
missing = keys2 - common
|
missing = keys2 - common
|
||||||
|
|
||||||
map1 = {k for k in common if is_mapping(x.get(k))}
|
map1 = {k for k in common if is_map(x.get(k))}
|
||||||
seq1 = {k for k in common if is_sequence(x.get(k))}
|
seq1 = {k for k in common if is_seq(x.get(k))}
|
||||||
misc1 = common - seq1 - map1
|
misc1 = common - seq1 - map1
|
||||||
|
|
||||||
merge_safe = missing | misc1
|
merge_safe = missing | misc1
|
||||||
x.update({k: d2.get(k) for k in merge_safe})
|
x.update({k: d2.get(k) for k in merge_safe})
|
||||||
|
|
||||||
map_common = {k for k in map1 if is_mapping(d2.get(k))}
|
map_common = {k for k in map1 if is_map(d2.get(k))}
|
||||||
for k in map_common:
|
for k in map_common:
|
||||||
y = d2.get(k)
|
y = d2.get(k)
|
||||||
if not y:
|
if not y:
|
||||||
@@ -319,7 +310,7 @@ def merge_dict_recurse(d1, d2: dict) -> dict:
|
|||||||
continue
|
continue
|
||||||
x[k] = merge_dict_recurse(x.get(k), y)
|
x[k] = merge_dict_recurse(x.get(k), y)
|
||||||
|
|
||||||
seq_common = {k for k in seq1 if is_sequence(d2.get(k))}
|
seq_common = {k for k in seq1 if is_seq(d2.get(k))}
|
||||||
for k in seq_common:
|
for k in seq_common:
|
||||||
y = d2.get(k)
|
y = d2.get(k)
|
||||||
if not y:
|
if not y:
|
||||||
@@ -331,10 +322,7 @@ def merge_dict_recurse(d1, d2: dict) -> dict:
|
|||||||
for k in unmerged:
|
for k in unmerged:
|
||||||
t1 = type(x.get(k))
|
t1 = type(x.get(k))
|
||||||
t2 = type(d2.get(k))
|
t2 = type(d2.get(k))
|
||||||
print(
|
print(f'merge_dict_recurse(): skipping key {k} due to type mismatch: {t1} vs. {t2}', file=sys.stderr)
|
||||||
f'merge_dict_recurse(): skipping key {k}'
|
|
||||||
+ f' due to type mismatch: {t1} vs. {t2}',
|
|
||||||
file=sys.stderr)
|
|
||||||
|
|
||||||
return x
|
return x
|
||||||
|
|
||||||
@@ -354,34 +342,159 @@ def join_prefix(prefix: str, *paths) -> str:
|
|||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
J2CFG_FILTERS = [
|
def md5(x: str) -> str:
|
||||||
|
return hashlib.md5(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha1(x: str) -> str:
|
||||||
|
return hashlib.sha1(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha256(x: str) -> str:
|
||||||
|
return hashlib.sha256(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha384(x: str) -> str:
|
||||||
|
return hashlib.sha384(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha512(x: str) -> str:
|
||||||
|
return hashlib.sha512(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha3_256(x: str) -> str:
|
||||||
|
return hashlib.sha3_256(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha3_384(x: str) -> str:
|
||||||
|
return hashlib.sha3_384(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def sha3_512(x: str) -> str:
|
||||||
|
return hashlib.sha3_512(x.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_md5(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.md5(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha1(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha1(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha256(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha256(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha384(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha384(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha512(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha512(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha3_256(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha3_256(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha3_384(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha3_384(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_sha3_512(x: str) -> str:
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
return hashlib.sha3_512(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
@jinja2.pass_environment
|
||||||
|
def shell_like_file_to_list(j2env, file_in: str) -> list:
|
||||||
|
tpl = j2env.get_template(file_in)
|
||||||
|
text = pathlib.Path(tpl.filename).read_text(encoding='utf-8')
|
||||||
|
lines = re.split(r'[\r\n]', text)
|
||||||
|
return list(itertools.filterfalse(
|
||||||
|
lambda x: re.match(r'\s*#', x), lines
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
@jinja2.pass_context
|
||||||
|
def header_rule_policy(j2ctx: jinja2.runtime.Context, header: str) -> str:
|
||||||
|
DEFAULT_POLICY = 'deny'
|
||||||
|
|
||||||
|
header = http_header(header).lower()
|
||||||
|
# "header" is now guaranteed to be non-empty and lowercase
|
||||||
|
|
||||||
|
x = j2ctx.resolve('j2cfg')
|
||||||
|
if (x is jinja2.Undefined) or (x is None):
|
||||||
|
print('header_rule_policy(): j2cfg is undefined in runtime context', file=sys.stderr)
|
||||||
|
return DEFAULT_POLICY
|
||||||
|
for k in ['headers', 'rules', header]:
|
||||||
|
x = x.get(k)
|
||||||
|
if x is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
if is_str(x):
|
||||||
|
return x
|
||||||
|
return DEFAULT_POLICY
|
||||||
|
|
||||||
|
|
||||||
|
J2CFG_FUNCTIONS = J2CFG_BUILTIN_FUNCTIONS + [
|
||||||
any_to_env_dict,
|
any_to_env_dict,
|
||||||
any_to_str_list,
|
any_to_str_list,
|
||||||
as_cgi_hdr,
|
cgi_header,
|
||||||
as_ngx_var,
|
|
||||||
dict_empty_keys,
|
dict_empty_keys,
|
||||||
dict_keys,
|
dict_keys,
|
||||||
dict_non_empty_keys,
|
dict_non_empty_keys,
|
||||||
dict_remap_keys,
|
dict_remap_keys,
|
||||||
dict_to_env_str_list,
|
dict_to_str_list,
|
||||||
|
file_md5,
|
||||||
|
file_sha1,
|
||||||
|
file_sha256,
|
||||||
|
file_sha384,
|
||||||
|
file_sha512,
|
||||||
|
file_sha3_256,
|
||||||
|
file_sha3_384,
|
||||||
|
file_sha3_512,
|
||||||
from_gobool,
|
from_gobool,
|
||||||
is_mapping,
|
http_header,
|
||||||
|
is_map,
|
||||||
is_re_fullmatch,
|
is_re_fullmatch,
|
||||||
is_re_match,
|
is_re_match,
|
||||||
is_sequence,
|
is_seq,
|
||||||
|
is_str,
|
||||||
join_prefix,
|
join_prefix,
|
||||||
list_diff,
|
list_diff,
|
||||||
list_intersect,
|
list_intersect,
|
||||||
|
md5,
|
||||||
ngx_esc,
|
ngx_esc,
|
||||||
|
ngx_var,
|
||||||
|
non_empty_str,
|
||||||
|
only_str,
|
||||||
re_fullmatch,
|
re_fullmatch,
|
||||||
re_fullmatch_negate,
|
re_fullmatch_neg,
|
||||||
re_match,
|
re_match,
|
||||||
re_match_negate,
|
re_match_neg,
|
||||||
re_sub,
|
re_sub,
|
||||||
remove_empty_str,
|
sha1,
|
||||||
remove_non_str,
|
sha256,
|
||||||
sh_like_file_to_list,
|
sha384,
|
||||||
|
sha512,
|
||||||
|
sha3_256,
|
||||||
|
sha3_384,
|
||||||
|
sha3_512,
|
||||||
str_split_to_list,
|
str_split_to_list,
|
||||||
uniq,
|
uniq,
|
||||||
uniq_str_list,
|
uniq_str_list,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
J2CFG_FILTERS = J2CFG_FUNCTIONS + [
|
||||||
|
header_rule_policy,
|
||||||
|
shell_like_file_to_list,
|
||||||
|
]
|
||||||
|
@@ -1,16 +1,21 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
J2CFG_TEMPLATE_EXT = '.j2'
|
|
||||||
|
|
||||||
J2CFG_DEFAULTS_FILE = '/run/ngx/conf/j2cfg.yml'
|
J2CFG_DEFAULTS_FILE = '/run/ngx/conf/j2cfg.yml'
|
||||||
|
|
||||||
J2CFG_PATH = [
|
J2CFG_PATH = [
|
||||||
'/run/ngx/conf/j2cfg',
|
'/run/ngx/conf/j2cfg',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
J2CFG_CONFIG_EXT = [
|
||||||
|
'.yml', '.yaml',
|
||||||
|
'.json',
|
||||||
|
'.toml',
|
||||||
|
]
|
||||||
|
|
||||||
|
J2CFG_TEMPLATE_EXT = '.j2'
|
||||||
|
|
||||||
J2CFG_PYTHON_MODULES = [
|
J2CFG_PYTHON_MODULES = [
|
||||||
|
'datetime',
|
||||||
|
'hashlib',
|
||||||
'itertools',
|
'itertools',
|
||||||
'json',
|
|
||||||
'os',
|
'os',
|
||||||
'os.path',
|
'os.path',
|
||||||
'pathlib',
|
'pathlib',
|
||||||
@@ -34,8 +39,6 @@ J2CFG_PRESERVE_ENVS = [
|
|||||||
# glibc
|
# glibc
|
||||||
'GLIBC_TUNABLES',
|
'GLIBC_TUNABLES',
|
||||||
'MALLOC_ARENA_MAX',
|
'MALLOC_ARENA_MAX',
|
||||||
# jemalloc
|
|
||||||
'MALLOC_CONF',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
J2CFG_PASSTHROUGH_ENVS = [
|
J2CFG_PASSTHROUGH_ENVS = [
|
||||||
@@ -71,10 +74,3 @@ J2CFG_BANNED_ENVS = [
|
|||||||
r'ENVSUBST_',
|
r'ENVSUBST_',
|
||||||
r'J2CFG_',
|
r'J2CFG_',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def is_env_banned(k: str) -> bool:
|
|
||||||
for r in J2CFG_BANNED_ENVS:
|
|
||||||
if re.match(r, k):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
@@ -1,173 +1,162 @@
|
|||||||
j2cfg:
|
{% set x = '123' %}
|
||||||
{{ j2cfg }}
|
x = {{ repr(x) }}
|
||||||
|
ngx_esc(x): {{ x | ngx_esc }}
|
||||||
|
{% set x = '1 23' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
ngx_esc(x): {{ x | ngx_esc }}
|
||||||
|
{% set x = '' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
ngx_esc(x): {{ x | ngx_esc }}
|
||||||
|
{% set x = [1,2,3,4] %}
|
||||||
|
x = {{ x }}
|
||||||
|
ngx_esc(x): {{ x | ngx_esc }}
|
||||||
|
{% set x = {1:2,3:4} %}
|
||||||
|
x = {{ x }}
|
||||||
|
ngx_esc(x): {{ x | ngx_esc }}
|
||||||
|
|
||||||
|
{% set x = '123' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
is_str(x): {{ x | is_str }}
|
||||||
|
is_seq(x): {{ x | is_seq }}
|
||||||
|
is_map(x): {{ x | is_map }}
|
||||||
|
any_to_str_list(x): {{ x | any_to_str_list }}
|
||||||
|
|
||||||
{% set x = [1,2,3,4] %}
|
{% set x = [1,2,3,4] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
is_sequence:
|
is_str(x): {{ x | is_str }}
|
||||||
{{ x | is_sequence }}
|
is_seq(x): {{ x | is_seq }}
|
||||||
|
is_map(x): {{ x | is_map }}
|
||||||
|
any_to_str_list(x): {{ x | any_to_str_list }}
|
||||||
|
|
||||||
{% set x = {1:2,3:4} %}
|
{% set x = {1:2,3:4} %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
is_sequence:
|
is_str(x): {{ x | is_str }}
|
||||||
{{ x | is_sequence }}
|
is_seq(x): {{ x | is_seq }}
|
||||||
|
is_map(x): {{ x | is_map }}
|
||||||
{% set x = [1,2,3,4] %}
|
any_to_str_list(x): {{ x | any_to_str_list }}
|
||||||
x = {{ x }}
|
|
||||||
is_mapping:
|
|
||||||
{{ x | is_mapping }}
|
|
||||||
|
|
||||||
{% set x = {1:2,3:4} %}
|
|
||||||
x = {{ x }}
|
|
||||||
is_mapping:
|
|
||||||
{{ x | is_mapping }}
|
|
||||||
|
|
||||||
{% set x = [2,3,1,2] %}
|
{% set x = [2,3,1,2] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
uniq:
|
uniq(x): {{ x | uniq }}
|
||||||
{{ x | uniq }}
|
|
||||||
|
|
||||||
{% set x = ['2',3,'1','2'] %}
|
{% set x = ['2',3,'1','2'] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
remove_non_str:
|
only_str(x): {{ x | only_str }}
|
||||||
{{ x | remove_non_str }}
|
|
||||||
|
|
||||||
{% set x = ['2','','1','2'] %}
|
{% set x = ['2','','1','2'] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
remove_empty_str:
|
non_empty_str(x): {{ x | non_empty_str }}
|
||||||
{{ x | remove_empty_str }}
|
|
||||||
|
|
||||||
{% set x = ['2','3','1','2'] %}
|
{% set x = ['2','3','1','2'] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
uniq_str_list:
|
uniq_str_list(x): {{ x | uniq_str_list }}
|
||||||
{{ x | uniq_str_list }}
|
|
||||||
|
|
||||||
{% set x = '2 3 1 2 ' %}
|
{% set x = '2 3 1 2 ' %}
|
||||||
x = {{ x.__repr__() }}
|
x = {{ repr(x) }}
|
||||||
str_split_to_list:
|
str_split_to_list(x): {{ x | str_split_to_list }}
|
||||||
{{ x | str_split_to_list }}
|
|
||||||
|
|
||||||
{% set x = '2:3::1:2:' %}
|
{% set x = '2:3::1:2:' %}
|
||||||
x = {{ x.__repr__() }}
|
x = {{ repr(x) }}
|
||||||
str_split_to_list(':'):
|
str_split_to_list(x, ':'): {{ x | str_split_to_list(':') }}
|
||||||
{{ x | str_split_to_list(':') }}
|
|
||||||
|
|
||||||
{% set x = { 'VAR1': 'Etc/UTC', 'VAR2': '', 'VAR3': None, '4VAR4': 'yeah', 'VAR5=not': 'yeah', 'VAR5=real yeah': None, 'VAR6': {'pi': 3.1415926}, 'VAR7': ['pi', 3.1415926] } %}
|
{% set x = { 'VAR1': 'Etc/UTC', 'VAR2': '', 'VAR3': None, '4VAR4': 'yeah', 'VAR5=not': 'yeah', 'VAR5=real yeah': None, 'VAR6': {'pi': 3.1415926}, 'VAR7': ['pi', 3.1415926] } %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
dict_to_env_str_list:
|
dict_keys(x): {{ x | dict_keys }}
|
||||||
{{ x | dict_to_env_str_list }}
|
dict_empty_keys(x): {{ x | dict_empty_keys }}
|
||||||
|
dict_non_empty_keys(x): {{ x | dict_non_empty_keys }}
|
||||||
{% set x = '1 2 3 4' %}
|
dict_to_str_list(x): {{ x | dict_to_str_list }}
|
||||||
x = {{ x.__repr__() }}
|
any_to_str_list(x): {{ x | any_to_str_list }}
|
||||||
any_to_str_list:
|
any_to_env_dict(x): {{ x | any_to_env_dict }}
|
||||||
{{ x | any_to_str_list }}
|
|
||||||
|
|
||||||
{% set x = [1,2,3,4] %}
|
|
||||||
x = {{ x }}
|
|
||||||
any_to_str_list:
|
|
||||||
{{ x | any_to_str_list }}
|
|
||||||
|
|
||||||
{% set x = 3.1415926 %}
|
{% set x = 3.1415926 %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
any_to_str_list:
|
any_to_str_list(x): {{ x | any_to_str_list }}
|
||||||
{{ x | any_to_str_list }}
|
|
||||||
|
|
||||||
{% set x = ['a2','b3','c1','d2'] %}
|
{% set x = ['a2','b3','c1','d2'] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
is_re_match('[ab]'):
|
is_re_match(x, '[ab]'): {{ x | is_re_match('[ab]') }}
|
||||||
{{ x | is_re_match('[ab]') }}
|
is_re_match(x, '[mn]'): {{ x | is_re_match('[mn]') }}
|
||||||
is_re_match('[mn]'):
|
is_re_fullmatch(x, '[ab]'): {{ x | is_re_fullmatch('[ab]') }}
|
||||||
{{ x | is_re_match('[mn]') }}
|
is_re_fullmatch(x, '[ab][12]'): {{ x | is_re_fullmatch('[ab][12]') }}
|
||||||
|
re_match(x, '[ab]'): {{ x | re_match('[ab]') }}
|
||||||
{% set x = ['a2','b3','c1','d2'] %}
|
re_match(x, '[mn]'): {{ x | re_match('[mn]') }}
|
||||||
x = {{ x }}
|
re_fullmatch(x, '[ab]'): {{ x | re_fullmatch('[ab]') }}
|
||||||
is_re_fullmatch('[ab]'):
|
re_fullmatch(x, '[ab][12]'): {{ x | re_fullmatch('[ab][12]') }}
|
||||||
{{ x | is_re_fullmatch('[ab]') }}
|
re_match_neg(x, '[ab]'): {{ x | re_match_neg('[ab]') }}
|
||||||
is_re_fullmatch('[ab][12]'):
|
re_match_neg(x, '[mn]'): {{ x | re_match_neg('[mn]') }}
|
||||||
{{ x | is_re_fullmatch('[ab][12]') }}
|
re_fullmatch_neg(x, '[ab]'): {{ x | re_fullmatch_neg('[ab]') }}
|
||||||
|
re_fullmatch_neg(x, '[ab][12]'): {{ x | re_fullmatch_neg('[ab][12]') }}
|
||||||
{% set x = ['a2','b3','c1','d2'] %}
|
|
||||||
x = {{ x }}
|
|
||||||
re_match('[ab]'):
|
|
||||||
{{ x | re_match('[ab]') }}
|
|
||||||
re_match('[mn]'):
|
|
||||||
{{ x | re_match('[mn]') }}
|
|
||||||
|
|
||||||
{% set x = ['a2','b3','c1','d2'] %}
|
|
||||||
x = {{ x }}
|
|
||||||
re_fullmatch('[ab]'):
|
|
||||||
{{ x | re_fullmatch('[ab]') }}
|
|
||||||
re_fullmatch('[ab][12]'):
|
|
||||||
{{ x | re_fullmatch('[ab][12]') }}
|
|
||||||
|
|
||||||
{% set x = ['a2','b3','c1','d2'] %}
|
|
||||||
x = {{ x }}
|
|
||||||
re_match_negate('[ab]'):
|
|
||||||
{{ x | re_match_negate('[ab]') }}
|
|
||||||
re_match_negate('[mn]'):
|
|
||||||
{{ x | re_match_negate('[mn]') }}
|
|
||||||
|
|
||||||
{% set x = ['a2','b3','c1','d2'] %}
|
|
||||||
x = {{ x }}
|
|
||||||
re_fullmatch_negate('[ab]'):
|
|
||||||
{{ x | re_fullmatch_negate('[ab]') }}
|
|
||||||
re_fullmatch_negate('[ab][12]'):
|
|
||||||
{{ x | re_fullmatch_negate('[ab][12]') }}
|
|
||||||
|
|
||||||
{% set x = ['a2b','b3b','c1f','d2g'] %}
|
{% set x = ['a2b','b3b','c1f','d2g'] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
re_sub('[ab]', '_'):
|
re_sub(x, '[ab]', '_'): {{ x | re_sub('[ab]', '_') }}
|
||||||
{{ x | re_sub('[ab]', '_') }}
|
re_sub(x, '[mn]', '_'): {{ x | re_sub('[mn]', '_') }}
|
||||||
re_sub('[mn]', '_'):
|
|
||||||
{{ x | re_sub('[mn]', '_') }}
|
|
||||||
|
|
||||||
{% set x = 'settings.py' %}
|
|
||||||
x = {{ x.__repr__() }}
|
|
||||||
sh_like_file_to_list:
|
|
||||||
{{ 'settings.py' | sh_like_file_to_list }}
|
|
||||||
|
|
||||||
{% set x = 'Accept-Encoding' %}
|
{% set x = 'Accept-Encoding' %}
|
||||||
x = {{ x.__repr__() }}
|
x = {{ repr(x) }}
|
||||||
as_cgi_hdr:
|
cgi_header(x): {{ x | cgi_header }}
|
||||||
{{ x | as_cgi_hdr }}
|
http_header(x): {{ x | http_header }}
|
||||||
|
|
||||||
{% set x = '_Permissions-Policy--' %}
|
{% set x = '_Permissions-Policy--' %}
|
||||||
x = {{ x.__repr__() }}
|
x = {{ repr(x) }}
|
||||||
as_cgi_hdr:
|
cgi_header(x): {{ x | cgi_header }}
|
||||||
{{ x | as_cgi_hdr }}
|
http_header(x): {{ x | http_header }}
|
||||||
|
|
||||||
|
{% set x = '@@Content__Type@@' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
cgi_header(x): {{ x | cgi_header }}
|
||||||
|
http_header(x): {{ x | http_header }}
|
||||||
|
|
||||||
|
{% set x = 'proxy-type' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
ngx_var(x): {{ x | ngx_var }}
|
||||||
|
ngx_var(x, 'my'): {{ x | ngx_var('my') }}
|
||||||
|
|
||||||
{% set x = 'VAR1=Etc/UTC' %}
|
{% set x = 'VAR1=Etc/UTC' %}
|
||||||
x = {{ x.__repr__() }}
|
x = {{ repr(x) }}
|
||||||
any_to_env_dict:
|
any_to_env_dict(x): {{ x | any_to_env_dict }}
|
||||||
{{ x | any_to_env_dict }}
|
|
||||||
|
|
||||||
{% set x = ['VAR1=Etc/UTC', 'VAR2=', 'VAR3', '4VAR4=yeah', 'VAR5=yeah', 'VAR5=not-yeah'] %}
|
{% set x = ['VAR1=Etc/UTC', 'VAR2=', 'VAR3', '4VAR4=yeah', 'VAR5=yeah', 'VAR5=not-yeah'] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
any_to_env_dict:
|
any_to_env_dict(x): {{ x | any_to_env_dict }}
|
||||||
{{ x | any_to_env_dict }}
|
|
||||||
|
|
||||||
{% set x = { 'VAR1': 'Etc/UTC', 'VAR2': '', 'VAR3': None, '4VAR4': 'yeah', 'VAR5=not': 'yeah', 'VAR5=real yeah': None, 'VAR6': {'pi': 3.1415926}, 'VAR7': ['pi', 3.1415926] } %}
|
|
||||||
x = {{ x }}
|
|
||||||
any_to_env_dict:
|
|
||||||
{{ x | any_to_env_dict }}
|
|
||||||
|
|
||||||
{% set x = { 'VAR1': 'Etc/UTC', 'VAR2': '', 'VAR3': None, '4VAR4': 'yeah', 'VAR5=not': 'yeah', 'VAR5=real yeah': None, 'VAR6': {'pi': 3.1415926}, 'VAR7': ['pi', 3.1415926] } %}
|
|
||||||
x = {{ x }}
|
|
||||||
dict_keys:
|
|
||||||
{{ x | dict_keys }}
|
|
||||||
dict_empty_keys:
|
|
||||||
{{ x | dict_empty_keys }}
|
|
||||||
dict_non_empty_keys:
|
|
||||||
{{ x | dict_non_empty_keys }}
|
|
||||||
|
|
||||||
{% set x = [1,2,3,4] %}
|
{% set x = [1,2,3,4] %}
|
||||||
{% set y = [3,4,5,6] %}
|
{% set y = [3,4,5,6] %}
|
||||||
x = {{ x }}
|
x = {{ x }}
|
||||||
y = {{ y }}
|
y = {{ y }}
|
||||||
list_diff(x, y):
|
list_diff(x, y): {{ x | list_diff(y) }}
|
||||||
{{ x | list_diff(y) }}
|
list_diff(y, x): {{ y | list_diff(x) }}
|
||||||
list_diff(y, x):
|
list_intersect(x, y): {{ x | list_intersect(y) }}
|
||||||
{{ y | list_diff(x) }}
|
list_intersect(y, x): {{ y | list_intersect(x) }}
|
||||||
list_intersect(x, y):
|
|
||||||
{{ x | list_intersect(y) }}
|
{% set x = 'settings.py' %}
|
||||||
list_intersect(y, x):
|
x = {{ repr(x) }}
|
||||||
{{ y | list_intersect(x) }}
|
md5(x): {{ x | md5 }}
|
||||||
|
sha1(x): {{ x | sha1 }}
|
||||||
|
sha256(x): {{ x | sha256 }}
|
||||||
|
sha384(x): {{ x | sha384 }}
|
||||||
|
sha512(x): {{ x | sha512 }}
|
||||||
|
sha3_256(x): {{ x | sha3_256 }}
|
||||||
|
sha3_384(x): {{ x | sha3_384 }}
|
||||||
|
sha3_512(x): {{ x | sha3_512 }}
|
||||||
|
file_md5(x): {{ x | file_md5 }}
|
||||||
|
file_sha1(x): {{ x | file_sha1 }}
|
||||||
|
file_sha256(x): {{ x | file_sha256 }}
|
||||||
|
file_sha384(x): {{ x | file_sha384 }}
|
||||||
|
file_sha512(x): {{ x | file_sha512 }}
|
||||||
|
file_sha3_256(x): {{ x | file_sha3_256 }}
|
||||||
|
file_sha3_384(x): {{ x | file_sha3_384 }}
|
||||||
|
file_sha3_512(x): {{ x | file_sha3_512 }}
|
||||||
|
|
||||||
|
{% set x = '/topdir' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
join_prefix(x, 'access.log'): {{ join_prefix(x, 'access.log') }}
|
||||||
|
join_prefix(x, './access.log'): {{ join_prefix(x, './access.log') }}
|
||||||
|
join_prefix(x, '/access.log'): {{ join_prefix(x, '/access.log') }}
|
||||||
|
join_prefix(x, '../access.log'): {{ join_prefix(x, '../access.log') }}
|
||||||
|
|
||||||
|
{% set x = 'settings.py' %}
|
||||||
|
x = {{ repr(x) }}
|
||||||
|
shell_like_file_to_list(x): {{ x | shell_like_file_to_list }}
|
||||||
|
|
||||||
|
{# end of tests #}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
jinja2==3.1.6
|
jinja2==3.1.6
|
||||||
psutil==7.0.0
|
psutil==7.0.0
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
wcmatch==10.0
|
wcmatch==10.1
|
||||||
|
31
scripts/envsubst-dirs
Executable file
31
scripts/envsubst-dirs
Executable file
@@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -ef
|
||||||
|
|
||||||
|
__template_list=$(mktemp)
|
||||||
|
|
||||||
|
find "$@" -follow -name '*.in' -type f -printf '%p\0' \
|
||||||
|
| sort -zuV > "${__template_list}"
|
||||||
|
|
||||||
|
[ -s "${__template_list}" ] || {
|
||||||
|
rm -f "${__template_list}"
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
__have_args="${ENVSUBST_ARGS:+1}"
|
||||||
|
if [ -z "${__have_args}" ] ; then
|
||||||
|
## optimize envsubst-single invocation by caching argument list
|
||||||
|
## ref: envsubst-single
|
||||||
|
ENVSUBST_ARGS=$(mktemp)
|
||||||
|
envsubst-args.sh > "${ENVSUBST_ARGS}"
|
||||||
|
export ENVSUBST_ARGS
|
||||||
|
fi
|
||||||
|
|
||||||
|
set +e ; __ret=0
|
||||||
|
xargs -0r -n 1000 -a "${__template_list}" \
|
||||||
|
envsubst-multi < /dev/null || __ret=1
|
||||||
|
|
||||||
|
[ -n "${__have_args}" ] || rm -f "${ENVSUBST_ARGS}"
|
||||||
|
|
||||||
|
rm -f "${__template_list}"
|
||||||
|
|
||||||
|
exit ${__ret}
|
20
scripts/envsubst-multi
Executable file
20
scripts/envsubst-multi
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -ef
|
||||||
|
|
||||||
|
__have_args="${ENVSUBST_ARGS:+1}"
|
||||||
|
if [ -z "${__have_args}" ] ; then
|
||||||
|
## optimize envsubst-single invocation by caching argument list
|
||||||
|
## ref: envsubst-single
|
||||||
|
ENVSUBST_ARGS=$(mktemp)
|
||||||
|
envsubst-args.sh > "${ENVSUBST_ARGS}"
|
||||||
|
export ENVSUBST_ARGS
|
||||||
|
fi
|
||||||
|
|
||||||
|
set +e ; __ret=0
|
||||||
|
for i ; do
|
||||||
|
envsubst-single "$i" || __ret=1
|
||||||
|
done
|
||||||
|
|
||||||
|
[ -n "${__have_args}" ] || rm -f "${ENVSUBST_ARGS}"
|
||||||
|
|
||||||
|
exit ${__ret}
|
58
scripts/envsubst-single
Executable file
58
scripts/envsubst-single
Executable file
@@ -0,0 +1,58 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -f
|
||||||
|
|
||||||
|
src='-' dst='-'
|
||||||
|
case $# in
|
||||||
|
0 ) ;;
|
||||||
|
1 )
|
||||||
|
src="$1"
|
||||||
|
case "$1" in
|
||||||
|
*.in ) dst="${1%".in"}" ;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
2 ) src="$1" ; dst="$2" ;;
|
||||||
|
* ) exit 1 ;;
|
||||||
|
esac
|
||||||
|
[ -n "${src}" ] || exit 1
|
||||||
|
[ -n "${dst}" ] || exit 1
|
||||||
|
if [ "${src}" = '-' ] ; then src=/dev/stdin ; fi
|
||||||
|
if [ "${dst}" = '-' ] ; then dst=/dev/stdout ; fi
|
||||||
|
|
||||||
|
is_same_file() {
|
||||||
|
find -L "$1" -samefile "$2" -printf . -quit 2>/dev/null | grep -Fq . || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_same_file "${src}" /dev/stdin ; then src=/dev/stdin ; fi
|
||||||
|
if is_same_file "${dst}" /dev/stdout ; then dst=/dev/stdout ; fi
|
||||||
|
|
||||||
|
while : ; do
|
||||||
|
if [ "${src}" = '/dev/stdin' ] && [ "${dst}" = '/dev/stdout' ] ; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
if is_same_file "${src}" "${dst}" ; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
break ; done
|
||||||
|
|
||||||
|
set +e ; unset __ret
|
||||||
|
while [ -n "${ENVSUBST_ARGS}" ] ; do
|
||||||
|
[ -f "${ENVSUBST_ARGS}" ] || break
|
||||||
|
[ -s "${ENVSUBST_ARGS}" ] || break
|
||||||
|
|
||||||
|
envsubst "$(cat "${ENVSUBST_ARGS}" </dev/null)" < "${src}" > "${dst}"
|
||||||
|
__ret=$?
|
||||||
|
break ; done
|
||||||
|
if [ -z "${__ret}" ] ; then
|
||||||
|
envsubst "$(envsubst-args.sh </dev/null)" < "${src}" > "${dst}"
|
||||||
|
__ret=$?
|
||||||
|
fi
|
||||||
|
|
||||||
|
while : ; do
|
||||||
|
[ "${ENVSUBST_UNLINK_SRC}" = 1 ] || break
|
||||||
|
[ "${src}" != '/dev/stdin' ] || break
|
||||||
|
|
||||||
|
rm -f "${src}"
|
||||||
|
break ; done
|
||||||
|
|
||||||
|
exit ${__ret}
|
@@ -1,12 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -f
|
|
||||||
|
|
||||||
while [ -n "${ENVSUBST_ARGS}" ] ; do
|
|
||||||
[ -f "${ENVSUBST_ARGS}" ] || break
|
|
||||||
[ -s "${ENVSUBST_ARGS}" ] || break
|
|
||||||
|
|
||||||
exec envsubst "$(cat "${ENVSUBST_ARGS}" </dev/null)" "$@"
|
|
||||||
exit 126
|
|
||||||
done
|
|
||||||
|
|
||||||
exec envsubst "$(envsubst-args.sh </dev/null)" "$@"
|
|
20
scripts/j2cfg-dirs
Executable file
20
scripts/j2cfg-dirs
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -ef
|
||||||
|
|
||||||
|
__template_list=$(mktemp)
|
||||||
|
|
||||||
|
find "$@" -follow -name '*.j2' -type f -printf '%p\0' \
|
||||||
|
| sort -zuV > "${__template_list}"
|
||||||
|
|
||||||
|
[ -s "${__template_list}" ] || {
|
||||||
|
rm -f "${__template_list}"
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
set +e ; __ret=0
|
||||||
|
xargs -0r -n 1000 -a "${__template_list}" \
|
||||||
|
j2cfg-multi < /dev/null || __ret=1
|
||||||
|
|
||||||
|
rm -f "${__template_list}"
|
||||||
|
|
||||||
|
exit ${__ret}
|
Reference in New Issue
Block a user