1
0

Compare commits

...

53 Commits

Author SHA1 Message Date
9871b29288 treewide: move zsh functions to sh scripts
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-09-09 20:53:55 +03:00
37599593bc ci: switch to Debian 13 "Trixie"
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-08-18 12:08:45 +03:00
a6e52c8992 zsh: improve sbuild snippet
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-08-18 10:58:51 +03:00
06126b2991 sbuild: dedup & adjust
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-08-17 17:36:29 +03:00
00648901a9 zsh: update
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-07-23 10:59:44 +03:00
48e93e48b6 zsh: openwrt-related goodies
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/manual/woodpecker Pipeline was successful
2025-07-13 21:09:51 +03:00
12449788a6 zsh/git goodies
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-07-08 12:21:04 +03:00
3b6c18395b zsh: openwrt-related goodies
listing archives with apk-tools 3.0.0_pre20250606 is PITA
2025-07-08 12:20:59 +03:00
c58b27fe29 zsh/git goodies 2025-06-18 19:32:44 +03:00
403b8a7e6c dotfiles: update git config
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-05-02 16:19:30 +03:00
8137c7efde zsh: git branch snippet 2025-05-02 16:13:31 +03:00
b12c01b3cb git: refresh
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-05-02 15:27:13 +03:00
88262ef200 sbuild: improve changes from fa00d72c
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-03-14 10:38:58 +03:00
fa00d72cbc sbuild: enforce random build directory
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-03-14 02:33:22 +03:00
e9e9b7fc51 dotfiles: cleanup
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-01-27 11:24:41 +03:00
5662040673 dotfiles: sbuild
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-01-17 04:16:11 +03:00
5cfbe03d71 zsh: optimize gpg-agent initialization
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-12-19 20:50:46 +03:00
4e67805847 dotfiles: update gitignore 2024-12-19 20:49:51 +03:00
8956cd7280 dotfiles: separate gitignore for vcs 2024-12-19 20:49:08 +03:00
61fb51b3eb zsh: update sbuild once more
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-08-03 14:08:31 +03:00
4e8515debe zsh: correct sbuild snippet
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-08-03 13:51:01 +03:00
bc90303dc1 ci: minor adjust
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/manual/woodpecker Pipeline was successful
2024-07-28 21:28:58 +03:00
055de153b1 ci: update
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/manual/woodpecker Pipeline was successful
2024-07-28 19:43:09 +03:00
6c0ac78b6e zsh: improve git snippets
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/manual/woodpecker Pipeline was successful
2024-07-23 23:08:35 +03:00
e6678b1a31 fix bd1f46de
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-07-12 15:45:14 +03:00
bd1f46dec6 zsh: container goodies 2024-07-12 15:41:17 +03:00
9012696fea zsh: bit better PATH 2024-07-10 12:09:31 +03:00
0ae83ee81b dotfiles: improve forge selection 2024-07-05 17:51:22 +03:00
4112913166 git: refresh a bit 2024-07-05 15:02:09 +03:00
8378e9993d zsh: rework krd-quilt 2024-07-05 13:42:31 +03:00
e3ff1bcb69 zsh: update 2024-07-05 13:18:16 +03:00
8283b6c752 zsh: fix dc30f109 2024-06-16 12:51:37 +03:00
785294aef5 zsh: better detect gnu screen 2024-06-16 12:49:12 +03:00
dc30f109cf zsh: adjust container-related stuff 2024-06-16 12:48:58 +03:00
d9ef22abc5 dotfiles: fix copy-paste
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-03-20 12:16:19 +03:00
69ba98a9d5 dotfiles: update
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
select origin dynamically
2024-03-20 11:41:04 +03:00
b2436b9e21 zsh: minor ssh-agent fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-03-04 16:11:07 +03:00
5a9f0b08c0 zsh: ssh-agent persistence
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-03-04 16:05:08 +03:00
98deefe039 zsh: fix ssh-agent
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-03-04 15:47:43 +03:00
6d29f9afbc dotfiles: fix accounting
fixes 20aafe29
2024-03-04 15:42:02 +03:00
20aafe29e8 zsh: update examples
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-03-04 15:39:55 +03:00
8834da817c zsh: update
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-03-04 15:35:55 +03:00
25123cece2 zsh: completion
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
many thanks to blackmou5e for pointing this out
2024-02-29 12:34:20 +03:00
3f527a0c73 zsh: modules
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-02-29 11:10:03 +03:00
3321d21f8b zsh: rework ZDOTDIR redirection
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-02-29 01:04:53 +03:00
7aac214e04 dotfiles: fix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-02-29 00:21:48 +03:00
0bb93c3b11 zsh: minor fixes
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-02-29 00:15:08 +03:00
ea774fdd8b dotfiles: update
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
rework htop layout :)
2024-02-29 00:02:58 +03:00
d872b655c9 zsh: update
PS: zprof rocks!
2024-02-28 23:22:33 +03:00
47b2170058 zsh: update
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-02-23 21:46:04 +03:00
0289d90fb9 zsh: zwc things
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-02-20 09:51:55 +03:00
a54be4863a dotfiles: remove unreachable code
found by CI
2024-02-20 09:35:47 +03:00
d05065f3aa ci: introduce 2024-02-20 09:35:34 +03:00
108 changed files with 3926 additions and 728 deletions

0
.cache/zsh/ssh/.keep Normal file
View File

View File

@@ -30,5 +30,3 @@ else
## development tree
gen_gitignore "${path_gitignore}"
fi
exit 1

View File

View File

@@ -1,28 +1,79 @@
*
!/.cache/zsh/compcache/.keep
!/.cache/zsh/completion/.keep
!/.cache/zsh/compzwc/.keep
!/.cache/zsh/ssh/.keep
!/.config/dotfiles/bin/.keep
!/.config/dotfiles/gen-gitignore.sh
!/.config/dotfiles/gitattributes
!/.config/dotfiles/gitignore
!/.config/dotfiles/gitignore.vcs
!/.config/dotfiles/install.sh
!/.config/dotfiles/scripts/.keep
!/.config/htop/htoprc.example
!/.config/mc/ini.example
!/.config/dotfiles/scripts/_perl-wrapper.sh
!/.config/dotfiles/scripts/_rsync-wrapper.sh
!/.config/dotfiles/scripts/apt-add-keyring
!/.config/dotfiles/scripts/apt-env
!/.config/dotfiles/scripts/apt-http-fetch
!/.config/dotfiles/scripts/apt-install
!/.config/dotfiles/scripts/apt-remove
!/.config/dotfiles/scripts/apt-repo-get-fpr
!/.config/dotfiles/scripts/apt-search
!/.config/dotfiles/scripts/apt-update
!/.config/dotfiles/scripts/check-bin-pkg
!/.config/dotfiles/scripts/crlf-fix
!/.config/dotfiles/scripts/deb-src-export
!/.config/dotfiles/scripts/deb-ver-parse
!/.config/dotfiles/scripts/dperl
!/.config/dotfiles/scripts/dpkg-list-auto
!/.config/dotfiles/scripts/dpkg-list-installed
!/.config/dotfiles/scripts/dpkg-list-manual
!/.config/dotfiles/scripts/dpkg-list-martians
!/.config/dotfiles/scripts/dpkg-search
!/.config/dotfiles/scripts/dpkg-source-raw
!/.config/dotfiles/scripts/dpkg-which
!/.config/dotfiles/scripts/ensure-eof-empty-line
!/.config/dotfiles/scripts/from
!/.config/dotfiles/scripts/gpg-batch
!/.config/dotfiles/scripts/gpg-export
!/.config/dotfiles/scripts/gpg-sign-file
!/.config/dotfiles/scripts/gpg-warmup
!/.config/dotfiles/scripts/idle
!/.config/dotfiles/scripts/krd-debsrc
!/.config/dotfiles/scripts/krd-quilt
!/.config/dotfiles/scripts/krd-sbuild
!/.config/dotfiles/scripts/openssl-cert-auto-pem
!/.config/dotfiles/scripts/openssl-generate-dh-bundle
!/.config/dotfiles/scripts/openssl-ocsp
!/.config/dotfiles/scripts/pip-env
!/.config/dotfiles/scripts/quiet
!/.config/dotfiles/scripts/quilt-series-auto
!/.config/dotfiles/scripts/quilt-series-strip-comments
!/.config/dotfiles/scripts/run-as
!/.config/dotfiles/scripts/sperl
!/.config/dotfiles/scripts/static-compress
!/.config/dotfiles/scripts/to
!/.config/dotfiles/scripts/try-read-stdin
!/.config/dotfiles/scripts/ufind.sh
!/.config/dotfiles/scripts/zap-tree
!/.config/dotfiles/woodpecker.yml
!/.config/htop/htoprc.dist
!/.config/mc/ini.dist
!/.config/sbuild/config.pl.dist
!/.config/zsh.dots/.zshenv
!/.config/zsh/_.zsh
!/.config/zsh/_wip/enclave.zsh.wip
!/.config/zsh/alias.zsh
!/.config/zsh/alias/containers.zsh
!/.config/zsh/alias/diff.zsh
!/.config/zsh/alias/directories.zsh
!/.config/zsh/alias/git.zsh
!/.config/zsh/alias/gpg.zsh
!/.config/zsh/alias/grep.zsh
!/.config/zsh/alias/history.zsh
!/.config/zsh/alias/idle.zsh
!/.config/zsh/alias/idle.zsh.old
!/.config/zsh/alias/k8s.zsh
!/.config/zsh/alias/kconfig.zsh
!/.config/zsh/alias/ls.zsh
!/.config/zsh/alias/quilt.zsh
!/.config/zsh/alias/sbuild.zsh
!/.config/zsh/alias/openwrt.zsh
!/.config/zsh/alias/sudo.zsh
!/.config/zsh/alias/telnet.zsh
!/.config/zsh/completion/.keep
@@ -32,29 +83,33 @@
!/.config/zsh/env.zsh
!/.config/zsh/env/aux.zsh
!/.config/zsh/env/containers.zsh
!/.config/zsh/env/gopath.zsh
!/.config/zsh/env/history.zsh
!/.config/zsh/env/ld.so.zsh
!/.config/zsh/env/pager.zsh
!/.config/zsh/env/quilt.zsh
!/.config/zsh/env/sed.zsh
!/.config/zsh/env/xdg.zsh
!/.config/zsh/lib.zsh
!/.config/zsh/lib/alternatives.zsh
!/.config/zsh/lib/cmdtime.zsh
!/.config/zsh/lib/completion.zsh
!/.config/zsh/lib/csv.zsh
!/.config/zsh/lib/curl.zsh
!/.config/zsh/lib/enclave.zsh.wip
!/.config/zsh/lib/git.zsh
!/.config/zsh/lib/gpg.zsh
!/.config/zsh/lib/history.zsh
!/.config/zsh/lib/prompt.zsh
!/.config/zsh/lib/pswalk.zsh
!/.config/zsh/lib/say-my.zsh
!/.config/zsh/lib/selfservice.zsh
!/.config/zsh/lib/starship.zsh.sample
!/.config/zsh/lib/systemd.zsh
!/.config/zsh/lib/term.zsh
!/.config/zsh/lib/time.zsh
!/.config/zsh/lib/title.zsh
!/.config/zsh/local.zsh.example
!/.config/zsh/local/.keep
!/.config/zsh/local/completion/.keep
!/.config/zsh/local/env.zsh.example
!/.config/zsh/opt.zsh
!/.config/zsh/opt/chase.zsh
!/.config/zsh/opt/completion.zsh
@@ -63,13 +118,17 @@
!/.config/zsh/opt/prompt.zsh
!/.config/zsh/rc.zsh
!/.config/zsh/rc/completion.zsh
!/.config/zsh/rc/keyboard.zsh
!/.config/zsh/rc/gpg-agent.zsh
!/.config/zsh/rc/keyboard-base.zsh
!/.config/zsh/rc/keyboard-extras.zsh
!/.config/zsh/rc/pager.zsh
!/.config/zsh/rc/prompt.zsh
!/.config/zsh/rc/ssh-agent.zsh
!/.config/zsh/rc/terminal.zsh
!/.config/zsh/var/.keep
!/.gdbinit
!/.gitconfig
!/.sbuildrc.dist
!/.screenrc
!/.selected_editor
!/.vimrc

View File

@@ -0,0 +1,18 @@
!/.cache/zsh/compcache/.keep
!/.cache/zsh/completion/.keep
!/.cache/zsh/compzwc/.keep
!/.cache/zsh/ssh/.keep
!/.config/zsh/local/.keep
!/.config/zsh/local/completion/.keep
!/.config/zsh/local/env.zsh.example
!/.config/zsh/var/.keep
/.cache/zsh/compcache/*
/.cache/zsh/compdump
/.cache/zsh/completion/*
/.cache/zsh/compzwc/*
/.cache/zsh/ssh/*
/.config/zsh/**/*.zwc
/.config/zsh/local.zsh
/.config/zsh/local/*
/.config/zsh/var/*

View File

@@ -1,44 +1,45 @@
#!/bin/sh
set -ef
gh_repo='rockdrilla/dotfiles'
gh_br='main'
unset LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE
unset LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS
unset LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
unset POSIXLY_CORRECT TAR_OPTIONS
f_gitignore='.config/dotfiles/gitignore'
u_gitignore="${GITHUB_RAW:-https://raw.githubusercontent.com}/${gh_repo}/${gh_br}/${f_gitignore}"
uri_krdsh="${GITKRDSH:-https://git.krd.sh/krd}/dotfiles"
uri_github="${GITHUB:-https://github.com/rockdrilla}/dotfiles"
git_branch='main'
u_repo="https://github.com/${gh_repo}.git"
d_repo='.config/dotfiles/repo.git'
f_gitignore='.config/dotfiles/gitignore'
u_tarball="${GITHUB:-https://github.com}/${gh_repo}/archive/refs/heads/${gh_br}.tar.gz"
have_cmd() {
command -v "$1" >/dev/null 2>&1
}
have_cmd() { command -v "$1" </dev/null >/dev/null 2>&1 ; }
fetch() {
if have_cmd curl ; then
curl -sSL ${2:+ -o "$2" } "$1"
return
curl -sSL ${2:+ -o "$2" } "$1" || return $?
return 0
fi
if have_cmd wget ; then
if [ -n "$2" ] ; then
wget -q -O - "$1" > "$2"
wget -q -O - "$1" > "$2" || return $?
else
wget -q -O - "$1"
wget -q -O - "$1" || return $?
fi
return
return 0
fi
if have_cmd /usr/lib/apt/apt-helper ; then
x=/usr/lib/apt/apt-helper
if have_cmd $x ; then
if [ -n "$2" ] ; then
/usr/lib/apt/apt-helper download-file "$1" "$2"
return
$x download-file "$1" "$2" || return $?
return 0
fi
__fetch_t=$(mktemp) || return 1
set +e
(
set -e
/usr/lib/apt/apt-helper download-file "$1" "${__fetch_t}"
$x download-file "$1" "${__fetch_t}" || return $?
cat "${__fetch_t}"
)
__fetch_r=$?
@@ -49,14 +50,54 @@ fetch() {
return 1
}
test_forge() {
fetch "$1" "$2" || return 1
[ "$(head -n 1 "$2")" = '*' ] || return 1
return 0
}
select_forge() {
unset uri_gitignore uri_repo uri_tarball
__t=$(mktemp)
## try with git.krd.sh
while [ -z "${uri_repo}" ] ; do
t_gitignore="${uri_krdsh}/raw/branch/${git_branch}/${f_gitignore}"
test_forge "${t_gitignore}" "${__t}" || break
uri_repo="${uri_krdsh}.git"
uri_gitignore="${t_gitignore}"
uri_tarball="${uri_krdsh}/archive/${git_branch}.tar.gz"
break
done
## try with github.com
while [ -z "${uri_repo}" ] ; do
t_gitignore="${uri_github}/raw/${git_branch}/${f_gitignore}"
test_forge "${t_gitignore}" "${__t}" || break
uri_repo="${uri_github}.git"
uri_gitignore="${t_gitignore}"
uri_tarball="${uri_github}/archive/refs/heads/${git_branch}.tar.gz"
break
done
rm -f "${__t}" ; unset __t
unset t_gitignore
if [ -n "${uri_repo}" ] ; then
return 0
fi
echo 'no forge is available to fetch URLs' >&2
return 1
}
main() {
## dry run to test connectivity
fetch "${u_gitignore}" >/dev/null
## test connectivity and select forge
select_forge
umask 0077
if have_cmd git ; then
if [ -s "${HOME}/${d_repo}/info/refs" ] ; then
if [ -s "${HOME}/${d_repo}/HEAD" ] ; then
dot_update
else
dot_install
@@ -66,6 +107,8 @@ main() {
dot_install_raw
fi
propagate_dist_files
echo 'installed.' >&2
}
@@ -74,8 +117,8 @@ dot_install() {
git_env
mkdir -p "${GIT_DIR}"
git init
git branch -M "${gh_br}" || true
git_config
git branch -M "${git_branch}" || true
git_config_init
git_update
}
@@ -90,7 +133,7 @@ find_fast() {
dot_install_raw() {
tf_tar=$(mktemp)
fetch "${u_tarball}" "${tf_tar}"
fetch "${uri_tarball}" "${tf_tar}"
td_tree=$(mktemp -d)
@@ -101,7 +144,7 @@ dot_install_raw() {
rm -f "${tf_tar}"
tf_list=$(mktemp)
fetch "${u_gitignore}" \
fetch "${uri_gitignore}" \
| sed -En '/^!\/(.+)$/{s//\1/;p;}' \
> "${tf_list}"
@@ -118,7 +161,7 @@ dot_install_raw() {
cat < "${HOME}/$f" > "${td_backup}/$f"
fi
done < "${tf_list}"
rm -f "${tf_list}"
rm -f "${tf_list}" ; unset tf_list
tar -C "${td_tree}" -cf . - | tar -C "${HOME}" -xf -
rm -rf "${td_tree}"
@@ -137,24 +180,38 @@ git_env() {
export GIT_DIR GIT_WORK_TREE
}
git_config() {
git_config_init() {
## remote
git remote add origin "${u_repo}"
git config remote.origin.fetch "+refs/heads/${gh_br}:refs/remotes/origin/${gh_br}"
git remote add origin "${uri_repo}"
git config remote.origin.fetch "+refs/heads/${git_branch}:refs/remotes/origin/${git_branch}"
git config remote.origin.tagopt '--no-tags'
git config "branch.${gh_br}.remote" origin
git config "branch.${git_branch}.remote" origin
## repo-specific
git config core.worktree "${GIT_WORK_TREE}"
git config core.excludesfile "${f_gitignore}"
}
git_config() {
## repo-specific
git remote set-url origin "${uri_repo}"
git config core.attributesfile .config/dotfiles/gitattributes
## migration (remove later)
git config --unset gc.auto || :
git config --unset pull.ff || :
## size optimization
git config core.bigFileThreshold 64k
git config core.compression 9
git config core.looseCompression 8
git config pack.compression 9
git config pack.threads 2
## generic
git config gc.auto 0
git config pull.ff only
git config receive.denyNonFastForwards true
}
git_update() {
git_config
git remote update -p
git pull || git reset --hard "origin/${gh_br}"
git pull || git reset --hard "origin/${git_branch}"
git gc --aggressive --prune=all --force || git gc || true
}
@@ -181,7 +238,7 @@ cmp_files() {
backup_unconditionally() {
tf_list=$(mktemp)
fetch "${u_gitignore}" \
fetch "${uri_gitignore}" \
| sed -En '/^!\/(.+)$/{s//\1/;p;}' \
> "${tf_list}"
@@ -195,7 +252,7 @@ backup_unconditionally() {
mv -f "${HOME}/$f" "${td_backup}/$f"
fi
done < "${tf_list}"
rm -f "${tf_list}"
rm -f "${tf_list}" ; unset tf_list
if find_fast "${td_backup}/" -mindepth 1 ; then
echo "backed-up files are here: ${td_backup}/"
@@ -205,4 +262,20 @@ backup_unconditionally() {
fi
}
propagate_dist_files() {
tf_list=$(mktemp)
sed -En '/^!\/(.+\.dist)$/{s//\1/;p;}' < "${HOME}/${f_gitignore}" > "${tf_list}"
while read -r f_dist ; do
[ -n "${f_dist}" ] || continue
[ -f "${f_dist}" ] || continue
f=${f_dist%.dist}
if [ -f "$f" ] ; then continue ; fi
cp "${f_dist}" "$f"
done < "${tf_list}"
rm -f "${tf_list}" ; unset tf_list
}
main "$@"

View File

@@ -0,0 +1,22 @@
#!/bin/sh
set -ef
e=perl ; d=
case "${0##*/}" in
d* )
e=debugperl
check-bin-pkg "$e:perl-debug"
case "$1" in
-* )
d=$1 ; shift
;;
esac
;;
esac
export PERL_HASH_SEED=0 PERL_PERTURB_KEYS=0
$e $d -T -c "$@" || echo
exec $e $d -T "$@"

View File

@@ -0,0 +1,45 @@
#!/bin/sh
set -ef
me="${0##*/}"
case "${me}" in
to | from ) ;;
* ) exit 1 ;;
esac
check-bin-pkg rsync ssh:openssh-client
h=$1 ; shift
rsync_aux=
for i ; do
case "$i" in
-*) rsync_aux="${rsync_aux}${rsync_aux:+' '}$i" ;;
esac
done
for i ; do
case "$i" in
-* ) continue ;;
*:* )
k=${i#*:}
case "$k" in
*:* )
env printf "%q: skipping bogus argument: %q\\n" "${me}" "$i" >&2
continue
;;
esac
i=${i%%:*}
;;
* ) k=$i ;;
esac
k="$h:$k"
case "${me}" in
to ) src=$i dst=$k ;;
from ) src=$k dst=$i ;;
esac
rsync -vaxISc -e ssh ${rsync_aux} "${src}" "${dst}"
done

View File

@@ -0,0 +1,430 @@
#!/bin/sh
set -ef
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} [options] <name> <repo uri> <suite> [keyring uri or path]
# options:
# -m, --merge
# merge with existing keyring (if any)
# -o, --optimize
# optimize keyring - preserve public key(s) which is/are used to sign repository
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
gnupg_testdata_clearsign() {
cat <<-EOF
-----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA256
test data
-----BEGIN PGP SIGNATURE-----
iHUEARYIAB0WIQTk95XBmO8PsqERxxdADlhK/AU7ugUCZVSqdwAKCRBADlhK/AU7
uj4rAP4nb8tkF6FEhEd7sAoBBTPX5vc/WlAhDYM8uK61mo2KMQD/UjsJ3YOvRYip
epkflbk2cdCD7ZHsGYpBIhMh9eDTRQE=
=vrbd
-----END PGP SIGNATURE-----
EOF
}
## NB: requires further "base64 -d" per line output
gnupg_dearmor_pubkey() {
sed -En -e "$(_gnupg_dearmor_pubkey_sed)" "$@"
}
_gnupg_dearmor_pubkey_sed() {
cat <<-EOF
/^-----BEGIN PGP PUBLIC KEY BLOCK-----\$/ {
:_scan
\$ { b _quit }
n
/^-----BEGIN PGP PUBLIC KEY BLOCK-----\$/ { b _quit }
/^-----END PGP PUBLIC KEY BLOCK-----\$/ { b _quit }
\\%^[[:alnum:]/+]{64}\$% { h ; b _read }
\\%^[[:alnum:]/+]{1,64}=*\$% { b _quit }
\\%^=[[:alnum:]/+]+=*\$% { b _quit }
b _scan
:_read
\$ { b _quit }
n
/^-----BEGIN PGP PUBLIC KEY BLOCK-----\$/ { b _quit }
/^-----END PGP PUBLIC KEY BLOCK-----\$/ { z ; b _end }
\\%^[[:alnum:]/+]{64}\$% { H ; b _read }
\\%^[[:alnum:]/+]{1,64}=*\$% { H ; b _read }
\\%^=[[:alnum:]/+]+=*\$% { H ; b _read }
b _read
:_end
x
s/[\\n]//gm
s/^([[:alnum:]/+]+).+\$/\\1==/g
p
:_quit
}
EOF
}
## NB: almost pristine format except last empty line
## TODO: research gnupg format
gnupg_armor_pubkey() {
cat <<-EOF
-----BEGIN PGP PUBLIC KEY BLOCK-----
$(base64 < "$1")
-----END PGP PUBLIC KEY BLOCK-----
EOF
}
_gpgv_debug_pkt() { gpgv --debug 1 "$@" ; }
gnupg_count_public_keys() {
## sanity test
_gpgv_debug_pkt --version >/dev/null 2>&1
gnupg_testdata_clearsign \
| _gpgv_debug_pkt --keyring "$1" 2>&1 >/dev/null \
| { grep -Fw parse_packet || : ; } \
| { grep -Fwc -e 'type=6' || : ; }
}
# process options
n_opt=0
for i ; do
case "$i" in
-m | --merge )
if [ -n "${o_merge:+1}" ] ; then
env printf "%q: error: 'merge' flag already set\\n" "${me}" >&2
usage 1
fi
o_merge=1
;;
-o | --optimize )
if [ -n "${o_optimize:+1}" ] ; then
env printf "%q: error: 'optimize' flag already set\\n" "${me}" >&2
usage 1
fi
o_optimize=1
;;
-*)
env printf "%q: error: unknown option: %q\\n" "${me}" "$i" >&2
usage 1
;;
*) break ;;
esac
n_opt=$((n_opt+1))
done
[ ${n_opt} = 0 ] || shift ${n_opt}
arg_ok=
while : ; do
[ -n "$1" ] || break
[ -n "$2" ] || break
[ -n "$3" ] || break
arg_ok=1 ; break
done
[ -n "${arg_ok}" ] || usage 1
xsedx=$(printf '\027')
_distro=$(sh -ec '. /etc/os-release ; printf "%s" "${ID}"' || : )
_suite=$(sh -ec '. /etc/os-release ; printf "%s" "${VERSION_CODENAME}"' || : )
if [ -z "${_distro}" ] || [ -z "${_suite}" ] ; then
env printf "%q: error: /etc/os-release is somewhat broken\\n" "${me}" >&2
exit 1
fi
unwrap_default_distro_suite() {
sed -ze "s${xsedx}@{distro}${xsedx}${_distro}${xsedx}g;s${xsedx}@{suite}${xsedx}${_suite}${xsedx}g"
}
name=$(printf '%s' "$1" | unwrap_default_distro_suite)
uri=$(printf '%s' "$2" | unwrap_default_distro_suite)
suite=$(printf '%s' "$3" | unwrap_default_distro_suite)
keyring_arg=$(printf '%s' "$4" | unwrap_default_distro_suite)
case "${name}" in
*/* | *..* )
env printf "%q: error: name is path-like: %q\\n" "${me}" "${name}" >&2
usage 1
;;
esac
verify_apt_uri() {
case "$1" in
http:* | https:* ) return 0 ;;
mirror:* | mirror+http:* | mirror+https:* ) return 0 ;;
esac
__uncommon=
case "$1" in
cdrom:* | copy:* | file:* | rsh:* | ssh:* ) __uncommon=1 ;;
mirror+copy:* | mirror+file:* ) __uncommon=1 ;;
mirror+*:* )
env printf "%q: error: unsupported uri mirror format: %q\\n" "${me}" "$1" >&2
usage 1
;;
esac
if [ -n "${__uncommon}" ] ; then
unset __uncommon
env printf "%q: warning: uncommon but likely valid uri: %q\\n" "${me}" "$1" >&2
return 0
fi
env printf "%q: error: unsupported uri format: %q\\n" "${me}" "$1" >&2
usage 1
}
verify_suite() {
if printf '%s' "$1" | grep -Eq '^[[:alnum:]][[:alnum:]-_]*[[:alnum:]]$' ; then
return 0
fi
env printf "%q: error: unsupported suite format: %q\\n" "${me}" "$1" >&2
usage 1
}
verify_apt_uri "${uri}"
verify_suite "${suite}"
_keyring_dirs='/etc/apt/keyrings /etc/apt/trusted.gpg.d'
lookup_keyring_dir() {
if [ -n "${_keyring_dir}" ] ; then
echo "${_keyring_dir}"
return
fi
for __keyring_dir in ${_keyring_dirs} ; do
[ -d "${__keyring_dir}" ] || continue
_keyring_dir=${__keyring_dir}
unset __keyring_dir
echo "${_keyring_dir}"
return
# [ -z "${1:-}" ] || return
done
unset __keyring_dir
env printf "%q: error: APT installation is broken: /etc/apt/trusted.gpg.d/ is missing\\n" "${me}" >&2
return 1
}
keyring_dir=$(lookup_keyring_dir) || exit 1
keyring_base="${keyring_dir}/${name}"
keyring="${keyring_base}.asc"
w=$(mktemp -d) ; : "${w:?}"
lookup_keyring() {
case "${1:?}" in
*/* | *..* )
env printf "%q: error: arg is path-like: %q\\n" "${me}" "$1" >&2
return 1
;;
esac
__keyring_base=$(lookup_keyring_dir) || return 1
__keyring_base="${__keyring_base}/$1"
__keyring_cnt=0
for __ext in asc gpg gpg.asc ; do
__keyring="${__keyring_base}.${__ext}"
[ -e "${__keyring}" ] || continue
if ! [ -f "${__keyring}" ] ; then
__dentry_type=$(env stat -c %F "${__keyring}")
env printf "%q: warning: found %q (expected: file): %q\\n" "${me}" "${__dentry_type}" "${__keyring}" >&2
continue
fi
if ! [ -s "${__keyring}" ] ; then
env printf "%q: warning: found empty file: %q\\n" "${me}" "${__keyring}" >&2
continue
fi
env printf "%q: info: found keyring: %q\\n" "${me}" "${__keyring}" >&2
echo "${__keyring}"
__keyring_cnt=$((__keyring_cnt+1))
## return 1st available
[ -z "$2" ] || break
done
unset __keyring_base __ext __keyring __dentry_type
}
if [ -n "${keyring_arg}" ] ; then
case "${keyring_arg}" in
http:* | https:* )
apt-http-fetch "${keyring_arg}" "$w/keyring" || {
rm -rf "$w"
exit 1
}
;;
* )
if ! [ -f "${keyring_arg}" ] ; then
env printf "%q: error: unknown keyring argument format: %q\\n" "${me}" "${keyring_arg}" >&2
rm -rf "$w"
usage 1
fi
if ! [ -s "${keyring_arg}" ] ; then
env printf "%q: error: empty keyring file: %q\\n" "${me}" "${keyring_arg}" >&2
rm -rf "$w"
usage 1
fi
cat < "${keyring_arg}" > "$w/keyring"
;;
esac
keynum=$(gnupg_count_public_keys "$w/keyring")
if [ "${keynum}" != 0 ] ; then
## valid gnupg keyring file in (appropriate) binary format (*.gpg)
keyring="${keyring_base}.gpg"
else
while read -r n ; do
[ -n "$n" ] || continue
echo "$n" | base64 -d > "$w/temp.gpg" || continue
x=$(gnupg_count_public_keys "$w/temp.gpg")
[ "$x" != 0 ] || continue
keynum=$((keynum+1))
done <<-EOF
$(gnupg_dearmor_pubkey "$w/keyring")
EOF
rm -f "$w/temp.gpg"
fi
if [ "${keynum}" = 0 ] ; then
env printf "%q: error: unable to parse keyring file: %q\\n" "${me}" "${keyring_arg}" >&2
rm -rf "$w"
exit 1
fi
while [ -n "${o_merge}" ] ; do
lookup_keyring "${name}" | grep -Fxq -e "${keyring}" || break
env printf "%q: warning: existing file is to be REPLACED: %q\\n" "${me}" "${keyring}" >&2
env printf "%q: info: consider running script with -m/--merge option to merge with existing keyring\\n" "${me}" >&2
break
done
fi
## TODO: whether we can skip gpg-related tasks?
## request gnupg for work
check-bin-pkg gpg gpgv dirmngr gpgconf
gpg_on() { gpg-batch start ; }
gpg_off() {
cd /
gpg-batch stop
unset GNUPGHOME
rm -rf "$w"
exit "${1:-0}"
}
(
export GNUPGHOME="$w/dot-gnupg"
mkdir -m 0700 "${GNUPGHOME}"
gpg_on
cd "$w"
csv_field_grep() {
if [ -n "$2" ] ; then
sed -En "\\${xsedx}$2${xsedx}p"
else
cat
fi \
| cut -d: -f "$1"
}
join_lines() { sed -zE 's/[[:space:]]+/ /g;s/^ //;s/ $//' ; }
if [ -s ./keyring ] ; then
gpg --import ./keyring
rm -f ./keyring
else
apt-repo-get-fpr "${uri}" "${suite}" > fpr.repo
## must contain at least one fingerprint; if not - bail out
[ -s fpr.repo ]
keys=$(join_lines < fpr.repo)
gpg --recv-keys ${keys}
fi
gpg --with-colons --list-keys > dump.all
csv_field_grep 5 '^pub:' < dump.all > pub.all
## must contain at least one public key; if not - bail out
[ -s pub.all ]
## minimize keyring
while [ -n "${o_optimize}" ] ; do
csv_field_grep 10 '^fpr:' < dump.all > fpr.all
: > fpr.save
for i in $(join_lines < pub.all) ; do
gpg --with-colons --list-keys "$i" \
| csv_field_grep 10 '^fpr:' > "fpr.$i"
## must contain at least one fingerprint; if not - bail out
[ -s "fpr.$i" ]
fpr_pub=$(head -n 1 "fpr.$i")
## try filter out useful fingerprints; if none - skip whole key chain
grep -Fx -f fpr.repo "fpr.$i" > "fpr-save.$i" || :
[ -s "fpr-save.$i" ] || continue
if grep -Fxq -e "${fpr_pub}" "fpr-save.$i" ; then
## primary key fingerprint match - do nothing
:
else
## subkey fingerprint match - add primary key fingerprint
echo "${fpr_pub}" >> "fpr-save.$i"
fi
## append fingerprints
cat < "fpr-save.$i" >> fpr.save
done
## must contain at least one fingerprint; if not - bail out
## (supplied key material seems to be useless)
[ -s fpr.save ]
sort -u < fpr.save > fpr.include
grep -Fxv -f fpr.include fpr.all > fpr.exclude || :
if [ -s fpr.exclude ] ; then
gpg --delete-keys $(sed -E 's/$/!/' < fpr.exclude)
fi
break
done
if [ -n "${o_merge}" ] ; then
while read -r k ; do
[ -n "$k" ] || continue
gpg --import "$k" || continue
rm -f "$k"
env printf "%q: info: merging with existing keyring: %q\\n" "${me}" "$k" >&2
done <<-EOF
$(lookup_keyring "${name}" 2>/dev/null)
EOF
fi
gpg --armor --export > "${keyring}"
gpg --show-keys "${keyring}"
gpg_off
) || gpg_off 1

View File

@@ -0,0 +1,11 @@
#!/bin/sh
set -ef
set -a
DEBCONF_NONINTERACTIVE_SEEN=true
DEBIAN_FRONTEND=noninteractive
DEBIAN_PRIORITY=critical
TERM=linux
set +a
exec "$@"

View File

@@ -0,0 +1,51 @@
#!/bin/sh
set -ef
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} [options] <uri> <destination file>
# cURL compatibility options:
# -s, --silent
# suppress output until error is occured
# -k, --insecure
# ignore TLS verification error
# -L, --location
# no-op (does nothing)
# --no-location
# prohibit HTTP redirects
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
o_quiet=
o_insecure=
o_noredir=
## process options
n_opt=0
for i ; do
case "$i" in
-s | --silent ) o_quiet=1 ;;
-k | --insecure ) o_insecure=1 ;;
--no-location ) o_noredir=1 ;;
-L | --location ) o_noredir= ;;
-* )
env printf "%q: unknown option: %q\\n" "${me}" "$i" >&2
usage 1
;;
* ) break ;;
esac
n_opt=$((n_opt+1))
done
[ ${n_opt} = 0 ] || shift ${n_opt}
[ $# -eq 2 ] || usage 1
exec ${o_quiet:+ quiet } /usr/lib/apt/apt-helper \
${o_insecure:+ -o 'Acquire::https::Verify-Peer=false' } \
${o_insecure:+ -o 'Acquire::https::Verify-Host=false' } \
${o_noredir:+ -o 'Acquire::http::AllowRedirect=false' } \
download-file "$1" "$2"

View File

@@ -0,0 +1,9 @@
#!/bin/sh
set -ef
apt-update ${APT_OPTS}
exec apt-env \
apt-get \
${APT_OPTS} \
"${APT_INSTALL:-install}" \
"$@"

View File

@@ -0,0 +1,14 @@
#!/bin/sh
set -ef
apt-env \
apt-get \
${APT_OPTS} \
--allow-remove-essential \
"${APT_REMOVE:-remove}" \
"$@"
exec apt-env \
apt-get \
${APT_OPTS} \
-y
autoremove \

View File

@@ -0,0 +1,80 @@
#!/bin/sh
set -ef
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <repo uri> <suite>
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
arg_ok=
while : ; do
[ -n "$1" ] || break
[ -n "$2" ] || break
arg_ok=1 ; break
done
[ -n "${arg_ok}" ] || usage 1
check-bin-pkg gpg dirmngr gpgconf
gpg_on() { gpg-batch start ; }
gpg_off() {
cd /
gpg-batch stop
unset GNUPGHOME
rm -rf "$w"
exit "${1:-0}"
}
gpg_keys_from_sig() {
set +e
gpg --no-options --verify "$@" 2>&1 \
| sed -En 's/\s+/ /g;s/ $//;/^([Gg][Pp][Gg]|[Gg][Nn][Uu][Pp][Gg]): using .+ key (.+)$/{s,,\2,p}'
set -e
}
w=$(mktemp -d) ; : "${w:?}"
export GNUPGHOME="$w/dot-gnupg"
(
mkdir -m 0700 "${GNUPGHOME}"
gpg_on
cd "$w"
while : ; do
apt-http-fetch -s "$1/dists/$2/InRelease" InRelease || break
gpg_keys_from_sig InRelease > apt.fpr.InRelease
break
done
rm -f InRelease
while : ; do
apt-http-fetch -s "$1/dists/$2/Release.gpg" Release.gpg || break
apt-http-fetch -s "$1/dists/$2/Release" Release || break
gpg_keys_from_sig Release.gpg Release > apt.fpr.Release
break
done
rm -f Release.gpg Release
rm -f apt.fpr
c=0
for i in apt.fpr.InRelease apt.fpr.Release ; do
[ -s "$i" ] || continue
cp -f "$i" apt.fpr
c=$((c+1))
done
if [ "$c" = 2 ] ; then
cmp apt.fpr.InRelease apt.fpr.Release >&2
fi
[ -s apt.fpr ]
cat apt.fpr
gpg_off
) || gpg_off 1

View File

@@ -0,0 +1,52 @@
#!/bin/sh
set -ef
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <name or regex> [regex]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
[ -n "$1" ] || usage 1
w=$(mktemp -d) ; : "${w:?}"
r=0
_exit() {
rm -rf "$w"
exit $r
}
set +e
if [ "$(id -u)" = 0 ] ; then
apt-update
fi
apt-cache --names-only search "$1" > "$w/search"
r=$?
[ $r = 0 ] || _exit
[ -s "$w/search" ] || _exit
awk '{print $1}' < "$w/search" > "$w/list"
rm -f "$w/search"
pat="${2:-$1}"
## plain string, exact match
if grep -Fq -e "${pat}" "$w/list" ; then
grep -Fx -e "${pat}" "$w/list"
_exit
fi
## regex (?)
need_anchors=
case "${pat}" in
*^* | *\$* ) ;;
*) need_anchors=1 ;;
esac
grep -E${need_anchors:+x} -e "${pat}" "$w/list"
_exit

View File

@@ -0,0 +1,53 @@
#!/bin/sh
set -ef
: "${DPKG_ADMINDIR:=/var/lib/dpkg}"
find_fresh_ts() {
{
find "$@" -exec stat -c '%Y' '{}' '+' 2>/dev/null || :
## duck and cover!
echo 1
} | sort -rn | head -n 1
}
_apt_update() {
## update package lists; may fail sometimes,
## e.g. soon-to-release channels like Debian "bullseye" @ 22.04.2021
if [ $# = 0 ] ; then
## (wannabe) smart package list update
ts_sources=$(find_fresh_ts /etc/apt/ -follow -regextype egrep -regex '^/etc/apt/sources\.list(|\.d/[^/]+\.(list|sources))$' -type f)
ts_lists=$(find_fresh_ts /var/lib/apt/lists/ -maxdepth 1 -regextype egrep -regex '^.+_Packages(|\.(bz2|gz|lz[4o]|xz|zstd?))$' -type f)
if [ ${ts_sources} -gt ${ts_lists} ] ; then
apt-env apt-get -y update
fi
else
apt-env apt-get "$@" update
fi
}
_dpkg_avail_hack() {
set +e
suite=$(sh -ec '. /etc/os-release ; printf "%s" "${VERSION_CODENAME}"' || : )
## if ${suite} is empty then we're on Debian sid or so :)
: "${suite:-sid}"
set -e
f="${DPKG_ADMINDIR}/available"
case "${suite}" in
stretch | buster | bionic | focal )
## ref: https://unix.stackexchange.com/a/271387/49297
if [ -s "$f" ] ; then
return
fi
echo '# fixing "dpkg/available"' >&2
/usr/lib/dpkg/methods/apt/update "${DPKG_ADMINDIR}" apt apt
;;
* )
touch "$f"
;;
esac
}
_apt_update "$@"
_dpkg_avail_hack

View File

@@ -0,0 +1,44 @@
#!/bin/sh
set -ef
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <binary>[:<package>] [...]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
[ -n "$1" ] || usage 1
for i ; do
unset bin pkg
IFS=: read -r bin pkg <<-EOF
$i
EOF
if [ -z "${bin}" ] ; then
env printf "%q: 'binary' expected but empty: %q\\n" "${me}" "$i" >&2
usage 1
fi
done
for i ; do
unset bin pkg
IFS=: read -r bin pkg <<-EOF
$i
EOF
[ -n "${pkg}" ] || pkg=${bin}
if command -v "${bin}" >/dev/null 2>&1 ; then
continue
fi
cat >&2 <<-EOF
# ${0##*/}: unable to find command '${bin}'
# install package '${pkg}' first, e.g.
# sudo apt-install ${pkg}
EOF
exit 1
done

View File

@@ -0,0 +1,4 @@
#!/bin/sh
set -ef
exec sed -zE 's/\r\n/\n/g;s/\r/\n/g' "$@"

View File

@@ -0,0 +1,254 @@
#!/bin/sh
set -ef
tar_opts='--blocking-factor=1 --format=gnu --no-selinux --no-xattrs --sparse'
unset LANGUAGE LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE
unset LC_MONETARY LC_MESSAGES LC_PAPER LC_NAME LC_ADDRESS
unset LC_TELEPHONE LC_MEASUREMENT LC_IDENTIFICATION
unset POSIXLY_CORRECT TAR_OPTIONS
export LC_ALL=C.UTF-8 LANG=C.UTF8
have_cmd() { command -v "$1" </dev/null >/dev/null 2>&1 ; }
ro_git() { GIT_OPTIONAL_LOCKS=0 command git "$@" ; }
find_fast() {
find "$@" -printf . -quit | grep -Fq .
}
check-bin-pkg dh:debhelper dpkg-parsechangelog:dpkg-dev
have_git=
while [ "${DEB_SRC_EXPORT_GIT:-1}" = 1 ] ; do
have_cmd git || break
ro_git rev-parse --is-inside-work-tree >/dev/null 2>&1 || break
have_git=1
break
done
want_gpg=
while [ "${DEB_SRC_EXPORT_GNUPG:-1}" = 1 ] ; do
have_cmd gpg || break
[ -z "${DEB_SIGN_KEYID}" ] || want_gpg=1
if [ -n "${have_git}" ] ; then
if ro_git config --get commit.gpgsign | grep -Fxq true ; then
want_gpg=1
fi
fi
break
done
SIGN_OPT=
if [ -n "${want_gpg}" ] ; then
sign_file() { gpg-sign-file "$@"; }
else
SIGN_OPT='--no-sign'
sign_file() { :; }
unset \
GNUPGHOME GPG_AGENT_INFO GPG_TTY PINENTRY_USER_DATA \
SSH_AGENT_PID SSH_AUTH_SOCK \
DEB_SIGN_KEYID DEBEMAIL DEBFULLNAME
fi
deb_folder='debian'
if [ -z "$2" ] ; then
if [ -n "${have_git}" ] ; then
cd "$(ro_git rev-parse --show-toplevel)"
fi
else
deb_folder=$(printf '%s' "$2" | sed -zE 's,/+,/,g;s,/$,,')
fi
## basic verification of debian/ folder
[ -d "${deb_folder}" ]
[ -s "${deb_folder}/rules" ]
if [ "${deb_folder##*/}" = . ] ; then
deb_folder=$( ( cd "${deb_folder}" ; pwd ; ) )
fi
if [ -z "${SOURCE_DATE_EPOCH}" ] ; then
SOURCE_DATE_EPOCH=$(date -u +%s)
fi
export SOURCE_DATE_EPOCH
temp_folder=$(mktemp -d) ; : "${temp_folder:?}"
## prepare/warmup GnuPG
## if signing is disabled then it does nothing with files or GnuPG
date > "${temp_folder}/t" ; sign_file "${temp_folder}/t" ; rm -f "${temp_folder}/t"
## (early) grab source package related files
mkdir -p "${temp_folder}/debian"
find "${deb_folder}/" -mindepth 1 -maxdepth 1 -printf '%P\0' \
| tar -C "${deb_folder}" ${tar_opts} --dereference --null -T - -cf - \
| tar -C "${temp_folder}/debian" -xf -
chmod 0755 "${temp_folder}/debian/rules"
d_rules() { DPKG_BUILD_API="${DPKG_BUILD_API:-0}" debian/rules "$@" ; }
(
cd "${temp_folder}"
set +e
## refresh control files
d_rules clean
d_rules debian/control
d_rules debian/changelog
)
if ! [ -s "${temp_folder}/debian/control" ] ; then
echo "# ${0##*/}: debian/control is missing/unreproducible for ${deb_folder}/!"
rm -rf "${temp_folder}"
exit 1
fi
## retrieve only desired fields from dpkg-parsechangelog output
if ! dpkg-parsechangelog -l "${temp_folder}/debian/changelog" > "${temp_folder}/changelog" ; then
rm -rf "${temp_folder}"
exit 1
fi
# pkg_name=$(deb822 get "${temp_folder}/changelog" Source | sed -zn 1p | tr -d '\0')
# pkg_ver=$(deb822 get "${temp_folder}/changelog" Version | sed -zn 1p | tr -d '\0')
# pkg_ts=$(deb822 get "${temp_folder}/changelog" Timestamp | sed -zn 1p | tr -d '\0')
pkg_name=$(sed -En '/^Source:\s*(\S+)$/{s//\1/p;q;}' "${temp_folder}/changelog")
pkg_ver=$(sed -En '/^Version:\s*(\S+)$/{s//\1/p;q;}' "${temp_folder}/changelog")
pkg_ts=$(sed -En '/^Timestamp:\s*(\S+)$/{s//\1/p;q;}' "${temp_folder}/changelog")
rm -f "${temp_folder}/changelog"
SOURCE_DATE_EPOCH=${pkg_ts}
export SOURCE_DATE_EPOCH
pkg_ver_list=$(deb-ver-parse "${pkg_ver}")
[ -n "${pkg_ver_list}" ]
unset ver_epoch ver_upstream ver_revision
IFS='|' read -r ver_epoch ver_upstream ver_revision <<EOF
${pkg_ver_list}
EOF
T="$1"
if [ -z "$T" ] ; then
T=$(mktemp -d) ; : "${T:?}"
echo "output directory (auto-created): $T" >&2
else
echo "output directory: $T" >&2
mkdir -p "$T"
fi
if [ -z "${ver_upstream}" ] ; then
## native package
## do as simple as possible
W="$T/${pkg_name}-${ver_revision}" ; mkdir -p "$W"
echo "working directory (as subdirectory of output): $W" >&2
else
## regular package
## do some tricks! :)
W=$(mktemp -d)
echo "working directory (auto-created): $W" >&2
fi
_cleanup() {
cd /
rm -rf -- "$W" ; rm -rf -- "$T"
}
_finish_export() {
cd "$T" ; rm -rf -- "$W"
echo "index of $T" >&2
ls -lhgG >&2
}
## move files to "permanent" location
tar -C "${temp_folder}" ${tar_opts} -cf - debian \
| tar -C "$W" -xf -
rm -rf "${temp_folder}" ; unset temp_folder
if [ -z "${ver_upstream}" ] ; then
## native package
## grab all the things (except Git)
(
cd "${deb_folder}/.."
if [ -n "${have_git}" ] ; then
git ls-files -z
else
find ./ -mindepth 1 -maxdepth 1 -printf '%P\0'
fi \
| sed -zE '/^(\.git|debian)(\/|$)/d' \
| tar ${tar_opts} --null -T - -cf - \
| tar -C "$W" -xf -
) || { _cleanup ; exit 1 ; }
fi
export TAR_OPTIONS="${tar_opts} --mtime @${pkg_ts} --sort=name --numeric-owner --owner=0 --group=0 --exclude-vcs"
if [ -z "${ver_upstream}" ] ; then
## native package
## do as simple as possible
cd "$W"
dpkg-buildpackage --build=source -z9 -d -nc ${SIGN_OPT} || { _cleanup ; exit 1 ; }
_finish_export
exit
fi
## regular package
## here goes magic and dragons
## shortcuts
pnu="${pkg_name}_${ver_upstream}"
pnd="${pnu}-${ver_revision}"
pna="${pnd}_source"
## try copy upstream source tarball(s) (if any)
(
cd "${deb_folder}/.."
for d in '..' '.' ; do
## main tarball
find "$d/" -follow -mindepth 1 -maxdepth 1 -type f -name "${pnu}.orig.*" \
-exec cp -vaL -t "$T" {} +
## extra tarball(s) for multiple upstream tarball (MUT) package
find "$d/" -follow -mindepth 1 -maxdepth 1 -type f -name "${pnu}.orig-*.*" \
-exec cp -vaL -t "$T" {} +
done
) || { _cleanup ; exit 1 ; }
cd "$W"
(
## verify that we have upstream tarball(s)
if ! find_fast "$T" -mindepth 1 -maxdepth 1 -name "${pnu}.orig.*" -type f ; then
d_rules debian/watch || :
check-bin-pkg uscan:devscripts || exit 1
uscan --destdir "$T" --download-current-version --rename
fi
tar -cf - debian | xz -9 > "$T/${pnd}.debian.tar.xz"
## hackish way to generate .dsc with minimal overhead
dpkg-source-raw "$T" || exit 1
sign_file "$T/${pnd}.dsc"
dpkg-genbuildinfo --build=source -u"$T" -O > "$T/${pna}.buildinfo"
sign_file "$T/${pna}.buildinfo"
dpkg-genchanges --build=source -u"$T" > "$T/${pna}.changes"
sign_file "$T/${pna}.changes"
_finish_export
exit
) || { _cleanup ; exit 1 ; }

View File

@@ -0,0 +1,23 @@
#!/bin/sh
set +e ; set -f
## debian package version handling
## match pattern "epoch:version-revision"
m1='([1-9][0-9]*):([0-9][0-9a-zA-Z.+~-]*)-([0-9a-zA-Z.+~]+)'
## match pattern "epoch:native_version"
m2='([1-9][0-9]*):([0-9][0-9a-zA-Z.+~]*)'
## match pattern "version-revision"
m3='([0-9][0-9a-zA-Z.+~-]*)-([0-9a-zA-Z.+~]+)'
## match pattern "native_version"
m4='([0-9][0-9a-zA-Z.+~]*)'
## replace pattern "epoch|upstream version|revision or native_version"
replace='\2\5|\3\7|\4\6\8\9'
## sed script to split package version into kind of 'list'
pkg_ver_to_list='/^('${m1}'|'${m2}'|'${m3}'|'${m4}')$/{s##'${replace}'#;p;}'
for i ; do
echo "$i" | sed -En "${pkg_ver_to_list}"
done

View File

@@ -0,0 +1 @@
_perl-wrapper.sh

View File

@@ -0,0 +1,49 @@
#!/bin/sh
set -ef
w=$(mktemp -d) ; : "${w:?}"
have_cmd() { command -v "$1" </dev/null >/dev/null 2>&1 ; }
unset has_mawk
if have_cmd mawk ; then has_mawk=1 ; fi
mawk_then_awk() {
if [ -n "${has_mawk}" ]
then mawk "$@"
else awk "$@"
fi
}
f='/var/lib/apt/extended_states'
if [ -f "$f" ] ; then
mawk_then_awk '
/^Package:/,/^$/ {
if ($1 == "Package:") { pkg = $2; }
if ($1 == "Architecture:") { arch = $2; }
if ($1 == "Auto-Installed:") { is_auto = $2; }
if ($0 == "") {
if (is_auto == 1) { print pkg ":" arch; }
}
}
' "$f" | sort -V
fi > "$w/auto.pkg"
while [ -s "$w/auto.pkg" ] ; do
dpkg-list-installed > "$w/all"
## fix:
## /var/lib/apt/extended_states stores (some) arch:all entries as arch:native
sed -En '/^([^:]+):all$/ {s##/^\1:.+$/ {s//\1:all/}#p}' \
< "$w/all" \
> "$w/auto.sed"
if [ -s "$w/auto.sed" ] ; then
sed -E -f "$w/auto.sed"
else
cat
fi < "$w/auto.pkg"
break
done
rm -rf "$w"

View File

@@ -0,0 +1,6 @@
#!/bin/sh
set -ef
dpkg-query --show --showformat='${Package}:${Architecture}|${db:Status-Abbrev}\n' \
| sed -En '/^(.+)\|[hi]i $/{s//\1/;p}' \
| sort -V

View File

@@ -0,0 +1,15 @@
#!/bin/sh
set -ef
w=$(mktemp -d) ; : "${w:?}"
dpkg-list-installed > "$w/all"
dpkg-list-auto > "$w/auto"
if [ -s "$w/auto" ] ; then
grep -Fxv -f "$w/auto"
else
cat
fi < "$w/all"
rm -rf "$w"

View File

@@ -0,0 +1,24 @@
#!/bin/sh
set +e ; set -f
_list() {
dpkg-query --show --showformat='${Package}:${Architecture} ${Version} status="${db:Status-Abbrev}"\n' \
| grep -Ev '"ii "$' \
| sort -V
}
t=$(mktemp)
if [ -z "$t" ] ; then
# unable to create temporary file?
# produce "raw" output
_list
exit 0
fi
_list > "$t"
if [ -s "$t" ] ; then
echo '# "martian" packages (i.e. unusual state):' >&2
cat "$t"
fi
rm -f "$t"

View File

@@ -0,0 +1,25 @@
#!/bin/sh
set -ef
: "${1:?}"
me="${0##*/}"
if dpkg-query --search "$1" ; then exit 0 ; fi
case "$1" in
*\** | *\?* )
env printf "%q does not support globs: %q\\n" "${me}" "$1" >&2
exit 1
;;
esac
while read -r f ; do
[ -n "$f" ] || continue
dpkg-query --search "$f" || continue
exit 0
done <<EOF
$(set +e ; find / -xdev -follow -samefile "$1" 2>/dev/null | grep -Fxv -e "$1" | sort -V)
EOF
exit 1

View File

@@ -0,0 +1,280 @@
#!/usr/bin/perl
#
# dpkg-source-raw: "hackish" script based on original dpkg-source
# repo: https://git.dpkg.org/git/dpkg/dpkg.git
# synced commit: f506e5dbc94393e9b5a8783d992815dca8ea7a2b
# file: scripts/dpkg-source.pl
#
# Copyright © 1996 Ian Jackson <ijackson@chiark.greenend.org.uk>
# Copyright © 1997 Klee Dienes <klee@debian.org>
# Copyright © 1999-2003 Wichert Akkerman <wakkerma@debian.org>
# Copyright © 1999 Ben Collins <bcollins@debian.org>
# Copyright © 2000-2003 Adam Heath <doogie@debian.org>
# Copyright © 2005 Brendan O'Dea <bod@debian.org>
# Copyright © 2006-2008 Frank Lichtenheld <djpig@debian.org>
# Copyright © 2006-2009,2012 Guillem Jover <guillem@debian.org>
# Copyright © 2008-2011 Raphaël Hertzog <hertzog@debian.org>
# Copyright © 2020-2025 Konstantin Demin <rockdrilla@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
use strict;
use warnings;
use List::Util qw(any none);
use Cwd;
use File::Basename;
use File::Spec;
use Dpkg ();
use Dpkg::ErrorHandling;
use Dpkg::Arch qw(:operators);
use Dpkg::Deps;
use Dpkg::Compression;
use Dpkg::Conf;
use Dpkg::Control::Info;
use Dpkg::Control::Tests;
use Dpkg::Control::Fields;
use Dpkg::Substvars;
use Dpkg::Version;
use Dpkg::Changelog::Parse;
use Dpkg::Source::Format;
use Dpkg::Source::Package;
use Dpkg::Vendor;
# heavily-conditional "use MODULE"
BEGIN {
my $dpkg_ver = Dpkg::Version->new($Dpkg::PROGVERSION);
my $new_api_ver = Dpkg::Version->new('1.22.0');
if ($dpkg_ver >= $new_api_ver) {
require Dpkg::Package;
Dpkg::Package->import(qw(set_source_name));
# aliasing
*set_source_package = \&set_source_name;
} else {
require Dpkg::Vars;
Dpkg::Vars->import(qw(set_source_package));
}
}
my $build_format;
my %options = ();
my $substvars = Dpkg::Substvars->new();
my @options;
$options{origtardir} = $ARGV[0];
$options{changelog_file} = 'debian/changelog';
my %ch_options = (file => $options{changelog_file});
my $changelog = changelog_parse(%ch_options);
my $control = Dpkg::Control::Info->new('debian/control');
# <https://reproducible-builds.org/specs/source-date-epoch/>
$ENV{SOURCE_DATE_EPOCH} ||= $changelog->{timestamp} || time;
# Select the format to use
if (not defined $build_format) {
my $format_file = 'debian/source/format';
if (-e $format_file) {
my $format = Dpkg::Source::Format->new(filename => $format_file);
$build_format = $format->get();
} else {
warning('no source format specified in %s, ' .
'see dpkg-source(1)', $format_file);
$build_format = '1.0';
}
}
my $srcpkg = Dpkg::Source::Package->new(format => $build_format,
options => \%options);
my $fields = $srcpkg->{fields};
my @sourcearch;
my %archadded;
my @binarypackages;
# Scan control info of source package
my $src_fields = $control->get_source();
error("debian/control doesn't contain any information about the source package") unless defined $src_fields;
my $src_sect = $src_fields->{'Section'} || 'unknown';
my $src_prio = $src_fields->{'Priority'} || 'unknown';
foreach my $f (keys %{$src_fields}) {
my $v = $src_fields->{$f};
if ($f eq 'Source') {
set_source_package($v);
$fields->{$f} = $v;
} elsif ($f eq 'Uploaders') {
# Merge in a single-line
($fields->{$f} = $v) =~ s/\s*[\r\n]\s*/ /g;
} elsif (any { $f eq $_ } field_list_src_dep()) {
my $dep;
my $type = field_get_dep_type($f);
$dep = deps_parse($v, build_dep => 1, union => $type eq 'union');
error('error occurred while parsing %s', $f) unless defined $dep;
my $facts = Dpkg::Deps::KnownFacts->new();
$dep->simplify_deps($facts);
$dep->sort() if $type eq 'union';
$fields->{$f} = $dep->output();
} else {
field_transfer_single($src_fields, $fields, $f);
}
}
# Scan control info of binary packages
my @pkglist;
foreach my $pkg ($control->get_packages()) {
my $p = $pkg->{'Package'};
my $sect = $pkg->{'Section'} || $src_sect;
my $prio = $pkg->{'Priority'} || $src_prio;
my $type = $pkg->{'Package-Type'} ||
$pkg->get_custom_field('Package-Type') || 'deb';
my $arch = $pkg->{'Architecture'};
my $profile = $pkg->{'Build-Profiles'};
my $pkg_summary = sprintf('%s %s %s %s', $p, $type, $sect, $prio);
$pkg_summary .= ' arch=' . join ',', split ' ', $arch;
if (defined $profile) {
# Instead of splitting twice and then joining twice, we just do
# simple string replacements:
# Remove the enclosing <>
$profile =~ s/^\s*<(.*)>\s*$/$1/;
# Join lists with a plus (OR)
$profile =~ s/>\s+</+/g;
# Join their elements with a comma (AND)
$profile =~ s/\s+/,/g;
$pkg_summary .= " profile=$profile";
}
if (defined $pkg->{'Protected'} and $pkg->{'Protected'} eq 'yes') {
$pkg_summary .= ' protected=yes';
}
if (defined $pkg->{'Essential'} and $pkg->{'Essential'} eq 'yes') {
$pkg_summary .= ' essential=yes';
}
push @pkglist, $pkg_summary;
push @binarypackages, $p;
foreach my $f (keys %{$pkg}) {
my $v = $pkg->{$f};
if ($f eq 'Architecture') {
# Gather all binary architectures in one set. 'any' and 'all'
# are special-cased as they need to be the only ones in the
# current stanza if present.
if (debarch_eq($v, 'any') || debarch_eq($v, 'all')) {
push(@sourcearch, $v) unless $archadded{$v}++;
} else {
for my $a (split(/\s+/, $v)) {
error("'%s' is not a legal architecture string", $a)
if debarch_is_illegal($a);
error('architecture %s only allowed on its ' .
"own (list for package %s is '%s')",
$a, $p, $a)
if $a eq 'any' or $a eq 'all';
push(@sourcearch, $a) unless $archadded{$a}++;
}
}
} elsif (any { $f eq $_ } qw(Homepage Description)) {
# Do not overwrite the same field from the source entry
} else {
field_transfer_single($pkg, $fields, $f);
}
}
}
unless (scalar(@pkglist)) {
error("debian/control doesn't list any binary package");
}
if (any { $_ eq 'any' } @sourcearch) {
# If we encounter one 'any' then the other arches become insignificant
# except for 'all' that must also be kept
if (any { $_ eq 'all' } @sourcearch) {
@sourcearch = qw(any all);
} else {
@sourcearch = qw(any);
}
} else {
# Minimize arch list, by removing arches already covered by wildcards
my @arch_wildcards = grep { debarch_is_wildcard($_) } @sourcearch;
my @mini_sourcearch = @arch_wildcards;
foreach my $arch (@sourcearch) {
if (none { debarch_is($arch, $_) } @arch_wildcards) {
push @mini_sourcearch, $arch;
}
}
@sourcearch = @mini_sourcearch;
}
$fields->{'Architecture'} = join(' ', @sourcearch);
$fields->{'Package-List'} = "\n" . join("\n", sort @pkglist);
# Scan fields of dpkg-parsechangelog
foreach my $f (keys %{$changelog}) {
my $v = $changelog->{$f};
if ($f eq 'Source') {
set_source_package($v);
$fields->{$f} = $v;
} elsif ($f eq 'Version') {
my ($ok, $error) = version_check($v);
error($error) unless $ok;
$fields->{$f} = $v;
} elsif ($f eq 'Binary-Only') {
error('building source for a binary-only release')
if $v eq 'yes';
} elsif ($f eq 'Maintainer') {
# Do not replace the field coming from the source entry
} else {
field_transfer_single($changelog, $fields, $f);
}
}
$fields->{'Binary'} = join(', ', @binarypackages);
# Avoid overly long line by splitting over multiple lines
if (length($fields->{'Binary'}) > 980) {
$fields->{'Binary'} =~ s/(.{0,980}), ?/$1,\n/g;
}
my $basenamerev = $srcpkg->get_basename(1);
# fragment from Dpkg::Source::Package::V2::_generate_patch()
my $tarfile;
my $comp_ext_regex = compression_get_file_extension_regex();
foreach my $file (sort $srcpkg->find_original_tarballs()) {
if ($file =~ /\.orig\.tar\.$comp_ext_regex$/) {
if (defined($tarfile)) {
error('several orig.tar files found (%s and %s) but only ' .
'one is allowed', $tarfile, $file);
}
$srcpkg->add_file($file);
} elsif ($file =~ /\.orig-([[:alnum:]-]+)\.tar\.$comp_ext_regex$/) {
$srcpkg->add_file($file);
}
}
# end of fragment
# fragment from Dpkg::Source::Package::V2::do_build()
$srcpkg->add_file("$ARGV[0]/$basenamerev.debian.tar.xz");
# end of fragment
# Write the .dsc
my $dscname = "$ARGV[0]/$basenamerev.dsc";
$srcpkg->write_dsc(filename => $dscname, substvars => $substvars);
exit(0);

View File

@@ -0,0 +1,91 @@
#!/bin/sh
set +e ; set -f
usage() {
cat >&2 <<-EOF
# usage: ${0##*/} <path> [<path> ..]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
: "${DPKG_ADMINDIR:=/var/lib/dpkg}"
dpkg_info_dir="${DPKG_ADMINDIR}/info"
dpkg_diversions="${DPKG_ADMINDIR}/diversions"
# _same() { find "$@" -maxdepth 0 -samefile "$1" -print0 2>/dev/null ; }
_get_alt() {
[ -n "$1" ] || return
a="$1"
case "$1" in
/usr/local/* ) return ;;
/usr/* ) a="${1#/usr}" ;;
/* ) a="/usr$1" ;;
* ) return ;;
esac
# n=$(_same "$1" "$a" | xargs -0 sh -c 'echo $#' --)
# [ "$n" = 2 ] || return
# printf '%s' "$a"
test "$1" -ef "$a" || return 0
printf '%s' "$a"
}
_dpkg_search() {
[ -n "$1" ] || return
grep -FxRl -e "$1" "${dpkg_info_dir}/" \
| sed -En '/\.list$/{s,^.+/([^/]+)\.list$,\1,p}'
}
_dpkg_divert() {
[ -n "$1" ] || return
n=$(grep -Fxhn -e "$1" "${dpkg_diversions}" | cut -d: -f1)
[ -n "$n" ] || return
case "$((n%3))" in
1 | 2 ) ;;
* ) return ;;
esac
k=$(( n - (n%3) ))
divert_pkg=$(sed -n "$((k+3))p" "${dpkg_diversions}")
case "$((n%3))" in
1 )
[ "${divert_pkg}" = ':' ] || echo "${divert_pkg}"
;;
2 )
divert_from=$(sed -n "$((k+1))p" "${dpkg_diversions}")
_dpkg_search "${divert_from}" | grep -Fxv -e "${divert_pkg}"
;;
esac
}
for i ; do
[ -n "$i" ] || continue
name=
altpath=$(_get_alt "$i")
name=$(_dpkg_divert "$i")
if [ -n "${name}" ] ; then
echo "${name}"
continue
fi
name=$(_dpkg_divert "${altpath}")
if [ -n "${name}" ] ; then
echo "${name}"
continue
fi
name=$( { _dpkg_search "$i" ; _dpkg_search "${altpath}" ; } | sort -uV)
if [ -n "${name}" ] ; then
echo "${name}"
continue
fi
done
exit 0

View File

@@ -0,0 +1,35 @@
#!/bin/sh
set -ef
[ -n "$1" ] || exit 1
[ -f "$1" ] || exit 1
! [ -h "$1" ] || exit 1
# ensure that we have at least two empty lines at the end
n=$(env stat -c '%s' "$1") || exit 1
want_lf=2 i=$n
case "$n" in
1 ) ;;
0 ) exit 0 ;;
* ) i=2 ;;
esac
while [ $i -gt 0 ] ; do
n=$((n - 1))
c=$(od -A n -j "$n" -N 1 -t x1 "$1") || exit 1
c=$(printf '%s' "$c" | tr -d '[:space:]')
i=$((i - 1))
[ "$c" = '0a' ] || break
want_lf=$((want_lf - 1))
done
padding=
case "${want_lf}" in
0 ) exit 0 ;;
1 ) padding='\n' ;;
2 ) padding='\n\n' ;;
esac
printf "${padding}" >> "$1"

View File

@@ -0,0 +1 @@
_rsync-wrapper.sh

View File

@@ -0,0 +1,59 @@
#!/bin/sh
set -ef
: "${GPG_KEYSERVER:=hkps://keyserver.ubuntu.com}"
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} {start|stop}
# NB: ensure that env GNUPGHOME is set to appropriate (e.g. temporary) directory
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
act=$(printf '%s' "$1" | tr '[:upper:]' '[:lower:]')
case "${act}" in
1 | start ) ;;
0 | stop ) ;;
* ) usage 1 ;;
esac
check-bin-pkg gpg dirmngr gpgconf
case "${act}" in
1 | start )
[ -n "${GNUPGHOME}" ] || usage 1
[ -d "${GNUPGHOME}" ] || usage 1
cd "${GNUPGHOME}"
cat > gpg.conf <<-EOF
quiet
batch
trust-model always
no-auto-check-trustdb
ignore-time-conflict
keyid-format 0xlong
keyserver ${GPG_KEYSERVER}
EOF
cat > dirmngr.conf <<-EOF
quiet
batch
keyserver ${GPG_KEYSERVER}
EOF
quiet gpg --update-trustdb
quiet gpg --list-keys
quiet dirmngr
;;
0 | stop )
[ -n "${GNUPGHOME}" ] || exit 0
[ -d "${GNUPGHOME}" ] || usage 1
cd "${GNUPGHOME}"
quiet gpgconf --kill all
cd /
rm -rf "${GNUPGHOME}"
;;
esac
exit 0

View File

@@ -0,0 +1,29 @@
#!/bin/sh
set -ef
: "${1:?}" "${2:?}"
w=$(mktemp -d) ; : "${w:?}"
gpg_on() { gpg-batch start ; }
gpg_off() {
cd /
gpg-batch stop
unset GNUPGHOME
rm -rf "$w"
exit "${1:-0}"
}
(
export GNUPGHOME="$w/dot-gnupg"
mkdir -m 0700 "${GNUPGHOME}"
gpg_on
gpg --import "$1"
gpg --armor --export > "$w/export"
cat < "$w/export" > "$2"
gpg --show-keys "$2"
gpg_off
) || gpg_off 1

View File

@@ -0,0 +1,38 @@
#!/bin/sh
set +e ; set -f
me="${0##*/}"
## test that 1st argument is (exactly) file
if [ -z "$1" ] ; then
env printf "%q: requires 1st argument\\n" "${me}" >&2
exit 1
fi
if [ -h "$1" ] ; then
env printf "%q: symlinks are not supported: %q\\n" "${me}" "$1" >&2
exit 1
fi
if ! [ -f "$1" ] ; then
env printf "%q: file does not exist: %q\\n" "${me}" "$1" >&2
exit 1
fi
check-bin-pkg gpg dirmngr gpgconf
w=$(mktemp -d) ; : "${w:?}"
(
set -e
cp -L "$1" "$w/s"
ensure-eof-empty-line "$w/s"
gpg \
--utf8-strings --textmode --armor --clearsign \
--output "$w/d" - < "$w/s"
rm -f "$w/s"
cat < "$w/d" > "$1"
rm -f "$w/d"
)
res=$?
rm -rf -- "$w"
exit ${res}

View File

@@ -0,0 +1,10 @@
#!/bin/sh
set +e ; set -f
check-bin-pkg gpg || exit $?
t=$(mktemp) || exit $?
gpg -abs "$t"
r=$?
rm -f "$t" "$t.asc"
exit $r

20
.config/dotfiles/scripts/idle Executable file
View File

@@ -0,0 +1,20 @@
#!/bin/sh
set +e ; set -f
have_cmd() { command -v "$1" </dev/null >/dev/null 2>&1 ; }
has_nice=1 has_chrt=1 has_ionice=1
have_cmd nice || has_nice=
have_cmd chrt || has_chrt=
have_cmd ionice || has_ionice=
if [ -z "${has_nice}${has_chrt}${has_ionice}" ] ; then
env printf "%q: unable to launch command in idle mode due to missing executables\\n" "${0##*/}" >&2
exit 126
fi
exec \
${has_nice:+ nice -n +40 } \
${has_chrt:+ chrt -i 0 } \
${has_ionice:+ ionice -c 3 } \
"$@"

View File

@@ -0,0 +1,13 @@
#!/bin/sh
set +e ; set -f
[ -n "${1:?}" ]
check-bin-pkg deb-src-export || exit $?
case "$1" in
*/* ) dstdir="$1/src" ;;
* ) dstdir="/tmp/$1/src" ;;
esac
rm -rf "${dstdir}"
exec deb-src-export "${dstdir}"

View File

@@ -0,0 +1,60 @@
#!/bin/sh
set +e ; set -f
check-bin-pkg quilt || exit $?
[ -n "${1:?}" ] || exit 1
if [ -d "$1" ] ; then
patchdir="$1/debian/patches"
if [ -d "${patchdir}" ] ; then
ls "${patchdir}/series" >/dev/null || exit 1
else
patchdir="$1"
fi
series="${patchdir}/series"
if ! [ -f "${series}" ] ; then
mkdir -p "$1/.pc" || exit 1
series="$1/.pc/krd-quilt-series"
touch "${series}" || exit 1
quilt-series-auto "${patchdir}" > "${series}"
fi
elif [ -f "$1" ] ; then
[ -s "$1" ] || exit 1
series="$1"
patchdir=$(dirname "${series}")
else
exit 1
fi
set -a
QUILT_SERIES="${series:?}"
QUILT_PATCHES="${patchdir:?}"
set +a
xsedx=$(printf '\027')
r=0
while read -r i ; do
[ -n "$i" ] || continue
quilt --fuzz=0 push "$i" || exit $?
quilt refresh "$i" || exit $?
sed -E -i \
-e 's#^(-{3} )[^/][^/]*/(.*)$#\1a/\2#;' \
-e 's#^(\+{3} )[^/][^/]*/(.*)$#\1b/\2#' \
"$i"
rm -f "$i"'~'
done <<EOF
$(
if ! quilt unapplied ; then
quilt-series-strip-comments "${series}" \
| sed -E "s${xsedx}^${xsedx}${patchdir}/${xsedx}"
fi
)
EOF
exit $r

View File

@@ -0,0 +1,115 @@
#!/bin/sh
set +e ; set -f
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <package top directory> {<arch>|<host-arch>:<build-arch>} [env|arguments]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
check-bin-pkg sbuild xz:xz-utils || exit $?
_tmpdir() {
TMPDIR="$1"
TMP="$1"
TEMPDIR="$1"
TEMP="$1"
export TMPDIR TMP TEMPDIR TEMP
}
## TODO: questionable
_tmpdir /tmp
[ -n "${1:?}" ]
[ -n "${2:?}" ]
case "$1" in
*/* ) topdir="$1" ;;
* ) topdir="/tmp/$1" ;;
esac
[ -d "${topdir}" ] || return 1
srcdir="${topdir}/src"
[ -d "${srcdir}" ] || return 2
_sbuild_args=
_append_args() {
for __i ; do
_sbuild_args="${_sbuild_args}${_sbuild_args:+ }$(env printf '%q' "${__i}" )"
done ; unset __i
}
case "$2" in
*:* )
unset host_arch build_arch _xtra
IFS=: read -r host_arch build_arch _xtra <<-EOF
$i
EOF
if [ -z "${host_arch}" ] ; then
env printf "%q: error: 'host-arch' expected but empty\\n" "${me}" >&2
exit 1
fi
if [ -z "${build_arch}" ] ; then
env printf "%q: error: 'build-arch' expected but empty\\n" "${me}" >&2
exit 1
fi
if [ -n "${_xtra}" ] ; then
env printf "%q: warning: extra data in 'host-arch:build-arch' specifier: %q\\n" "${me}" "${_xtra}" >&2
fi
unset _xtra
arch=${host_arch}
_append_args "--host=${host_arch}"
_append_args "--build=${build_arch}"
;;
* )
arch=$2
_append_args "--arch=$2"
;;
esac
## done with args
shift 2
for i ; do
## naive splitting args and env
case "$i" in
-* ) _append_args "$i" ;;
=* ) _append_args "$i" ;;
*= ) unset "$i" ;;
*=* ) export "$i" ;;
* ) _append_args "$i" ;;
esac
done ; unset i
builddir="${topdir}/${arch}"
mkdir -p "${topdir}/all" "${builddir}" "${builddir}-all" "${builddir}-debug"
cd "${builddir}"
set +f
for i in "${srcdir}"/*.dsc ; do
## I'm NOT proud of using "eval" here :(
eval "idle sbuild --arch-all --arch-any ${_sbuild_args} $i" || exit $?
find -name '*.build' -type l -exec rm -f {} +
find -name '*.build' -type f -exec xz -9vv {} +
done
find \
-regextype egrep -regex '.+all\.d?deb$' -type f \
-exec mv -fvt "../${arch}-all" {} +
find \
-name '*.ddeb' -type f \
-exec mv -fvt "../${arch}-debug" {} +
find \
-regextype egrep -regex '.+dbgsym_[^_]+_'"${arch}"'\.d?deb$' -type f \
-exec mv -fvt "../${arch}-debug" {} +
cd "${builddir}-all"
find -type f -exec mv -nvt '../all' {} +
exit 0

View File

@@ -0,0 +1,117 @@
#!/bin/sh
set +e ; set -f
me=${0##*/}
usage() {
cat >&2 <<-EOF
# usage: ${me} <certificate file> [PEM output file] [fingerprints output file] [offsets output file]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
[ -n "$1" ] || exit 1
[ -f "$1" ] || {
env printf '%s: not a file or does not exist: %q\n' "${me}" "$1" >&2
exit 1
}
[ -s "$1" ] || exit 0
w=$(mktemp -d) || exit 1
w_cleanup() {
[ -z "$w" ] || ls -lA "$w/" >&2
[ -z "$w" ] || rm -rf "$w"
unset w
exit "${1:-0}"
}
have_cmd() { command -v "$1" </dev/null >/dev/null 2>&1 ; }
unset has_mawk
if have_cmd mawk ; then has_mawk=1 ; fi
mawk_then_awk() {
if [ -n "${has_mawk}" ]
then mawk "$@"
else awk "$@"
fi
}
bundle_offsets() {
mawk_then_awk 'BEGIN { OFS = ","; i_begin = 0; }
$0 == "-----BEGIN CERTIFICATE-----" {
i_begin = NR;
}
$0 == "-----END CERTIFICATE-----" {
if (i_begin > 0) {
print i_begin, NR;
i_begin = 0;
}
}' "$1"
}
bundle_fingerprints() {
local x f
while read -r x ; do
[ -n "$x" ] || continue
f=$(sed -ne "${x}p" "$1" | openssl x509 -noout -fingerprint -sha256)
[ -n "$f" ] || f=$(sed -ne "${x}p" "$1" | openssl x509 -noout -fingerprint)
[ -n "$f" ] || continue
printf '%s\n' "$f" | tr '[:upper:]' '[:lower:]'
done < "$2"
}
openssl storeutl -certs "$1" > "$w/cert.pem" || w_cleanup 1
[ -s "$w/cert.pem" ] || w_cleanup 1
tr -s '\r\n' '\n' < "$w/cert.pem" > "$w/cert.txt"
[ -s "$w/cert.txt" ] || w_cleanup 1
rm -f "$w/cert.pem"
bundle_offsets "$w/cert.txt" > "$w/cert.off"
[ -s "$w/cert.off" ] || w_cleanup 1
bundle_fingerprints "$w/cert.txt" "$w/cert.off" > "$w/cert.fp.all"
[ -s "$w/cert.fp.all" ] || w_cleanup 1
sort -uV < "$w/cert.fp.all" > "$w/cert.fp"
while read -r fp ; do
[ -n "${fp}" ] || continue
n=$(grep -m1 -Fxn -e "${fp}" "$w/cert.fp.all" | cut -d : -f 1)
[ -n "$n" ] || continue
off=$(sed -ne "${n}p" "$w/cert.off")
[ -n "${off}" ] || continue
sed -ne "${off}p" "$w/cert.txt"
done < "$w/cert.fp" > "$w/cert.pem"
[ -s "$w/cert.pem" ] || w_cleanup 1
rm -f "$w/cert.txt" "$w/cert.off" "$w/cert.fp.all"
if [ -n "$2" ] ; then
while : ; do
if [ -e "$2" ] ; then
[ -f "$2" ] || break
fi
cat > "$2"
break ; done
else
cat
fi < "$w/cert.pem"
while [ -n "$3" ] ; do
if [ -e "$3" ] ; then
[ -f "$3" ] || break
fi
cat "$w/cert.fp" > "$3"
break ; done
while [ -n "$4" ] ; do
if [ -e "$4" ] ; then
[ -f "$4" ] || break
fi
bundle_offsets "$w/cert.pem" > "$4"
break ; done
rm -rf "$w" ; unset w

View File

@@ -0,0 +1,28 @@
#!/bin/sh
set -ef
for k in 1024:20 2048:80 ; do
unset b t
IFS=':' read -r b t <<-EOF
$k
EOF
f="dh${b}.pem"
echo "# openssl genpkey: $f" >&2
timeout --kill-after=3s ${t}s \
openssl genpkey -quiet -genparam -algorithm DH -out "./$f" -pkeyopt "dh_paramgen_prime_len:${b}"
done
for k in 2048:80 3072:120 4096:160 ; do
unset b t
IFS=':' read -r b t <<-EOF
$k
EOF
f="ffdhe${b}.pem"
echo "# openssl genpkey: $f" >&2
timeout --kill-after=3s ${t}s \
openssl genpkey -quiet -genparam -algorithm DH -out "./$f" -pkeyopt "group:ffdhe${b}"
done

View File

@@ -0,0 +1,209 @@
#!/bin/sh
set -ef
ocsp_fetch_timeout=15
ocsp_fetch_retries=3
ocsp_fetch_retry_delay=5
ocsp_valid_threshold=86400
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <command> [args...]
# ${me} get-uri <in:cert.pem>
# ${me} is-valid <in:issuer.pem> <in:cert.pem> <in:ocsp.der>
# ${me} is-expiring <in:issuer.pem> <in:cert.pem> <in:ocsp.der>
# ${me} fetch <in:issuer.pem> <in:cert.pem> <out:ocsp.der>
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
## $1 - X509 in PEM format
ossl_x509_verify_fmt() {
openssl x509 -in "$1" -noout >/dev/null
}
## $1 - cert
ossl_ocsp_uri() {
openssl x509 -in "$1" -noout -ocsp_uri \
| head -n 1
}
## $1 - chain
## $2 - cert
## $3 - ocsp uri
## $4 - ocsp response
ossl_ocsp_fetch() {
openssl ocsp \
-timeout "${ocsp_fetch_timeout}" \
-nonce \
-issuer "$1" -cert "$2" -url "$3" -respout "$4"
}
## $1 - ocsp response
ossl_ocsp_verify_fmt() {
openssl ocsp \
-noverify -respin "$1"
}
## $1 - chain
## $2 - cert
## $3 - ocsp response
ossl_ocsp_read() {
openssl ocsp \
-issuer "$1" -cert "$2" -respin "$3" -resp_text
}
## $1 - chain
## $2 - cert
## $3 - ocsp response
ossl_ocsp_verify() {
ossl_ocsp_read "$@" >/dev/null
}
## stdin - output of ossl_ocsp_read()
ossl_ocsp_next_update() {
sed -En '/^\s*[Nn]ext [Uu]pdate:\s*(\S.+\S)\s*$/{s//\1/;p;q}'
}
unset arg_ok cmd chain cert ocsp_uri ocsp_resp
arg_ok=
while : ; do
[ -n "$1" ] || break
cmd="$1"
case "$1" in
get-uri )
[ -n "$2" ] || break
[ -s "$2" ] || break
ossl_x509_verify_fmt "$2" || break
cert="$2"
;;
is-valid | is-expiring | fetch )
[ -n "$2" ] || break
[ -s "$2" ] || break
ossl_x509_verify_fmt "$2" || break
chain="$2"
[ -n "$3" ] || break
[ -s "$3" ] || break
ossl_x509_verify_fmt "$3" || break
cert="$3"
[ -n "$4" ] || break
ocsp_resp="$4"
## OCSP response validation is handled later and in various ways (!)
## e.g. "is-valid" (cmd_is_valid) validates OCSP response as expected
## but "is-expiring" (cmd_is_expiring) returns success for invalid OCSP response
## which means OCSP response should be updated ASAP
;;
*) break ;;
esac
arg_ok=1
break ; done
[ -n "${arg_ok}" ] || usage 1
unset arg_ok
## OCSP URI is used only in "get-uri" and "fetch" commands
## but implicitly required for all actions
ocsp_uri=$(ossl_ocsp_uri "${cert}") || exit 1
if [ -z "${ocsp_uri}" ] ; then
env printf '%q: unable to extract OCSP URI from %q\n' "${me}" "${cert}" >&2
exit 1
fi
## early command handling
if [ "${cmd}" = 'get-uri' ] ; then
printf '%s\n' "${ocsp_uri}"
exit 0
fi
## $1 - chain
## $2 - cert
## $3 - ocsp response
cmd_is_valid() {
ossl_ocsp_verify_fmt "$3" || return 1
ossl_ocsp_verify "$1" "$2" "$3" || return 1
}
## $1 - chain
## $2 - cert
## $3 - ocsp response
cmd_is_expiring() {
cmd_is_valid "$1" "$2" "$3" || return 0
local need_update next ts_now ts_next ts_diff
need_update=1
while : ; do
next=$(ossl_ocsp_read "$1" "$2" "$3" 2>/dev/null | ossl_ocsp_next_update)
[ -n "${next}" ] || break
ts_now=$(date '+%s')
ts_next=$(date -d "${next}" '+%s')
[ -n "${ts_next}" ] || break
[ ${ts_now} -lt ${ts_next} ] || break
ts_diff=$((ts_next - ts_now))
[ ${ts_diff} -le ${ocsp_valid_threshold} ] || need_update=0
break ; done
if [ "${need_update}" = 0 ] ; then
env printf '%q: %q has valid and fresh OCSP response\n' "${me}" "$2" >&2
return 1
fi
return 0
}
## $1 - chain
## $2 - cert
## $3 - ocsp uri
## $4 - ocsp response
cmd_fetch() {
local t i r
t=$(mktemp) ; : "${t:?}"
for i in $(seq 1 "${ocsp_fetch_retries}") ; do
i= ## no-op
if ossl_ocsp_fetch "$1" "$2" "$3" "$t" ; then
break
fi
: > "$t"
sleep "${ocsp_fetch_retry_delay}"
done
r=
while : ; do
[ -s "$t" ] || break
cmd_is_valid "$1" "$2" "$t" || break
r=1
break ; done
if [ -z "$r" ] ; then
env printf '%q: unable to fetch OCSP response for %q via %q\n' "${me}" "$2" "$3" >&2
rm -rf "$t"
return 1
fi
r=
while : ; do
touch "$4" || break
tee "$4" < "$t" >/dev/null || break
chmod 0644 "$4" || break
r=1
break ; done
if [ -z "$r" ] ; then
env printf '%q: unable to save OCSP response for %q into %q\n' "${me}" "$2" "$4" >&2
rm -rf "$t"
return 1
fi
return 0
}
case "${cmd}" in
is-valid ) cmd_is_valid "${chain}" "${cert}" "${ocsp_resp}" ;;
is-expiring ) cmd_is_expiring "${chain}" "${cert}" "${ocsp_resp}" ;;
fetch ) cmd_fetch "${chain}" "${cert}" "${ocsp_uri}" "${ocsp_resp}" ;;
esac

View File

@@ -0,0 +1,11 @@
#!/bin/sh
set -ef
set -a
PIP_DISABLE_PIP_VERSION_CHECK=1
PIP_NO_CACHE_DIR=1
PIP_ROOT_USER_ACTION=ignore
PIP_NO_COMPILE=1
set +a
exec "$@"

38
.config/dotfiles/scripts/quiet Executable file
View File

@@ -0,0 +1,38 @@
#!/bin/sh
set +e ; set -f
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <command> [arguments]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
t=$(mktemp)
if [ -z "$t" ] ; then
env printf "# %q: unable to create temporary file\\n" "${me}" >&2
## unable to create temporary file?
## no output in case of error
exec "$@" >/dev/null 2>/dev/null
fi
( "$@" ; ) >"$t" 2>"$t"
r=$?
if [ $r != 0 ] ; then
printf '# command:'
env printf ' %q' "$@"
echo
echo "# return code: $r"
if [ -s "$t" ] ; then
echo '# output:'
sed -E 's/^(.+)$/#>| \1/;s/^$/#=|/' < "$t"
fi
fi >&2
rm -f "$t"
exit $r

View File

@@ -0,0 +1,10 @@
#!/bin/sh
set +e ; set -f
[ -n "${1:?}" ] || exit 1
[ -d "$1" ] || exit 1
find "$1/" -follow -type f -printf '%P\0' \
| sed -zEn '/\.(diff|patch)$/p' \
| sort -zuV \
| xargs -0r printf '%s\n'

View File

@@ -0,0 +1,4 @@
#!/bin/sh
set -ef
sed -E -e '/^[[:space:]]*(#|$)/d' "$@"

148
.config/dotfiles/scripts/run-as Executable file
View File

@@ -0,0 +1,148 @@
#!/bin/sh
set -ef
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} <user>[:<group>] [command and arguments]
EOF
exit "${1:-0}"
}
[ $# != 0 ] || usage
[ -n "$1" ] || usage 1
if ! command -v setpriv >/dev/null ; then
env printf "%q: error: 'setpriv' is missing, unable to switch user!\\n" "${me}" >&2
exit 126
fi
is_id() { printf '%s' "$1" | grep -Eqz '^(0|[1-9][0-9]*)$' ; }
userspec="$1" ; shift
unset user group _xtra
IFS=':' read -r user group _xtra <<-EOF
${userspec}
EOF
if [ -z "${user}" ] ; then
env printf "%q: error: 'user' expected but empty\\n" "${me}" >&2
usage 1
fi
if [ -n "${_xtra}" ] ; then
env printf "%q: warning: extra data in 'user:group' specifier: %q\\n" "${me}" "${_xtra}" >&2
fi
unset _xtra
unset _user _x _uid _gid _comment _home _shell _xtra
IFS=':' read -r _user _x _uid _gid _comment _home _shell _xtra <<-EOF
$(getent passwd "${user}" || : )
EOF
unset _x _uid _comment _xtra
_has_user=0
homedir='/'
shell=/bin/sh
if [ -n "${_user}" ] ; then
_has_user=1
user=${_user}
homedir=${_home}
shell=${_shell}
elif is_id "${user}" ; then
## accept unknown/random uid
:
else
env printf "%q: error: 'user' is malformed - not exist nor numeric\\n" "${me}" >&2
exit 1
fi
if [ -z "${group}" ] ; then
if [ ${_has_user} = 1 ] ; then
group=_gid
else
group=nogroup
fi
else
unset _group _x _gid _users _xtra
IFS=':' read -r _group _x _gid _users _xtra <<-EOF
$(getent group "${group}" || : )
EOF
unset _x _gid _users _xtra
if [ -n "${_group}" ] ; then
group=${_group}
elif is_id "${group}" ; then
## accept unknown/random gid
:
else
env printf "%q: error: 'group' is malformed - not exist nor numeric\\n" "${me}" >&2
exit 1
fi
fi
unset _user _gid _home _shell _group _gid _users
# handle username and working directory
username='__non_existent_user__'
init_groups='--clear-groups'
if [ ${_has_user} = 1 ] ; then
username="${user}"
init_groups='--init-groups'
fi
unset __exec__
_runas() {
${__exec__:+ exec } \
setpriv \
--reuid="${user}" \
--regid="${group}" \
${init_groups} \
${inh_caps} \
env \
-u __exec__ \
-C "${cwd}" \
USER="${username}" \
LOGNAME="${username}" \
HOME="${homedir}" \
SHELL="${shell}" \
"$@"
}
## TODO: support somehow flags for "setpriv"
## Ubuntu 20.04 "Focal":
## setpriv: libcap-ng is too old for "all" caps
cwd=/
inh_caps='--inh-caps=-all'
_runas true >/dev/null 2>&1 || inh_caps=
cwd=
for i in "${PWD}" "${homedir}" / ; do
[ -d "$i" ] || continue
cwd=/
if _runas ls -l "$i" >/dev/null ; then
if [ "$i" != "${PWD}" ] ; then
env printf "%q: info: working directory will be changed to %q\\n" "${me}" "$i" >&2
fi
cwd="$i"
break
else
env printf "%q: warning: user %q can't access directory %q\\n" "${me}" "${user}" "$i" >&2
fi
cwd=
done
if [ -z "${cwd}" ] ; then
env printf "%q: error: unable to find appropriate working directory\\n" "${me}" >&2
exit 1
fi
__exec__=1
if [ $# = 0 ] ; then
_runas id
else
_runas "$@"
fi

View File

@@ -0,0 +1 @@
_perl-wrapper.sh

View File

@@ -0,0 +1,89 @@
#!/bin/sh
set +e ; set -f
: "${GZIP_COMPLEVEL:=1}"
: "${BROTLI_COMPLEVEL:=1}"
: "${ZSTD_COMPLEVEL:=1}"
has_gzip=1 has_brotli=1 has_zstd=1
check-bin-pkg gzip || has_gzip=
check-bin-pkg brotli || has_brotli=
check-bin-pkg zstd || has_zstd=
if [ -z "${has_gzip}${has_brotli}${has_zstd}" ] ; then
env printf "%q: suitable compressors are missing\\n" "${0##*/}" >&2
exit 1
fi
do_gzip() {
[ "$1" -nt "$1.gz" ] || return 0
gzip "-${GZIP_COMPLEVEL}kf" "$1" || return $?
comp_fixup "$1" "$1.gz" || rm -f "$1.gz"
}
do_brotli() {
[ "$1" -nt "$1.br" ] || return 0
brotli "-${BROTLI_COMPLEVEL}kf" "$1" || return $?
comp_fixup "$1" "$1.br" || rm -f "$1.br"
}
do_zstd() {
[ "$1" -nt "$1.zst" ] || return 0
zstd "-${ZSTD_COMPLEVEL}kfq" "$1" || return $?
comp_fixup "$1" "$1.zst" || rm -f "$1.zst"
}
have_cmd() { command -v "$1" </dev/null >/dev/null 2>&1 ; }
unset has_mawk
if have_cmd mawk ; then has_mawk=1 ; fi
mawk_then_awk() {
if [ -n "${has_mawk}" ]
then mawk "$@"
else awk "$@"
fi
}
float_div() {
mawk_then_awk -v "a=$1" -v "b=$2" 'BEGIN{print a/b;exit;}' </dev/null
}
comp_fixup() {
[ -f "$1" ] || return 1
size1=$(env stat -Lc '%s' "$1") || return 1
[ -n "${size1}" ] || return 1
[ "${size1}" != 0 ] || return 1
[ -f "$2" ] || return 1
size2=$(env stat -c '%s' "$2") || return 1
[ -n "${size2}" ] || return 1
[ "${size2}" != 0 ] || return 1
ratio=$(float_div "${size2}" "${size1}") || return 1
case "${ratio}" in
[0-9]*e-[0-9]* )
## doubtful but okay (c) Oleg Tinkov
;;
0.[0-8]* | 0.9[0-5]* )
## compression ratio below 95% is fine enough
;;
* ) return 1 ;;
esac
return 0
}
for i ; do
[ -n "$i" ] || continue
case "$i" in
*.br | *.gz | *.zst ) continue ;;
esac
[ -f "$i" ] || continue
[ -s "$i" ] || continue
[ -z "${has_gzip}" ] || do_gzip "$i"
[ -z "${has_brotli}" ] || do_brotli "$i"
[ -z "${has_zstd}" ] || do_zstd "$i"
done

1
.config/dotfiles/scripts/to Symbolic link
View File

@@ -0,0 +1 @@
_rsync-wrapper.sh

View File

@@ -0,0 +1,42 @@
#!/bin/sh
set +e ; set -f
_timelimit='15s'
me="${0##*/}"
usage() {
cat >&2 <<-EOF
# usage: ${me} [<timeout>]
# if <timeout> is not specified then default '${_timelimit}' is used
# <timeout> format: [1-9][0-9]*[smh]
EOF
exit "${1:-0}"
}
case "$1" in
-h | --help ) usage ;;
esac
_timeout() { timeout --verbose --kill-after=3s --foreground "$@" ; }
tlimit="${1:-${_timelimit}}"
case "${tlimit}" in
*s | *m | *h )
printf '%s' "${tlimit}" | grep -zEq '^[1-9][0-9]*[smh]$' || {
env printf "%q: broken <timeout> format: %q\\n" "${me}" "${tlimit}" >&2
usage 1
}
;;
* )
env printf "%q: unsupported <timeout> format: %q\\n" "${me}" "${tlimit}" >&2
usage 1
;;
esac
## verify tlimit
_timeout "${tlimit}" /bin/true || usage 1
_dd='dd conv=notrunc status=none'
_timeout "${tlimit}" ${_dd} bs=1 count=1 || exit 1
exec ${_dd}

View File

@@ -0,0 +1,8 @@
#!/bin/sh
set +e ; set -f
find "$@" -follow -type f -print0 \
| xargs -0 -r -n 128 stat -L --printf='%d:%i|%n\0' \
| sort -z -u -t '|' -k1,1 \
| cut -z -d '|' -f 2 \
| tr '\0' '\n'

View File

@@ -0,0 +1,30 @@
#!/bin/sh
set +e ; set -f
if [ -z "${ZAP_TREE}" ] ; then
export ZAP_TREE=1
## intrusive parallelism
jobs=$(nproc)
jobs=$(( jobs + (jobs + 1)/2 ))
for i ; do
[ -d "$i" ] || continue
find "$i/" -mindepth 1 -maxdepth 1 -type d -print0
done \
| xargs -0 -r readlink -e \
| sort -zuV \
| xargs -0 -r -n 1 -P "${jobs}" "$0"
exit
fi
find_fast() {
find "$@" -printf . -quit | grep -Fq .
}
for i ; do
[ -d "$i" ] || continue
find_fast "$i/" ! -type d || rm -rfv "$i"
done

View File

@@ -0,0 +1,34 @@
steps:
- name: check
image: docker.io/debian:trixie-slim
environment:
DEBCONF_NONINTERACTIVE_SEEN: 'true'
DEBIAN_FRONTEND: 'noninteractive'
DEBIAN_PRIORITY: 'critical'
TERM: 'linux'
MALLOC_ARENA_MAX: '4'
commands:
- |
: # install required packages
apt-get -y update
apt-get -y install file findutils git shellcheck zsh
apt-get -y clean
- |
: # verify that repo doesn't contain "garbage"
find_fast() {
find "$@" -printf . -quit | grep -Fq .
}
if find_fast ./ -type f -name '*.zwc' ; then
echo 'found *.zwc' >&2
find ./ -type f -name '*.zwc' | sort -V
exit 1
fi
- |
: # try zsh compile
git ls-files | sort -uV | xargs -r file -N -i | grep text/x-shellscript | cut -d: -f1 \
| xargs -r -n1 zsh -efc 'zcompile -UR "$@"' --
find ./ -type f -name '*.zwc' -delete
- |
: # try shellcheck (except for *.zsh)
git ls-files | grep -Fv '.zsh' | sort -uV | xargs -r file -N -i | grep text/x-shellscript | cut -d: -f1 \
| xargs -r -n1 shellcheck

View File

@@ -1,6 +1,6 @@
# Beware! This file is rewritten by htop when settings are changed in the interface.
# The parser is also very primitive, and not human-friendly.
htop_version=3.2.1
htop_version=3.3.0
config_reader_min_version=3
fields=0 48 17 18 38 39 40 2 46 47 49 1
hide_kernel_threads=1
@@ -28,20 +28,18 @@ show_cpu_frequency=1
show_cpu_temperature=1
degree_fahrenheit=0
update_process_names=0
account_guest_in_cpu_meter=0
account_guest_in_cpu_meter=1
color_scheme=0
enable_mouse=1
delay=11
hide_function_bar=0
header_layout=four_25_25_25_25
column_meters_0=Hostname System DateTime Blank LeftCPUs2
column_meter_modes_0=2 2 2 2 1
column_meters_1=Uptime Tasks LoadAverage Blank RightCPUs2
column_meter_modes_1=2 2 2 2 1
column_meters_2=Memory Swap PressureStallMemorySome PressureStallMemoryFull
column_meter_modes_2=1 1 2 2
column_meters_3=DiskIO NetworkIO PressureStallIOSome PressureStallIOFull Blank Battery
column_meter_modes_3=2 2 2 2 2 2
header_layout=three_30_40_30
column_meters_0=Hostname System DateTime Uptime Blank Tasks LoadAverage FileDescriptors
column_meter_modes_0=2 2 2 2 2 2 2 2
column_meters_1=Memory HugePages Swap Blank AllCPUs2
column_meter_modes_1=1 1 1 2 1
column_meters_2=DiskIO NetworkIO Battery Blank PressureStallCPUSome PressureStallMemorySome PressureStallMemoryFull PressureStallIOSome PressureStallIOFull
column_meter_modes_2=2 2 2 2 2 2 2 2 2
tree_view=1
sort_key=46
tree_sort_key=0
@@ -52,16 +50,16 @@ all_branches_collapsed=0
screen:Main=PID USER PRIORITY NICE M_VIRT M_RESIDENT M_SHARE STATE PERCENT_CPU PERCENT_MEM TIME Command
.sort_key=PERCENT_CPU
.tree_sort_key=PID
.tree_view=1
.tree_view_always_by_pid=1
.tree_view=1
.sort_direction=-1
.tree_sort_direction=1
.all_branches_collapsed=0
screen:I/O=PID USER IO_PRIORITY IO_RATE IO_READ_RATE IO_WRITE_RATE PERCENT_SWAP_DELAY PERCENT_IO_DELAY Command
.sort_key=PID
.tree_sort_key=PID
.tree_view=1
.tree_view_always_by_pid=1
.tree_view=1
.sort_direction=-1
.tree_sort_direction=1
.all_branches_collapsed=0

View File

@@ -0,0 +1,21 @@
$chroot_mode = "schroot";
$build_path = '';
$pgp_options = [ '-us', '-uc', '-ui' ];
$build_arch_all = 1;
$build_arch_any = 1;
$check_space = 0;
$enable_network = 1;
$run_autopkgtest = 0;
$run_lintian = 0;
$run_piuparts = 0;
$apt_distupgrade = 1;
$apt_upgrade = 1;
$purge_build_directory = 'successful';
# don't remove this, Perl needs it:
1;

6
.config/zsh.dots/.zshenv Normal file
View File

@@ -0,0 +1,6 @@
#!/bin/zsh
if [[ -o interactive ]] ; then
## early redirect
ZDOTDIR="${ZDOTDIR%/${ZDOTDIR:t2}}"
source "${ZDOTDIR}/.zshenv"
fi

View File

@@ -1,33 +1,29 @@
#!/bin/zsh
: "${ZDOTDIR:=${HOME}}"
## early load modules
zmodload zsh/mathfunc zsh/datetime zsh/zprof
typeset -gA ZSHU
__z_unsupported() { echo "not supported" >&2 ; }
ZSHU[t_begin]=${(%):-%D{%s.%6.}}
ZSHU[d_zdot]="${ZDOTDIR}"
ZSHU[d_cache]="${ZDOTDIR}/.cache/zsh"
ZSHU[d_dotfiles]="${ZDOTDIR}/.config/dotfiles"
ZSHU[d_conf]="${ZDOTDIR}/.config/zsh"
ZSHU[d_cache]="${ZDOTDIR}/.cache/zsh"
ZSHU[d_var]="${ZSHU[d_conf]}/var"
ZSHU[d_bin]="${ZDOTDIR}/.config/dotfiles/bin"
ZSHU[d_scripts]="${ZDOTDIR}/.config/dotfiles/scripts"
for i ( d_zdot d_cache d_conf d_bin d_scripts d_var ) ; do
d=${ZSHU[$i]}
[ -d "$d" ] || mkdir -p "$d"
done ; unset i d
ZSHU[d_bin]="${ZSHU[d_dotfiles]}/bin"
ZSHU[d_scripts]="${ZSHU[d_dotfiles]}/scripts"
## early escape
unsetopt global_rcs
## safety measure:
## redirect all following activity within ZDOTDIR to cache
## (probably) these files are safe to remove
ZDOTDIR="${ZSHU[d_cache]}"
rm -f "${ZDOTDIR}/.zshrc" "${ZDOTDIR}/.zlogin"
export ZDOTDIR="${ZDOTDIR}/.config/zsh.dots"
## cleanup: start from scratch
for i ( a s f d ) ; do unhash -$i -m '*' ; done ; unset i
@@ -38,23 +34,24 @@ umask 0022
zshu_parts=( env opt lib rc alias local )
for n ( ${zshu_parts} ) ; do
f="${ZSHU[d_conf]}/$n.zsh"
[ -s "$f" ] && source "$f"
done ; unset n f
[ -s "${ZSHU[d_conf]}/$n.zsh" ] || continue
source "${ZSHU[d_conf]}/$n.zsh"
done ; unset n
for n ( ${zshu_parts} ) ; do
d="${ZSHU[d_conf]}/$n"
[ -d "$d" ] || continue
for i ( $d/*.zsh(N.r) ) ; do
[ -d "${ZSHU[d_conf]}/$n" ] || continue
for i ( "${ZSHU[d_conf]}/$n"/*.zsh(N.r) ) ; do
source "$i"
done ; unset i
done ; unset n d
done
done ; unset i n
unset zshu_parts
ZSHU[t_end]=${(%):-%D{%s.%6.}}
hash -f
ZSHU[t_load]=$[ ZSHU[t_end] - ZSHU[t_begin] ]
ZSHU[t_load]=${ZSHU[t_load]:0:6}
unset 'ZSHU[t_begin]' 'ZSHU[t_end]'
t=${(%):-%D{%s.%6.}}
t=$[ t - ZSHU[t_begin] ]
unset 'ZSHU[t_begin]'
n=${t#*.}
ZSHU[t_load]=${t%.*}.${n:0:4}
unset n t

View File

@@ -1,45 +1,197 @@
#!/bin/zsh
alias bud='buildah bud --isolation chroot --network host --format docker -f '
typeset -Uga ZSHU_CNTR_SHELLS=( /bin/bash /bin/sh /bin/ash )
typeset -ga ZSHU_CNTR_FALLBACK_SHELL=( /busybox/busybox sh )
alias pod-run='podman run -e "TERM=${TERM:-linux}" --network host --rm -it '
alias pod-run-sh="pod-run --entrypoint='[\"/bin/sh\"]' --user=0:0 "
alias pod-ps='podman ps '
alias pod-images='podman images --format "table {{.ID}} {{.Repository}}:{{.Tag}} {{.Size}} {{.Created}} |{{.CreatedAt}}" '
alias pod-inspect='podman inspect '
alias pod-logs='podman logs '
alias bud='buildah bud --network=host -f '
function {
local i
for i ( run images ps top inspect logs ) ; do
alias "pod-$i"="podman $i "
done
}
z-pod() { command podman "$@" ; }
alias podman='z-podman '
z-podman() {
case "${1:-}" in
run ) shift ; z-pod-run "$@" ;;
images ) shift ; z-pod-images "$@" ;;
ps ) shift ; z-pod-ps "$@" ;;
top ) shift ; z-pod-top "$@" ;;
* ) z-pod "$@" ;;
esac
}
z-pod-run() {
z-pod run -e "TERM=${TERM:-linux}" --rm -it "$@"
}
z-pod-images() {
local have_flags=0
case "$1" in
-* ) have_flags=1 ;;
esac
if [ ${have_flags} = 1 ] ; then
z-pod images "$@"
return $?
fi
z-pod images --format 'table {{.ID}} {{.Repository}}:{{.Tag}} {{.Size}} {{.Created}} |{{.CreatedAt}}' "$@"
}
z-pod-ps() {
local have_flags=0
case "$1" in
-* ) have_flags=1 ;;
esac
if [ ${have_flags} = 1 ] ; then
z-pod ps "$@"
return $?
fi
z-pod ps -a --sort names --format 'table {{.ID}} {{.Names}} {{.Image}} {{.CreatedHuman}} {{.Status}}' "$@"
}
z-pod-top() {
local have_flags=0
case "$1" in
-* ) have_flags=1 ;;
esac
if [ ${have_flags} = 1 ] ; then
z-pod top "$@"
return $?
fi
if [ $# -eq 1 ] ; then
z-pod top "$1" 'pid,ppid,user,args,pcpu,time,stime,etime,state,nice,rss,vsz'
else
z-pod top "$@"
fi
}
pod-images-grep() {
z-pod-images \
| {
if [ -z "$1" ] ; then
head
else
sed -En "1{p;D};\\${ZSHU_XSED}$1${ZSHU_XSED}p"
fi
}
}
## NB: naive. rewrite!
pod-run-sh() {
local -a cntr_opts=( --network=host --entrypoint='[]' --user=0:0 )
local i
local -a shell
for i ( ${ZSHU_CNTR_SHELLS} ) ; do
echo "pod-run-sh: trying $i as shell" >&2
z-pod-run ${cntr_opts[@]} "$@" "$i" -c ':' 2>/dev/null || continue
shell=($i) ; break
done
while [ -z "${shell}" ] ; do
echo "pod-run-sh: trying '${ZSHU_CNTR_FALLBACK_SHELL[*]}' as last-resort shell" >&2
z-pod-run ${cntr_opts[@]} "$@" ${ZSHU_CNTR_FALLBACK_SHELL[@]} -c ':' 2>/dev/null || break
shell=(${ZSHU_CNTR_FALLBACK_SHELL})
break
done
if [ -z "${shell}" ] ; then
echo "unable to run: $*"
return 1
fi
z-pod-run ${cntr_opts[@]} "$@" ${shell[@]}
}
## NB: naive. rewrite!
sko-inspect() {
command skopeo inspect "docker://${1:?}"
local i
i="${1:?}" ; shift
command skopeo inspect "$@" "docker://$i"
}
## NB: naive. rewrite!
sko-list-tags() {
command skopeo list-tags "docker://${1:?}"
local i
i="${1:?}" ; shift
command skopeo list-tags "$@" "docker://$i"
}
## NB: naive. rewrite!
pod-dive() {
command dive "podman://${1:?}"
local i
i="${1:?}" ; shift
command dive "$@" "podman://$i"
}
jq-visual() {
jq -C | less
}
jq-visual() { jq -C | "${PAGER:-cat}" ; }
jq-config() { jq '.[].Config' ; }
jq-tags() { jq -r '.Tags[]' ; }
jq-config() {
jq '.[].Config'
}
jq-tags() {
jq -r '.Tags[]'
}
if [ ${UID} -ne 0 ] ; then
alias docker='sudo docker '
fi
alias dkr='docker '
alias dkr-run='dkr run -e "TERM=${TERM:-linux}" --network host --rm -it '
alias dkr-run-sh="dkr-run --entrypoint='' --user=0:0 "
alias dkr-run='dkr run -e "TERM=${TERM:-linux}" --rm -it '
alias dkr-ps='dkr ps '
alias dkr-images='dkr images --format "table {{.ID}}\\t{{.Repository}}:{{.Tag}}\\t{{.Size}}\\t{{.CreatedAt}}" '
alias dkr-inspect='dkr inspect '
alias dkr-logs='dkr logs '
z-dkr() { command docker "$@" ; }
z-dkr-run() {
z-dkr run -e "TERM=${TERM:-linux}" --rm -it "$@"
}
## NB: naive. rewrite!
dkr-run-sh() {
local -a cntr_opts=( --network=host --entrypoint='' --user=0:0 )
local i
local -a shell
for i ( ${ZSHU_CNTR_SHELLS} ) ; do
echo "dkr-run-sh: trying $i as shell" >&2
z-dkr-run ${cntr_opts[@]} "$@" "$i" -c ':' 2>/dev/null || continue
shell=($i) ; break
done
while [ -z "${shell}" ] ; do
echo "dkr-run-sh: trying '${ZSHU_CNTR_FALLBACK_SHELL[*]}' as last-resort shell" >&2
z-dkr-run ${cntr_opts[@]} "$@" ${ZSHU_CNTR_FALLBACK_SHELL[@]} -c ':' 2>/dev/null || break
shell=(${ZSHU_CNTR_FALLBACK_SHELL})
break
done
if [ -z "${shell}" ] ; then
echo "unable to run: $*"
return 1
fi
z-dkr-run ${cntr_opts[@]} "$@" ${shell[@]}
}
## NB: naive. rewrite!
dkr-dive() {
local i
i="${1:?}" ; shift
command dive "$@" "docker://$i"
}
typeset -g ZSHU_GRP_DOCKER=docker
z-adjust-docker() {
[ ${UID} -eq 0 ] && return 0
getent group "${ZSHU_GRP_DOCKER}" >/dev/null || return 1
(( ${+commands[docker]} )) || return 127
local _users=$(getent group "${ZSHU_GRP_DOCKER}" | cut -d: -f4)
local -a users=("${(@s[,])_users}")
local i found
for i ( ${users}) ; do
if [ "$i" = "${USERNAME}" ] ; then
found=1
break
fi
done
[ -n "${found}" ] && return 0
(( ${+commands[sudo]} )) || return 127
alias docker='sudo docker '
z-dkr() { command sudo docker "$@" ; }
return 0
}

View File

@@ -28,4 +28,4 @@ gd() {
cd "$1"
;;
esac
}
}

View File

@@ -1,5 +1,13 @@
#!/bin/zsh
alias gar='git-archive-ref '
alias gbr='git-br '
alias gds='git diff -p --stat=200 '
alias gdu='git-dir-usage '
alias ggc='git-gc '
alias ggcf='git-gc-force '
alias gst='git status -s '
git-dir-usage() {
local gitdir x topdir
gitdir=$(__z_git rev-parse --git-dir) || return $?
@@ -16,39 +24,57 @@ git-dir-usage() {
esac
local -a subdirs
subdirs+="${gitdir}/logs/refs"
subdirs+="${gitdir}/objects/info"
subdirs+="${gitdir}/objects/pack"
if [ -n "${topdir}" ] ; then
env -C "${topdir}" du -d1 "${gitdir}"
env -C "${topdir}" du -d1 "${subdirs[@]}"
else
for x ( logs/refs objects/info objects/pack ) ; do
[ -d "${gitdir}/$x" ] || continue
subdirs+="${gitdir}/$x"
done
(
[ -n "${topdir}" ] && cd "${topdir}/"
if [ ${#subdirs} -gt 0 ] ; then
du -d1 "${subdirs[@]}"
fi
du -d1 "${gitdir}"
du -d1 "${subdirs[@]}"
fi | grep -Ev '^[0-9]\s' | sort -Vk2
) | grep -Ev '^[0-9]K?\s' | sort -Vk2
}
git-gc() {
git-dir-usage || return $?
echo
idle git gc "$@"
echo "# git gc $*" >&2
z-time idle git gc "$@"
echo
git-dir-usage
}
git-gc-force() {
git-gc --aggressive --force
git-dir-usage || return $?
echo
echo "# git gc --aggressive --force $*" >&2
z-time idle git gc --aggressive --force "$@"
echo
echo "# git repack -Ad" >&2
z-time idle git repack -Ad
echo
git-dir-usage
}
git-archive-ref() {
local name ver gitref topdir c_hash c_time out
name="${1:?}" ver="${2:?}" gitref="${3:?}"
topdir=$(__z_git rev-parse --show-toplevel) || return $?
c_hash=$(__z_git log -n 1 --format='%h' --abbrev=8 "${gitref}") || return $?
c_hash=$(__z_git log -n 1 --format='%h' --abbrev=12 "${gitref}") || return $?
c_time=$(__z_git log -n 1 --format='%cd' --date='format:%Y%m%d.%H%M%S' "${gitref}") || return $?
out="${name}_${ver}+git.${c_time}.${c_hash}.tar"
topdir=${topdir:h}
git archive --format=tar -o "${topdir}/${out}" --prefix="${name}-${ver}-git.${c_hash}/" "${gitref}" || return $?
echo "archived to ${out} in ${topdir}/" >&2
}
}
git-br() {
__z_git -c core.pager='cat' branch --no-abbrev "$@"
}
git-rebase-log() {
git log --format='pick %h # %s' --reverse "$@"
}

View File

@@ -1,14 +0,0 @@
#!/bin/zsh
gpg-warmup() {
(( ${+commands[gpg]} )) || return 1
local t r
t=$(mktemp)
command gpg -abs "$t"
r=$?
rm -f "$t" "$t.asc"
return "$r"
}

View File

@@ -1,7 +1,7 @@
#!/bin/zsh
idle() {
[ -n "${1:?}" ] || return 1
[ -n "${1:?}" ]
local f
@@ -17,7 +17,7 @@ idle() {
}
z-idle-ext() {
[ -n "${1:?}" ] || return 1
[ -n "${1:?}" ]
local -a s
@@ -28,7 +28,7 @@ z-idle-ext() {
}
z-idle-int() {
[ -n "${1:?}" ] || return 1
[ -n "${1:?}" ]
## execute in subshell
(

View File

@@ -1,24 +1,22 @@
#!/bin/zsh
kconf-set() {
[ -n "${1:?}" ] || return 1
local n=$1 v=$2
local n v
n="${1:?}" v=$2
shift 2
[ $# -gt 0 ] || return 2
[ $# -gt 0 ] || return 1
command grep -ElZ "^((CONFIG_)?$n=|# (CONFIG_)?$n is not set)" "$@" \
| xargs -0 -r sed -i -E -e "s/^(((CONFIG_)?$n)=.+|# ((CONFIG_)?$n) is not set)\$/\\2\\4=$v/"
}
kconf-unset() {
[ -n "${1:?}" ] || return 1
local n=$1
local n
n="${1:?}"
shift
[ $# -gt 0 ] || return 2
[ $# -gt 0 ] || return 1
command grep -ElZ "^(CONFIG_)?$n=" "$@" \
| xargs -0 -r sed -i -E -e "s/^((CONFIG_)?$n)=.+\$/# \\1 is not set/"

View File

@@ -0,0 +1,42 @@
#!/bin/zsh
openwrt-ssh() {
ssh -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o CheckHostIP=no -o StrictHostKeyChecking=no "$@"
}
openwrt-apk-list() {
(( ${+commands[openwrt-apk]} )) || {
echo 'missing "openwrt-apk"' >&2
return 127
}
local i w
w=$(mktemp -d) ; : "${w:?}"
for i ; do
[ -n "$i" ] || continue
find "$w/" -mindepth 1 -maxdepth 1 -exec rm -rf {} +
find "$w/" -mindepth 1 -maxdepth 1 -exec rm -rf {} + || return
openwrt-apk extract --no-cache --no-logfile --no-network --no-check-certificate --allow-untrusted --no-chown --destination "$w" "$i"
env -C "$w" find ./ -mindepth 1 -exec ls -ldgG --color {} +
done
rm -rf "$w"
}
openwrt-ipk-list() {
local i m o
for i ; do
[ -n "$i" ] || continue
o=0
for m ( './data.tar.gz' 'data.tar.gz' ) ; do
tar -tf "$i" "$m" 2>/dev/null || continue
o=1
env printf '%q:\n' "$i"
tar -Oxf "$i" "$m" | tar -ztvf -
break
done
if [ "$o" = '0' ] ; then
env printf '%q: missing data.tar.gz\n' "$i"
continue
fi
done
}

View File

@@ -1,79 +0,0 @@
#!/bin/zsh
quilt-series-strip-comments() {
sed -E '/^[[:space:]]*(#|$)/d' "$@"
}
quilt-series-auto() {
[ -n "${1:?}" ] || return 1
find "$1/" -follow -type f -printf '%P\0' \
| sed -zEn '/\.(diff|patch)$/p' \
| sort -zuV | xargs -0r -n1
}
krd-quilt() {
[ -n "${1:?}" ] || return 1
(( $+commands[quilt] )) || return 2
local patchdir series tmp_series
if [ -d "$1" ] ; then
patchdir="$1/debian/patches"
if [ -d "${patchdir}" ] ; then
[ -f "${patchdir}/series" ] || return 3
else
patchdir="$1"
fi
series="${patchdir}/series"
if ! [ -f "${series}" ] ; then
tmp_series=1
series=$(mktemp)
quilt-series-auto "${patchdir}" > "${series}"
fi
elif [ -f "$1" ] ; then
[ -s "$1" ] || return 3
series="$1"
patchdir=${series:h}
else
return 3
fi
local r
(
z-quilt-default-env
set -a
QUILT_SERIES="${series}"
QUILT_PATCHES="${patchdir}"
set +a
command quilt pop -a ; echo
r=0
while read -r i ; do
[ -n "$i" ] || continue
k="${patchdir}/$i"
command quilt --fuzz=0 push "$k"
r=$? ; [ $r -eq 0 ] || exit $r
command quilt refresh "$k"
r=$? ; [ $r -eq 0 ] || exit $r
sed -E -i \
-e 's#^(-{3} )[^/][^/]*/(.*)$#\1a/\2#;' \
-e 's#^(\+{3} )[^/][^/]*/(.*)$#\1b/\2#' \
"$k"
rm -f "$k"'~'
done <<< $(quilt-series-strip-comments "${series}")
exit $r
)
r=$?
[ -z "${tmp_series}" ] || rm -f "${series}"
return $r
}

View File

@@ -1,67 +0,0 @@
#!/bin/zsh
krd-debsrc() {
[ -n "${1:?}" ] || return 1
local dstdir
case "$1" in
*/* ) dstdir="$1/src" ;;
* ) dstdir="/tmp/$1/src" ;;
esac
rm -rf "${dstdir}"
deb-src-export "${dstdir}"
}
krd-sbuild() {
[ -n "${1:?}" ] || return 1
[ -n "${2:?}" ] || return 1
(( $+commands[sbuild] )) || return 2
(( $+commands[xz] )) || return 2
local topdir
case "$1" in
*/* ) topdir="$1" ;;
* ) topdir="/tmp/$1" ;;
esac
[ -d "${topdir}" ] || return 3
local srcdir="${topdir}/src"
[ -d "${srcdir}" ] || return 3
arch="$2"
## done with args
shift 2
local -a sbuild_env sbuild_args
local i
for i ; do
## naive splitting args and env
case "$i" in
*=* ) sbuild_env+=( $i ) ;;
*) sbuild_args+=( $i ) ;;
esac
done
(
for i ( ${sbuild_env} ) ; do
export "$i"
done
z-set-tmpdir /tmp
builddir="${topdir}/${arch}"
mkdir -p "${topdir}/all" "${builddir}" "${builddir}-debug"
cd "${builddir}"
for i ( "${srcdir}"/*.dsc(N.r) ) ; do
idle sbuild --arch-all --arch-any --arch=${arch} ${sbuild_args[@]} "$i"
find -name '*.build' -type l -exec rm -f {} +
find -name '*.build' -type f -exec xz -9vv {} +
done
find -name '*dbgsym*.deb' -type f -exec mv -nvt "../${arch}-debug" {} +
find -name '*_all.deb' -type f -exec mv -nvt '../all' {} +
)
}

View File

@@ -7,4 +7,4 @@ function {
fi
alias sudo-i="$c"
alias sudoi="$c"
}
}

View File

@@ -1,11 +1,11 @@
#!/bin/zsh
## sort-n-fill PATH
function {
z-sort-path() {
local -a p
local -aU t npath games
p=( $path )
p=( ${path} )
## strip "games" first :)
t=( ${(@)p:#*games*} )
@@ -14,7 +14,7 @@ function {
## process in-home part
t=( ${(@)p:#${HOME}/*} )
npath+=( "${ZSHU[d_scripts]}" "${ZSHU[d_bin]}" "${HOME}/bin" )
npath+=( "${HOME}/bin" "${ZSHU[d_bin]}" "${ZSHU[d_scripts]}" )
npath+=( ${(@)p:|t} )
p=( $t )
@@ -31,16 +31,20 @@ function {
p=( $t )
## now we're with /sbin and /bin... probably :)
npath+=( /sbin /bin )
## in case of merged /usr
[ -h /sbin ] || npath+=( /sbin )
[ -h /bin ] || npath+=( /bin )
npath+=( $p )
## finally... games! xD
npath+=( /usr/local/games /usr/games )
npath+=( $games )
npath+=( ${games} )
path=( $npath )
path=( ${npath} )
hash -f
}
z-sort-path
unset GREP_OPTIONS
unset LS_OPTIONS
@@ -86,3 +90,7 @@ function {
## last resort
ZSHU[host]=${ZSHU[host_name]}
}
case "${ZSHU[os_family]}" in
linux ) ZSHU[procfs]=/proc ;;
esac

View File

@@ -1,6 +1,8 @@
#!/bin/zsh
export NO_AT_BRIDGE=1
export QT_ACCESSIBILITY=0
set -a
NO_AT_BRIDGE=1
QT_ACCESSIBILITY=0
export MENUCONFIG_COLOR=blackbg
MENUCONFIG_COLOR=blackbg
set +a

View File

@@ -1,5 +1,10 @@
#!/bin/zsh
BUILDAH_FORMAT=docker
BUILDAH_ISOLATION=chroot
typeset -x -m 'BUILDAH_*'
BUILD_IMAGE_NETWORK=host
BUILD_IMAGE_PUSH=0

21
.config/zsh/env/gopath.zsh vendored Normal file
View File

@@ -0,0 +1,21 @@
#!/bin/zsh
z-gobin-fixup() {
(( ${+commands[go]} )) || return 0
local gobin
gobin=$(command go env GOBIN)
if [ -z "${gobin}" ] ; then
local gopath
gopath=$(command go env GOPATH)
[ -n "${gopath}" ] || return 1
[ -d "${gopath}" ] || return 0
gobin="${gopath}/bin"
fi
[ -d "${gobin}" ] || mkdir "${gobin}" || return 1
## already in PATH?
[ "${path[(I)${gobin}]}" = 0 ] || return 0
path=( "${gobin}" ${path} )
}
z-gobin-fixup

3
.config/zsh/env/sed.zsh vendored Normal file
View File

@@ -0,0 +1,3 @@
#!/bin/zsh
typeset -r ZSHU_XSED=$'\027'

View File

@@ -7,8 +7,7 @@ z-time() {
"$@" ; r=$?
a=$[ EPOCHREALTIME - a ]
a=$(z-ts-to-human "$a" 6)
echo >&2
echo "time took: $a" >&2
printf '\n# time took: %s\n' "$a" >&2
return $r
}
@@ -49,5 +48,7 @@ add-zsh-hook precmd __z_cmdtime_measure
add-zsh-hook preexec __z_cmdtime_set
else
echo "cmd time measurement is disabled due to missing hook support" >&2
echo "cmd time measurement is disabled due to missing hook support" >&2
fi

View File

@@ -3,10 +3,12 @@
typeset -gA ZSHU_COMP_FORCE
ZSHU[f_compdump]="${ZSHU[d_cache]}/compdump"
ZSHU[d_completion]="${ZSHU[d_cache]}/completion"
ZSHU[d_compzwc]="${ZSHU[d_cache]}/compzwc"
ZSHU[d_compcache]="${ZSHU[d_cache]}/compcache"
[ -d "${ZSHU[d_compcache]}" ] || mkdir -p "${ZSHU[d_compcache]}"
fpath=( "${ZSHU[d_cache]}/completion" $fpath )
typeset -a ZSHU_SYS_FPATH=( ${fpath} )
fpath=( "${ZSHU[d_compzwc]}" "${ZSHU[d_completion]}" ${fpath} )
__z_compdump_print() { printf '#zshu %s %s\n' "$1" "${(P)1}" ; }
@@ -46,11 +48,12 @@ __z_compdump_finalize() {
## TODO: refactor (e.g. buildah completion is a "bit" broken)
__z_comp_bash() {
# (( ${+commands[$1]} )) || return 127
local f p x
(( ${+commands[$1]} )) || return 1
(( ${+_comps[$1]} )) && return 2
(( ${+ZSHU[compdump_bash]} )) || return 3
(( ${+_comps[$1]} )) && return 1
(( ${+ZSHU[compdump_bash]} )) || return 2
(( ${+2} )) && return 0
f=0
@@ -58,7 +61,7 @@ __z_comp_bash() {
x="_$1" ; [ -s "$p/$x" ] && f=1 && break
x="$1" ; [ -s "$p/$x" ] && f=1 && break
done
[ "$f" = 0 ] && return 4
[ "$f" = 0 ] && return 3
complete -C "$x" "$1"
return 0
@@ -68,55 +71,64 @@ __z_comp_external() {
local c f
c="$1" ; shift
(( ${+commands[$c]} )) || return 127
[ $# -gt 0 ] || return 1
(( ${+commands[$c]} )) || return 2
if ! (( ${+ZSHU_COMP_FORCE[$c]} )) ; then
(( ${+_comps[$c]} )) && return
(( ${+_comps[$c]} )) && return 0
fi
f="${ZSHU[d_cache]}/completion/_$c"
f="${ZSHU[d_completion]}/_$c"
if ! [ -s "$f" ] ; then
if ! "$@" > "$f" ; then
rm -f "$f"
return 3
return 2
fi
fi
# zcompile -zR "$f"
# mv -f "$f.zwc" "${ZSHU[d_compzwc]}/$c.zwc"
# emulate zsh -c "autoload -Uz _$c"
autoload -Uz "_$c"
return 0
}
__z_comp_system() {
# (( ${+commands[$1]} )) || return 127
local d
(( ${+commands[$1]} )) || return 1
(( ${+_comps[$1]} )) && return 2
(( ${+_comps[$1]} )) && return 1
for d ( $fpath ) ; do
(( ${+ZSHU_COMP_FORCE[$c]} )) && return 0
local -a _fpath
_fpath=( ${fpath} )
fpath=( ${ZSHU_SYS_FPATH} )
for d ( ${fpath} ) ; do
[ -s "$d/_$1" ] || continue
# emulate zsh -c "autoload -Uz _$1"
autoload -Uz "_$1"
fpath=( ${_fpath} )
return 0
done
return 3
fpath=( ${_fpath} )
return 2
}
## reload or new session are required to regenerate compcache
z-comp-invalidate() {
local f
[ -n "${1:?}" ]
[ -n "$1" ] || return 1
f="${ZSHU[d_cache]}/completion/_$1"
[ -f "$f" ] || return 2
rm -f "$f" || return 3
# rm -f "${ZSHU[d_completion]}/_$1" "${ZSHU[d_compzwc]}/_$1.zwc" "${ZSHU[d_compzwc]}/$1.zwc"
rm -f "${ZSHU[d_completion]}/_$1"
}
## reload or new session are required to regenerate completions
z-comp-flush() {
find "${ZSHU[d_cache]}/completion/" -xdev -type f '!' -name '.keep' -delete
find "${ZSHU[d_completion]}/" "${ZSHU[d_compzwc]}/" -xdev -type f '!' -name '.keep' -delete
}
z-comp-auto() {
@@ -130,4 +142,4 @@ z-comp-auto() {
c=${f#__z_comp_ext__}
__z_comp_external $c $f && unset -f "$f"
done
}
}

33
.config/zsh/lib/csv.zsh Normal file
View File

@@ -0,0 +1,33 @@
#!/bin/zsh
## NB: set IFS manually
z-csv-select() {
local field value
field="${1:?}" value="${2:?}"
local line
local -a ary
while IFS='' read -rs line ; do
[ -n "${line}" ] || continue
ary=()
read -rs -A ary <<< "${line}"
[ "${ary[${field}]}" = "${value}" ] || continue
printf '%s' "${line}"
return 0
done
return 1
}
z-csv-field() {
local field
field="${1:?}"
local -a ary
read -rs -A ary
printf '%s' "${ary[${field}]}"
}

View File

@@ -21,7 +21,7 @@ __z_git_desc_tag() { __z_git describe --tags "$@" ; }
z-git-test() {
[ "${ZSHU_PS[git]}" = '1' ] || return 1
__z_git_avail || return 2
__z_git_avail || return $?
__z_git_is_repo || return 3
@@ -29,15 +29,12 @@ z-git-test() {
}
__z_git_pwd() {
local x
unset 'ZSHU_PS[git_ref]' 'ZSHU_PS[git_changes]' 'ZSHU_PS[git_tag]'
unset 'ZSHU_GIT[path_root]' 'ZSHU_GIT[path_mid]' 'ZSHU_GIT[path_last]'
unset 'ZSHU_GIT[commit]' 'ZSHU_GIT[detached]' 'ZSHU_GIT[ref]' 'ZSHU_GIT[remote]' 'ZSHU_GIT[tag]'
unset 'ZSHU_GIT[ref_behind]' 'ZSHU_GIT[ref_ahead]' 'ZSHU_GIT[ref_changes]'
ZSHU_GIT=()
z-git-test || return
local x
x=$(__z_git rev-parse --short HEAD 2>/dev/null)
[ -n "$x" ] || return
ZSHU_GIT[commit]=$x

45
.config/zsh/lib/gpg.zsh Normal file
View File

@@ -0,0 +1,45 @@
#!/bin/zsh
z-gpgconf-comp-avail() {
(( ${+commands[gpgconf]} )) || return 127
local comp
comp="${1:?}"
local csv
csv=$(command gpgconf --list-components | IFS=':' z-csv-select 1 "${comp}")
[ -n "${csv}" ]
}
z-gpgconf-comp-opt-avail() {
(( ${+commands[gpgconf]} )) || return 127
local comp opt
comp="${1:?}" opt="${2:?}"
z-gpgconf-comp-avail "${comp}" || return $?
local csv
csv=$(command gpgconf --list-options "${comp}" | IFS=':' z-csv-select 1 "${opt}")
[ -n "${csv}" ]
}
## merely that command:
## gpgconf --list-options "$1" | awk -F: "/^$2:/{ print \$10 }"
z-gpgconf-getopt() {
(( ${+commands[gpgconf]} )) || return 127
local comp opt
comp="${1:?}" opt="${2:?}"
## not really necessary here
# z-gpgconf-comp-opt-avail "${comp}" "${opt}" || return $?
local csv
csv=$(command gpgconf --list-options "${comp}" | IFS=':' z-csv-select 1 "${opt}")
[ -n "${csv}" ] || return 1
local v
v=$(IFS=':' z-csv-field 10 <<< "${csv}")
printf '%s' "$v"
}

View File

@@ -4,7 +4,7 @@ z-history() {
local list
zparseopts -E l=list
if [[ -n "$list" ]]; then
if [ -n "${list}" ]; then
builtin fc "$@"
else
[[ ${@[-1]-} = *[0-9]* ]] && builtin fc -il "$@" || builtin fc -il "$@" 1

View File

@@ -47,5 +47,7 @@ __z_pwd_hook() {
add-zsh-hook precmd __z_pwd_hook
else
echo "shiny pwd's are disabled due to missing hook support" >&2
echo "shiny pwd's are disabled due to missing hook support" >&2
fi

View File

@@ -1,28 +1,65 @@
#!/bin/zsh
z-proc-exists() {
[ -n "${1:?}" ]
while [ -n "${ZSHU[procfs]}" ] ; do
[ -d "${ZSHU[procfs]}" ] || return 1
[ -f "${ZSHU[procfs]}/$1/status" ]
return $?
done
ps -o 'pid=' -p "$1" >/dev/null 2>&1
}
typeset -Uga ZSHU_PARENTS_PID
typeset -ga ZSHU_PARENTS_NAME
function {
local i c
local procfs
while [ -n "${ZSHU[procfs]}" ] ; do
[ -d "${ZSHU[procfs]}" ] || break
procfs=1 ; break
done
i=$$ ; while : ; do
i=$(ps -o ppid= -p $i 2>/dev/null) || :
i=${i//[^0-9]}
[[ "$i" =~ '^[1-9][0-9]*$' ]] || break
local i c x _unused
i=${PPID}
while : ; do
[ -n "$i" ] || break
## don't deal with PID1
[ "$i" = 1 ] && continue
[ "$i" = 1 ] && break
ZSHU_PARENTS_PID+=( $i )
c=
while [ "${procfs}" = 1 ] ; do
[ -f "${ZSHU[procfs]}/$i/cmdline" ] || break
read -d $'\0' -rs c <<< $(cat "${ZSHU[procfs]}/$i/cmdline")
break
done
if [ -z "$c" ] ; then
read -rs c _unused <<< "$(ps -o 'comm=' -p "$i" 2>/dev/null)"
fi
[ -n "$c" ] && ZSHU_PARENTS_NAME+=( "${c:t}" )
x=
while [ "${procfs}" = 1 ] ; do
[ -f "${ZSHU[procfs]}/$i/status" ] || break
# read -rs _unused x <<< "$(cat "${ZSHU[procfs]}/$i/status" | grep -F 'PPid:')"
while read -rs _unused c ; do
[ "${_unused}" = 'PPid:' ] || continue
x=$c ; break
done < "${ZSHU[procfs]}/$i/status"
break
done
if [ -z "$x" ] ; then
read -rs x _unused <<< "$(ps -o 'ppid=' -p "$i" 2>/dev/null)"
fi
i=$x
done
for i ( ${ZSHU_PARENTS_PID} ) ; do
c=$(ps -o comm= -p $i 2>/dev/null) || :
[ -n "$c" ] || continue
ZSHU_PARENTS_NAME+=( "${c:t}" )
done
typeset -r ZSHU_PARENTS_PID
typeset -r ZSHU_PARENTS_NAME
typeset -r ZSHU_PARENTS_PID ZSHU_PARENTS_NAME
}
typeset -gA ZSHU_RUN
@@ -41,6 +78,6 @@ z-run-test() {
}
z-run-test gui konsole xterm x-terminal-emulator
z-run-test nested screen tmux mc
z-run-test nested SCREEN screen tmux mc
z-run-test nested1L mc
z-run-test elevated sudo su

View File

@@ -1,15 +1,17 @@
#!/bin/zsh
say_my_name() {
set -a
GIT_COMMITTER_NAME="$1"
GIT_AUTHOR_NAME="$1"
DEBFULLNAME="$1"
export GIT_COMMITTER_NAME GIT_AUTHOR_NAME DEBFULLNAME
set +a
}
say_my_email() {
set -a
GIT_COMMITTER_EMAIL="$1"
GIT_AUTHOR_EMAIL="$1"
DEBEMAIL="$1"
export GIT_COMMITTER_EMAIL GIT_AUTHOR_EMAIL DEBEMAIL
set +a
}

View File

@@ -1,21 +1,47 @@
#!/bin/zsh
dotfiles-update() {
"${ZSHU[d_zdot]}/.config/dotfiles/install.sh"
"${ZSHU[d_dotfiles]}/install.sh" "$@" || return $?
}
dotfiles-git() { (
cd "${ZSHU[d_zdot]}/"
export GIT_DIR="${ZSHU[d_zdot]}/.config/dotfiles/repo.git"
export GIT_WORK_TREE="${ZSHU[d_zdot]}"
zsh -i
set -a
GIT_DIR="${ZSHU[d_dotfiles]}/repo.git"
GIT_WORK_TREE="${ZSHU[d_zdot]}"
set +a
z-reload
) }
dotfiles-gen-gitignore() {
local x='.config/dotfiles/gen-gitignore.sh'
[ -x "$x" ] || {
echo "${x:t} is somewhere else" >&2
return 1
}
if [ -d .config/dotfiles/repo.git ] ; then
echo "NOT going to change dotfiles installation" >&2
return 2
fi
"$x" "$@"
}
z-zwc-gen() {
local i
for i ( "${ZSHU[d_conf]}"/**/*.zsh(N.r) ) ; do
zcompile -U "$i"
zcompile -UR "$i"
done
# for i ( "${ZSHU[d_completion]}"/*(N.r) ) ; do
# case "$i" in
# *.zwc )
# # likely a remnant
# rm -f "$i"
# continue
# ;;
# esac
# zcompile -UR "$i"
# mv -f "$i.zwc" "${ZSHU[d_compzwc]}/"
# done
}
z-zwc-flush() {
@@ -23,20 +49,23 @@ z-zwc-flush() {
}
z-update() {
dotfiles-update
z-zwc-flush
z-zwc-gen
dotfiles-update || return $?
z-cache-flush
}
z-reload() {
export ZDOTDIR="${ZSHU[d_zdot]}"
local r
exec -a "${ZSH_ARGZERO}" "${ZSH_NAME}" "${argv[@]}"
echo "unable to reload (something went wrong), code $?" 1>&2
return 1
r=$?
echo "unable to reload (something went wrong), code $r" >&2
return $r
}
## reload or new session are required to regenerate compcache
z-cache-flush() {
find "${ZSHU[d_cache]}/" "${ZSHU[d_compcache]}/" -xdev -type f '!' -name '.keep' -delete
find "${ZSHU[d_cache]}/" -xdev -type f '!' -name '.keep' -delete
find "${ZSHU[d_zdot]}/.config/zsh.dots/" -xdev -type f '!' -name '.zshenv' -delete
z-zwc-flush
z-zwc-gen
}

View File

@@ -1,205 +0,0 @@
#!/bin/zsh
## inspired by 'https://starship.rs/install.sh' as of 2021-03-07
ZSHU[starship_baseurl]='https://github.com/starship/starship/releases'
## ZSHU[starship_target] is auto-detected
## ZSHU[starship_path] defaults to ZSHU[d_bin] which is in PATH already
# export STARSHIP_CONFIG="$HOME/.config/starship.toml"
# export STARSHIP_CACHE="$HOME/.cache/starship"
__z_starship_auto_path() {
echo "${ZSHU[starship_path]:-${ZSHU[d_bin]}}"
}
__z_starship() {
local x=$(__z_starship_auto_path)
x="$x/starship"
[ -x "$x" ] || x=starship
[ -x "$x" ] || return 127
"$x" "$@"
}
__z_starship_test() { __z_starship -V &>/dev/null ; }
## NB: supply TARGET environment variable to call
__z_starship_url_latest() {
printf '%s/latest/download/starship-%s.tar.gz' \
"${ZSHU[starship_baseurl]}" "${TARGET}"
}
## NB: supply TARGET environment variable to call
## $1 - version (semver like '0.50.0')
__z_starship_url_versioned() {
printf '%s/download/v%s/starship-%s.tar.gz' \
"${ZSHU[starship_baseurl]}" "$1" "${TARGET}"
}
## NB: install starship somewhere in PATH ;)
__z_starship_ver_installed() {
__z_starship -V 2>/dev/null \
| sed -En '/^starship v?(\S.+)$/{s//\1/;p;}'
}
## NB: supply TARGET environment variable to call
__z_starship_ver_latest() {
local x=$(__z_starship_url_latest)
local y=$(__z_curl_location "$x")
## hackish strip, e.g.:
## from: https://github.com/starship/starship/releases/download/v0.50.0/starship-x86_64-unknown-linux-musl.tar.gz
## to: v0.50.0
y=${y:h:t}
[ "${y:0:1}" = 'v' ] && y=${y:1}
echo "$y"
}
__z_starship_detect_arch() {
local arch=${ZSHU[mach]}
case "${arch}" in
x86_64) [ "$(getconf LONG_BIT)" -eq 32 ] && arch=i686 ;;
aarch64) [ "$(getconf LONG_BIT)" -eq 32 ] && arch=arm ;;
esac
echo "${arch}"
}
__z_starship_detect_platform() {
local platform=${ZSHU[uname]}
case "${ZSHU[uname]}" in
msys_nt*) platform=pc-windows-msvc ;;
cygwin_nt*) platform=pc-windows-msvc ;;
mingw*) platform=pc-windows-msvc ;;
linux) platform=unknown-linux-musl ;; ## static builds
darwin) platform=apple-darwin ;;
freebsd) platform=unknown-freebsd ;;
esac
echo "${platform}"
}
## $1 - arch
## $2 - platform
__z_starship_detect_target() {
local target="$1-$2"
case "${target}" in
arm-unknown-linux-musl) target="${target}eabihf" ;;
esac
echo "${target}"
}
__z_starship_auto_target() {
[ -n "${ZSHU[starship_target]}" ] && echo "${ZSHU[starship_target]}" && return
local arch=$(__z_starship_detect_arch)
local platform=$(__z_starship_detect_platform)
local target=$(__z_starship_detect_target "${arch}" "${platform}")
echo "${target}"
}
__z_starship_install() {
local ver=${1:-latest}
local target url resp
target=$(__z_starship_auto_target)
if [ "${ver}" = 'latest' ] ; then
url=$(TARGET=${target} __z_starship_url_latest)
resp=$(__z_curl_response "${url}")
resp=${resp:-400}
[ ${resp} -ge 400 ] && return 1
else
url=$(TARGET=${target} __z_starship_url_versioned "${ver}")
resp=$(__z_curl_response "${url}")
resp=${resp:-400}
if [ ${resp} -ge 400 ] ; then
## last resort: try messing with version ;D
if [ "${ver:0:1}" = 'v' ] ; then
ver=${ver:1}
else
ver="v${ver}"
fi
url=$(TARGET=${target} __z_starship_url_versioned "${ver}")
resp=$(__z_curl_response "${url}")
resp=${resp:-400}
[ ${resp} -ge 400 ] && return 1
fi
fi
local t=$(mktemp -d)
local f="$t/starship.tar.gz"
command curl -sqL "${url}" > "$f"
command tar -C "$t" -xf "$f" starship &>/dev/null
if [ $? -ne 0 ] ; then
## last resort
command tar -C "$t" --strip-components=1 --wildcards -xf "$f" '*/starship' &>/dev/null
if [ $? -ne 0 ] ; then
rm -rf "$t"
return 1
fi
fi
local d=$(__z_starship_auto_path)
mv "$t/starship" "$d/"
local r=$?
if [ $r -eq 0 ] ; then
[ "${ver:0:1}" = 'v' ] && ver=${ver:1}
echo "starship: installed ${ver} version in $d/" 1>&2
fi
rm -rf "$t"
return $r
}
z-starship-target-available() {
local target url resp
target=$(__z_starship_auto_target)
url=$(TARGET=${target} __z_starship_url_latest)
resp=$(__z_curl_response "${url}")
resp=${resp:-400}
if [ ${resp} -lt 400 ] ; then
echo "starship: available for ${target}" 1>&2
return 0
else
echo "starship: NOT available for ${target}" 1>&2
return 1
fi
}
z-starship-update-available() {
local target=$(__z_starship_auto_target)
local installed=$(__z_starship_ver_installed)
local latest=$(TARGET=${target} __z_starship_ver_latest)
if [ -z "${latest}" ] ; then
echo "starship: update is NOT available" 1>&2
return 1
fi
if [ -z "${installed}" ] ; then
echo "starship: NOT installed, install it 1st" 1>&2
return 0
fi
local tailver=$(printf '%s\n' "${installed}" "${latest}" | sort -Vu | tail -n 1)
if [ "${installed}" = "${tailver}" ] ; then
if [ "${installed}" = "${latest}" ] ; then
echo "starship: local version is up to date" 1>&2
else
echo "starship: local version is newer! o_O" 1>&2
fi
return 1
else
echo "starship: update is available (${installed} -> ${latest})" 1>&2
return 0
fi
}
z-starship-init() {
[ -n "${ZSHU[starship_init]}" ] && return
__z_starship_test || return
eval "$(__z_starship init zsh)"
ZSHU[starship_init]=1
}
z-starship-install() {
z-starship-target-available || return
__z_starship_install || \
echo "starship: unable to install" 1>&2
}
z-starship-update() {
z-starship-update-available || return 0
__z_starship_install || \
echo "starship: unable to update" 1>&2
}

View File

@@ -0,0 +1,18 @@
#!/bin/zsh
z-systemctl() {
command systemctl --quiet --no-pager --lines=0 --no-ask-password "$@"
}
z-systemctl-status-rc() {
z-systemctl status "$@" >/dev/null 2>&1
}
z-systemctl-exists() {
z-systemctl-status-rc "$@"
case "$?" in
0 | 1 | 3 ) return 0 ;;
## also 4 = "no such unit"
* ) return 1 ;;
esac
}

View File

@@ -1,5 +1,6 @@
#!/bin/zsh
typeset -gA ZSHU_TERM
typeset -Uga ZSHU_TERM_MISSING
z-ti-test() {
@@ -16,3 +17,136 @@ z-ti-test() {
return $r
}
case "${TERM}" in
xterm* | putty* | rxvt* | konsole* | mlterm* | alacritty* | foot* | contour* )
ZSHU_TERM[has_title_tab]=1
ZSHU_TERM[has_title_wnd]=1
ZSHU_TERM[want_cwd]=1
ZSHU_TERM[title_tab]=term
ZSHU_TERM[title_wnd]=term
;;
st* | wezterm* )
ZSHU_TERM[has_title_tab]=1
ZSHU_TERM[has_title_wnd]=1
ZSHU_TERM[title_tab]=term
ZSHU_TERM[title_wnd]=term
;;
cygwin | ansi )
ZSHU_TERM[has_title_tab]=1
ZSHU_TERM[has_title_wnd]=1
ZSHU_TERM[title_tab]=term
ZSHU_TERM[title_wnd]=term
;;
screen* | tmux* )
ZSHU_TERM[has_title_tab]=1
ZSHU_TERM[want_cwd]=1
ZSHU_TERM[title_tab]=mux
;;
* )
if z-ti-test fsl tsl ; then
ZSHU_TERM[has_title_tab]=1
ZSHU_TERM[want_cwd]=1
ZSHU_TERM[title_tab]=terminfo
fi
;;
esac
z-term-title-tab() {
# [ "${ZSHU_TERM[has_title_tab]}" = 1 ] || return 1
case "${ZSHU_TERM[title_tab]:-}" in
term )
print -Pn "\e]1;${1:q}\a"
;;
mux )
## screen/tmux: hardstatus
print -Pn "\ek${1:q}\e\\"
;;
terminfo )
echoti tsl
print -Pn "$1"
echoti fsl
;;
esac
}
z-term-title-window() {
# [ "${ZSHU_TERM[has_title_wnd]}" = 1 ] || return 1
case "${ZSHU_TERM[title_wnd]:-}" in
term )
print -Pn "\e]2;${1:q}\a"
;;
esac
}
z-term-title() {
## if $2 is unset use $1 as default
## if it is set and empty, leave it as is
: ${2=$1}
z-term-title-tab "$1"
z-term-title-window "$2"
}
z-term-cwd() {
[ "${ZSHU_TERM[want_cwd]}" = 1 ] || return 1
local host path
host=${HOST:-localhost}
path=${PWD}
## Konsole doesn't want ${host}
while : ; do
[ -n "${KONSOLE_DBUS_SERVICE}" ] || break
[ -n "${KONSOLE_DBUS_SESSION}" ] || break
[ -n "${KONSOLE_DBUS_WINDOW}" ] || break
[ -n "${KONSOLE_PROFILE_NAME}" ] || break
host=
break ; done
printf "\e]7;file://%s%s\e\\" "${host}" "${path}"
}
if autoload -Uz add-zsh-hook ; then
ZSHU[term_title]=1
z-term-title-enable() { ZSHU[term_title]=1 ; }
z-term-title-disable() { ZSHU[term_title]=0 ; }
ZSHU[title_tab]='%15<..<%~%<<'
ZSHU[title_wnd]='%n@%m:%~'
__z_term_title_precmd() {
[ "${ZSHU[term_title]}" = 1 ] || return
z-term-title "${ZSHU[title_tab]}" "${ZSHU[title_wnd]}"
}
add-zsh-hook precmd __z_term_title_precmd
ZSHU[term_cwd]=1
z-term-cwd-enable() { ZSHU[term_cwd]=1 ; }
z-term-cwd-disable() { ZSHU[term_cwd]=0 ; }
__z_term_cwd_precmd() {
[ "${ZSHU[term_cwd]}" = 1 ] || return
z-term-cwd
}
## "chpwd" doesn't always hook pwd changes
add-zsh-hook precmd __z_term_cwd_precmd
else
echo "current working directory and tab/window title handling is disabled due to missing hook support" >&2
z-term-title-enable() { __z_unsupported ; }
z-term-title-disable() { __z_unsupported ; }
z-term-cwd-enable() { __z_unsupported ; }
z-term-cwd-disable() { __z_unsupported ; }
fi

View File

@@ -1,58 +0,0 @@
#!/bin/zsh
ZSHU[title_tab]='%15<..<%~%<<'
ZSHU[title_window]='%n@%m:%~'
z-title-tab() {
emulate -L zsh
case "${TERM}" in
cygwin | xterm* | putty* | rxvt* | konsole* | ansi | mlterm* | alacritty | st* )
print -Pn "\e]1;${1:q}\a"
;;
screen* | tmux* )
## hardstatus
print -Pn "\ek${1:q}\e\\"
;;
* )
z-ti-test fsl tsl || return
echoti tsl
print -Pn "$1"
echoti fsl
;;
esac
}
z-title-window() {
emulate -L zsh
case "${TERM}" in
cygwin | xterm* | putty* | rxvt* | konsole* | ansi | mlterm* | alacritty | st* )
print -Pn "\e]2;${1:q}\a"
;;
esac
}
z-title() {
emulate -L zsh
## if $2 is unset use $1 as default
## if it is set and empty, leave it as is
: ${2=$1}
z-title-tab "$1"
z-title-window "$2"
}
if autoload -Uz add-zsh-hook ; then
__z_title_precmd() {
z-title "${ZSHU[title_tab]}" "${ZSHU[title_window]}"
}
add-zsh-hook precmd __z_title_precmd
else
echo "tab/window title handling is disabled due to missing hook support" 1>&2
fi

View File

@@ -2,9 +2,3 @@
## skip system-wide podman completion and produce own
ZSHU_COMP_FORCE[podman]=1
## systemctl status podman.socket
export DOCKER_HOST=unix:///run/podman/podman.sock
## systemctl --user status podman.socket
export DOCKER_HOST=unix://${XDG_RUNTIME_DIR}/podman/podman.sock

View File

View File

@@ -0,0 +1,11 @@
#!/bin/zsh
z-ssh-agent
z-gpg-agent
z-adjust-docker
## systemctl status podman.socket
export DOCKER_HOST=unix:///run/podman/podman.sock
## systemctl --user status podman.socket
export DOCKER_HOST=unix://${XDG_RUNTIME_DIR}/podman/podman.sock

View File

@@ -1,13 +1,6 @@
#!/bin/zsh
unsetopt err_exit
unsetopt err_return
unsetopt multios
unsetopt err_exit err_return multios
setopt bsd_echo
setopt interactive_comments
setopt long_list_jobs
setopt monitor
setopt prompt_subst
setopt zle
setopt bsd_echo interactive_comments long_list_jobs monitor prompt_subst zle
# setopt magic_equal_subst

View File

@@ -1,5 +1,5 @@
#!/bin/zsh
chase() { setopt chase_dots chase_links ; }
nochase() { unsetopt chase_dots chase_links ; }
nochase
z-chase() { setopt chase_dots chase_links ; }
z-nochase() { unsetopt chase_dots chase_links ; }
z-nochase

View File

@@ -1,8 +1,5 @@
#!/bin/zsh
unsetopt flow_control
unsetopt menu_complete
unsetopt flow_control menu_complete
setopt always_to_end
setopt auto_menu
setopt complete_in_word
setopt always_to_end auto_menu complete_aliases complete_in_word

View File

@@ -1,7 +1,3 @@
#!/bin/zsh
setopt auto_cd
setopt auto_pushd
setopt cdable_vars
setopt pushd_ignore_dups
setopt pushd_minus
setopt auto_cd auto_pushd cdable_vars pushd_ignore_dups pushd_minus

View File

@@ -1,11 +1,3 @@
#!/bin/zsh
setopt append_history
setopt extended_history
setopt hist_expire_dups_first
setopt hist_ignore_all_dups
setopt hist_ignore_dups
setopt hist_ignore_space
setopt hist_verify
setopt inc_append_history
setopt share_history
setopt append_history extended_history hist_expire_dups_first hist_ignore_all_dups hist_ignore_dups hist_ignore_space hist_verify inc_append_history share_history

View File

@@ -1,25 +1,50 @@
#!/bin/zsh
typeset -Ua zshu_modules
zshu_modules+=(
typeset -a zshu_modules
## DEBUG module load order
# typeset -a zshu_m0 zshu_m1
zshu_modules=(
clone
langinfo
parameter
sched
termcap
terminfo
watch
zpty
zle
zleparameter
deltochar
complete
complist
computil
datetime
langinfo
main
mathfunc
parameter
stat
system
terminfo
zle
zutil
compctl
)
for i ( ${zshu_modules} ) ; do
[[ "$i" != */* ]] && i="zsh/$i"
zmodload -i $i
done ; unset i
unset zshu_modules
i="zsh/$i"
## DEBUG module load order
# zshu_m0=( $(zmodload) )
# if ((${zshu_m0[(Ie)${i}]})); then
# echo "# already loaded: $i" >&2
# continue
# fi
zmodload "$i"
## DEBUG module load order
# zshu_m1=( $(zmodload) )
# for k ( ${zshu_m1} ) ; do
# if [ "$k" = "$i" ] ; then continue ; fi
# if ((${zshu_m0[(Ie)${k}]})); then
# continue
# fi
# echo "# new module loaded (with $i): $k" >&2
# done
done
unset i zshu_modules
## DEBUG module load order
# unset zshu_m0 zshu_m1
autoload -Uz +X colors && colors

View File

@@ -19,21 +19,22 @@ zstyle ':completion:*:cd:*' tag-order local-directories directory-stack path-dir
zstyle ':completion:*:*:*:users' ignored-patterns adm amanda apache at avahi avahi-autoipd backup beaglidx bin bind cacti canna clamav colord daemon dbus distcache dnsmasq dovecot fax ftp games gdm gkrellmd gnats gopher hacluster haldaemon halt hplip hsqldb ident irc junkbust kdm ldap list lp mail mailman mailnull man messagebus mldonkey mysql nagios named netdump news nfsnobody nginx nobody nscd ntp ntpsec nut nx obsrun openvpn operator pcap polkitd postfix postgres privoxy proxy pulse pvm quagga radvd redsocks rpc rpcuser rpm rtkit saned sbuild sbws scard sddm shutdown speech-dispatcher squid sshd statd svn sync sys tcpdump tftp tss usbmux uucp uuidd vcsa wwwrun www-data x2gouser xfs '_*' 'systemd-*' 'debian-*' 'Debian-*'
zstyle '*' single-ignored show
zstyle ':completion:*:*:*:*:processes' command "ps -u $USER -o pid,user,comm -w -w"
zstyle ':completion:*:*:*:*:processes' command "ps -u ${USER} -o pid,user,comm -w -w"
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#) ([0-9a-z-]#)*=01;34=0=01'
zstyle ':completion:*:kill:*' command 'ps -u $USER -o pid,%cpu,tty,cputime,cmd'
if autoload -Uz +X bashcompinit ; then
bashcompinit && ZSHU[compdump_bash]=1
fi
zstyle ':completion:*:kill:*' command 'ps -u ${USER} -o pid,%cpu,tty,cputime,cmd'
autoload -Uz +X compinit && \
compinit -i -C -d "${ZSHU[f_compdump]}"
for i ( ${ZSHU[d_conf]}/completion/*.zsh(N.r) ) ; do
unset 'ZSHU[compdump_bash]'
if autoload -Uz +X bashcompinit ; then
bashcompinit && ZSHU[compdump_bash]=1
fi
for i ( "${ZSHU[d_conf]}"/completion/*.zsh(N.r) ) ; do
source "$i"
done ; unset i
for i ( ${ZSHU[d_conf]}/local/completion/*.zsh(N.r) ) ; do
for i ( "${ZSHU[d_conf]}"/local/completion/*.zsh(N.r) ) ; do
source "$i"
done ; unset i

View File

@@ -0,0 +1,38 @@
#!/bin/zsh
z-gpg-agent() {
## don't bother with gpg agent socket if it already set
[ -z "${GPG_AGENT_INFO}" ] || return 0
(( ${+commands[gpg-agent]} )) || return 127
if (( ${+commands[systemctl]} )) ; then
local u s
for u in gpg-agent.{service,socket} ; do
s=$(z-systemctl --user is-enabled $u)
case "$s" in
disabled ) ;;
* ) continue ;;
esac
z-systemctl --user --now enable $u
done
fi
(( ${+commands[gpgconf]} )) || return 127
local agent_sock
agent_sock=$(command gpgconf --list-dirs agent-socket) || return $?
[ -n "${agent_sock}" ] || return 3
export GPG_AGENT_INFO="${agent_sock}:0:1"
## don't bother with ssh agent socket if it already set
[ -z "${SSH_AUTH_SOCK}" ] || return 0
local want_ssh_agent ssh_auth_sock
want_ssh_agent=$(z-gpgconf-getopt gpg-agent enable-ssh-support)
if [ "${want_ssh_agent}" = 1 ] ; then
ssh_auth_sock=$(command gpgconf --list-dirs agent-ssh-socket) || return $?
[ -n "${ssh_auth_sock}" ] || return 5
export SSH_AUTH_SOCK="${ssh_auth_sock}"
fi
}

Some files were not shown because too many files have changed in this diff Show More