Compare commits
No commits in common. "master" and "master" have entirely different histories.
65
README.md
65
README.md
|
@ -1,65 +0,0 @@
|
|||
Spark autobuilder
|
||||
=================
|
||||
|
||||
To use the build scripts, grab the latest Spark rootfs in one of the following ways:
|
||||
```
|
||||
curl -sL https://spark.fleshless.org/rootfs/latest.tar.xz | podman import - localhost/spark
|
||||
# or
|
||||
git clone https://code.fleshless.org/Spark/spark-rootfs.git; cd spark-rootfs; podman build --rm -t localhost/spark .
|
||||
```
|
||||
|
||||
Then build a builder image from `./autobuilder`:
|
||||
```
|
||||
cd ./autobuilder; podman build --rm -t localhost/spark:autobuilder .
|
||||
```
|
||||
|
||||
Example for Arch images:
|
||||
```
|
||||
cd autobuilder
|
||||
podman build --build-arg BASEIMAGE=docker.io/archlinux:base-devel --rm -t localhost/arch:autobuilder .
|
||||
|
||||
PODMAN_IMAGE=localhost/arch:autobuilder makepkg-ci /tmp/pkg https://code.fleshless.org/PKGBUILDs/ssm-git
|
||||
```
|
||||
|
||||
makepkg-podman
|
||||
--------------
|
||||
|
||||
Builds Arch packages using `podman`. Takes the image name in the `PODMAN_IMAGE` env var.
|
||||
|
||||
Usage:
|
||||
```
|
||||
[PODMAN_IMAGE=<image>] makepkg-podman [makepkg_args]
|
||||
```
|
||||
|
||||
Example:
|
||||
```
|
||||
git clone https://code.fleshless.org/PKGBUILDs/ssm-git /tmp/ssm-git
|
||||
cd /tmp/ssm-git
|
||||
makepkg-podman -sL
|
||||
```
|
||||
|
||||
makepkg-ci
|
||||
----------
|
||||
|
||||
Builds Arch packages from a git repo using `makepkg-podman` and puts the packages into `dest`.
|
||||
|
||||
Usage:
|
||||
```
|
||||
[PODMAN_IMAGE=<image>] makepkg-ci <dest> <url>
|
||||
```
|
||||
|
||||
Example:
|
||||
```
|
||||
makepkg-ci /tmp/artifacts https://code.fleshless.org/PKGBUILDs/ssm-git
|
||||
```
|
||||
|
||||
pkgbuilder
|
||||
----------
|
||||
|
||||
Tracks given git repos for changes and builds packages on updates.
|
||||
`pkgbuilder.conf` goes into `$XDG_CONFIG_HOME/pkgbuilder/config`
|
||||
|
||||
Usage:
|
||||
```
|
||||
pkgbuilder
|
||||
```
|
|
@ -1,11 +0,0 @@
|
|||
ARG BASEIMAGE=localhost/spark
|
||||
FROM $BASEIMAGE
|
||||
|
||||
COPY ./mirrorlist /etc/pacman.d/mirrorlist
|
||||
COPY gnupg /.gnupg
|
||||
COPY ./build /build
|
||||
|
||||
RUN pacman -Sy --noconfirm archlinux-keyring
|
||||
RUN pacman -Suy --noconfirm base-devel nodejs git
|
||||
|
||||
RUN chmod 700 /.gnupg
|
|
@ -1,9 +0,0 @@
|
|||
autobuilder image
|
||||
=================
|
||||
|
||||
For some packages, you might need to import some pgp keys into the image. Use the `./gnupg` directory to do so, then rebuild the image:
|
||||
|
||||
```
|
||||
GNUPGHOME=./gnupg gpg --recv-keys <keyid>
|
||||
podman build --rm -t localhost/spark:autobuilder .
|
||||
```
|
|
@ -1,35 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
shopt -s nullglob
|
||||
|
||||
cleanup() {
|
||||
chown -R root:root /buildroot
|
||||
}
|
||||
|
||||
# Upgrade
|
||||
pacman -Suy --noconfirm
|
||||
|
||||
# Create a builder user
|
||||
useradd -U builder
|
||||
builder_uid=$(id -u builder)
|
||||
builder_gid=$(id -g builder)
|
||||
|
||||
# dir permissions
|
||||
chown -R "$builder_uid:$builder_gid" /.gnupg /buildroot
|
||||
|
||||
# Return the permissions on exit
|
||||
trap cleanup INT TERM EXIT
|
||||
|
||||
# Add sudo permissions for pacman to install dependencies
|
||||
printf '%s ALL=(ALL) NOPASSWD: /usr/bin/pacman\n' 'builder' > /etc/sudoers.d/builduser
|
||||
|
||||
# Workdir
|
||||
cd /buildroot
|
||||
|
||||
# Install all additional packages
|
||||
deps=( ./deps/* )
|
||||
[[ $deps ]] && pacman -U "${deps[@]}"
|
||||
|
||||
# Build the damn thing
|
||||
sudo -u builder GNUPGHOME="/.gnupg" makepkg "$@"
|
||||
|
||||
chown -R root:root /buildroot/
|
|
@ -1,3 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
podman build . -t 'spark:autobuilder'
|
|
@ -1,7 +0,0 @@
|
|||
# Fleshless
|
||||
Server = https://mirror.fleshless.org/arch/$repo/os/$arch
|
||||
|
||||
# Backup
|
||||
Server = https://mirror.pkgbuild.com/$repo/os/$arch
|
||||
Server = https://mirror.pseudoform.org/$repo/os/$arch
|
||||
Server = https://ftp.halifax.rwth-aachen.de/archlinux/$repo/os/$arch
|
70
lxf
Executable file
70
lxf
Executable file
|
@ -0,0 +1,70 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
mkconf() {
|
||||
for i in "${includes[@]}"; do
|
||||
echo "lxc.include = $lxf_conf_dir/$i.conf"
|
||||
done
|
||||
|
||||
echo "lxc.rootfs.path = $lxf_cont_dir/$cont_name/rootfs"
|
||||
echo "lxc.uts.name = $cont_name"
|
||||
}
|
||||
|
||||
mount_cont() {
|
||||
ov-mount -n "$cont_name" "$rootfs" "$cont_dir/rootfs"
|
||||
}
|
||||
|
||||
lxf_conf_dir='/etc/lxf/conf'
|
||||
lxf_cont_dir='/var/lib/lxf/cont'
|
||||
lxf_rootfs_dir='/var/lib/lxf/fs'
|
||||
|
||||
[[ -f '/etc/lxf.conf' ]] && source '/etc/lxf.conf'
|
||||
|
||||
while (( $# )); do
|
||||
case $1 in
|
||||
(-i) includes+=( "$2" ); shift;;
|
||||
(-r) rootfs="$2"; shift;;
|
||||
|
||||
(--cont-dir) lxf_cont_dir=$2; shift;;
|
||||
(--conf-dir) lxf_conf_dir=$2; shift;;
|
||||
|
||||
(--) shift; break;;
|
||||
(-*) echo "Unknown key: $1" >&2: exit 1;;
|
||||
(*) break;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
|
||||
action=$1; shift; [[ $action ]] || exit 1
|
||||
cont_name=$1; shift; [[ $cont_name ]] || exit 1
|
||||
|
||||
cont_dir="$lxf_cont_dir/$cont_name"
|
||||
|
||||
case $action in
|
||||
(create|new)
|
||||
[[ "$rootfs" ]] || exit 1
|
||||
rootfs="$lxf_rootfs_dir/$rootfs"
|
||||
|
||||
[[ -d "$cont_dir" ]] && {
|
||||
printf 'Container already exists: %s\n' "$cont_dir" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
mkdir -p "$cont_dir" || exit $?
|
||||
|
||||
mkconf > "$cont_dir/config"
|
||||
mount_cont
|
||||
;;
|
||||
|
||||
(mount)
|
||||
mountpoint -q "$cont_dir/rootfs" && {
|
||||
printf 'Container already mounted: %s\n' "$cont_dir/rootfs" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
mount_cont
|
||||
;;
|
||||
|
||||
(umount) umount "$cont_dir/rootfs";;
|
||||
esac
|
32
makepkg-ci
32
makepkg-ci
|
@ -1,32 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
shopt -s nullglob
|
||||
|
||||
cleanup() { rm -rf "$tmpdir"; }
|
||||
trap 'cleanup' EXIT
|
||||
|
||||
did="pkg-build-$$.$SRANDOM"
|
||||
tmpdir="/tmp/$did"
|
||||
mkdir "$tmpdir"
|
||||
|
||||
pkg_dest=$1
|
||||
pkg_url=$2
|
||||
|
||||
mkdir -p "$pkg_dest" || exit 1
|
||||
|
||||
git clone "$pkg_url" "$tmpdir"
|
||||
cd "$tmpdir"
|
||||
makepkg-podman --noconfirm -sL
|
||||
|
||||
artifacts=( "$tmpdir/"*.pkg.* )
|
||||
for i in "${artifacts[@]}"; do
|
||||
i_name="${i##*/}"
|
||||
|
||||
printf 'Found artifact: %s\n' "$i_name"
|
||||
|
||||
if [[ -f "$pkg_dest/$i_name" ]]; then
|
||||
printf '%s/%s already exists, not overwriting.\n' "$pkg_dest" "$i_name"
|
||||
else
|
||||
printf 'Copying %s to %s.\n' "$i_name" "$pkg_dest"
|
||||
cp -vn "$i" "$pkg_dest"
|
||||
fi
|
||||
done
|
68
makepkg-overlay
Executable file
68
makepkg-overlay
Executable file
|
@ -0,0 +1,68 @@
|
|||
#!/usr/bin/env bash
|
||||
# Depends on lxf
|
||||
shopt -s nullglob
|
||||
|
||||
cleanup() { lxf umount "$cnt"; }
|
||||
|
||||
buildscript() {
|
||||
cat <<- EOF
|
||||
#!/usr/bin/env bash
|
||||
# The builder user is already created in the rootfs
|
||||
|
||||
export PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
|
||||
export LC_ALL=en_US.UTF-8
|
||||
|
||||
# Network
|
||||
dhcpcd eth0
|
||||
|
||||
# Upgrade
|
||||
pacman -Suy --noconfirm
|
||||
|
||||
# Build dir
|
||||
mkdir -m777 /buildroot
|
||||
|
||||
# Build the damn thing
|
||||
cd /buildroot
|
||||
sudo -u builder git clone "$pkg_url" .
|
||||
sudo -u builder makepkg -s --noconfirm -L
|
||||
EOF
|
||||
}
|
||||
|
||||
# Config
|
||||
wrk_dir='/home/lxc'
|
||||
|
||||
# Parameters
|
||||
pkg_url=$1
|
||||
pkg_dest=$2
|
||||
cnt="_makepkg.$$"
|
||||
cnt_dir="$wrk_dir/containers/$cnt"
|
||||
|
||||
# Create new container
|
||||
lxf -r builder -i base new "$cnt" || exit $?
|
||||
|
||||
# Unmount the thing in any case
|
||||
trap 'cleanup' INT TERM EXIT
|
||||
|
||||
# Add the build script
|
||||
buildscript > "$cnt_dir/rootfs/init"
|
||||
chmod 755 "$cnt_dir/rootfs/init"
|
||||
|
||||
# Start the container
|
||||
lxc-start -n "$cnt" -F /init || exit $?
|
||||
|
||||
# Put the artifacts where asked to
|
||||
[[ "$pkg_dest" ]] && {
|
||||
artifacts=( "$cnt_dir/rootfs/buildroot/"*.pkg.* )
|
||||
|
||||
for i in "${artifacts[@]}"; do
|
||||
i_name="${i##*/}"
|
||||
|
||||
printf 'Found artifact: %s\n' "$i_name"
|
||||
|
||||
if [[ -f "$pkg_dest/$i_name" ]]; then
|
||||
echo "$pkg_dest/$i_name already exists, not overwriting."
|
||||
else
|
||||
cp -vn "$i" "$pkg_dest"
|
||||
fi
|
||||
done
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
image=${PODMAN_IMAGE:-localhost/spark:autobuilder}
|
||||
|
||||
podman_cmd=(
|
||||
podman run --rm -it -u root
|
||||
-v "$PWD:/buildroot"
|
||||
--workdir /buildroot
|
||||
--name "autobuilder.$$"
|
||||
--env "BUILDUSER=$USER"
|
||||
"$image" /build "$@"
|
||||
)
|
||||
|
||||
"${podman_cmd[@]}"
|
57
pkgbuilder
57
pkgbuilder
|
@ -2,7 +2,7 @@
|
|||
|
||||
err() { printf '%s\n' "$*" >&2; }
|
||||
|
||||
build_remote() { makepkg-ci "$@"; }
|
||||
build_remote() { sudo makepkg-overlay "$@"; }
|
||||
cleanup() { rm -f "$lockfile"; }
|
||||
|
||||
lock() {
|
||||
|
@ -21,8 +21,57 @@ lockfile="$HOME/.cache/pkgbuilder/.lock"
|
|||
lock || exit 1
|
||||
trap cleanup EXIT
|
||||
|
||||
spark_repos="$HOME/public"
|
||||
|
||||
declare -A pkg_dests
|
||||
source "$HOME/.config/pkgbuilder/config"
|
||||
pkg_dests=(
|
||||
[spark]="$spark_repos/spark/import"
|
||||
[spark-extra]="$spark_repos/spark-extra/import"
|
||||
[spark-testing]="$spark_repos/spark-testing/import"
|
||||
[spark-updates]="$spark_repos/spark-updates/spark/import"
|
||||
)
|
||||
|
||||
repos=(
|
||||
spark "https://code.fleshless.org/pkgbuilds/sinit-spark"
|
||||
spark "https://code.fleshless.org/pkgbuilds/sinit-sysvcompat"
|
||||
spark "https://code.fleshless.org/pkgbuilds/smdev"
|
||||
spark "https://code.fleshless.org/pkgbuilds/spark-etc"
|
||||
spark "https://code.fleshless.org/pkgbuilds/spark-rc"
|
||||
spark "https://code.fleshless.org/pkgbuilds/ssm"
|
||||
spark "https://code.fleshless.org/pkgbuilds/ssm-services-git"
|
||||
spark "https://code.fleshless.org/pkgbuilds/systemd-dummy"
|
||||
spark "https://code.fleshless.org/pkgbuilds/ubase-extras"
|
||||
spark "https://code.fleshless.org/pkgbuilds/udev-dummy"
|
||||
spark "https://code.fleshless.org/pkgbuilds/xorg-noudev-conf"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/anope-services"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/chef-client"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/dmenu-q-xywh-xft-git"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/dxvk-bin"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/eudev"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/fake"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/graphicsmagick-webp"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/ircd-ratbox"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/lemonbar"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/libsystemd-standalone"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/loksh"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/mlmmj"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/nginx-mod-rtmp"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/posh"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/prm"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/qstat"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/scron"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/sx-utils"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/systemd-libs-systemd"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/ufw-tools"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/xkb-layout-ducky-mini"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/xsct-git"
|
||||
spark-testing "https://code.fleshless.org/pkgbuilds/compton-git"
|
||||
spark-testing "https://code.fleshless.org/pkgbuilds/fskit-git"
|
||||
spark-testing "https://code.fleshless.org/pkgbuilds/ssm-git"
|
||||
spark-testing "https://code.fleshless.org/pkgbuilds/xtitle-git"
|
||||
)
|
||||
|
||||
|
||||
|
||||
wrkdir="$HOME/.cache/pkgbuilder/repositories"
|
||||
mkdir -p "$wrkdir"
|
||||
|
@ -44,11 +93,11 @@ while (($#>1)); do
|
|||
mkdir -p "$wrkdir/${repo_path%$repo_name}"
|
||||
git clone "$pkg_repo" "$repo_local"
|
||||
|
||||
build_remote "$pkg_dest" "$pkg_repo"
|
||||
build_remote "$pkg_repo" "$pkg_dest"
|
||||
else
|
||||
git -C "$repo_local" remote update
|
||||
if git -C "$repo_local" status --porcelain -bu | grep -q behind; then
|
||||
build_remote "$pkg_dest" "$pkg_repo"
|
||||
build_remote "$pkg_repo" "$pkg_dest"
|
||||
git -C "$repo_local" pull
|
||||
fi
|
||||
fi
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
# Example config for pkgbuilder
|
||||
|
||||
spark_repos="$HOME/public"
|
||||
|
||||
pkg_dests=(
|
||||
[spark]="$spark_repos/spark/import"
|
||||
[spark-extra]="$spark_repos/spark-extra/import"
|
||||
[spark-testing]="$spark_repos/spark-testing/import"
|
||||
[spark-updates]="$spark_repos/spark-updates/spark/import"
|
||||
)
|
||||
|
||||
repos=(
|
||||
spark "https://code.fleshless.org/pkgbuilds/sinit-spark"
|
||||
spark-extra "https://code.fleshless.org/pkgbuilds/anope-services"
|
||||
spark-testing "https://code.fleshless.org/pkgbuilds/ssm-git"
|
||||
)
|
83
repo-clean
Executable file
83
repo-clean
Executable file
|
@ -0,0 +1,83 @@
|
|||
#!/usr/bin/bash
|
||||
shopt -s nullglob
|
||||
|
||||
parse_pkg_filename() {
|
||||
declare filename=$1
|
||||
declare -a filename_array
|
||||
|
||||
IFS='-' read -r -a filename_array <<< "$filename"
|
||||
|
||||
for (( i=${#filename_array[@]}; i>=0; i-- )); do
|
||||
printf '%s ' "${filename_array[$i]}"
|
||||
done
|
||||
}
|
||||
|
||||
is_latest() {
|
||||
declare filename=$1 p
|
||||
|
||||
for p in "${latest[@]}"; do
|
||||
if [[ "$filename" == "$p" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
_pacsort() {
|
||||
declare i
|
||||
|
||||
for i in *.pkg.tar.xz; do
|
||||
printf '%s\n' "$i"
|
||||
done | pacsort -f
|
||||
}
|
||||
|
||||
while (( $# )); do
|
||||
case "$1" in
|
||||
-r) flag_rm=1;;
|
||||
-d) flag_debug=1;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if ! (( flag_rm )); then
|
||||
if ! [[ -d old ]]; then
|
||||
mkdir -p old || {
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
fi
|
||||
|
||||
read -d '' -r -a package_files < <( _pacsort )
|
||||
|
||||
for f in "${package_files[@]}"; do
|
||||
unset revision version end package_name
|
||||
|
||||
read -r end revision version _ < <(parse_pkg_filename "$f")
|
||||
package_name=${f%%-$version-$revision*}
|
||||
|
||||
declare -g -A latest["$package_name"]="$package_name-$version-$revision-$end"
|
||||
done
|
||||
|
||||
for f in ${package_files[@]}; do
|
||||
if is_latest "$f"; then
|
||||
(( flag_debug )) && printf '%s is the latest version available\n' "$f"
|
||||
else
|
||||
printf 'Removing: %s\n' "$f"
|
||||
|
||||
if (( flag_rm )); then
|
||||
rm -f "$f" "${f}.sig"
|
||||
else
|
||||
mv "$f" old/
|
||||
|
||||
if [[ -f "${f}.sig" ]]; then
|
||||
mv "${f}.sig" old/
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -d old ]]; then
|
||||
cd old
|
||||
exec bash "$0" -r
|
||||
fi
|
48
repo-maintenance
Executable file
48
repo-maintenance
Executable file
|
@ -0,0 +1,48 @@
|
|||
#!/usr/bin/env bash
|
||||
shopt -s nullglob
|
||||
|
||||
lockfile='.mainenance.lock'
|
||||
|
||||
usage() { echo "$0 <repo_name>"; }
|
||||
|
||||
spark_repo=$1
|
||||
[[ "$spark_repo" ]] || {
|
||||
usage
|
||||
exit 1
|
||||
}
|
||||
|
||||
cleanup() { rm -f "$lockfile"; }
|
||||
|
||||
lock() {
|
||||
[[ -f "$lockfile" ]] && {
|
||||
printf 'Lockfile found: %s\n' "$lockfile" >&2
|
||||
return 1
|
||||
}
|
||||
|
||||
echo $$ > "$lockfile"
|
||||
lock_pid=$(<"$lockfile")
|
||||
[[ $$ == "$lock_pid" ]] || return 1
|
||||
}
|
||||
|
||||
cd "$HOME/public/$spark_repo" || exit $?
|
||||
|
||||
lock || exit 1
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
for f in import/*; do
|
||||
f_name="${f##*/}"
|
||||
if [[ -f "$f_name" ]]; then
|
||||
printf 'Package already exists: %s, not overwriting\n' "$f_name" >&2
|
||||
else
|
||||
mv -vf "$f" ./
|
||||
new_pkgs+=( "$f_name" )
|
||||
fi
|
||||
done
|
||||
|
||||
repo-sign
|
||||
repo-clean
|
||||
|
||||
for p in "${new_pkgs[@]}"; do
|
||||
repo-add -n "$spark_repo.db.tar.gz" "$p"
|
||||
done
|
15
repo-sign
Executable file
15
repo-sign
Executable file
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/bash
|
||||
shopt -s nullglob
|
||||
|
||||
for i in *.pkg.*; do
|
||||
if ! [[ "$i" == "${i%.*}.sig" ]]; then
|
||||
if ! [[ -f "${i}.sig" ]]; then
|
||||
unsigned_pkgs+=( "$i" )
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
for i in "${unsigned_pkgs[@]}"; do
|
||||
echo $i
|
||||
gpg --pinentry-mode loopback --passphrase-fd 0 --batch --no-tty --detach-sign --no-armor "$i" < ~/.gnupg/passphrase
|
||||
done
|
49
syntax
Executable file
49
syntax
Executable file
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
_highlight() {
|
||||
declare -n syn=$1
|
||||
|
||||
if [[ "$syn" ]]; then
|
||||
exec highlight --force --inline-css -f -I -O xhtml -S "$syn" 2>/dev/null
|
||||
else
|
||||
# echo " :cgit: No syntax type provided."
|
||||
exec cat
|
||||
fi
|
||||
}
|
||||
|
||||
file_name=$1
|
||||
file_extension="${file_name##*.}"
|
||||
|
||||
if ! [[ "$file_name" == "$file_extension" ]]; then
|
||||
case "$file_extension" in
|
||||
(md) exec cmark;;
|
||||
(bash|zsh) syntax='sh';;
|
||||
(*) syntax=$file_extension;;
|
||||
esac
|
||||
fi
|
||||
|
||||
case "$file_name" in
|
||||
(Makefile) syntax='makefile';;
|
||||
(PKGBUILD) syntax='sh';;
|
||||
esac
|
||||
|
||||
# Read and output the first line
|
||||
read -r
|
||||
|
||||
# Set syntax if the first line is a shebang
|
||||
if [[ "$REPLY" =~ ^'#!' ]]; then
|
||||
case "$REPLY" in
|
||||
(*sh) syntax='sh';;
|
||||
(*ruby*) syntax='ruby';;
|
||||
esac
|
||||
fi
|
||||
|
||||
{
|
||||
# Print the first line
|
||||
printf '%s\n' "$REPLY"
|
||||
|
||||
# Read and output everything
|
||||
while read -r; do
|
||||
printf '%s\n' "$REPLY"
|
||||
done
|
||||
} | _highlight syntax
|
Loading…
Reference in New Issue
Block a user