Compare commits

...

15 Commits

Author SHA1 Message Date
Your Name
9d4e5f76ce 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m12s
2025-06-15 21:37:21 +12:00
Your Name
366f5c2d0e 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m57s
2025-06-02 00:44:23 +12:00
Your Name
0b0f3df59c 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m20s
2025-06-01 23:38:58 +12:00
Your Name
f48302c05e 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-06-01 23:37:34 +12:00
Your Name
7f341699c1 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m52s
2025-06-01 23:34:31 +12:00
Your Name
18c53acd71 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m21s
2025-06-01 23:16:42 +12:00
Your Name
eb632c010c 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m27s
2025-06-01 23:06:14 +12:00
Your Name
964e8598b1 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m27s
2025-06-01 18:33:37 +12:00
Your Name
a5cf9313e9 dropshell release 2025.0601.1821
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-06-01 18:21:32 +12:00
Your Name
ab73a47751 dropshell release 2025.0601.1754
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m31s
2025-06-01 17:54:13 +12:00
Your Name
1da7dc7951 dropshell release 2025.0601.1752
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 16m46s
2025-06-01 17:53:06 +12:00
Your Name
49d61f0da0 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 16m26s
2025-06-01 15:57:52 +12:00
Your Name
27c0abcb9f 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-06-01 15:38:57 +12:00
Your Name
483ee4e3ef :-'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 13m16s
2025-05-30 00:14:24 +12:00
Your Name
f7294e01e4 :-'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m40s
2025-05-28 20:40:24 +12:00
28 changed files with 397 additions and 272 deletions

View File

@ -14,14 +14,26 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install build dependencies - name: Install build dependencies
run: | run: |
cd ${{ gitea.workspace }}/source cd source
./install_build_prerequisites.sh ./install_build_prerequisites.sh
- name: Build - name: Build Native
run: | run: |
cd ${{ gitea.workspace }}/source cd source
./multibuild.sh ./build_native.sh
- name: Test - name: Test
run: | run: |
cd ${{ gitea.workspace }}/source/output cd source
./dropshell_x86_64 list ./test.sh
./dropshell_x86_64 help - name: Build Production
run: |
cd source
./build_production.sh
- name: Test
run: |
cd source
./test.sh
- name: Publish
run: |
cd source
./publish.sh

22
dropshell-install.sh Executable file
View File

@ -0,0 +1,22 @@
#!/bin/bash
set -euo pipefail
# download and install dropshell
# 1. Determine architecture
# -----------------------------------------------------------------------------
ARCH=$(uname -m)
TARGET_PATH="${HOME}/.local/bin/dropshell"
[ ! -f "${TARGET_PATH}" ] || rm -f "${TARGET_PATH}"
mkdir -p "$(dirname "${TARGET_PATH}")"
curl -L -s -o "${TARGET_PATH}" "https://getbin.xyz/dropshell.${ARCH}" || die "Failed to download dropshell for ${ARCH}"
chmod +x "${TARGET_PATH}"
echo "dropshell installed successfully to $TARGET_PATH"
echo " "
echo "Please:"
echo "1. run '${TARGET_PATH} edit' to edit the configuration."
echo "2. run '${TARGET_PATH} install' to install dropshell components on this computer."
echo "3. run 'source ~/.bashrc' to add to your path and autocomplete for the current shell."

View File

@ -1,49 +0,0 @@
#!/bin/bash
set -e
# download and install dropshell
# 1. Determine architecture
# -----------------------------------------------------------------------------
ARCH=$(uname -m)
if [[ "$ARCH" == "x86_64" ]]; then
BIN=dropshell.x86_64
elif [[ "$ARCH" == "aarch64" || "$ARCH" == "arm64" ]]; then
BIN=dropshell.aarch64
else
echo "Unsupported architecture: $ARCH" >&2
exit 1
fi
# 2. Download the appropriate binary to a temp directory
# -----------------------------------------------------------------------------
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
URL="https://gitea.jde.nz/public/dropshell/releases/download/latest/$BIN"
echo "Downloading $BIN from $URL..."
TARGET_PATH="${HOME}/.local/bin/dropshell"
mkdir -p "${HOME}/.local/bin"
curl -fsSL -o "$TARGET_PATH" "$URL"
if [ ! -f "$TARGET_PATH" ]; then
echo "Failed to download dropshell" >&2
exit 1
fi
chmod +x "$TARGET_PATH"
if [ ! -f "${HOME}/.local/bin/ds" ]; then
ln -s "$TARGET_PATH" "${HOME}/.local/bin/ds"
fi
echo "dropshell installed successfully to $TARGET_PATH"
echo "Please:"
echo "1. run '${TARGET_PATH} edit' to edit the configuration."
echo "2. run '${TARGET_PATH} install' to install dropshell components on this computer."
echo "3. run 'source ~/.bashrc' to add to your path and autocomplete for the current shell."

View File

@ -38,6 +38,7 @@ CURRENT_EXIT_CODE=0
load_dotenv(){ load_dotenv(){
local file_path=$1 local file_path=$1
if [ -f "${file_path}" ]; then if [ -f "${file_path}" ]; then
# shellcheck source=/dev/null
source "${file_path}" source "${file_path}"
fi fi
} }
@ -69,9 +70,9 @@ function run_command() {
load_dotenv "${service_path}/config/.template_info.env" load_dotenv "${service_path}/config/.template_info.env"
# update the main variables. # update the main variables.
CONFIG_PATH="${service_path}/config" export CONFIG_PATH="${service_path}/config"
SERVICE="${SERVICE_NAME}" export SERVICE="${SERVICE_NAME}"
DOCKER_CLI_HINTS=false export DOCKER_CLI_HINTS=false
set +a set +a

View File

@ -21,16 +21,13 @@ fi
_check_required_env_vars "AGENT_PATH" _check_required_env_vars "AGENT_PATH"
function install_bb64() { function install_bb64() {
curl -fsSL "https://gitea.jde.nz/public/bb64/releases/download/latest/install.sh" | bash -s -- "$AGENT_PATH" "$(id -u $USER):$(id -g $USER)" if ! curl -fsSL "https://gitea.jde.nz/public/bb64/releases/download/latest/install.sh" | \
bash -s -- "$AGENT_PATH" "$(id -u "$USER"):$(id -g "$USER")"; then
# test result code from curl
if [ $? -ne 0 ]; then
_die "Failed to install bb64. Curl returned non-zero exit code." _die "Failed to install bb64. Curl returned non-zero exit code."
fi fi
# test if bb64 is installed # test if bb64 is installed
VER=$("$AGENT_PATH/bb64" -v) if ! VER=$("$AGENT_PATH/bb64" -v); then
if [ $? -ne 0 ]; then
_die "bb64 did not install correctly." _die "bb64 did not install correctly."
fi fi

View File

@ -41,18 +41,18 @@ _create_and_start_container() {
local run_cmd="$1" local run_cmd="$1"
local container_name="$2" local container_name="$2"
if _is_container_exists $container_name; then if _is_container_exists "$container_name"; then
_is_container_running $container_name && return 0 _is_container_running "$container_name" && return 0
_start_container $container_name _start_container "$container_name"
else else
$run_cmd $run_cmd
fi fi
if ! _is_container_running $container_name; then if ! _is_container_running "$container_name"; then
_die "Container ${container_name} failed to start" _die "Container ${container_name} failed to start"
fi fi
ID=$(_get_container_id $container_name) ID=$(_get_container_id "$container_name")
echo "Container ${container_name} is running with ID ${ID}" echo "Container ${container_name} is running with ID ${ID}"
} }
@ -93,6 +93,7 @@ _check_docker_installed() {
# Checks if a container (any state) exists. Returns 1 if not found. # Checks if a container (any state) exists. Returns 1 if not found.
_is_container_exists() { _is_container_exists() {
[ -n "${1:-}" ] || { echo "_is_container_exists: Container name is empty" >&2; return 1; }
if ! docker ps -a --format "{{.Names}}" | grep -q "^$1$"; then if ! docker ps -a --format "{{.Names}}" | grep -q "^$1$"; then
return 1 return 1
fi fi
@ -101,6 +102,7 @@ _is_container_exists() {
# Checks if a container is currently running. Returns 1 if not running. # Checks if a container is currently running. Returns 1 if not running.
_is_container_running() { _is_container_running() {
[ -n "${1:-}" ] || { echo "_is_container_running: Container name is empty" >&2; return 1; }
if ! docker ps --format "{{.Names}}" | grep -q "^$1$"; then if ! docker ps --format "{{.Names}}" | grep -q "^$1$"; then
return 1 return 1
fi fi
@ -119,39 +121,39 @@ _get_container_status() {
# Starts an existing, stopped container. # Starts an existing, stopped container.
_start_container() { _start_container() {
_is_container_exists $1 || return 1 _is_container_exists "$1" || return 1
_is_container_running $1 && return 0 _is_container_running "$1" && return 0
docker start $1 docker start "$1"
} }
# Stops a running container. # Stops a running container.
_stop_container() { _stop_container() {
_is_container_running $1 || return 0; _is_container_running "$1" || return 0;
docker stop $1 docker stop "$1"
} }
# Stops (if needed) and removes a container. # Stops (if needed) and removes a container.
_remove_container() { _remove_container() {
_stop_container $1 _stop_container "$1"
_is_container_exists $1 || return 0; _is_container_exists "$1" || return 0;
docker rm $1 docker rm "$1"
} }
# Prints the logs for a container. # Prints the logs for a container.
_get_container_logs() { _get_container_logs() {
if ! _is_container_exists $1; then if ! _is_container_exists "$1"; then
echo "Container $1 does not exist" echo "Container $1 does not exist"
return 1 return 1
fi fi
docker logs $1 docker logs "$1"
} }
# Checks if listed environment variables are set; calls _die() if any are missing. # Checks if listed environment variables are set; calls _die() if any are missing.
_check_required_env_vars() { _check_required_env_vars() {
local required_vars=("$@") local required_vars=("$@")
for var in "${required_vars[@]}"; do for var in "${required_vars[@]}"; do
if [ -z "${!var}" ]; then if [ -z "${!var:-}" ]; then
_die "Required environment variable $var is not set" _die "Required environment variable $var is not set"
fi fi
done done

View File

@ -12,26 +12,26 @@ _autocommandrun_volume() {
case "$command" in case "$command" in
create) create)
if docker volume ls | grep -q ${volume_name}; then if docker volume ls | grep -q "${volume_name}"; then
echo "Volume ${volume_name} already exists - leaving unchanged" echo "Volume ${volume_name} already exists - leaving unchanged"
return return
fi fi
echo "Creating volume ${volume_name}" echo "Creating volume ${volume_name}"
docker volume create ${volume_name} docker volume create "${volume_name}"
;; ;;
destroy) destroy)
echo "Destroying volume ${volume_name}" echo "Destroying volume ${volume_name}"
docker volume rm ${volume_name} docker volume rm "${volume_name}"
;; ;;
backup) backup)
echo "Backing up volume ${volume_name}" echo "Backing up volume ${volume_name}"
docker run --rm -v ${volume_name}:/volume -v ${backup_folder}:/backup debian bash -c "tar -czvf /backup/backup.tgz -C /volume . && chown -R $MYID:$MYGRP /backup" docker run --rm -v "${volume_name}":/volume -v "${backup_folder}":/backup debian bash -c "tar -czvf /backup/backup.tgz -C /volume . && chown -R $MYID:$MYGRP /backup"
;; ;;
restore) restore)
echo "Restoring volume ${volume_name}" echo "Restoring volume ${volume_name}"
docker volume rm ${volume_name} docker volume rm "${volume_name}"
docker volume create ${volume_name} docker volume create "${volume_name}"
docker run --rm -v ${volume_name}:/volume -v ${backup_folder}:/backup debian bash -c "tar -xzvf /backup/backup.tgz -C /volume --strip-components=1" docker run --rm -v "${volume_name}":/volume -v "${backup_folder}":/backup debian bash -c "tar -xzvf /backup/backup.tgz -C /volume --strip-components=1"
;; ;;
esac esac
} }
@ -48,14 +48,16 @@ _autocommandrun_path() {
return return
fi fi
echo "Creating path ${path}" echo "Creating path ${path}"
mkdir -p ${path} mkdir -p "${path}"
;; ;;
destroy) destroy)
echo "Destroying path ${path}" echo "Destroying path ${path}"
local path_parent=$(dirname ${path}) local path_parent;
local path_child=$(basename ${path}) path_parent=$(dirname "${path}")
local path_child;
path_child=$(basename "${path}")
if [ -d "${path_parent}/${path_child}" ]; then if [ -d "${path_parent}/${path_child}" ]; then
docker run --rm -v ${path_parent}:/volume debian bash -c "rm -rfv /volume/${path_child}" || echo "Failed to destroy path ${path}" docker run --rm -v "${path_parent}":/volume debian bash -c "rm -rfv /volume/${path_child}" || echo "Failed to destroy path ${path}"
else else
echo "Path ${path} does not exist - nothing to destroy" echo "Path ${path} does not exist - nothing to destroy"
fi fi
@ -63,7 +65,7 @@ _autocommandrun_path() {
backup) backup)
echo "Backing up path ${path}" echo "Backing up path ${path}"
if [ -d "${path}" ]; then if [ -d "${path}" ]; then
docker run --rm -v ${path}:/path -v ${backup_folder}:/backup debian bash -c "tar -czvf /backup/backup.tgz -C /path . && chown -R $MYID:$MYGRP /backup" docker run --rm -v "${path}":/path -v "${backup_folder}":/backup debian bash -c "tar -czvf /backup/backup.tgz -C /path . && chown -R $MYID:$MYGRP /backup"
else else
echo "Path ${path} does not exist - nothing to backup" echo "Path ${path} does not exist - nothing to backup"
fi fi
@ -73,9 +75,9 @@ _autocommandrun_path() {
echo "Backup file ${backup_folder}/backup.tgz does not exist - nothing to restore" echo "Backup file ${backup_folder}/backup.tgz does not exist - nothing to restore"
else else
echo "Clearing existing data in path ${path}" echo "Clearing existing data in path ${path}"
docker run --rm -v ${path}:/path debian bash -c "rm -rfv /path/{*,.*}" docker run --rm -v "${path}":/path debian bash -c "rm -rfv /path/{*,.*}"
echo "Restoring path ${path} from backup file ${backup_folder}/backup.tgz" echo "Restoring path ${path} from backup file ${backup_folder}/backup.tgz"
tar -xzvf ${backup_folder}/backup.tgz -C ${path} --strip-components=1 tar -xzvf "${backup_folder}/backup.tgz" -C "${path}" --strip-components=1
fi fi
;; ;;
esac esac
@ -88,31 +90,36 @@ _autocommandrun_file() {
case "$command" in case "$command" in
create) create)
filepath_parent=$(dirname ${filepath}) local file_parent;
filepath_child=$(basename ${filepath}) file_parent=$(dirname "${filepath}")
if [ ! -d "${filepath_parent}" ]; then local file_name;
echo "Parent directory ${filepath_parent} of ${filepath_child} does not exist - creating" file_name=$(basename "${filepath}")
mkdir -p ${filepath_parent} if [ ! -d "${file_parent}" ]; then
echo "Parent directory ${file_parent} of ${file_name} does not exist - creating"
mkdir -p "${file_parent}"
fi fi
;; ;;
destroy) destroy)
rm -f ${filepath} rm -f "${filepath}"
;; ;;
backup) backup)
echo "Backing up file ${filepath}" echo "Backing up file ${filepath}"
local file_parent=$(dirname ${filepath}) local file_parent;
local file_name=$(basename ${filepath}) file_parent=$(dirname "${filepath}")
local file_name;
file_name=$(basename "${filepath}")
if [ -f "${file_parent}/${file_name}" ]; then if [ -f "${file_parent}/${file_name}" ]; then
docker run --rm -v ${file_parent}:/volume -v ${backup_folder}:/backup debian bash -c "cp /volume/${file_name} /backup/${file_name} && chown -R $MYID:$MYGRP /backup" docker run --rm -v "${file_parent}":/volume -v "${backup_folder}":/backup debian bash -c "cp /volume/${file_name} /backup/${file_name} && chown -R $MYID:$MYGRP /backup"
else else
echo "File ${filepath} does not exist - nothing to backup" echo "File ${filepath} does not exist - nothing to backup"
fi fi
;; ;;
restore) restore)
echo "Restoring file ${filepath}" echo "Restoring file ${filepath}"
local file_name=$(basename ${filepath}) local file_name;
rm -f ${filepath} || die "Unable to remove existing file ${filepath}, restore failed." file_name=$(basename "${filepath}")
cp ${backup_folder}/${file_name} ${filepath} || die "Unable to copy file ${backup_folder}/${file_name} to ${filepath}, restore failed." rm -f "${filepath}" || return_die "Unable to remove existing file ${filepath}, restore failed."
cp "${backup_folder}/${file_name}" "${filepath}" || return_die "Unable to copy file ${backup_folder}/${file_name} to ${filepath}, restore failed."
;; ;;
esac esac
} }
@ -153,9 +160,10 @@ _autocommandparse() {
local value="${pair#*=}" local value="${pair#*=}"
# create backup folder unique to key/value. # create backup folder unique to key/value.
local bfolder=$(echo "${key}_${value}" | tr -cd '[:alnum:]_-') local bfolder;
bfolder=$(echo "${key}_${value}" | tr -cd '[:alnum:]_-')
local targetpath="${backup_temp_path}/${bfolder}" local targetpath="${backup_temp_path}/${bfolder}"
mkdir -p ${targetpath} mkdir -p "${targetpath}"
# Key must be one of volume, path or file # Key must be one of volume, path or file
case "$key" in case "$key" in
@ -191,7 +199,7 @@ databackup() {
mkdir -p "$BACKUP_TEMP_PATH" mkdir -p "$BACKUP_TEMP_PATH"
echo "_autocommandparse [backup] [$BACKUP_TEMP_PATH] [$@]" echo "_autocommandparse [backup] [$BACKUP_TEMP_PATH]" "$@"
_autocommandparse backup "$BACKUP_TEMP_PATH" "$@" _autocommandparse backup "$BACKUP_TEMP_PATH" "$@"
tar zcvf "$BACKUP_FILE" -C "$BACKUP_TEMP_PATH" . tar zcvf "$BACKUP_FILE" -C "$BACKUP_TEMP_PATH" .
@ -201,7 +209,7 @@ datarestore() {
_check_required_env_vars "BACKUP_FILE" "TEMP_DIR" _check_required_env_vars "BACKUP_FILE" "TEMP_DIR"
BACKUP_TEMP_PATH="$TEMP_DIR/restore" BACKUP_TEMP_PATH="$TEMP_DIR/restore"
echo "_autocommandparse [restore] [$BACKUP_TEMP_PATH] [$@]" echo "_autocommandparse [restore] [$BACKUP_TEMP_PATH]" "$@"
mkdir -p "$BACKUP_TEMP_PATH" mkdir -p "$BACKUP_TEMP_PATH"
tar zxvf "$BACKUP_FILE" -C "$BACKUP_TEMP_PATH" --strip-components=1 tar zxvf "$BACKUP_FILE" -C "$BACKUP_TEMP_PATH" --strip-components=1

View File

@ -1,45 +0,0 @@
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
OUTPUT_DIR=${SCRIPT_DIR}/output
INSTALL_DIR=${HOME}/.local/bin
mkdir -p ${OUTPUT_DIR}
# Exit on error
set -e
function build_native() {
local BUILDDIR=${SCRIPT_DIR}/build/native
local PREVDIR=$PWD
local JOBS=$(nproc) # Set JOBS to the number of available CPU cores
mkdir -p ${BUILDDIR}
cd ${SCRIPT_DIR}
CC="${HOME}/.musl-cross/x86_64-linux-musl-native/bin/x86_64-linux-musl-gcc"
CXX="${HOME}/.musl-cross/x86_64-linux-musl-native/bin/x86_64-linux-musl-g++"
cmake -B ${BUILDDIR} -G Ninja \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DCMAKE_LINKER=mold \
-DCMAKE_C_COMPILER=${CC} \
-DCMAKE_CXX_COMPILER=${CXX}
cd ${BUILDDIR}
ninja -j"$JOBS"
#upx ${BUILDDIR}/dropshell
cp ${BUILDDIR}/dropshell ${OUTPUT_DIR}/dropshell.native
cd ${PREVDIR}
}
build_native
echo "Auto-installing dropshell locally..."
mkdir -p "${INSTALL_DIR}"
cp "${OUTPUT_DIR}/dropshell.native" "${INSTALL_DIR}/dropshell"
ds version
echo "Build process completed!"

53
source/build_native.sh Executable file
View File

@ -0,0 +1,53 @@
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
OUTPUT_DIR=${SCRIPT_DIR}/output
INSTALL_DIR=${HOME}/.local/bin
mkdir -p "${OUTPUT_DIR}"
# Exit on error
set -euo pipefail
ARCH=$(uname -m)
if [ "$ARCH" != "x86_64" ] && [ "$ARCH" != "aarch64" ]; then
echo "Unsupported architecture: $ARCH"
exit 1
fi
function build_native() {
local BUILDDIR=${SCRIPT_DIR}/build/native
local PREVDIR=$PWD
local JOBS;
JOBS=$(nproc) # Set JOBS to the number of available CPU cores
mkdir -p "${BUILDDIR}"
cd "${SCRIPT_DIR}" || exit 1
CC="${HOME}/.musl-cross/${ARCH}-linux-musl-native/bin/${ARCH}-linux-musl-gcc"
CXX="${HOME}/.musl-cross/${ARCH}-linux-musl-native/bin/${ARCH}-linux-musl-g++"
cmake -B "${BUILDDIR}" -G Ninja \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DCMAKE_LINKER=mold \
-DCMAKE_C_COMPILER="${CC}" \
-DCMAKE_CXX_COMPILER="${CXX}"
cd "${BUILDDIR}" || exit 1
ninja -j"$JOBS"
#upx ${BUILDDIR}/dropshell
cp "${BUILDDIR}/dropshell" "${OUTPUT_DIR}/dropshell.${ARCH}"
cd "${PREVDIR}" || exit 1
}
build_native
echo "Auto-installing dropshell locally..."
mkdir -p "${INSTALL_DIR}"
cp "${OUTPUT_DIR}/dropshell.${ARCH}" "${INSTALL_DIR}/dropshell"
echo "Build process completed!"

View File

@ -1,15 +1,15 @@
#!/bin/bash #!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
# Create output directory # Create output directory
mkdir -p ${SCRIPT_DIR}/output mkdir -p "${SCRIPT_DIR}/output"
PREV_DIR=$(pwd)
cd "${SCRIPT_DIR}"
trap 'cd "${PREV_DIR}"' EXIT
function build_arch() { function build_arch() {
local arch=$1 local arch=$1
local PREVDIR=$PWD
cd ${SCRIPT_DIR}
if [ ! -f "${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-c++" ]; then if [ ! -f "${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-c++" ]; then
echo "Musl cross compiler for ${arch} not found. Please run install_build_prerequisites.sh first." echo "Musl cross compiler for ${arch} not found. Please run install_build_prerequisites.sh first."
@ -20,16 +20,14 @@ function build_arch() {
CC="${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-gcc" CC="${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-gcc"
CXX="${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-g++" CXX="${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-g++"
BUILDDIR=${SCRIPT_DIR}/build/${arch} BUILDDIR="${SCRIPT_DIR}/build/${arch}"
mkdir -p ${BUILDDIR} mkdir -p "${BUILDDIR}"
cmake -B ${BUILDDIR} -G Ninja -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} -DCMAKE_C_COMPILER=${CC} -DCMAKE_CXX_COMPILER=${CXX} cmake -B "${BUILDDIR}" -G Ninja -DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" -DCMAKE_C_COMPILER="${CC}" -DCMAKE_CXX_COMPILER="${CXX}"
cmake --build ${BUILDDIR} cmake --build "${BUILDDIR}"
upx ${BUILDDIR}/dropshell upx "${BUILDDIR}/dropshell"
cp ${BUILDDIR}/dropshell ${SCRIPT_DIR}/output/dropshell.${arch} cp "${BUILDDIR}/dropshell" "${SCRIPT_DIR}/output/dropshell.${arch}"
cd ${PREVDIR}
} }
build_arch x86_64 build_arch x86_64

View File

@ -46,7 +46,14 @@ print_status "Detected OS: $OS $VER"
case $OS in case $OS in
"Ubuntu"|"Debian GNU/Linux") "Ubuntu"|"Debian GNU/Linux")
# Common packages for both Ubuntu and Debian # Common packages for both Ubuntu and Debian
PACKAGES="cmake make g++ devscripts debhelper build-essential upx musl-tools wget tar ccache ninja-build" PACKAGES="bash cmake make g++ devscripts debhelper build-essential upx musl-tools wget tar ccache ninja-build"
INSTALLCMD="apt-get install -y"
UPDATECMD="apt-get update"
;;
"Alpine Linux")
PACKAGES="bash build-base cmake git nlohmann-json wget tar curl ninja mold nodejs npm"
INSTALLCMD="apk add --no-cache"
UPDATECMD="apk update"
;; ;;
*) *)
print_error "Unsupported distribution: $OS" print_error "Unsupported distribution: $OS"
@ -56,19 +63,29 @@ esac
# Function to check if a package is installed # Function to check if a package is installed
is_package_installed() { is_package_installed() {
dpkg -l "$1" 2>/dev/null | grep -q "^ii" if [ "$OS" = "Alpine Linux" ]; then
# Use apk info <pkg> and check exit status
apk info "$1" >/dev/null 2>&1
return $?
else
dpkg -l "$1" 2>/dev/null | grep -q "^ii"
fi
} }
# Update package lists UPDATED=false
print_status "Updating package lists..."
apt-get update
# Install missing packages # Install missing packages
print_status "Checking and installing required packages..." print_status "Checking and installing required packages..."
for pkg in $PACKAGES; do for pkg in $PACKAGES; do
if ! is_package_installed "$pkg"; then if ! is_package_installed "$pkg"; then
if [ "$UPDATED" = false ]; then
print_status "Updating package lists..."
$UPDATECMD
UPDATED=true
fi
print_status "Installing $pkg..." print_status "Installing $pkg..."
apt-get install -y "$pkg" $INSTALLCMD "$pkg"
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
print_error "Failed to install $pkg" print_error "Failed to install $pkg"
exit 1 exit 1
@ -84,7 +101,7 @@ done
# Set install directory # Set install directory
if [ -n "$SUDO_USER" ] && [ "$SUDO_USER" != "root" ]; then if [ -n "$SUDO_USER" ] && [ "$SUDO_USER" != "root" ]; then
USER_HOME=$(eval echo ~$SUDO_USER) USER_HOME=$(eval echo "~$SUDO_USER")
else else
USER_HOME="$HOME" USER_HOME="$HOME"
fi fi
@ -99,7 +116,7 @@ function install_musl_cross() {
local MUSL_CC_URL="https://musl.cc" local MUSL_CC_URL="https://musl.cc"
if [ ! -d "$INSTALL_DIR/$TOOLCHAIN" ]; then if [ ! -d "$INSTALL_DIR/$TOOLCHAIN" ]; then
echo "Downloading $TOOLCHAIN musl cross toolchain..." echo "Downloading $TOOLCHAIN musl cross toolchain..."
wget -nc -O "$TMPDIR/$TOOLCHAIN.tgz" $MUSL_CC_URL/$TOOLCHAIN.tgz wget -nc -O "$TMPDIR/$TOOLCHAIN.tgz" "$MUSL_CC_URL/$TOOLCHAIN.tgz"
tar -C "$INSTALL_DIR" -xvf "$TMPDIR/$TOOLCHAIN.tgz" tar -C "$INSTALL_DIR" -xvf "$TMPDIR/$TOOLCHAIN.tgz"
fi fi
} }

View File

@ -9,74 +9,21 @@ echo "Script directory: $SCRIPT_DIR"
TOKEN="${GITEA_TOKEN_DEPLOY:-${GITEA_TOKEN}}" TOKEN="${GITEA_TOKEN_DEPLOY:-${GITEA_TOKEN}}"
[ -z "$TOKEN" ] && { echo "Neither GITEA_TOKEN_DEPLOY nor GITEA_TOKEN environment variable set!" >&2; exit 1; } [ -z "$TOKEN" ] && { echo "Neither GITEA_TOKEN_DEPLOY nor GITEA_TOKEN environment variable set!" >&2; exit 1; }
$SCRIPT_DIR/multibuild.sh OLD_PWD="$PWD"
BUILD_DIR=$SCRIPT_DIR/build cd "$SCRIPT_DIR" || exit 1
TEMP_DIR=$(mktemp -d)
OLD_PWD=$PWD trap 'rm -rf "$TEMP_DIR" && cd "$OLD_PWD"' EXIT
cd $SCRIPT_DIR
# Check for required binaries
REQUIRED_BINARIES=("dropshell.x86_64" "dropshell.aarch64")
for binary in "${REQUIRED_BINARIES[@]}"; do
if [ ! -f "output/$binary" ]; then
echo "output/$binary not found!" >&2
echo "Please run multibuild.sh first." >&2
exit 1
fi
done
TAG=$("$SCRIPT_DIR/output/dropshell.x86_64" --version)
[ -z "$TAG" ] && echo "Failed to get version from dropshell.x86_64" >&2 && exit 1
ARCH=$(uname -m)
TAG=$("$SCRIPT_DIR/output/dropshell.${ARCH}" --version)
[ -z "$TAG" ] && echo "Failed to get version from dropshell.${ARCH}" >&2 && exit 1
echo "Publishing dropshell version $TAG" echo "Publishing dropshell version $TAG"
# make sure we've commited.
git add "$SCRIPT_DIR/../" && git commit -m "dropshell release $TAG" && git push
function die() {
# Find repo info from .git/config echo "$@" >&2
REPO_URL=$(git config --get remote.origin.url)
if [[ ! $REPO_URL =~ gitea ]]; then
echo "Remote origin is not a Gitea repository: $REPO_URL" >&2
exit 1 exit 1
fi
# Extract base URL, owner, and repo
# Example: https://gitea.example.com/username/reponame.git
BASE_URL=$(echo "$REPO_URL" | sed -E 's#(https?://[^/]+)/.*#\1#')
OWNER=$(echo "$REPO_URL" | sed -E 's#.*/([^/]+)/[^/]+(\.git)?$#\1#')
REPO=$(echo "$REPO_URL" | sed -E 's#.*/([^/]+)(\.git)?$#\1#')
API_URL="$BASE_URL/api/v1/repos/$OWNER/$REPO"
# Create release
RELEASE_DATA=$(cat <<EOF
{
"tag_name": "$TAG",
"name": "$TAG",
"body": "dropshell release $TAG",
"draft": false,
"prerelease": false
} }
EOF
)
# Capture stdout and stderr of the curl command
CURL_OUTPUT=$(curl -X POST "$API_URL/releases" \
-H "Content-Type: application/json" \
-H "Authorization: token $TOKEN" \
-d "$RELEASE_DATA" 2>&1)
# Extract the release ID from the captured output
RELEASE_ID=$(echo "$CURL_OUTPUT" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
if [ -z "$RELEASE_ID" ]; then
echo "Failed to create release on Gitea." >&2
echo "Release ID returned: $RELEASE_ID" >&2
echo "Curl Output/Error:" >&2
echo "$CURL_OUTPUT" >&2
exit 1
fi
# Function to find file in specified locations # Function to find file in specified locations
find_file() { find_file() {
@ -94,25 +41,17 @@ find_file() {
return 1 return 1
} }
curl -L -s -o "${TEMP_DIR}/sos" "https://getbin.xyz/sos" || die "Failed to download sos"
chmod +x "${TEMP_DIR}/sos"
# Upload binaries and install.sh # Upload binaries and install.sh
for FILE in dropshell.x86_64 dropshell.aarch64 install.sh server_autosetup.sh; do for FILE in dropshell.x86_64 dropshell.aarch64 dropshell-install.sh dropshell-server-autosetup.sh; do
# Pass the locations directly to the find_file function # Pass the locations directly to the find_file function
filetoupload=$(find_file "$FILE" "output" "../" ".") filetoupload=$(find_file "$FILE" "output" "../" ".")
if [ -z "$filetoupload" ]; then [ -z "$filetoupload" ] && die "File $FILE not found in expected locations!"
echo "File $FILE not found in expected locations!" >&2
continue
fi
# Auto-detect content type "${TEMP_DIR}/sos" upload getbin.xyz "$filetoupload" "$FILE:latest" "$FILE:TAG"
ctype=$(file --mime-type -b "$filetoupload")
curl -X POST "$API_URL/releases/$RELEASE_ID/assets?name=$FILE" \
-H "Content-Type: $ctype" \
-H "Authorization: token $TOKEN" \
--data-binary @"$filetoupload"
echo "Uploaded $FILE to release $TAG as $ctype."
done done
echo "Published dropshell version $TAG to $REPO_URL (tag $TAG) with binaries." echo "Published dropshell $TAG to getbin.xyz"
cd $OLD_PWD

View File

@ -103,6 +103,11 @@ namespace dropshell
if (server_name.empty() || template_name.empty() || service_name.empty()) if (server_name.empty() || template_name.empty() || service_name.empty())
return false; return false;
if (!legal_service_name(service_name)) {
error << "Service name contains illegal characters: " << service_name << std::endl;
return false;
}
ServerConfig server_info(server_name); ServerConfig server_info(server_name);
if (!server_info.is_valid()) if (!server_info.is_valid())
{ {

View File

@ -94,11 +94,11 @@ int edit_config()
std::string config_file = localfile::dropshell_json(); std::string config_file = localfile::dropshell_json();
if (!edit_file(config_file, false) || !std::filesystem::exists(config_file)) if (!edit_file(config_file, false) || !std::filesystem::exists(config_file))
return die("Failed to edit config file."); return return_die("Failed to edit config file.");
gConfig().load_config(); gConfig().load_config();
if (!gConfig().is_config_set()) if (!gConfig().is_config_set())
return die("Failed to load and parse edited config file!"); return return_die("Failed to load and parse edited config file!");
gConfig().save_config(true); gConfig().save_config(true);

View File

@ -0,0 +1,83 @@
#include "command_registry.hpp"
#include "config.hpp"
#include "utils/utils.hpp"
#include "utils/directories.hpp"
#include "shared_commands.hpp"
#include "version.hpp"
#include "hash.hpp"
#include <unistd.h>
#include <cstring>
#include <iostream>
#include <sstream>
#include <filesystem>
#include <libassert/assert.hpp>
namespace dropshell {
void hash_autocomplete(const CommandContext& ctx);
int hash_handler(const CommandContext& ctx);
static std::vector<std::string> hash_name_list={"hash"};
// Static registration
struct HashCommandRegister {
HashCommandRegister() {
CommandRegistry::instance().register_command({
hash_name_list,
hash_handler,
hash_autocomplete,
false, // hidden
false, // requires_config
false, // requires_install
0, // min_args (after command)
1, // max_args (after command)
"hash [FILE|DIRECTORY]",
"Hash a file or directory.",
// heredoc
R"(
Hash a file or directory recursively.
)"
});
}
} hash_command_register;
void hash_autocomplete(const CommandContext& ctx) {
if (ctx.args.size() == 0) {
// list all files and directories in the current directory
for (const auto& entry : std::filesystem::directory_iterator(".")) {
rawout << entry.path().string() << std::endl;
}
}
return;
}
int hash_handler(const CommandContext& ctx) {
std::filesystem::path path = safearg(ctx.args, 0);
if (path.empty())
path=std::filesystem::current_path();
if (!std::filesystem::exists(path))
{
error << "Does not exist: " << path.string() << std::endl;
return 1;
}
if (std::filesystem::is_directory(path))
{
// hash the directory recursively
uint64_t hash = hash_directory_recursive(path.string());
std::cout << hash << std::endl;
}
else
{
// hash the file
uint64_t hash = hash_file(path.string());
std::cout << hash << std::endl;
}
return 0;
}
} // namespace dropshell

View File

@ -78,10 +78,13 @@ namespace dropshell
if (!server_env.is_valid()) if (!server_env.is_valid())
return false; // should never hit this. return false; // should never hit this.
std::string user = server_env.get_user_for_service(service); std::string user = service_info.user;
std::string remote_service_path = remotepath(server,user).service(service); std::string remote_service_path = remotepath(server,user).service(service);
ASSERT(!remote_service_path.empty(), "Install_Service: Remote service path is empty for " + service + " on " + server);
ASSERT(!user.empty(), "Install_Service: User is empty for " + service + " on " + server);
if (server_env.check_remote_dir_exists(remote_service_path, user)) if (server_env.check_remote_dir_exists(remote_service_path, user))
{ // uninstall the old service before we update the config or template! { // uninstall the old service before we update the config or template!
info << "Service " << service << " is already installed on " << server << std::endl; info << "Service " << service << " is already installed on " << server << std::endl;
@ -149,7 +152,13 @@ namespace dropshell
// Run install script // Run install script
{ {
info << "Running " << service_info.template_name << " install script on " << server << "..." << std::endl; info << "Running " << service_info.template_name << " install script on " << server << "..." << std::endl;
server_env.run_remote_template_command(service, "install", {}, false, {});
shared_commands::cRemoteTempFolder remote_temp_folder(server_env, user);
if (!server_env.run_remote_template_command(service, "install", {}, false, {{"TEMP_DIR", remote_temp_folder.path()}}))
{
error << "Failed to run install script on " << server << std::endl;
return false;
}
} }
// print health tick // print health tick
@ -301,7 +310,7 @@ complete -F _dropshell_completions ds
std::filesystem::remove(exe_path.parent_path() / "dropshell.old"); std::filesystem::remove(exe_path.parent_path() / "dropshell.old");
// execute the new version // execute the new version
execlp("bash", "bash", "-c", (parent_path / "dropshell").c_str(), "install", (char *)nullptr); execlp("bash", "bash", "-c", (exe_path.parent_path() / "dropshell").string() + "install", (char *)nullptr);
error << "Failed to execute new version of dropshell." << std::endl; error << "Failed to execute new version of dropshell." << std::endl;
return -1; return -1;
} }

View File

@ -75,9 +75,9 @@ namespace dropshell
// determine the architecture of the system // determine the architecture of the system
std::string arch; std::string arch;
#ifdef __aarch64__ #ifdef __aarch64__
arch = "arm64"; arch = "aarch64";
#elif __x86_64__ #elif __x86_64__
arch = "amd64"; arch = "x86_64";
#endif #endif
return arch; return arch;
} }

View File

@ -61,6 +61,12 @@ namespace dropshell
return false; return false;
} }
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " << service << std::endl;
return false;
}
LocalServiceInfo sinfo = get_service_info(server, service); LocalServiceInfo sinfo = get_service_info(server, service);
if (!SIvalid(sinfo)) if (!SIvalid(sinfo))
{ {

View File

@ -51,6 +51,12 @@ namespace dropshell
return false; return false;
} }
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " << service << std::endl;
return false;
}
// run the start script. // run the start script.
bool started = server_env.run_remote_template_command(service, "start", {}, false, {}); bool started = server_env.run_remote_template_command(service, "start", {}, false, {});

View File

@ -51,6 +51,12 @@ namespace dropshell
return false; return false;
} }
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " << service << std::endl;
return false;
}
// run the stop script. // run the stop script.
bool stopped = server_env.run_remote_template_command(service, "stop", {}, false, {}); bool stopped = server_env.run_remote_template_command(service, "stop", {}, false, {});

View File

@ -6,8 +6,8 @@ _dropshell_completions() {
cur="${COMP_WORDS[COMP_CWORD]}" cur="${COMP_WORDS[COMP_CWORD]}"
# call dropshell to get the list of possiblities for the current argument. Supply all previous arguments. # call dropshell to get the list of possiblities for the current argument. Supply all previous arguments.
local completions=($(dropshell autocomplete "${COMP_WORDS[@]:1:${COMP_CWORD}-1}")) mapfile -t completions < <(dropshell autocomplete "${COMP_WORDS[@]:1:${COMP_CWORD}-1}")
COMPREPLY=( $(compgen -W "${completions[*]}" -- ${cur}) ) mapfile -t COMPREPLY < <(compgen -W "${completions[*]}" -- "$cur")
return 0 return 0
} }

View File

@ -149,12 +149,20 @@ namespace dropshell
std::string get_user_for_service(const std::string &server, const std::string &service) std::string get_user_for_service(const std::string &server, const std::string &service)
{ {
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " + service << std::endl;
return "";
}
auto services_info = get_server_services_info(server); auto services_info = get_server_services_info(server);
auto it = std::find_if(services_info.begin(), services_info.end(), auto it = std::find_if(services_info.begin(), services_info.end(),
[&service](const LocalServiceInfo &si) [&service](const LocalServiceInfo &si)
{ return si.service_name == service; }); { return si.service_name == service; });
if (it != services_info.end() && SIvalid(*it)) if (it != services_info.end() && SIvalid(*it))
return it->user; return it->user;
debug << "Couldn't find user for service \"" << service << "\" on server \"" << server << "\"" << std::endl;
return ""; return "";
} }

View File

@ -80,6 +80,9 @@ namespace dropshell
if (server_name.empty() || service_name.empty()) if (server_name.empty() || service_name.empty())
return LocalServiceInfo(); return LocalServiceInfo();
if (!legal_service_name(service_name))
return LocalServiceInfo();
service.service_name = service_name; service.service_name = service_name;
service.local_service_path = localpath::service(server_name, service_name); service.local_service_path = localpath::service(server_name, service_name);
@ -230,6 +233,13 @@ namespace dropshell
warning << "Expected environment file not found: " << file << std::endl; warning << "Expected environment file not found: " << file << std::endl;
}; };
// add in some simple variables first, as others below may depend on/use these in bash.
// if we change these, we also need to update agent/_allservicesstatus.sh
all_env_vars["SERVER"] = server_name;
all_env_vars["SERVICE"] = service_name;
all_env_vars["DOCKER_CLI_HINTS"] = "false"; // turn off docker junk.
// Load environment files // Load environment files
load_env_file(localfile::service_env(server_name, service_name)); load_env_file(localfile::service_env(server_name, service_name));
load_env_file(localfile::template_info_env(server_name, service_name)); load_env_file(localfile::template_info_env(server_name, service_name));
@ -243,13 +253,10 @@ namespace dropshell
return false; return false;
} }
// add in some handy variables. // more additional, these depend on others above.
// if we change these, we also need to update agent/_allservicesstatus.sh
all_env_vars["CONFIG_PATH"] = remotepath(server_name, user).service_config(service_name); all_env_vars["CONFIG_PATH"] = remotepath(server_name, user).service_config(service_name);
all_env_vars["SERVER"] = server_name;
all_env_vars["SERVICE"] = service_name;
all_env_vars["AGENT_PATH"] = remotepath(server_name, user).agent(); all_env_vars["AGENT_PATH"] = remotepath(server_name, user).agent();
all_env_vars["DOCKER_CLI_HINTS"] = "false"; // turn off docker junk.
// determine template name. // determine template name.
auto it = all_env_vars.find("TEMPLATE"); auto it = all_env_vars.find("TEMPLATE");

View File

@ -171,6 +171,11 @@
bool template_manager::create_template(const std::string &template_name) const bool template_manager::create_template(const std::string &template_name) const
{ {
if (!legal_service_name(template_name)) {
error << "Template name contains illegal characters: " << template_name << std::endl;
return false;
}
// 1. Create a new directory in the user templates directory // 1. Create a new directory in the user templates directory
std::vector<std::string> local_server_definition_paths = gConfig().get_local_server_definition_paths(); std::vector<std::string> local_server_definition_paths = gConfig().get_local_server_definition_paths();

View File

@ -312,7 +312,7 @@ std::string requote(std::string str) {
} }
int die(const std::string & msg) { int return_die(const std::string & msg) {
error << "Fatal error:" << std::endl; error << "Fatal error:" << std::endl;
error << msg << std::endl; error << msg << std::endl;
return 1; return 1;
@ -650,4 +650,24 @@ bool file_replace_or_add_segment(std::string filepath, std::string segment)
return true; return true;
} }
bool legal_service_name(const std::string &service_name) {
static bool initialized = false;
static bool legal_chars[256] = {false}; // Initialize all to false
// One-time initialization
if (!initialized) {
// Set true for valid characters
for (unsigned char c : "0123456789"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"._-") {
legal_chars[c] = true;
}
initialized = true;
}
return std::all_of(service_name.begin(), service_name.end(),
[](unsigned char c) { return legal_chars[c]; });
}
} // namespace dropshell } // namespace dropshell

View File

@ -45,7 +45,7 @@ int count_substring(const std::string &substring, const std::string &text);
std::string random_alphanumeric_string(int length); std::string random_alphanumeric_string(int length);
int die(const std::string & msg); int return_die(const std::string & msg);
std::string safearg(int argc, char *argv[], int index); std::string safearg(int argc, char *argv[], int index);
std::string safearg(const std::vector<std::string> & args, int index); std::string safearg(const std::vector<std::string> & args, int index);
@ -75,4 +75,7 @@ constexpr unsigned int switchhash(const char *s, int off = 0)
return !s[off] ? 5381 : (switchhash(s, off + 1) * 33) ^ s[off]; return !s[off] ? 5381 : (switchhash(s, off + 1) * 33) ^ s[off];
} }
bool legal_service_name(const std::string & service_name);
} // namespace dropshell } // namespace dropshell

12
source/test.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
ARCH=$(uname -m)
PREV_DIR=$(pwd)
trap 'cd "$PREV_DIR"' EXIT
"$SCRIPT_DIR/output/dropshell.${ARCH}" hash "${SCRIPT_DIR}/test.sh"
"$SCRIPT_DIR/output/dropshell.${ARCH}" help