Compare commits

...

42 Commits

Author SHA1 Message Date
Your Name
9d4e5f76ce 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m12s
2025-06-15 21:37:21 +12:00
Your Name
366f5c2d0e 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m57s
2025-06-02 00:44:23 +12:00
Your Name
0b0f3df59c 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m20s
2025-06-01 23:38:58 +12:00
Your Name
f48302c05e 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-06-01 23:37:34 +12:00
Your Name
7f341699c1 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m52s
2025-06-01 23:34:31 +12:00
Your Name
18c53acd71 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m21s
2025-06-01 23:16:42 +12:00
Your Name
eb632c010c 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m27s
2025-06-01 23:06:14 +12:00
Your Name
964e8598b1 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m27s
2025-06-01 18:33:37 +12:00
Your Name
a5cf9313e9 dropshell release 2025.0601.1821
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-06-01 18:21:32 +12:00
Your Name
ab73a47751 dropshell release 2025.0601.1754
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m31s
2025-06-01 17:54:13 +12:00
Your Name
1da7dc7951 dropshell release 2025.0601.1752
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 16m46s
2025-06-01 17:53:06 +12:00
Your Name
49d61f0da0 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 16m26s
2025-06-01 15:57:52 +12:00
Your Name
27c0abcb9f 'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-06-01 15:38:57 +12:00
Your Name
483ee4e3ef :-'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 13m16s
2025-05-30 00:14:24 +12:00
Your Name
f7294e01e4 :-'Generic Commit'
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m40s
2025-05-28 20:40:24 +12:00
Your Name
c836b26657 dropshell release 2025.0527.2201
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m44s
2025-05-27 22:01:51 +12:00
Your Name
7bf624589f tidying
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m46s
2025-05-26 23:39:40 +12:00
Your Name
a5e339a358 tidying
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m37s
2025-05-26 23:26:37 +12:00
Your Name
029823a6b4 tidying
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m52s
2025-05-26 23:22:38 +12:00
Your Name
f79abd346e Tidying
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m2s
2025-05-26 23:19:10 +12:00
Your Name
940c2a12a1 dropshell release 2025.0526.2310
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m58s
2025-05-26 23:12:28 +12:00
Your Name
6ac651d4f0 Tidying
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m40s
2025-05-26 22:49:37 +12:00
Your Name
84fd96e74e dropshell release 2025.0526.2234
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m47s
2025-05-26 22:34:52 +12:00
Your Name
8eb652b84e dropshell release 2025.0526.2224
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 3m0s
2025-05-26 22:25:03 +12:00
Your Name
c8c0c3289c dropshell release 2025.0526.2223
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-05-26 22:23:35 +12:00
Your Name
3dc82c682c dropshell release 2025.0526.2221
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-05-26 22:21:39 +12:00
Your Name
4035538ba5 GPT4.5 is having a go
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m59s
2025-05-26 00:13:32 +12:00
Your Name
8ec4976cc0 Playing with static still
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m30s
2025-05-25 23:59:50 +12:00
Your Name
1b35f74bfe Seems to be fully statically built now.
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m15s
2025-05-25 23:47:12 +12:00
Your Name
08794e6480 LOL Zig
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m43s
2025-05-25 23:04:39 +12:00
Your Name
3b51a511a6 zlib not static
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m28s
2025-05-25 22:40:46 +12:00
Your Name
e1be3dff8d Tidying
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m17s
2025-05-25 22:32:56 +12:00
Your Name
9375acafa9 Ninja and ccache - faster builds!
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m28s
2025-05-25 22:18:58 +12:00
Your Name
f45baa8362 Craziness with multiarch
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m18s
2025-05-25 22:12:14 +12:00
Your Name
d3ceb3f4be Add vscode stuff.
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m28s
2025-05-25 19:46:14 +12:00
Your Name
27a2d25fb2 Fixed.
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m33s
2025-05-25 19:44:10 +12:00
Your Name
e7558be416 Not better.
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m29s
2025-05-25 19:31:43 +12:00
Your Name
8f06fc31ae Debugging
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 2m10s
2025-05-25 19:17:51 +12:00
Your Name
1502d6e3d2 Fix test.yaml
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 1m26s
2025-05-25 18:16:19 +12:00
Your Name
d71ba38754 ds
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 12s
2025-05-25 18:14:15 +12:00
Your Name
f45d9a33ed yaml
Some checks failed
Dropshell Test / Build_and_Test (push) Failing after 12s
2025-05-25 10:10:07 +12:00
Your Name
73f85769a2 yaml
Some checks failed
Dropshell Test / Build_and_Test (push) Has been cancelled
2025-05-25 10:02:45 +12:00
52 changed files with 11872 additions and 26542 deletions

View File

@ -12,12 +12,28 @@ jobs:
sudo apt-get install -y openssh-server
- name: Check out repository code
uses: actions/checkout@v4
- name: Build
- name: Install build dependencies
run: |
cd ${{ gitea.workspace }}/source
./multibuild.sh
cd source
./install_build_prerequisites.sh
- name: Build Native
run: |
cd source
./build_native.sh
- name: Test
run: |
cd ${{ gitea.workspace }}/source/output
./dropshell_x86_64 list
./dropshell_x86_64 help
cd source
./test.sh
- name: Build Production
run: |
cd source
./build_production.sh
- name: Test
run: |
cd source
./test.sh
- name: Publish
run: |
cd source
./publish.sh

1
.gitignore vendored
View File

@ -39,7 +39,6 @@ cmake_install.cmake
Makefile
# IDE specific files
.vscode/
.idea/
*.swp
*.swo

17
.vscode/c_cpp_properties.json vendored Normal file
View File

@ -0,0 +1,17 @@
{
"configurations": [
{
"name": "Linux",
"includePath": [
"${workspaceFolder}/**",
"${workspaceFolder}/source/build/src/autogen"
],
"defines": [],
"compilerPath": "/usr/bin/g++",
"cStandard": "c23",
"cppStandard": "c++23",
"intelliSenseMode": "linux-gcc-x64"
}
],
"version": 4
}

103
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,103 @@
{
"files.associations": {
"*.inja": "jinja-html",
"*.ipp": "cpp",
"random": "cpp",
"ostream": "cpp",
"cctype": "cpp",
"clocale": "cpp",
"cmath": "cpp",
"csetjmp": "cpp",
"csignal": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"any": "cpp",
"array": "cpp",
"atomic": "cpp",
"strstream": "cpp",
"bit": "cpp",
"*.tcc": "cpp",
"bitset": "cpp",
"cfenv": "cpp",
"charconv": "cpp",
"chrono": "cpp",
"cinttypes": "cpp",
"codecvt": "cpp",
"compare": "cpp",
"complex": "cpp",
"concepts": "cpp",
"condition_variable": "cpp",
"cstdint": "cpp",
"deque": "cpp",
"forward_list": "cpp",
"list": "cpp",
"map": "cpp",
"set": "cpp",
"string": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"vector": "cpp",
"exception": "cpp",
"expected": "cpp",
"algorithm": "cpp",
"functional": "cpp",
"iterator": "cpp",
"memory": "cpp",
"memory_resource": "cpp",
"numeric": "cpp",
"optional": "cpp",
"ratio": "cpp",
"regex": "cpp",
"source_location": "cpp",
"string_view": "cpp",
"system_error": "cpp",
"tuple": "cpp",
"type_traits": "cpp",
"utility": "cpp",
"fstream": "cpp",
"future": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"iostream": "cpp",
"istream": "cpp",
"limits": "cpp",
"mutex": "cpp",
"new": "cpp",
"numbers": "cpp",
"ranges": "cpp",
"scoped_allocator": "cpp",
"semaphore": "cpp",
"shared_mutex": "cpp",
"span": "cpp",
"sstream": "cpp",
"stdexcept": "cpp",
"stop_token": "cpp",
"streambuf": "cpp",
"thread": "cpp",
"typeindex": "cpp",
"typeinfo": "cpp",
"valarray": "cpp",
"variant": "cpp",
"format": "cpp",
"stdfloat": "cpp",
"__nullptr": "cpp",
"__node_handle": "cpp",
"__split_buffer": "cpp",
"filesystem": "cpp",
"queue": "cpp",
"stack": "cpp",
"__bit_reference": "cpp",
"__functional_base": "cpp",
"__memory": "cpp",
"locale": "cpp",
"stacktrace": "cpp",
"__locale": "cpp"
}
}

View File

@ -5,15 +5,23 @@ A system management tool for server operations, written in C++.
## Installation
```
curl -fsSL https://gitea.jde.nz/public/dropshell/releases/download/latest/install.sh | sudo bash
curl -fsSL https://gitea.jde.nz/public/dropshell/releases/download/latest/install.sh | bash
```
This installs as dropshell, with a symlink ds if the ds command does not already exist.
This installs as dropshell for the local user, with a symbolic link ds.
You'll need to run:
```
~/.local/bin/dropshell edit
~/.local/bin/dropshell install
source ~/.bashrc
```
to configure dropshell and install the local components.
## Installation of Agent
## Remote Server Setup
Install the Agent on each server you wish to manage. Supports amd64 (x86_64) and arm64 (aarch64) architectures.
### Initial setup
Auto setup script which creates a dropshell user, and includes installing docker if not already present:
```
curl -fsSL https://gitea.jde.nz/public/dropshell/releases/download/latest/server_autosetup.sh | sudo bash
```
@ -26,7 +34,21 @@ Manual steps:
1. Test ssh'ing into the server.
## Install Services
### Configure and Use Remote Server
Set up a server and install a service:
1. `ds create-server SERVERNAME`
#### Add to local dropshell configuration, and install remote agent
Back on the dropshell host:
1. `dropshell create-server SERVERNAME`
1. `dropshell edit SERVERNAME`
1. `dropshell install SERVERNAME`
#### Install Services
Create and install a service
1. `ds template list` -- see what templates are available to install.
1. `ds create-service SERVERNAME SERVICENAME TEMPLATE`
1. `ds edit SERVERNAME SERVICENAME`
1. Edit other config files if needed.
1. `ds install SERVERNAME SERVICENAME`
1. `ds list`
The service should now be seen to be running.

22
dropshell-install.sh Executable file
View File

@ -0,0 +1,22 @@
#!/bin/bash
set -euo pipefail
# download and install dropshell
# 1. Determine architecture
# -----------------------------------------------------------------------------
ARCH=$(uname -m)
TARGET_PATH="${HOME}/.local/bin/dropshell"
[ ! -f "${TARGET_PATH}" ] || rm -f "${TARGET_PATH}"
mkdir -p "$(dirname "${TARGET_PATH}")"
curl -L -s -o "${TARGET_PATH}" "https://getbin.xyz/dropshell.${ARCH}" || die "Failed to download dropshell for ${ARCH}"
chmod +x "${TARGET_PATH}"
echo "dropshell installed successfully to $TARGET_PATH"
echo " "
echo "Please:"
echo "1. run '${TARGET_PATH} edit' to edit the configuration."
echo "2. run '${TARGET_PATH} install' to install dropshell components on this computer."
echo "3. run 'source ~/.bashrc' to add to your path and autocomplete for the current shell."

View File

@ -1,4 +1,6 @@
# can you make this script run in bash, but fall back to sh if bash is not installed?
#!/bin/bash
# set up a remote server for use with dropshell.
# check if we are running as root
if [ "$(id -u)" -ne 0 ]; then

View File

@ -1,50 +0,0 @@
#!/bin/bash
set -e
# download and install dropshell
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "Please run this script as root (use sudo)"
exit 1
fi
# 1. Determine architecture
# -----------------------------------------------------------------------------
ARCH=$(uname -m)
if [[ "$ARCH" == "x86_64" ]]; then
BIN=dropshell.amd64
elif [[ "$ARCH" == "aarch64" || "$ARCH" == "arm64" ]]; then
BIN=dropshell.arm64
else
echo "Unsupported architecture: $ARCH" >&2
exit 1
fi
# 2. Download the appropriate binary to a temp directory
# -----------------------------------------------------------------------------
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
URL="https://gitea.jde.nz/public/dropshell/releases/download/latest/$BIN"
echo "Downloading $BIN from $URL..."
curl -fsSL -o "$TMPDIR/dropshell" "$URL"
if [ ! -f "$TMPDIR/dropshell" ]; then
echo "Failed to download dropshell" >&2
exit 1
fi
chmod +x "$TMPDIR/dropshell"
cp "$TMPDIR/dropshell" /usr/local/bin/dropshell
if [ -f /usr/local/bin/ds ]; then
rm -f /usr/local/bin/ds
fi
ln -s /usr/local/bin/dropshell /usr/local/bin/ds
rm -rf "$TMPDIR"
echo "dropshell installed successfully to /usr/local/bin/dropshell"

View File

@ -1,6 +1,15 @@
cmake_minimum_required(VERSION 3.10)
project(dropshell VERSION 1.0.0 LANGUAGES CXX)
# Force static linking globally
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static")
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
set(BUILD_SHARED_LIBS OFF CACHE BOOL "Build shared libraries" FORCE)
set(CMAKE_POSITION_INDEPENDENT_CODE OFF)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -static")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -static")
set(ZLIB_USE_STATIC_LIBS "ON")
set(CMAKE_CXX_STANDARD 23)
set(CMAKE_C_STANDARD 23)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
@ -37,17 +46,16 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
file(GLOB_RECURSE SOURCES "src/*.cpp")
file(GLOB_RECURSE HEADERS "src/*.hpp")
# Add custom target to run make_createagent.sh at the start of the build process
add_custom_target(run_createagent ALL
COMMAND ${CMAKE_COMMAND} -E echo "Running make_createagent.sh..."
COMMAND ${CMAKE_COMMAND} -E env bash ${CMAKE_CURRENT_SOURCE_DIR}/make_createagent.sh
# Add custom target to run cmake_prebuild.sh at the start of the build process
add_custom_target(run_prebuild_script ALL
COMMAND ${CMAKE_COMMAND} -E echo "Running cmake_prebuild.sh..."
COMMAND ${CMAKE_COMMAND} -E env bash ${CMAKE_CURRENT_SOURCE_DIR}/cmake_prebuild.sh
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Running make_createagent.sh before build"
)
# Add executable
add_executable(dropshell ${SOURCES})
add_dependencies(dropshell run_createagent)
add_dependencies(dropshell run_prebuild_script)
# Mark the generated files as GENERATED so CMake knows they'll be created during build
set_source_files_properties(
@ -89,7 +97,8 @@ FetchContent_Declare(
GIT_TAG v2.1.5
)
FetchContent_MakeAvailable(libassert)
include(FetchContent)
# Add cpptrace
FetchContent_Declare(
cpptrace
GIT_REPOSITORY https://github.com/jeremy-rifkin/cpptrace.git
@ -97,81 +106,28 @@ FetchContent_Declare(
)
FetchContent_MakeAvailable(cpptrace)
# Add nlohmann/json
FetchContent_Declare(
nlohmann_json
GIT_REPOSITORY https://github.com/nlohmann/json.git
GIT_TAG v3.11.3
)
FetchContent_MakeAvailable(nlohmann_json)
# Link libraries
target_link_libraries(dropshell PRIVATE
libassert::assert
cpptrace::cpptrace
nlohmann_json::nlohmann_json
)
# Set static linking flags
set_target_properties(dropshell PROPERTIES
LINK_FLAGS "-static"
)
# Install targets
install(TARGETS dropshell
RUNTIME DESTINATION bin
RUNTIME DESTINATION $ENV{HOME}/.local/bin
)
# Create symbolic link 'ds' pointing to 'dropshell'
install(CODE "
message(STATUS \"Checking if 'ds' command already exists...\")
execute_process(
COMMAND which ds
RESULT_VARIABLE DS_NOT_EXISTS
OUTPUT_QUIET
ERROR_QUIET
)
if(DS_NOT_EXISTS)
message(STATUS \"Command 'ds' does not exist. Creating symlink.\")
execute_process(
COMMAND ${CMAKE_COMMAND} -E create_symlink
\${CMAKE_INSTALL_PREFIX}/bin/dropshell
\${CMAKE_INSTALL_PREFIX}/bin/ds
)
else()
message(STATUS \"Command 'ds' already exists. Skipping symlink creation.\")
endif()
")
# Install completion script
install(FILES src/dropshell-completion.bash
DESTINATION /etc/bash_completion.d
RENAME dropshell
)
# Create a symlink for the completion script to work with 'ds' command
install(CODE "
# First check if 'ds' command exists after our installation
execute_process(
COMMAND which ds
RESULT_VARIABLE DS_NOT_EXISTS
OUTPUT_VARIABLE DS_PATH
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE
)
# Only proceed if 'ds' exists
if(NOT DS_NOT_EXISTS)
# Check if 'ds' is a symlink pointing to dropshell
execute_process(
COMMAND readlink -f \${DS_PATH}
RESULT_VARIABLE READLINK_FAILED
OUTPUT_VARIABLE REAL_PATH
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE
)
# Get the path to our dropshell binary
set(DROPSHELL_PATH \${CMAKE_INSTALL_PREFIX}/bin/dropshell)
# Check if the real path is our dropshell binary
if(NOT READLINK_FAILED AND \"\${REAL_PATH}\" STREQUAL \"\${DROPSHELL_PATH}\")
message(STATUS \"Command 'ds' exists and points to dropshell. Creating completion script symlink.\")
execute_process(
COMMAND ${CMAKE_COMMAND} -E create_symlink
/etc/bash_completion.d/dropshell
/etc/bash_completion.d/ds
)
else()
message(STATUS \"Command 'ds' exists but doesn't point to dropshell. Skipping completion symlink.\")
endif()
else()
message(STATUS \"Command 'ds' not found. Skipping completion symlink.\")
endif()
")

65
source/Dockerfile Normal file
View File

@ -0,0 +1,65 @@
FROM --platform=$BUILDPLATFORM alpine:latest AS builder
# Add build arguments for platform
ARG TARGETPLATFORM
ARG BUILDPLATFORM
# Install build dependencies
RUN apk add --no-cache \
build-base \
cmake \
git \
musl-dev \
curl \
bash \
musl \
g++ \
ninja \
linux-headers
# Install cross-compilation tools for ARM64
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
apk add --no-cache \
crossbuild-essential-arm64 \
gcc-aarch64-linux-gnu \
g++-aarch64-linux-gnu; \
fi
# Set working directory
WORKDIR /build
# Copy source files
COPY . .
# Configure and build
RUN mkdir -p build_static
# Set up cross-compilation environment for ARM64
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
export CC=aarch64-linux-gnu-gcc \
export CXX=aarch64-linux-gnu-g++ \
export CMAKE_TOOLCHAIN_FILE=/build/toolchain.cmake; \
fi
# Create toolchain file for ARM64
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
echo "set(CMAKE_SYSTEM_NAME Linux)" > toolchain.cmake && \
echo "set(CMAKE_SYSTEM_PROCESSOR aarch64)" >> toolchain.cmake && \
echo "set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)" >> toolchain.cmake && \
echo "set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)" >> toolchain.cmake && \
echo "set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)" >> toolchain.cmake && \
echo "set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)" >> toolchain.cmake && \
echo "set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)" >> toolchain.cmake; \
fi
RUN cmake -G Ninja -B build_static -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_EXE_LINKER_FLAGS="-static" \
-DCMAKE_FIND_LIBRARY_SUFFIXES=".a" \
-DBUILD_SHARED_LIBS=OFF \
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
RUN cmake --build build_static
# Final stage that only contains the binary
FROM scratch AS dropshell
COPY --from=builder /build/build_static/dropshell /dropshell

View File

@ -1,5 +1,8 @@
#!/bin/bash
# install the dropshell host agent on this computer.
# (not for remote servers)
SCRIPT_DIR=$(dirname "$0")
echo "Installing dropshell host agent on this computer..."
@ -22,27 +25,6 @@ _check_required_env_vars() {
done
}
# Checks if Docker is installed, running, and user has permission. Returns 1 on failure.
_check_docker_installed() {
if ! command -v docker &> /dev/null; then
echo "Docker is not installed"
return 1
fi
# check if docker daemon is running
if ! docker info &> /dev/null; then
echo "Docker daemon is not running"
return 1
fi
# check if user has permission to run docker
if ! docker run --rm hello-world &> /dev/null; then
echo "User does not have permission to run docker"
return 1
fi
return 0
}
function install_bb64() {
# check curl installed
@ -77,7 +59,5 @@ set +a
_check_required_env_vars "AGENT_LOCAL_PATH"
echo "Installing host agent into $AGENT_LOCAL_PATH"
_check_docker_installed || _die "Docker is required."
install_bb64

View File

@ -38,6 +38,7 @@ CURRENT_EXIT_CODE=0
load_dotenv(){
local file_path=$1
if [ -f "${file_path}" ]; then
# shellcheck source=/dev/null
source "${file_path}"
fi
}
@ -69,9 +70,9 @@ function run_command() {
load_dotenv "${service_path}/config/.template_info.env"
# update the main variables.
CONFIG_PATH="${service_path}/config"
SERVICE="${SERVICE_NAME}"
DOCKER_CLI_HINTS=false
export CONFIG_PATH="${service_path}/config"
export SERVICE="${SERVICE_NAME}"
export DOCKER_CLI_HINTS=false
set +a

View File

@ -21,16 +21,13 @@ fi
_check_required_env_vars "AGENT_PATH"
function install_bb64() {
curl -fsSL "https://gitea.jde.nz/public/bb64/releases/download/latest/install.sh" | bash -s -- "$AGENT_PATH" "$(id -u $USER):$(id -g $USER)"
# test result code from curl
if [ $? -ne 0 ]; then
if ! curl -fsSL "https://gitea.jde.nz/public/bb64/releases/download/latest/install.sh" | \
bash -s -- "$AGENT_PATH" "$(id -u "$USER"):$(id -g "$USER")"; then
_die "Failed to install bb64. Curl returned non-zero exit code."
fi
# test if bb64 is installed
VER=$("$AGENT_PATH/bb64" -v)
if [ $? -ne 0 ]; then
if ! VER=$("$AGENT_PATH/bb64" -v); then
_die "bb64 did not install correctly."
fi
@ -53,6 +50,12 @@ if ! command -v docker &> /dev/null; then
exit 1
fi
# check rsync installation
if ! command -v rsync &> /dev/null; then
echo "Rsync is not installed. Rsync is required for agent installation."
exit 1
fi
#-------------------------------------------------------------------------

View File

@ -41,18 +41,18 @@ _create_and_start_container() {
local run_cmd="$1"
local container_name="$2"
if _is_container_exists $container_name; then
_is_container_running $container_name && return 0
_start_container $container_name
if _is_container_exists "$container_name"; then
_is_container_running "$container_name" && return 0
_start_container "$container_name"
else
$run_cmd
fi
if ! _is_container_running $container_name; then
if ! _is_container_running "$container_name"; then
_die "Container ${container_name} failed to start"
fi
ID=$(_get_container_id $container_name)
ID=$(_get_container_id "$container_name")
echo "Container ${container_name} is running with ID ${ID}"
}
@ -93,6 +93,7 @@ _check_docker_installed() {
# Checks if a container (any state) exists. Returns 1 if not found.
_is_container_exists() {
[ -n "${1:-}" ] || { echo "_is_container_exists: Container name is empty" >&2; return 1; }
if ! docker ps -a --format "{{.Names}}" | grep -q "^$1$"; then
return 1
fi
@ -101,6 +102,7 @@ _is_container_exists() {
# Checks if a container is currently running. Returns 1 if not running.
_is_container_running() {
[ -n "${1:-}" ] || { echo "_is_container_running: Container name is empty" >&2; return 1; }
if ! docker ps --format "{{.Names}}" | grep -q "^$1$"; then
return 1
fi
@ -119,39 +121,39 @@ _get_container_status() {
# Starts an existing, stopped container.
_start_container() {
_is_container_exists $1 || return 1
_is_container_running $1 && return 0
docker start $1
_is_container_exists "$1" || return 1
_is_container_running "$1" && return 0
docker start "$1"
}
# Stops a running container.
_stop_container() {
_is_container_running $1 || return 0;
docker stop $1
_is_container_running "$1" || return 0;
docker stop "$1"
}
# Stops (if needed) and removes a container.
_remove_container() {
_stop_container $1
_is_container_exists $1 || return 0;
docker rm $1
_stop_container "$1"
_is_container_exists "$1" || return 0;
docker rm "$1"
}
# Prints the logs for a container.
_get_container_logs() {
if ! _is_container_exists $1; then
if ! _is_container_exists "$1"; then
echo "Container $1 does not exist"
return 1
fi
docker logs $1
docker logs "$1"
}
# Checks if listed environment variables are set; calls _die() if any are missing.
_check_required_env_vars() {
local required_vars=("$@")
for var in "${required_vars[@]}"; do
if [ -z "${!var}" ]; then
if [ -z "${!var:-}" ]; then
_die "Required environment variable $var is not set"
fi
done

View File

@ -12,26 +12,26 @@ _autocommandrun_volume() {
case "$command" in
create)
if docker volume ls | grep -q ${volume_name}; then
if docker volume ls | grep -q "${volume_name}"; then
echo "Volume ${volume_name} already exists - leaving unchanged"
return
fi
echo "Creating volume ${volume_name}"
docker volume create ${volume_name}
docker volume create "${volume_name}"
;;
destroy)
echo "Destroying volume ${volume_name}"
docker volume rm ${volume_name}
docker volume rm "${volume_name}"
;;
backup)
echo "Backing up volume ${volume_name}"
docker run --rm -v ${volume_name}:/volume -v ${backup_folder}:/backup debian bash -c "tar -czvf /backup/backup.tgz -C /volume . && chown -R $MYID:$MYGRP /backup"
docker run --rm -v "${volume_name}":/volume -v "${backup_folder}":/backup debian bash -c "tar -czvf /backup/backup.tgz -C /volume . && chown -R $MYID:$MYGRP /backup"
;;
restore)
echo "Restoring volume ${volume_name}"
docker volume rm ${volume_name}
docker volume create ${volume_name}
docker run --rm -v ${volume_name}:/volume -v ${backup_folder}:/backup debian bash -c "tar -xzvf /backup/backup.tgz -C /volume --strip-components=1"
docker volume rm "${volume_name}"
docker volume create "${volume_name}"
docker run --rm -v "${volume_name}":/volume -v "${backup_folder}":/backup debian bash -c "tar -xzvf /backup/backup.tgz -C /volume --strip-components=1"
;;
esac
}
@ -48,14 +48,16 @@ _autocommandrun_path() {
return
fi
echo "Creating path ${path}"
mkdir -p ${path}
mkdir -p "${path}"
;;
destroy)
echo "Destroying path ${path}"
local path_parent=$(dirname ${path})
local path_child=$(basename ${path})
local path_parent;
path_parent=$(dirname "${path}")
local path_child;
path_child=$(basename "${path}")
if [ -d "${path_parent}/${path_child}" ]; then
docker run --rm -v ${path_parent}:/volume debian bash -c "rm -rfv /volume/${path_child}" || echo "Failed to destroy path ${path}"
docker run --rm -v "${path_parent}":/volume debian bash -c "rm -rfv /volume/${path_child}" || echo "Failed to destroy path ${path}"
else
echo "Path ${path} does not exist - nothing to destroy"
fi
@ -63,7 +65,7 @@ _autocommandrun_path() {
backup)
echo "Backing up path ${path}"
if [ -d "${path}" ]; then
docker run --rm -v ${path}:/path -v ${backup_folder}:/backup debian bash -c "tar -czvf /backup/backup.tgz -C /path . && chown -R $MYID:$MYGRP /backup"
docker run --rm -v "${path}":/path -v "${backup_folder}":/backup debian bash -c "tar -czvf /backup/backup.tgz -C /path . && chown -R $MYID:$MYGRP /backup"
else
echo "Path ${path} does not exist - nothing to backup"
fi
@ -73,9 +75,9 @@ _autocommandrun_path() {
echo "Backup file ${backup_folder}/backup.tgz does not exist - nothing to restore"
else
echo "Clearing existing data in path ${path}"
docker run --rm -v ${path}:/path debian bash -c "rm -rfv /path/{*,.*}"
docker run --rm -v "${path}":/path debian bash -c "rm -rfv /path/{*,.*}"
echo "Restoring path ${path} from backup file ${backup_folder}/backup.tgz"
tar -xzvf ${backup_folder}/backup.tgz -C ${path} --strip-components=1
tar -xzvf "${backup_folder}/backup.tgz" -C "${path}" --strip-components=1
fi
;;
esac
@ -88,31 +90,36 @@ _autocommandrun_file() {
case "$command" in
create)
filepath_parent=$(dirname ${filepath})
filepath_child=$(basename ${filepath})
if [ ! -d "${filepath_parent}" ]; then
echo "Parent directory ${filepath_parent} of ${filepath_child} does not exist - creating"
mkdir -p ${filepath_parent}
local file_parent;
file_parent=$(dirname "${filepath}")
local file_name;
file_name=$(basename "${filepath}")
if [ ! -d "${file_parent}" ]; then
echo "Parent directory ${file_parent} of ${file_name} does not exist - creating"
mkdir -p "${file_parent}"
fi
;;
destroy)
rm -f ${filepath}
rm -f "${filepath}"
;;
backup)
echo "Backing up file ${filepath}"
local file_parent=$(dirname ${filepath})
local file_name=$(basename ${filepath})
local file_parent;
file_parent=$(dirname "${filepath}")
local file_name;
file_name=$(basename "${filepath}")
if [ -f "${file_parent}/${file_name}" ]; then
docker run --rm -v ${file_parent}:/volume -v ${backup_folder}:/backup debian bash -c "cp /volume/${file_name} /backup/${file_name} && chown -R $MYID:$MYGRP /backup"
docker run --rm -v "${file_parent}":/volume -v "${backup_folder}":/backup debian bash -c "cp /volume/${file_name} /backup/${file_name} && chown -R $MYID:$MYGRP /backup"
else
echo "File ${filepath} does not exist - nothing to backup"
fi
;;
restore)
echo "Restoring file ${filepath}"
local file_name=$(basename ${filepath})
rm -f ${filepath} || die "Unable to remove existing file ${filepath}, restore failed."
cp ${backup_folder}/${file_name} ${filepath} || die "Unable to copy file ${backup_folder}/${file_name} to ${filepath}, restore failed."
local file_name;
file_name=$(basename "${filepath}")
rm -f "${filepath}" || return_die "Unable to remove existing file ${filepath}, restore failed."
cp "${backup_folder}/${file_name}" "${filepath}" || return_die "Unable to copy file ${backup_folder}/${file_name} to ${filepath}, restore failed."
;;
esac
}
@ -153,9 +160,10 @@ _autocommandparse() {
local value="${pair#*=}"
# create backup folder unique to key/value.
local bfolder=$(echo "${key}_${value}" | tr -cd '[:alnum:]_-')
local bfolder;
bfolder=$(echo "${key}_${value}" | tr -cd '[:alnum:]_-')
local targetpath="${backup_temp_path}/${bfolder}"
mkdir -p ${targetpath}
mkdir -p "${targetpath}"
# Key must be one of volume, path or file
case "$key" in
@ -191,7 +199,7 @@ databackup() {
mkdir -p "$BACKUP_TEMP_PATH"
echo "_autocommandparse [backup] [$BACKUP_TEMP_PATH] [$@]"
echo "_autocommandparse [backup] [$BACKUP_TEMP_PATH]" "$@"
_autocommandparse backup "$BACKUP_TEMP_PATH" "$@"
tar zcvf "$BACKUP_FILE" -C "$BACKUP_TEMP_PATH" .
@ -201,7 +209,7 @@ datarestore() {
_check_required_env_vars "BACKUP_FILE" "TEMP_DIR"
BACKUP_TEMP_PATH="$TEMP_DIR/restore"
echo "_autocommandparse [restore] [$BACKUP_TEMP_PATH] [$@]"
echo "_autocommandparse [restore] [$BACKUP_TEMP_PATH]" "$@"
mkdir -p "$BACKUP_TEMP_PATH"
tar zxvf "$BACKUP_FILE" -C "$BACKUP_TEMP_PATH" --strip-components=1

View File

@ -1,105 +0,0 @@
#!/bin/bash
# Exit on error
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
JOBS=4
# Determine number of CPU cores for parallel build
if command -v nproc >/dev/null 2>&1; then
JOBS=$(nproc)
fi
# Function to print status messages
print_status() {
echo -e "${GREEN}[*] $1${NC}"
}
print_error() {
echo -e "${RED}[!] $1${NC}"
}
print_warning() {
echo -e "${YELLOW}[!] $1${NC}"
}
# ensure we have latest dehydrate.
dehydrate -u
# Check if build directory exists, if not create it
if [ ! -d "build" ]; then
print_status "Creating build directory..."
mkdir build
fi
# Enter build directory
cd build
# Check if CMake is installed
if ! command -v cmake &> /dev/null; then
print_error "CMake is not installed. Please install CMake first."
exit 1
fi
# Check if make is installed
if ! command -v make &> /dev/null; then
print_error "Make is not installed. Please install Make first."
exit 1
fi
# Check if pkg-config is installed
if ! command -v pkg-config &> /dev/null; then
print_error "pkg-config is not installed. Please install pkg-config first."
print_warning "On Ubuntu/Debian: sudo apt-get install pkg-config"
print_warning "On Fedora: sudo dnf install pkg-config"
print_warning "On Arch: sudo pacman -S pkg-config"
exit 1
fi
# Check if ncurses is installed
if ! pkg-config --exists ncurses; then
print_error "ncurses is not installed. Please install ncurses first."
print_warning "On Ubuntu/Debian: sudo apt-get install libncurses-dev"
print_warning "On Fedora: sudo dnf install ncurses-devel"
print_warning "On Arch: sudo pacman -S ncurses"
exit 1
fi
# Configure with CMake
print_status "Configuring with CMake..."
cmake .. -DCMAKE_BUILD_TYPE=Debug
#cmake .. -DCMAKE_BUILD_TYPE=Release
# Build the project
print_status "Building project..."
make -j"$JOBS"
# Check if build was successful
if [ $? -eq 0 ]; then
print_status "Build successful!"
print_status "Binary location: $(pwd)/dropshell"
else
print_error "Build failed!"
exit 1
fi
print_status "Auto-installing dropshell..."
sudo make install
if [ $? -eq 0 ]; then
print_status "Installation successful!"
else
print_error "Installation failed!"
exit 1
fi
# Return to original directory
cd ..
print_status "Build process completed!"

53
source/build_native.sh Executable file
View File

@ -0,0 +1,53 @@
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
OUTPUT_DIR=${SCRIPT_DIR}/output
INSTALL_DIR=${HOME}/.local/bin
mkdir -p "${OUTPUT_DIR}"
# Exit on error
set -euo pipefail
ARCH=$(uname -m)
if [ "$ARCH" != "x86_64" ] && [ "$ARCH" != "aarch64" ]; then
echo "Unsupported architecture: $ARCH"
exit 1
fi
function build_native() {
local BUILDDIR=${SCRIPT_DIR}/build/native
local PREVDIR=$PWD
local JOBS;
JOBS=$(nproc) # Set JOBS to the number of available CPU cores
mkdir -p "${BUILDDIR}"
cd "${SCRIPT_DIR}" || exit 1
CC="${HOME}/.musl-cross/${ARCH}-linux-musl-native/bin/${ARCH}-linux-musl-gcc"
CXX="${HOME}/.musl-cross/${ARCH}-linux-musl-native/bin/${ARCH}-linux-musl-g++"
cmake -B "${BUILDDIR}" -G Ninja \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DCMAKE_LINKER=mold \
-DCMAKE_C_COMPILER="${CC}" \
-DCMAKE_CXX_COMPILER="${CXX}"
cd "${BUILDDIR}" || exit 1
ninja -j"$JOBS"
#upx ${BUILDDIR}/dropshell
cp "${BUILDDIR}/dropshell" "${OUTPUT_DIR}/dropshell.${ARCH}"
cd "${PREVDIR}" || exit 1
}
build_native
echo "Auto-installing dropshell locally..."
mkdir -p "${INSTALL_DIR}"
cp "${OUTPUT_DIR}/dropshell.${ARCH}" "${INSTALL_DIR}/dropshell"
echo "Build process completed!"

37
source/build_production.sh Executable file
View File

@ -0,0 +1,37 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
# Create output directory
mkdir -p "${SCRIPT_DIR}/output"
PREV_DIR=$(pwd)
cd "${SCRIPT_DIR}"
trap 'cd "${PREV_DIR}"' EXIT
function build_arch() {
local arch=$1
if [ ! -f "${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-c++" ]; then
echo "Musl cross compiler for ${arch} not found. Please run install_build_prerequisites.sh first."
exit 1
fi
CMAKE_BUILD_TYPE=Release
CC="${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-gcc"
CXX="${HOME}/.musl-cross/${arch}-linux-musl-cross/bin/${arch}-linux-musl-g++"
BUILDDIR="${SCRIPT_DIR}/build/${arch}"
mkdir -p "${BUILDDIR}"
cmake -B "${BUILDDIR}" -G Ninja -DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" -DCMAKE_C_COMPILER="${CC}" -DCMAKE_CXX_COMPILER="${CXX}"
cmake --build "${BUILDDIR}"
upx "${BUILDDIR}/dropshell"
cp "${BUILDDIR}/dropshell" "${SCRIPT_DIR}/output/dropshell.${arch}"
}
build_arch x86_64
build_arch aarch64
echo "Static binaries have been created:"
ls -la output

View File

@ -1,20 +1,24 @@
#!/bin/bash
set -e
# CMake pre-build script.
# Runs before the build process.
# This script creates two files:
# src/utils/createagent.hpp
# src/utils/createagent.cpp
#
SCRIPT_DIR=$(dirname "$0")
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# check if dehydrate is installed
if ! command -v dehydrate &> /dev/null; then
echo "dehydrate could not be found - installing"
curl -fsSL https://gitea.jde.nz/public/dehydrate/releases/download/latest/install.sh | bash
else
# ensure we have latest dehydrate.
dehydrate -u
fi
mkdir -p "${SCRIPT_DIR}/src/autogen"
dehydrate "${SCRIPT_DIR}/agent-remote" "${SCRIPT_DIR}/src/autogen"
dehydrate "${SCRIPT_DIR}/agent-local" "${SCRIPT_DIR}/src/autogen"

View File

@ -37,11 +37,23 @@ fi
print_status "Detected OS: $OS $VER"
#----------------------------------------------------------------------------------------------------------
# INSTALL PREREQUISITE PACKAGES
#----------------------------------------------------------------------------------------------------------
# Define packages based on distribution
case $OS in
"Ubuntu"|"Debian GNU/Linux")
# Common packages for both Ubuntu and Debian
PACKAGES="cmake make g++ devscripts debhelper"
PACKAGES="bash cmake make g++ devscripts debhelper build-essential upx musl-tools wget tar ccache ninja-build"
INSTALLCMD="apt-get install -y"
UPDATECMD="apt-get update"
;;
"Alpine Linux")
PACKAGES="bash build-base cmake git nlohmann-json wget tar curl ninja mold nodejs npm"
INSTALLCMD="apk add --no-cache"
UPDATECMD="apk update"
;;
*)
print_error "Unsupported distribution: $OS"
@ -51,19 +63,29 @@ esac
# Function to check if a package is installed
is_package_installed() {
dpkg -l "$1" 2>/dev/null | grep -q "^ii"
if [ "$OS" = "Alpine Linux" ]; then
# Use apk info <pkg> and check exit status
apk info "$1" >/dev/null 2>&1
return $?
else
dpkg -l "$1" 2>/dev/null | grep -q "^ii"
fi
}
# Update package lists
print_status "Updating package lists..."
apt-get update
UPDATED=false
# Install missing packages
print_status "Checking and installing required packages..."
for pkg in $PACKAGES; do
if ! is_package_installed "$pkg"; then
if [ "$UPDATED" = false ]; then
print_status "Updating package lists..."
$UPDATECMD
UPDATED=true
fi
print_status "Installing $pkg..."
apt-get install -y "$pkg"
$INSTALLCMD "$pkg"
if [ $? -ne 0 ]; then
print_error "Failed to install $pkg"
exit 1
@ -73,82 +95,71 @@ for pkg in $PACKAGES; do
fi
done
# Verify all required tools are installed
print_status "Verifying installation..."
for tool in cmake make g++; do
if ! command -v "$tool" &> /dev/null; then
print_error "$tool is not installed properly"
exit 1
fi
done
# Install other required packages
apt install -y musl-tools wget tar
# ----------------------------------------------------------------------------------------------------------
# MUSL CROSS COMPILERS
# ----------------------------------------------------------------------------------------------------------
# Set install directory
if [ -n "$SUDO_USER" ] && [ "$SUDO_USER" != "root" ]; then
USER_HOME=$(eval echo ~$SUDO_USER)
USER_HOME=$(eval echo "~$SUDO_USER")
else
USER_HOME="$HOME"
fi
INSTALL_DIR="$USER_HOME/.musl-cross"
mkdir -p "$INSTALL_DIR"
MUSL_CC_URL="https://musl.cc"
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
# x86_64
if [ ! -d "$INSTALL_DIR/x86_64-linux-musl-cross" ]; then
echo "Downloading x86_64 musl cross toolchain..."
wget -nc -O "$TMPDIR/x86_64-linux-musl-cross.tgz" $MUSL_CC_URL/x86_64-linux-musl-cross.tgz
tar -C "$INSTALL_DIR" -xvf "$TMPDIR/x86_64-linux-musl-cross.tgz"
fi
function install_musl_cross() {
local TOOLCHAIN="$1"
local MUSL_CC_URL="https://musl.cc"
if [ ! -d "$INSTALL_DIR/$TOOLCHAIN" ]; then
echo "Downloading $TOOLCHAIN musl cross toolchain..."
wget -nc -O "$TMPDIR/$TOOLCHAIN.tgz" "$MUSL_CC_URL/$TOOLCHAIN.tgz"
tar -C "$INSTALL_DIR" -xvf "$TMPDIR/$TOOLCHAIN.tgz"
fi
}
# aarch64
if [ ! -d "$INSTALL_DIR/aarch64-linux-musl-cross" ]; then
echo "Downloading aarch64 musl cross toolchain..."
wget -nc -O "$TMPDIR/aarch64-linux-musl-cross.tgz" $MUSL_CC_URL/aarch64-linux-musl-cross.tgz
tar -C "$INSTALL_DIR" -xvf "$TMPDIR/aarch64-linux-musl-cross.tgz"
fi
function check_path() {
if [ -n "$SUDO_USER" ] && [ "$SUDO_USER" != "root" ]; then
local BASHRC="$USER_HOME/.bashrc"
local TOOLCHAIN="$1"
local MUSL_PATH="$INSTALL_DIR/$TOOLCHAIN/bin"
if ! echo "$PATH" | grep -q "$MUSL_PATH"; then
echo "Adding $MUSL_PATH to PATH in $BASHRC"
PATH_LINE="export PATH=\"$MUSL_PATH:\$PATH\""
if ! grep -Fxq "$PATH_LINE" "$BASHRC"; then
echo "" >> "$BASHRC"
echo "# Add musl cross compilers to PATH for dropshell" >> "$BASHRC"
echo "$PATH_LINE" >> "$BASHRC"
echo "Added musl cross compilers to $BASHRC"
echo "You should run 'source ~/.bashrc' to update your PATH"
else
echo "You should run 'source ~/.bashrc' to update your PATH"
fi
fi
fi
}
# Print instructions for adding to PATH
# cat <<EOF
# To use the musl cross compilers, add the following to your shell:
# export PATH="$INSTALL_DIR/x86_64-linux-musl-cross/bin:$INSTALL_DIR/aarch64-linux-musl-cross/bin:$PATH"
TOOLCHAIN_LIST=(
"aarch64-linux-musl-cross"
"x86_64-linux-musl-cross"
"x86_64-linux-musl-native"
)
# Or run:
# export PATH="$INSTALL_DIR/x86_64-linux-musl-cross/bin:$INSTALL_DIR/aarch64-linux-musl-cross/bin:\$PATH"
# EOF
for TOOLCHAIN in "${TOOLCHAIN_LIST[@]}"; do
install_musl_cross "$TOOLCHAIN"
check_path "$TOOLCHAIN"
done
# Clean up
rm -rf "$TMPDIR"
# If run with sudo, add to invoking user's ~/.bashrc
if [ -n "$SUDO_USER" ] && [ "$SUDO_USER" != "root" ]; then
BASHRC="$USER_HOME/.bashrc"
EXPORT_LINE="export PATH=\"$INSTALL_DIR/x86_64-linux-musl-cross/bin:$INSTALL_DIR/aarch64-linux-musl-cross/bin:\$PATH\""
if ! grep -Fxq "$EXPORT_LINE" "$BASHRC"; then
echo "" >> "$BASHRC"
echo "# Add musl cross compilers to PATH for bb64" >> "$BASHRC"
echo "$EXPORT_LINE" >> "$BASHRC"
echo "Added musl cross compilers to $BASHRC"
else
echo "musl cross compiler PATH already present in $BASHRC"
fi
fi
# check if dehydrate command is installed
if ! command -v dehydrate &> /dev/null; then
curl -fsSL https://gitea.jde.nz/public/dehydrate/releases/download/latest/install.sh | bash
fi
dehydrate -u
# ----------------------------------------------------------------------------------------------------------
# COMPLETE
# ----------------------------------------------------------------------------------------------------------
print_status "All dependencies installed successfully!"
print_status "You can now run ./build.sh to build the project"

View File

@ -1,59 +0,0 @@
#!/bin/bash
# build amd64 and arm64 versions of dropshell, to:
# build/dropshell.amd64
# build/dropshell.arm64
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
rm -f $SCRIPT_DIR/build_amd64/dropshell $SCRIPT_DIR/build_arm64/dropshell $SCRIPT_DIR/output/dropshell.amd64 $SCRIPT_DIR/output/dropshell.arm64
# Determine number of CPU cores for parallel build
if command -v nproc >/dev/null 2>&1; then
JOBS=$(nproc)
else
JOBS=4 # fallback default
fi
PREV_PWD=$PWD
cd $SCRIPT_DIR
# Build for amd64 (musl)
echo "Building for amd64 (musl)..."
cmake -B build_amd64 -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_COMPILER=x86_64-linux-musl-gcc \
-DCMAKE_CXX_COMPILER=x86_64-linux-musl-g++ \
-DCMAKE_EXE_LINKER_FLAGS="-static" \
-DCMAKE_CXX_FLAGS="-march=x86-64" .
cmake --build build_amd64 --target dropshell --config Release -j"$JOBS"
mkdir -p output
cp build_amd64/dropshell output/dropshell.amd64
# Build for arm64 (musl)
echo "Building for arm64 (musl)..."
cmake -B build_arm64 -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_COMPILER=aarch64-linux-musl-gcc \
-DCMAKE_CXX_COMPILER=aarch64-linux-musl-g++ \
-DCMAKE_EXE_LINKER_FLAGS="-static" \
-DCMAKE_CXX_FLAGS="-march=armv8-a" \
-DCMAKE_SYSTEM_PROCESSOR=aarch64 .
cmake --build build_arm64 --target dropshell --config Release -j"$JOBS"
mkdir -p output
cp build_arm64/dropshell output/dropshell.arm64
if [ ! -f output/dropshell.amd64 ]; then
echo "output/dropshell.amd64 not found!" >&2
exit 1
fi
if [ ! -f output/dropshell.arm64 ]; then
echo "output/dropshell.arm64 not found!" >&2
exit 1
fi
echo "Builds complete:"
ls -lh output/dropshell.*
cd $PREV_PWD

View File

@ -6,103 +6,52 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo "Script directory: $SCRIPT_DIR"
# Check for GITEA_TOKEN_DEPLOY or GITEA_TOKEN
if [ -n "$GITEA_TOKEN_DEPLOY" ]; then
TOKEN="$GITEA_TOKEN_DEPLOY"
elif [ -n "$GITEA_TOKEN" ]; then
TOKEN="$GITEA_TOKEN"
else
echo "GITEA_TOKEN_DEPLOY or GITEA_TOKEN environment variable not set!" >&2
exit 1
fi
TOKEN="${GITEA_TOKEN_DEPLOY:-${GITEA_TOKEN}}"
[ -z "$TOKEN" ] && { echo "Neither GITEA_TOKEN_DEPLOY nor GITEA_TOKEN environment variable set!" >&2; exit 1; }
$SCRIPT_DIR/multibuild.sh
BUILD_DIR=$SCRIPT_DIR/build
OLD_PWD=$PWD
cd $SCRIPT_DIR
if [ ! -f "output/dropshell.amd64" ]; then
echo "output/dropshell.amd64 not found!" >&2
echo "Please run multibuild.sh first." >&2
exit 1
fi
if [ ! -f "output/dropshell.arm64" ]; then
echo "output/dropshell.arm64 not found!" >&2
echo "Please run multibuild.sh first." >&2
exit 1
fi
TAG=$("$SCRIPT_DIR/output/dropshell.amd64" --version)
[ -z "$TAG" ] && echo "Failed to get version from dropshell.amd64" >&2 && exit 1
OLD_PWD="$PWD"
cd "$SCRIPT_DIR" || exit 1
TEMP_DIR=$(mktemp -d)
trap 'rm -rf "$TEMP_DIR" && cd "$OLD_PWD"' EXIT
ARCH=$(uname -m)
TAG=$("$SCRIPT_DIR/output/dropshell.${ARCH}" --version)
[ -z "$TAG" ] && echo "Failed to get version from dropshell.${ARCH}" >&2 && exit 1
echo "Publishing dropshell version $TAG"
# make sure we've commited.
git add "$SCRIPT_DIR/../" && git commit -m "dropshell release $TAG" && git push
# Find repo info from .git/config
REPO_URL=$(git config --get remote.origin.url)
if [[ ! $REPO_URL =~ gitea ]]; then
echo "Remote origin is not a Gitea repository: $REPO_URL" >&2
function die() {
echo "$@" >&2
exit 1
fi
# Extract base URL, owner, and repo
# Example: https://gitea.example.com/username/reponame.git
BASE_URL=$(echo "$REPO_URL" | sed -E 's#(https?://[^/]+)/.*#\1#')
OWNER=$(echo "$REPO_URL" | sed -E 's#.*/([^/]+)/[^/]+(\.git)?$#\1#')
REPO=$(echo "$REPO_URL" | sed -E 's#.*/([^/]+)(\.git)?$#\1#')
API_URL="$BASE_URL/api/v1/repos/$OWNER/$REPO"
# Create release
RELEASE_DATA=$(cat <<EOF
{
"tag_name": "$TAG",
"name": "$TAG",
"body": "dropshell release $TAG",
"draft": false,
"prerelease": false
}
EOF
)
RELEASE_ID=$(curl -s -X POST "$API_URL/releases" \
-H "Content-Type: application/json" \
-H "Authorization: token $TOKEN" \
-d "$RELEASE_DATA" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
# Function to find file in specified locations
find_file() {
local filename="$1"
shift # remove filename from args
local locations=("$@") # grab the rest of the args as locations
for loc in "${locations[@]}"; do
if [ -f "$loc/$filename" ]; then
echo "$loc/$filename"
return 0 # Found the file, return success
fi
done
echo "" # Return empty string if not found
return 1
}
curl -L -s -o "${TEMP_DIR}/sos" "https://getbin.xyz/sos" || die "Failed to download sos"
chmod +x "${TEMP_DIR}/sos"
if [ -z "$RELEASE_ID" ]; then
echo "Failed to create release on Gitea." >&2
exit 1
fi
# Upload binaries and install.sh
for FILE in dropshell.amd64 dropshell.arm64 install.sh server_autosetup.sh; do
if [ -f "output/$FILE" ]; then
filetoupload="output/$FILE"
elif [ -f "../$FILE" ]; then
filetoupload="../$FILE"
elif [ -f "$FILE" ]; then
filetoupload="$FILE"
else
echo "File $FILE not found!" >&2
continue
fi
for FILE in dropshell.x86_64 dropshell.aarch64 dropshell-install.sh dropshell-server-autosetup.sh; do
# Pass the locations directly to the find_file function
filetoupload=$(find_file "$FILE" "output" "../" ".")
[ -z "$filetoupload" ] && die "File $FILE not found in expected locations!"
# Auto-detect content type
ctype=$(file --mime-type -b "$filetoupload")
curl -s -X POST "$API_URL/releases/$RELEASE_ID/assets?name=$FILE" \
-H "Content-Type: $ctype" \
-H "Authorization: token $TOKEN" \
--data-binary @"$filetoupload"
echo "Uploaded $FILE to release $TAG as $ctype."
"${TEMP_DIR}/sos" upload getbin.xyz "$filetoupload" "$FILE:latest" "$FILE:TAG"
done
echo "Published dropshell version $TAG to $REPO_URL (tag $TAG) with binaries."
cd $OLD_PWD
echo "Published dropshell $TAG to getbin.xyz"

View File

@ -84,7 +84,7 @@ namespace dropshell
remote_command_script_file,
remotefile(server, user).service_env(service)}, user))
{
error << "Error: Required service directories not found on remote server" << std::endl;
error << "Required service directories not found on remote server" << std::endl;
info << "Is the service installed?" << std::endl;
return false;
}
@ -103,7 +103,7 @@ namespace dropshell
std::string local_backups_dir = localpath::backups();
if (local_backups_dir.empty())
{
error << "Error: Local backups directory not found" << std::endl;
error << "Local backups directory not found" << std::endl;
info << "Run 'dropshell edit' to configure DropShell" << std::endl;
return false;
}

View File

@ -103,6 +103,11 @@ namespace dropshell
if (server_name.empty() || template_name.empty() || service_name.empty())
return false;
if (!legal_service_name(service_name)) {
error << "Service name contains illegal characters: " << service_name << std::endl;
return false;
}
ServerConfig server_info(server_name);
if (!server_info.is_valid())
{

View File

@ -94,11 +94,11 @@ int edit_config()
std::string config_file = localfile::dropshell_json();
if (!edit_file(config_file, false) || !std::filesystem::exists(config_file))
return die("Error: Failed to edit config file.");
return return_die("Failed to edit config file.");
gConfig().load_config();
if (!gConfig().is_config_set())
return die("Error: Failed to load and parse edited config file!");
return return_die("Failed to load and parse edited config file!");
gConfig().save_config(true);
@ -112,7 +112,7 @@ int edit_config()
int edit_server(const std::string &server_name)
{
if (localpath::server(server_name).empty()) {
std::cerr << "Error: Server not found: " << server_name << std::endl;
error << "Server not found: " << server_name << std::endl;
return -1;
}
@ -130,6 +130,28 @@ int edit_server(const std::string &server_name)
return 0;
}
void list_directory(std::string dir, std::string msg)
{
bool first=true;
std::vector<std::string> directories;
for (const auto &file : std::filesystem::directory_iterator(dir))
{
if (first)
{
if (!msg.empty())
info << msg << std::endl;
first=false;
}
if (std::filesystem::is_directory(file.path()))
directories.push_back(file.path());
else
info << " " << file.path() << std::endl;
}
for (const auto &dir : directories)
list_directory(dir, "");
}
// ------------------------------------------------------------------------------------------------
// edit service config
// ------------------------------------------------------------------------------------------------
@ -143,7 +165,14 @@ int edit_service_config(const std::string &server, const std::string &service)
}
if (edit_file(config_file, true) && std::filesystem::exists(config_file))
info << "To apply your changes, run:\n dropshell install " + server + " " + service << std::endl;
info << "Successfully edited service config file at " << config_file << std::endl;
std::string service_dir = localpath::service(server, service);
list_directory(service_dir, "You may wish to edit the other files in " + service_dir);
info << "Then to apply your changes, run:" << std::endl;
info << " dropshell uninstall " + server + " " + service << std::endl;
info << " dropshell install " + server + " " + service << std::endl;
return 0;
}

View File

@ -0,0 +1,83 @@
#include "command_registry.hpp"
#include "config.hpp"
#include "utils/utils.hpp"
#include "utils/directories.hpp"
#include "shared_commands.hpp"
#include "version.hpp"
#include "hash.hpp"
#include <unistd.h>
#include <cstring>
#include <iostream>
#include <sstream>
#include <filesystem>
#include <libassert/assert.hpp>
namespace dropshell {
void hash_autocomplete(const CommandContext& ctx);
int hash_handler(const CommandContext& ctx);
static std::vector<std::string> hash_name_list={"hash"};
// Static registration
struct HashCommandRegister {
HashCommandRegister() {
CommandRegistry::instance().register_command({
hash_name_list,
hash_handler,
hash_autocomplete,
false, // hidden
false, // requires_config
false, // requires_install
0, // min_args (after command)
1, // max_args (after command)
"hash [FILE|DIRECTORY]",
"Hash a file or directory.",
// heredoc
R"(
Hash a file or directory recursively.
)"
});
}
} hash_command_register;
void hash_autocomplete(const CommandContext& ctx) {
if (ctx.args.size() == 0) {
// list all files and directories in the current directory
for (const auto& entry : std::filesystem::directory_iterator(".")) {
rawout << entry.path().string() << std::endl;
}
}
return;
}
int hash_handler(const CommandContext& ctx) {
std::filesystem::path path = safearg(ctx.args, 0);
if (path.empty())
path=std::filesystem::current_path();
if (!std::filesystem::exists(path))
{
error << "Does not exist: " << path.string() << std::endl;
return 1;
}
if (std::filesystem::is_directory(path))
{
// hash the directory recursively
uint64_t hash = hash_directory_recursive(path.string());
std::cout << hash << std::endl;
}
else
{
// hash the file
uint64_t hash = hash_file(path.string());
std::cout << hash << std::endl;
}
return 0;
}
} // namespace dropshell

View File

@ -18,6 +18,7 @@
#include <filesystem>
#include <libassert/assert.hpp>
#include "servers.hpp"
#include <sys/stat.h>
namespace dropshell
{
@ -68,7 +69,29 @@ namespace dropshell
std::string server = server_env.get_server_name();
LocalServiceInfo service_info = get_service_info(server_env.get_server_name(), service);
if (!SIvalid(service_info) || !service_info.service_template_hash_match)
if (!SIvalid(service_info))
{
error << "Failed to install - service information not valid." << std::endl;
return false;
}
if (!server_env.is_valid())
return false; // should never hit this.
std::string user = service_info.user;
std::string remote_service_path = remotepath(server,user).service(service);
ASSERT(!remote_service_path.empty(), "Install_Service: Remote service path is empty for " + service + " on " + server);
ASSERT(!user.empty(), "Install_Service: User is empty for " + service + " on " + server);
if (server_env.check_remote_dir_exists(remote_service_path, user))
{ // uninstall the old service before we update the config or template!
info << "Service " << service << " is already installed on " << server << std::endl;
shared_commands::uninstall_service(server_env, service);
}
if (!service_info.service_template_hash_match)
{
warning << "Service " << service << " is using an old template. Updating. " << std::endl;
if (!merge_updated_service_template(server_env.get_server_name(), service))
@ -87,9 +110,6 @@ namespace dropshell
maketitle("Installing " + service + " (" + service_info.template_name + ") on " + server);
if (!server_env.is_valid())
return false; // should never hit this.
// Check if template exists
template_info tinfo = gTemplateManager().get_template_info(service_info.template_name);
if (!tinfo.is_set())
@ -102,8 +122,6 @@ namespace dropshell
}
// Create service directory
std::string user = server_env.get_user_for_service(service);
std::string remote_service_path = remotepath(server,user).service(service);
std::string mkdir_cmd = "mkdir -p " + quote(remote_service_path);
if (!execute_ssh_command(server_env.get_SSH_INFO(user), sCommand("", mkdir_cmd, {}), cMode::Silent))
{
@ -111,14 +129,6 @@ namespace dropshell
return false;
}
// Check if rsync is installed on remote host
std::string check_rsync_cmd = "which rsync";
if (!execute_ssh_command(server_env.get_SSH_INFO(user), sCommand("", check_rsync_cmd, {}), cMode::Silent))
{
std::cerr << "rsync is not installed on the remote host" << std::endl;
return false;
}
// Copy template files
debug << "Copying: [LOCAL] " << tinfo.local_template_path() << std::endl
<< std::string(8, ' ') << "[REMOTE] " << remotepath(server,user).service_template(service) << "/" << std::endl;
@ -142,7 +152,13 @@ namespace dropshell
// Run install script
{
info << "Running " << service_info.template_name << " install script on " << server << "..." << std::endl;
server_env.run_remote_template_command(service, "install", {}, false, {});
shared_commands::cRemoteTempFolder remote_temp_folder(server_env, user);
if (!server_env.run_remote_template_command(service, "install", {}, false, {{"TEMP_DIR", remote_temp_folder.path()}}))
{
error << "Failed to run install script on " << server << std::endl;
return false;
}
}
// print health tick
@ -176,49 +192,107 @@ namespace dropshell
return trim(result);
}
int configure_autocomplete()
{
debug << "Ensuring dropshell autocomplete is registered in ~/.bashrc..." << std::endl;
std::filesystem::path bashrc = localpath::current_user_home() +"/.bashrc";
std::string autocomplete_script = R"(
#---DROPSHELL AUTOCOMPLETE START---
_dropshell_completions() {
local cur
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
# call dropshell to get the list of possiblities for the current argument. Supply all previous arguments.
local completions=($(dropshell autocomplete "${COMP_WORDS[@]:1:${COMP_CWORD}-1}"))
COMPREPLY=( $(compgen -W "${completions[*]}" -- ${cur}) )
return 0
}
# Register the completion function
complete -F _dropshell_completions dropshell
complete -F _dropshell_completions ds
#---DROPSHELL AUTOCOMPLETE END---
)";
file_replace_or_add_segment(bashrc.string(), autocomplete_script);
return 0;
}
int configure_localbin()
{
debug << "Ensuring ~/.local/bin is in the ~/.bashrc path..." << std::endl;
std::filesystem::path bashrc = localpath::current_user_home() +"/.bashrc";
std::filesystem::path localbin = localpath::current_user_home() + "/.local/bin";
std::filesystem::create_directories(localbin);
// check if already in path
const char* env_p = std::getenv("PATH");
if (env_p) {
std::string path_str = env_p;
if (path_str.find(localbin.string()) == std::string::npos) {
std::string pathstr="#---DROPSHELL PATH START---\nexport PATH=\""+localbin.string()+":$PATH\"\n#---DROPSHELL PATH END---\n";
file_replace_or_add_segment(bashrc.string(), pathstr);
}
}
return 0;
}
int update_dropshell()
{
maketitle("Updating dropshell on this computer...");
configure_localbin();
configure_autocomplete();
// determine path to this executable
std::filesystem::path dropshell_path = std::filesystem::canonical("/proc/self/exe");
std::filesystem::path parent_path = dropshell_path.parent_path();
std::filesystem::path exe_path = std::filesystem::canonical("/proc/self/exe");
std::filesystem::path parent_path = exe_path.parent_path();
// determine the architecture of the system
std::string arch = shared_commands::get_arch();
std::string url = "https://gitea.jde.nz/public/dropshell/releases/download/latest/dropshell." + arch;
// download new version, preserve permissions and ownership
std::string bash_script;
bash_script += "docker run --rm -v " + parent_path.string() + ":/target";
bash_script += " gitea.jde.nz/public/debian-curl:latest";
bash_script += " sh -c \"";
bash_script += " curl -fsSL " + url + " -o /target/dropshell_temp &&";
bash_script += " chmod --reference=/target/dropshell /target/dropshell_temp &&";
bash_script += " chown --reference=/target/dropshell /target/dropshell_temp";
bash_script += "\"";
std::string cmd = "bash -c '" + bash_script + "'";
int rval = system(cmd.c_str());
if (rval != 0)
{
std::cerr << "Failed to download new version of dropshell." << std::endl;
// check that the user that owns the exe is the current user this process is running as.
struct stat st;
if (stat(exe_path.c_str(), &st) != 0) {
error << "Failed to stat dropshell executable: " << strerror(errno) << std::endl;
return -1;
}
uid_t current_uid = getuid();
if (st.st_uid != current_uid) {
warning << "Current user does not own the dropshell executable. Please run as the owner to update." << std::endl;
return -1;
}
shared_commands::cLocalTempFolder local_temp_folder;
std::filesystem::path temp_file = local_temp_folder.path() / "dropshell";
bool download_okay = download_file(url, temp_file);
if (!download_okay)
{
error << "Failed to download new version of dropshell." << std::endl;
return -1;
}
// make executable
chmod(temp_file.c_str(), 0755);
// check if the new version is the same as the old version
uint64_t new_hash = hash_file(parent_path / "dropshell_temp");
uint64_t old_hash = hash_file(parent_path / "dropshell");
uint64_t new_hash = hash_file(temp_file);
uint64_t old_hash = hash_file(exe_path);
if (new_hash == old_hash)
{
std::cout << "Confirmed dropshell is the latest version." << std::endl;
info << "Confirmed dropshell is the latest version." << std::endl;
return 0;
}
std::string runvercmd = (parent_path / "dropshell").string() + " version";
std::string runvercmd = exe_path.string() + " version";
std::string currentver = _exec(runvercmd.c_str());
runvercmd = (parent_path / "dropshell_temp").string() + " version";
runvercmd = temp_file.string() + " version";
std::string newver = _exec(runvercmd.c_str());
if (currentver >= newver)
@ -228,21 +302,15 @@ namespace dropshell
return 0;
}
return 0;
// move the new version to the old version.
std::filesystem::rename(exe_path, exe_path.parent_path() / "dropshell.old");
std::filesystem::rename(temp_file, exe_path);
std::string bash_script_2 = "docker run --rm -v " + parent_path.string() + ":/target gitea.jde.nz/public/debian-curl:latest " +
"sh -c \"mv /target/dropshell_temp /target/dropshell\"";
rval = system(bash_script_2.c_str());
if (rval != 0)
{
error << "Failed to install new version of dropshell." << std::endl;
return -1;
}
info << "Successfully updated " << dropshell_path << " to the latest " << arch << " version." << std::endl;
// remove the old version.
std::filesystem::remove(exe_path.parent_path() / "dropshell.old");
// execute the new version
execlp("bash", "bash", "-c", (parent_path / "dropshell").c_str(), "install", (char *)nullptr);
execlp("bash", "bash", "-c", (exe_path.parent_path() / "dropshell").string() + "install", (char *)nullptr);
error << "Failed to execute new version of dropshell." << std::endl;
return -1;
}
@ -281,7 +349,7 @@ namespace dropshell
info << "Installing agent for user " << user.user << " on " << server.get_server_name() << std::endl;
std::string agent_path = remotepath(server.get_server_name(),user.user).agent();
ASSERT(agent_path == user.dir+"/agent", "Agent path does not match user directory for "+user.user+"@" + server.get_server_name() + " : " + agent_path + " != " + user.dir);
ASSERT(agent_path == user.dir+"/agent", "Remote agent path does not match user directory for "+user.user+"@" + server.get_server_name() + " : " + agent_path + " != " + user.dir);
ASSERT(!agent_path.empty(), "Agent path is empty for " + user.user + "@" + server.get_server_name());
// now create the agent.
@ -295,7 +363,7 @@ namespace dropshell
bool okay = execute_ssh_command(server.get_SSH_INFO(user.user), sCommand(agent_path, "agent-install.sh",{}), cMode::Defaults | cMode::NoBB64, nullptr);
if (!okay)
{
error << "ERROR: Failed to install remote agent on " << server.get_server_name() << std::endl;
error << "Failed to install remote agent on " << server.get_server_name() << std::endl;
return 1;
}

View File

@ -147,7 +147,7 @@ void list_servers() {
void show_server_details(const std::string& server_name) {
ServerConfig env(server_name);
if (!env.is_valid()) {
error << "Error: Invalid server environment file: " << server_name << std::endl;
error << "Invalid server environment file: " << server_name << std::endl;
return;
}

View File

@ -60,7 +60,7 @@ namespace dropshell
std::string local_backups_dir = localpath::backups();
if (local_backups_dir.empty() || !std::filesystem::exists(local_backups_dir))
{
error << "Error: Local backups directory not found: " << local_backups_dir << std::endl;
error << "Local backups directory not found: " << local_backups_dir << std::endl;
return {};
}
@ -150,19 +150,19 @@ namespace dropshell
std::string local_backups_dir = localpath::backups();
if (local_backups_dir.empty() || !std::filesystem::exists(local_backups_dir))
{
error << "Error: Local backups directory not found: " << local_backups_dir << std::endl;
error << "Local backups directory not found: " << local_backups_dir << std::endl;
return 1;
}
std::string local_backup_file_path = (std::filesystem::path(local_backups_dir) / backup_details->get_filename()).string();
if (!std::filesystem::exists(local_backup_file_path))
{
error << "Error: Backup file not found at " << local_backup_file_path << std::endl;
error << "Backup file not found at " << local_backup_file_path << std::endl;
return 1;
}
if (backup_details->get_template_name() != service_info.template_name)
{
error << "Error: Backup template does not match service template. Can't restore." << std::endl;
error << "Backup template does not match service template. Can't restore." << std::endl;
info << "Backup template: " << backup_details->get_template_name() << std::endl;
info << "Service template: " << service_info.template_name << std::endl;
return 1;

View File

@ -75,9 +75,9 @@ namespace dropshell
// determine the architecture of the system
std::string arch;
#ifdef __aarch64__
arch = "arm64";
arch = "aarch64";
#elif __x86_64__
arch = "amd64";
arch = "x86_64";
#endif
return arch;
}
@ -107,6 +107,22 @@ namespace dropshell
return mPath;
}
cLocalTempFolder::cLocalTempFolder()
{
mPath = std::filesystem::temp_directory_path() / random_alphanumeric_string(10);
std::filesystem::create_directories(mPath);
}
cLocalTempFolder::~cLocalTempFolder()
{
std::filesystem::remove_all(mPath);
}
std::filesystem::path cLocalTempFolder::path() const
{
return mPath;
}
// ------------------------------------------------------------------------------------------------
// get_all_services_status : SHARED COMMAND
// ------------------------------------------------------------------------------------------------

View File

@ -1,6 +1,8 @@
#ifndef SHARED_COMMANDS_HPP
#define SHARED_COMMANDS_HPP
#include <filesystem>
#include "servers.hpp"
#include "command_registry.hpp"
#include "servers.hpp"
@ -40,6 +42,16 @@ namespace dropshell
std::string mUser;
};
class cLocalTempFolder
{
public:
cLocalTempFolder(); // create a temp folder on the local machine
~cLocalTempFolder(); // delete the temp folder on the local machine
std::filesystem::path path() const; // get the path to the temp folder on the local machine
private:
std::filesystem::path mPath;
};
bool rsync_tree_to_remote(
const std::string &local_path,
const std::string &remote_path,

View File

@ -61,6 +61,12 @@ namespace dropshell
return false;
}
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " << service << std::endl;
return false;
}
LocalServiceInfo sinfo = get_service_info(server, service);
if (!SIvalid(sinfo))
{

View File

@ -51,6 +51,12 @@ namespace dropshell
return false;
}
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " << service << std::endl;
return false;
}
// run the start script.
bool started = server_env.run_remote_template_command(service, "start", {}, false, {});
@ -67,7 +73,7 @@ namespace dropshell
{
if (ctx.args.size() < 2)
{
std::cerr << "Error: Server name and service name are both required" << std::endl;
error << "Server name and service name are both required" << std::endl;
return 1;
}

View File

@ -51,6 +51,12 @@ namespace dropshell
return false;
}
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " << service << std::endl;
return false;
}
// run the stop script.
bool stopped = server_env.run_remote_template_command(service, "stop", {}, false, {});

View File

@ -37,7 +37,7 @@ bool config::load_config() { // load json config file.
}
catch (nlohmann::json::parse_error& ex)
{
std::cerr << "Error: Failed to parse config file: " << ex.what() << std::endl;
error << "Failed to parse config file: " << ex.what() << std::endl;
return false;
}
@ -69,7 +69,7 @@ bool config::save_config(bool create_aux_directories)
if (!mIsConfigSet)
{
std::string homedir = localpath::current_user_home();
std::string dropshell_base = homedir + "/.local/dropshell_files";
std::string dropshell_base = homedir + "/.dropshell";
mConfig["server_definition_paths"] = {
dropshell_base + "/servers"
@ -81,6 +81,10 @@ bool config::save_config(bool create_aux_directories)
"https://templates.dropshell.app"
};
mConfig["template_upload_token"] = "SECRETTOKEN";
mConfig["backups_path"] = {
dropshell_base + "/backups"
};
}
config_file << mConfig.dump(4);
@ -116,14 +120,14 @@ bool config::is_agent_installed()
return std::filesystem::exists(localfile::bb64());
}
std::vector<std::string> config::get_template_registry_urls() {
nlohmann::json template_registry_urls = mConfig["template_registry_URLs"];
std::vector<std::string> urls;
for (auto &url : template_registry_urls) {
if (url.is_string() && !url.empty())
urls.push_back(url);
std::vector<tRegistryEntry> config::get_template_registry_urls() {
nlohmann::json template_registries = mConfig["template_registries"];
std::vector<tRegistryEntry> registries;
for (auto &registry : template_registries) {
if (registry.is_object() && !registry.empty())
registries.push_back(tRegistryEntry(registry));
}
return urls;
return registries;
}
std::vector<std::string> config::get_local_template_paths()
@ -163,16 +167,43 @@ std::string config::get_template_create_path()
return paths[0];
}
std::string config::get_template_upload_url()
std::string config::get_backups_path()
{
std::vector<std::string> urls = get_template_registry_urls();
if (urls.empty())
nlohmann::json backups_path = mConfig["backups_path"];
if (backups_path.empty())
return "";
return urls[0];
if (backups_path.is_string())
return backups_path;
warning << "backups_path is not a string: " << backups_path << std::endl;
return "";
}
std::string config::get_template_upload_token() {
return mConfig["template_upload_token"];
dropshell::tRegistryEntry::tRegistryEntry(nlohmann::json json)
{
valid = false;
if (json.is_object() && !json.empty()) {
for (auto &[key, value] : json.items()) {
if (value.is_string() && !value.empty())
switch (switchhash(key.c_str())) {
case switchhash("name"):
name = value;
break;
case switchhash("url"):
url = value;
break;
case switchhash("token"):
token = value;
break;
default:
break;
}
}
valid = (!url.empty()&&!name.empty()); // token can be empty.
}
}
tRegistryEntry::~tRegistryEntry()
{
}
} // namespace dropshell

View File

@ -4,10 +4,23 @@
#include <vector>
#define JSON_INLINE_ALL
#include "json.hpp"
#include <nlohmann/json.hpp>
namespace dropshell {
class tRegistryEntry {
public:
tRegistryEntry(nlohmann::json json);
~tRegistryEntry();
public:
std::string name;
std::string url;
std::string token;
bool valid;
};
class config {
public:
config();
@ -19,14 +32,13 @@ class config {
bool is_config_set() const;
static bool is_agent_installed();
std::vector<std::string> get_template_registry_urls();
std::vector<tRegistryEntry> get_template_registry_urls();
std::vector<std::string> get_local_template_paths();
std::vector<std::string> get_local_server_definition_paths();
std::string get_server_create_path();
std::string get_template_create_path();
std::string get_template_upload_url();
std::string get_template_upload_token();
std::string get_backups_path();
private:
nlohmann::json mConfig;

10509
source/src/contrib/httplib.hpp Normal file

File diff suppressed because it is too large Load Diff

View File

@ -6,8 +6,8 @@ _dropshell_completions() {
cur="${COMP_WORDS[COMP_CWORD]}"
# call dropshell to get the list of possiblities for the current argument. Supply all previous arguments.
local completions=($(dropshell autocomplete "${COMP_WORDS[@]:1:${COMP_CWORD}-1}"))
COMPREPLY=( $(compgen -W "${completions[*]}" -- ${cur}) )
mapfile -t completions < <(dropshell autocomplete "${COMP_WORDS[@]:1:${COMP_CWORD}-1}")
mapfile -t COMPREPLY < <(compgen -W "${completions[*]}" -- "$cur")
return 0
}

View File

@ -80,7 +80,7 @@ int main(int argc, char* argv[]) {
}
catch (const std::exception& e) {
std::cerr << "Error: " << e.what() << std::endl;
error << "Uncaught Exception: " << e.what() << std::endl;
return 1;
}
}

View File

@ -104,7 +104,7 @@ namespace dropshell
catch (const std::exception &e)
{
error << "Failed to parse " << server_json_path << std::endl;
error << "Error: " << e.what() << std::endl;
error << "Exception: " << e.what() << std::endl;
mValid = false;
}
@ -149,12 +149,20 @@ namespace dropshell
std::string get_user_for_service(const std::string &server, const std::string &service)
{
if (!legal_service_name(service))
{
error << "Service name contains illegal characters: " + service << std::endl;
return "";
}
auto services_info = get_server_services_info(server);
auto it = std::find_if(services_info.begin(), services_info.end(),
[&service](const LocalServiceInfo &si)
{ return si.service_name == service; });
if (it != services_info.end() && SIvalid(*it))
return it->user;
debug << "Couldn't find user for service \"" << service << "\" on server \"" << server << "\"" << std::endl;
return "";
}
@ -241,7 +249,7 @@ namespace dropshell
bool okay = execute_ssh_command(sshinfo, scommand, cMode::Silent);
if (!okay)
{
std::cerr << "Error: Required items not found on remote server: " << file_names_str << std::endl;
error << "Required items not found on remote server: " << file_names_str << std::endl;
return false;
}
return true;
@ -341,10 +349,12 @@ namespace dropshell
std::map<std::string, std::string> env_vars;
if (!get_all_service_env_vars(mServerName, service_name, env_vars))
{
std::cerr << "Error: Failed to get all service env vars for " << service_name << std::endl;
error << "Failed to get all service env vars for " << service_name << std::endl;
return std::nullopt;
}
env_vars["HOST_NAME"] = get_SSH_HOST();
std::string argstr = "";
for (const auto &arg : args)
{
@ -358,7 +368,7 @@ namespace dropshell
if (sc.empty())
{
std::cerr << "Error: Failed to construct command for " << service_name << " " << command << std::endl;
error << "Failed to construct command for " << service_name << " " << command << std::endl;
return std::nullopt;
}
return sc;
@ -388,7 +398,7 @@ namespace dropshell
ServerConfig env(server_name);
if (!env.is_valid())
{
std::cerr << "Error: Invalid server environment file: " << entry.path().string() << std::endl;
error << "Invalid server environment file: " << entry.path().string() << std::endl;
continue;
}
servers.push_back(env);
@ -406,7 +416,7 @@ namespace dropshell
std::string server_existing_dir = localpath::server(server_name);
if (!server_existing_dir.empty())
{
error << "Error: Server name already exists: " << server_name << std::endl;
error << "Server name already exists: " << server_name << std::endl;
info << "Current server path: " << server_existing_dir << std::endl;
return false;
}
@ -415,7 +425,7 @@ namespace dropshell
auto lsdp = gConfig().get_local_server_definition_paths();
if (lsdp.empty() || lsdp[0].empty())
{
error << "Error: Local server definition path not found" << std::endl;
error << "Local server definition path not found" << std::endl;
info << "Run 'dropshell edit' to configure DropShell" << std::endl;
return false;
}
@ -434,7 +444,9 @@ namespace dropshell
server_env_file << " \"USER\": \"" << user << "\"," << std::endl;
server_env_file << " \"DIR\": \"" << "/home/" + user << "/.dropshell\"" << std::endl;
server_env_file << " }" << std::endl;
server_env_file << " ]" << std::endl;
server_env_file << " ]," << std::endl;
server_env_file << " \"HAS_DOCKER\": \"true\"," << std::endl;
server_env_file << " \"DOCKER_ROOTLESS\": \"false\"" << std::endl;
server_env_file << "}" << std::endl;
server_env_file.close();

View File

@ -34,8 +34,8 @@ namespace dropshell
std::vector<std::string> local_server_definition_paths = gConfig().get_local_server_definition_paths();
if (local_server_definition_paths.empty())
{
std::cerr << "Error: No local server definition paths found" << std::endl;
std::cerr << "Run 'dropshell edit' to configure DropShell" << std::endl;
error << "No local server definition paths found" << std::endl;
info << "Run 'dropshell edit' to configure DropShell" << std::endl;
return services;
}
@ -80,6 +80,9 @@ namespace dropshell
if (server_name.empty() || service_name.empty())
return LocalServiceInfo();
if (!legal_service_name(service_name))
return LocalServiceInfo();
service.service_name = service_name;
service.local_service_path = localpath::service(server_name, service_name);
@ -158,7 +161,7 @@ namespace dropshell
auto service_info = get_service_info(server_name, service_name);
if (service_info.local_template_path.empty())
{
std::cerr << "Error: Service not found: " << service_name << std::endl;
error << "Service not found: " << service_name << std::endl;
return commands;
}
@ -183,7 +186,7 @@ namespace dropshell
auto service_info = get_service_info(server_name, service_name);
if (service_info.local_template_path.empty())
{
std::cerr << "Error: Service not found: " << service_name << std::endl;
error << "Service not found: " << service_name << std::endl;
return backups;
}
@ -211,7 +214,7 @@ namespace dropshell
if (localpath::service(server_name, service_name).empty() || !fs::exists(localpath::service(server_name, service_name)))
{
std::cerr << "Error: Service not found: " << service_name << " on server " << server_name << std::endl;
error << "Service not found: " << service_name << " on server " << server_name << std::endl;
return false;
}
@ -230,6 +233,13 @@ namespace dropshell
warning << "Expected environment file not found: " << file << std::endl;
};
// add in some simple variables first, as others below may depend on/use these in bash.
// if we change these, we also need to update agent/_allservicesstatus.sh
all_env_vars["SERVER"] = server_name;
all_env_vars["SERVICE"] = service_name;
all_env_vars["DOCKER_CLI_HINTS"] = "false"; // turn off docker junk.
// Load environment files
load_env_file(localfile::service_env(server_name, service_name));
load_env_file(localfile::template_info_env(server_name, service_name));
@ -243,13 +253,10 @@ namespace dropshell
return false;
}
// add in some handy variables.
// if we change these, we also need to update agent/_allservicesstatus.sh
// more additional, these depend on others above.
all_env_vars["CONFIG_PATH"] = remotepath(server_name, user).service_config(service_name);
all_env_vars["SERVER"] = server_name;
all_env_vars["SERVICE"] = service_name;
all_env_vars["AGENT_PATH"] = remotepath(server_name, user).agent();
all_env_vars["DOCKER_CLI_HINTS"] = "false"; // turn off docker junk.
// determine template name.
auto it = all_env_vars.find("TEMPLATE");
@ -265,7 +272,7 @@ namespace dropshell
template_info tinfo = gTemplateManager().get_template_info(it->second);
if (!tinfo.is_set())
{
std::cerr << "Error: Template '" << it->second << "' not found" << std::endl;
error << "Template '" << it->second << "' not found" << std::endl;
return false;
}

View File

@ -163,7 +163,7 @@
ASSERT(mLoaded && mSources.size() > 0, "Template manager not loaded, or no template sources found.");
template_source_interface* source = get_source(template_name);
if (!source) {
std::cerr << "Error: Template '" << template_name << "' not found" << std::endl;
error << "Template '" << template_name << "' not found" << std::endl;
return false;
}
return source->template_command_exists(template_name, command);
@ -171,25 +171,30 @@
bool template_manager::create_template(const std::string &template_name) const
{
if (!legal_service_name(template_name)) {
error << "Template name contains illegal characters: " << template_name << std::endl;
return false;
}
// 1. Create a new directory in the user templates directory
std::vector<std::string> local_server_definition_paths = gConfig().get_local_server_definition_paths();
if (local_server_definition_paths.empty()) {
std::cerr << "Error: No local server definition paths found" << std::endl;
std::cerr << "Run 'dropshell edit' to configure DropShell" << std::endl;
error << "No local server definition paths found" << std::endl;
info << "Run 'dropshell edit' to configure DropShell" << std::endl;
return false;
}
auto info = get_template_info(template_name);
if (info.is_set()) {
std::cerr << "Error: Template '" << template_name << "' already exists at " << info.locationID() << std::endl;
auto tinfo = get_template_info(template_name);
if (tinfo.is_set()) {
error << "Template '" << template_name << "' already exists at " << tinfo.locationID() << std::endl;
return false;
}
auto local_template_paths = gConfig().get_local_template_paths();
if (local_template_paths.empty()) {
std::cerr << "Error: No local template paths found" << std::endl;
std::cerr << "Run 'dropshell edit' to add one to the DropShell config" << std::endl;
error << "No local template paths found" << std::endl;
info << "Run 'dropshell edit' to add one to the DropShell config" << std::endl;
return false;
}
std::string new_template_path = local_template_paths[0] + "/" + template_name;
@ -200,7 +205,7 @@
// 2. Copy the example template from the system templates directory
auto example_info = gTemplateManager().get_template_info("example-nginx");
if (!example_info.is_set()) {
std::cerr << "Error: Example template not found" << std::endl;
error << "Example template not found" << std::endl;
return false;
}
std::string example_template_path = example_info.local_template_path();
@ -222,7 +227,7 @@
std::string replacement_line = "TEMPLATE=" + template_name;
std::string service_env_path = new_template_path + "/config/" + filenames::template_info_env;
if (!replace_line_in_file(service_env_path, search_string, replacement_line)) {
std::cerr << "Error: Failed to replace TEMPLATE= line in the " << filenames::template_info_env <<" file" << std::endl;
error << "Failed to replace TEMPLATE= line in the " << filenames::template_info_env <<" file" << std::endl;
return false;
}
@ -278,7 +283,7 @@
bool template_manager::required_file(std::string path, std::string template_name)
{
if (!std::filesystem::exists(path)) {
std::cerr << "Error: " << path << " file not found in template - REQUIRED." << template_name << std::endl;
error << path << " file not found in template - REQUIRED." << template_name << std::endl;
return false;
}
return true;
@ -323,7 +328,7 @@
std::filesystem::path path = template_path + "/" + file;
auto perms = std::filesystem::status(path).permissions();
if ((perms & std::filesystem::perms::owner_exec) == std::filesystem::perms::none)
std::cerr << "Error: " << file << " is not executable" << std::endl;
error << file << " is not executable" << std::endl;
}
}
@ -347,18 +352,18 @@
// determine template name.
auto it = all_env_vars.find("TEMPLATE");
if (it == all_env_vars.end()) {
std::cerr << "Error: TEMPLATE variable not found in " << template_path << std::endl;
error << "TEMPLATE variable not found in " << template_path << std::endl;
return false;
}
std::string env_template_name = it->second;
if (env_template_name.empty()) {
std::cerr << "Error: TEMPLATE variable is empty in " << template_path << std::endl;
error << "TEMPLATE variable is empty in " << template_path << std::endl;
return false;
}
if (env_template_name != template_name) {
std::cerr << "Error: TEMPLATE variable is wrong in " << template_path << std::endl;
error << "TEMPLATE variable is wrong in " << template_path << std::endl;
return false;
}

View File

@ -4,8 +4,9 @@
#include <memory>
#include <set>
#include "config.hpp"
#define JSON_INLINE_ALL
#include "json.hpp"
#include <nlohmann/json.hpp>
namespace dropshell {
@ -50,7 +51,7 @@ class template_source_interface {
class template_source_registry : public template_source_interface {
public:
template_source_registry(std::string URL) : mURL(URL) {}
template_source_registry(tRegistryEntry registry) : mRegistry(registry) {}
~template_source_registry() {}
@ -59,11 +60,11 @@ class template_source_registry : public template_source_interface {
template_info get_template_info(const std::string& template_name);
bool template_command_exists(const std::string& template_name,const std::string& command);
std::string get_description() { return "Registry: " + mURL; }
std::string get_description() { return "Registry: " + mRegistry.name + " (" + mRegistry.url + ")"; }
private:
std::filesystem::path get_cache_dir();
private:
std::string mURL;
tRegistryEntry mRegistry;
std::vector<nlohmann::json> mTemplates; // cached list.
};

View File

@ -7,86 +7,85 @@
#include <string>
#include <filesystem>
namespace fs = std::filesystem;
namespace dropshell {
namespace dropshell
{
namespace localfile
{
namespace localfile {
std::string dropshell_json() {
// Try ~/.config/dropshell/dropshell.json
std::string homedir = localpath::current_user_home();
if (!homedir.empty()) {
fs::path user_path = fs::path(homedir) / ".config" / "dropshell" / filenames::dropshell_json;
return user_path.string();
std::string dropshell_json()
{
return localpath::dropshell_dir() + "/" + filenames::dropshell_json;
}
return std::string();
}
std::string server_json(const std::string &server_name) {
std::string serverpath = localpath::server(server_name);
return (serverpath.empty() ? "" : (fs::path(serverpath) / filenames::server_json).string());
}
std::string server_json(const std::string &server_name)
{
std::string serverpath = localpath::server(server_name);
return (serverpath.empty() ? "" : (fs::path(serverpath) / filenames::server_json).string());
}
std::string service_env(const std::string &server_name, const std::string &service_name) {
std::string servicepath = localpath::service(server_name, service_name);
return (servicepath.empty() ? "" : (fs::path(servicepath) / filenames::service_env).string());
}
std::string service_env(const std::string &server_name, const std::string &service_name)
{
std::string servicepath = localpath::service(server_name, service_name);
return (servicepath.empty() ? "" : (fs::path(servicepath) / filenames::service_env).string());
}
std::string template_info_env(const std::string &server_name, const std::string &service_name)
std::string template_info_env(const std::string &server_name, const std::string &service_name)
{
std::string servicepath = localpath::service(server_name, service_name);
return (servicepath.empty() ? "" : (fs::path(servicepath) / filenames::template_info_env).string());
}
std::string template_example()
{
return localpath::agent_local() + "/template_example";
}
std::string bb64()
{
return localpath::agent_local() + "/bb64";
}
} // namespace localfile
// ------------------------------------------------------------------------------------------
namespace localpath
{
std::string servicepath = localpath::service(server_name, service_name);
return (servicepath.empty() ? "" : (fs::path(servicepath) / filenames::template_info_env).string());
}
std::string template_example()
{
return localpath::agent_local() + "/template_example";
}
std::string dropshell_dir()
{
return current_user_home() + "/.dropshell";
}
std::string bb64()
{
return localpath::agent_local() + "/bb64";
}
} // namespace localfile
// ------------------------------------------------------------------------------------------
namespace localpath {
std::string server(const std::string &server_name) {
for (std::filesystem::path dir : gConfig().get_local_server_definition_paths())
std::string server(const std::string &server_name)
{
for (std::filesystem::path dir : gConfig().get_local_server_definition_paths())
if (fs::exists(dir / server_name))
return dir / server_name;
return "";
}
std::string service(const std::string &server_name, const std::string &service_name) {
std::string service(const std::string &server_name, const std::string &service_name)
{
std::string serverpath = localpath::server(server_name);
return ((serverpath.empty() || service_name.empty()) ? "" : (serverpath+"/"+service_name));
return ((serverpath.empty() || service_name.empty()) ? "" : (serverpath + "/" + service_name));
}
std::string remote_versions(const std::string &server_name, const std::string &service_name)
{
std::string template_cache_path = localpath::template_cache();
return ((template_cache_path.empty() || service_name.empty()) ? "" :
(template_cache_path+"/remote_versions/"+service_name+".json"));
}
std::string agent_local()
{
return current_user_home()+"/.local/dropshell_agent/agent-local";
return dropshell_dir() + "/agent-local";
}
std::string agent_remote()
{
return current_user_home() + "/.local/dropshell_agent/agent-remote";
return dropshell_dir() + "/agent-remote";
}
std::string current_user_home()
{
char * homedir = std::getenv("HOME");
char *homedir = std::getenv("HOME");
if (homedir)
{
std::filesystem::path homedir_path(homedir);
@ -96,37 +95,32 @@ namespace localpath {
return std::string();
}
std::string dropshell_files()
{
return current_user_home() + "/.local/dropshell_files";
return std::string();
}
std::string backups()
{
return dropshell_files() + "/backups";
if (!gConfig().is_config_set())
return "";
return gConfig().get_backups_path();
}
std::string temp_files()
{
return dropshell_files() + "/temp_files";
return dropshell_dir() + "/temp_files";
}
std::string template_cache()
{
return dropshell_files() + "template_cache";
return dropshell_dir() + "/template_cache";
}
bool create_directories()
{
std::vector<std::filesystem::path> paths = {
dropshell_files(),
dropshell_dir(),
agent_local(),
agent_remote(),
template_cache(),
backups(),
temp_files()
};
temp_files()};
for (auto &p : gConfig().get_local_server_definition_paths())
paths.push_back(p);
@ -139,7 +133,7 @@ namespace localpath {
return false;
}
} // namespace localpath
} // namespace localpath
//------------------------------------------------------------------------------------------------
// remote paths
@ -159,21 +153,26 @@ namespace localpath {
// |-- service.env (default service config)
// |-- (other config files for specific server&service)
remotefile::remotefile(const std::string &server_name, const std::string &user) :
mServer_name(server_name), mUser(user) {}
remotefile::remotefile(const std::string &server_name, const std::string &user) : mServer_name(server_name), mUser(user) {}
std::string remotefile::service_env(const std::string &service_name) const
{
return remotepath(mServer_name,mUser).service_config(service_name) + "/" + filenames::service_env;
return remotepath(mServer_name, mUser).service_config(service_name) + "/" + filenames::service_env;
}
remotepath::remotepath(const std::string &server_name, const std::string &user) : mServer_name(server_name), mUser(user) {}
std::string remotepath::DROPSHELL_DIR() const
{
return ServerConfig(mServer_name).get_user_dir(mUser);
{
try
{
return ServerConfig(mServer_name).get_user_dir(mUser);
} catch (const std::exception &e)
{
error << "Failed to get remote dropshell directory for " << mServer_name << "@" << mUser << std::endl;
error << "Exception: " << e.what() << std::endl;
return "";
}
}
std::string remotepath::services() const
@ -218,22 +217,21 @@ namespace localpath {
return (dsp.empty() ? "" : (dsp + "/agent"));
}
// ------------------------------------------------------------------------------------------
// Utility functions
// ------------------------------------------------------------------------------------------
// Utility functions
std::string get_parent(const std::filesystem::path path)
{
if (path.empty())
return std::string();
return path.parent_path().string();
}
std::string get_parent(const std::filesystem::path path)
{
if (path.empty())
return std::string();
return path.parent_path().string();
}
std::string get_child(const std::filesystem::path path)
{
if (path.empty())
return std::string();
return path.filename().string();
}
std::string get_child(const std::filesystem::path path)
{
if (path.empty())
return std::string();
return path.filename().string();
}
} // namespace dropshell

View File

@ -10,20 +10,15 @@ namespace dropshell {
//------------------------------------------------------------------------------------------------
// local user config directories
// ~/.config/dropshell/dropshell.json
// ~/.local/dropshell_agent
// ~/.dropshell
// |-- dropshell.json
// |-- agent-local
// |-- agent-install.sh
// |-- bb64 (only used locally, as it's for the local machine's architecture!)
// |-- template_example
// |-- agent-remote
// |-- (remote agent files, including _allservicesstatus.sh)
// ~/.local/dropshell_files
// |-- backups
// |-- katie-_-squashkiwi-_-squashkiwi-test-_-2025-04-28_21-23-59.tgz
// |-- temp_files
// |-- template_cache
// |-- templates
@ -35,6 +30,10 @@ namespace dropshell {
// | |-- .template_info.env
// | |-- (...other service config files...)
// backups_path
// |-- katie-_-squashkiwi-_-squashkiwi-test-_-2025-04-28_21-23-59.tgz
// server_definition_path
// |-- <server_name>
// |-- server.json
@ -53,7 +52,6 @@ namespace dropshell {
} // namespace filenames.
namespace localfile {
// ~/.config/dropshell/dropshell.json
std::string dropshell_json();
std::string server_json(const std::string &server_name);
std::string service_env(const std::string &server_name, const std::string &service_name);
@ -63,16 +61,15 @@ namespace dropshell {
} // namespace localfile
namespace localpath {
std::string dropshell_dir();
std::string server(const std::string &server_name);
std::string service(const std::string &server_name, const std::string &service_name);
std::string remote_versions(const std::string &server_name, const std::string &service_name);
std::string agent_local();
std::string agent_remote();
std::string current_user_home();
std::string dropshell_files();
std::string backups();
std::string temp_files();
std::string template_cache();

View File

@ -183,7 +183,7 @@ namespace dropshell
if (!rval && !hasFlag(mode, cMode::Silent))
{
error << "Error: Failed to execute ssh command:" << std::endl;
error << "Failed to execute ssh command:" << std::endl;
debug << ssh_cmd.str() + " " + remote_command.construct_cmd(remote_bb64_path) << std::endl;
}
return rval;

View File

@ -3,6 +3,8 @@
#define XXH_INLINE_ALL
#include "contrib/xxhash.hpp"
#include "output.hpp"
#include <fstream>
#include <filesystem>
#include <iostream>
@ -13,7 +15,7 @@ uint64_t hash_file(const std::string &path) {
// Create hash state
XXH64_state_t* const state = XXH64_createState();
if (state == nullptr) {
std::cerr << "Failed to create hash state" << std::endl;
error << "Failed to create hash state" << std::endl;
return 0;
}
@ -24,7 +26,7 @@ uint64_t hash_file(const std::string &path) {
// Open file
std::ifstream file(path, std::ios::binary);
if (!file.is_open()) {
std::cerr << "Failed to open file: " << path << std::endl;
error << "Failed to open file: " << path << std::endl;
XXH64_freeState(state);
return 0;
}
@ -34,7 +36,7 @@ uint64_t hash_file(const std::string &path) {
char buffer[buffer_size];
while (file.read(buffer, buffer_size)) {
if (XXH64_update(state, buffer, file.gcount()) == XXH_ERROR) {
std::cerr << "Failed to update hash" << std::endl;
error << "Failed to update hash" << std::endl;
XXH64_freeState(state);
return 0;
}
@ -43,7 +45,7 @@ uint64_t hash_file(const std::string &path) {
// Handle any remaining bytes
if (file.gcount() > 0) {
if (XXH64_update(state, buffer, file.gcount()) == XXH_ERROR) {
std::cerr << "Failed to update hash" << std::endl;
error << "Failed to update hash" << std::endl;
XXH64_freeState(state);
return 0;
}
@ -59,14 +61,14 @@ uint64_t hash_directory_recursive(const std::string &path) {
// Create hash state
XXH64_state_t* const state = XXH64_createState();
if (state == nullptr) {
std::cerr << "Failed to create hash state" << std::endl;
error << "Failed to create hash state" << std::endl;
return 0;
}
// Initialize state with seed 0
XXH64_hash_t const seed = 0; /* or any other value */
if (XXH64_reset(state, seed) == XXH_ERROR) {
std::cerr << "Failed to reset hash state" << std::endl;
error << "Failed to reset hash state" << std::endl;
XXH64_freeState(state);
return 0;
}
@ -81,7 +83,7 @@ uint64_t hash_directory_recursive(const std::string &path) {
}
}
} catch (const std::filesystem::filesystem_error& e) {
std::cerr << "Filesystem error: " << e.what() << std::endl;
error << "Filesystem error: " << e.what() << std::endl;
XXH64_freeState(state);
return 0;
}
@ -94,7 +96,7 @@ uint64_t hash_directory_recursive(const std::string &path) {
uint64_t hash_path(const std::string &path) {
if (!std::filesystem::exists(path)) {
std::cerr << "Path does not exist: " << path << std::endl;
error << "Path does not exist: " << path << std::endl;
return 0;
}
@ -103,28 +105,28 @@ uint64_t hash_path(const std::string &path) {
} else if (std::filesystem::is_regular_file(path)) {
return hash_file(path);
} else {
std::cerr << "Path is neither a file nor a directory: " << path << std::endl;
error << "Path is neither a file nor a directory: " << path << std::endl;
return 0;
}
}
void hash_demo(const std::string & path)
{
std::cout << "Hashing path: " << path << std::endl;
info << "Hashing path: " << path << std::endl;
auto start = std::chrono::high_resolution_clock::now();
XXH64_hash_t hash = hash_path(path);
auto end = std::chrono::high_resolution_clock::now();
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
std::cout << "Hash: " << hash << " (took " << duration.count() << "ms)" << std::endl;
info << "Hash: " << hash << " (took " << duration.count() << "ms)" << std::endl;
}
int hash_demo_raw(const std::string & path)
{
if (!std::filesystem::exists(path)) {
std::cout << 0 <<std::endl; return 1;
info << 0 <<std::endl; return 1;
}
XXH64_hash_t hash = hash_path(path);
std::cout << hash << std::endl;
info << hash << std::endl;
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,187 +0,0 @@
// __ _____ _____ _____
// __| | __| | | | JSON for Modern C++
// | | |__ | | | | | | version 3.12.0
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
//
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
// SPDX-License-Identifier: MIT
#ifndef INCLUDE_NLOHMANN_JSON_FWD_HPP_
#define INCLUDE_NLOHMANN_JSON_FWD_HPP_
#include <cstdint> // int64_t, uint64_t
#include <map> // map
#include <memory> // allocator
#include <string> // string
#include <vector> // vector
// #include <nlohmann/detail/abi_macros.hpp>
// __ _____ _____ _____
// __| | __| | | | JSON for Modern C++
// | | |__ | | | | | | version 3.12.0
// |_____|_____|_____|_|___| https://github.com/nlohmann/json
//
// SPDX-FileCopyrightText: 2013 - 2025 Niels Lohmann <https://nlohmann.me>
// SPDX-License-Identifier: MIT
// This file contains all macro definitions affecting or depending on the ABI
#ifndef JSON_SKIP_LIBRARY_VERSION_CHECK
#if defined(NLOHMANN_JSON_VERSION_MAJOR) && defined(NLOHMANN_JSON_VERSION_MINOR) && defined(NLOHMANN_JSON_VERSION_PATCH)
#if NLOHMANN_JSON_VERSION_MAJOR != 3 || NLOHMANN_JSON_VERSION_MINOR != 12 || NLOHMANN_JSON_VERSION_PATCH != 0
#warning "Already included a different version of the library!"
#endif
#endif
#endif
#define NLOHMANN_JSON_VERSION_MAJOR 3 // NOLINT(modernize-macro-to-enum)
#define NLOHMANN_JSON_VERSION_MINOR 12 // NOLINT(modernize-macro-to-enum)
#define NLOHMANN_JSON_VERSION_PATCH 0 // NOLINT(modernize-macro-to-enum)
#ifndef JSON_DIAGNOSTICS
#define JSON_DIAGNOSTICS 0
#endif
#ifndef JSON_DIAGNOSTIC_POSITIONS
#define JSON_DIAGNOSTIC_POSITIONS 0
#endif
#ifndef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON
#define JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON 0
#endif
#if JSON_DIAGNOSTICS
#define NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS _diag
#else
#define NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS
#endif
#if JSON_DIAGNOSTIC_POSITIONS
#define NLOHMANN_JSON_ABI_TAG_DIAGNOSTIC_POSITIONS _dp
#else
#define NLOHMANN_JSON_ABI_TAG_DIAGNOSTIC_POSITIONS
#endif
#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON
#define NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON _ldvcmp
#else
#define NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON
#endif
#ifndef NLOHMANN_JSON_NAMESPACE_NO_VERSION
#define NLOHMANN_JSON_NAMESPACE_NO_VERSION 0
#endif
// Construct the namespace ABI tags component
#define NLOHMANN_JSON_ABI_TAGS_CONCAT_EX(a, b, c) json_abi ## a ## b ## c
#define NLOHMANN_JSON_ABI_TAGS_CONCAT(a, b, c) \
NLOHMANN_JSON_ABI_TAGS_CONCAT_EX(a, b, c)
#define NLOHMANN_JSON_ABI_TAGS \
NLOHMANN_JSON_ABI_TAGS_CONCAT( \
NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS, \
NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON, \
NLOHMANN_JSON_ABI_TAG_DIAGNOSTIC_POSITIONS)
// Construct the namespace version component
#define NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT_EX(major, minor, patch) \
_v ## major ## _ ## minor ## _ ## patch
#define NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT(major, minor, patch) \
NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT_EX(major, minor, patch)
#if NLOHMANN_JSON_NAMESPACE_NO_VERSION
#define NLOHMANN_JSON_NAMESPACE_VERSION
#else
#define NLOHMANN_JSON_NAMESPACE_VERSION \
NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT(NLOHMANN_JSON_VERSION_MAJOR, \
NLOHMANN_JSON_VERSION_MINOR, \
NLOHMANN_JSON_VERSION_PATCH)
#endif
// Combine namespace components
#define NLOHMANN_JSON_NAMESPACE_CONCAT_EX(a, b) a ## b
#define NLOHMANN_JSON_NAMESPACE_CONCAT(a, b) \
NLOHMANN_JSON_NAMESPACE_CONCAT_EX(a, b)
#ifndef NLOHMANN_JSON_NAMESPACE
#define NLOHMANN_JSON_NAMESPACE \
nlohmann::NLOHMANN_JSON_NAMESPACE_CONCAT( \
NLOHMANN_JSON_ABI_TAGS, \
NLOHMANN_JSON_NAMESPACE_VERSION)
#endif
#ifndef NLOHMANN_JSON_NAMESPACE_BEGIN
#define NLOHMANN_JSON_NAMESPACE_BEGIN \
namespace nlohmann \
{ \
inline namespace NLOHMANN_JSON_NAMESPACE_CONCAT( \
NLOHMANN_JSON_ABI_TAGS, \
NLOHMANN_JSON_NAMESPACE_VERSION) \
{
#endif
#ifndef NLOHMANN_JSON_NAMESPACE_END
#define NLOHMANN_JSON_NAMESPACE_END \
} /* namespace (inline namespace) NOLINT(readability/namespace) */ \
} // namespace nlohmann
#endif
/*!
@brief namespace for Niels Lohmann
@see https://github.com/nlohmann
@since version 1.0.0
*/
NLOHMANN_JSON_NAMESPACE_BEGIN
/*!
@brief default JSONSerializer template argument
This serializer ignores the template arguments and uses ADL
([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl))
for serialization.
*/
template<typename T = void, typename SFINAE = void>
struct adl_serializer;
/// a class to store JSON values
/// @sa https://json.nlohmann.me/api/basic_json/
template<template<typename U, typename V, typename... Args> class ObjectType =
std::map,
template<typename U, typename... Args> class ArrayType = std::vector,
class StringType = std::string, class BooleanType = bool,
class NumberIntegerType = std::int64_t,
class NumberUnsignedType = std::uint64_t,
class NumberFloatType = double,
template<typename U> class AllocatorType = std::allocator,
template<typename T, typename SFINAE = void> class JSONSerializer =
adl_serializer,
class BinaryType = std::vector<std::uint8_t>, // cppcheck-suppress syntaxError
class CustomBaseClass = void>
class basic_json;
/// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document
/// @sa https://json.nlohmann.me/api/json_pointer/
template<typename RefStringType>
class json_pointer;
/*!
@brief default specialization
@sa https://json.nlohmann.me/api/json/
*/
using json = basic_json<>;
/// @brief a minimal map-like container that preserves insertion order
/// @sa https://json.nlohmann.me/api/ordered_map/
template<class Key, class T, class IgnoredLess, class Allocator>
struct ordered_map;
/// @brief specialization that maintains the insertion order of object keys
/// @sa https://json.nlohmann.me/api/ordered_json/
using ordered_json = basic_json<nlohmann::ordered_map>;
NLOHMANN_JSON_NAMESPACE_END
#endif // INCLUDE_NLOHMANN_JSON_FWD_HPP_

View File

@ -1,4 +1,8 @@
#include "utils.hpp"
#include "httplib.hpp"
#include <nlohmann/json.hpp>
#include <iostream>
#include <string>
#include <fstream>
@ -10,6 +14,7 @@
#include <sys/ioctl.h>
#include <unistd.h>
#include <cctype>
#include <sstream>
namespace dropshell {
@ -35,7 +40,7 @@ bool replace_line_in_file(const std::string& file_path, const std::string& searc
std::string line;
if (!input_file.is_open()) {
std::cerr << "Error: Unable to open file: " << file_path << std::endl;
error << "Unable to open file: " << file_path << std::endl;
return false;
}
@ -50,7 +55,7 @@ bool replace_line_in_file(const std::string& file_path, const std::string& searc
std::ofstream output_file(file_path);
if (!output_file.is_open())
{
std::cerr << "Error: Unable to open file: " << file_path << std::endl;
error << "Unable to open file: " << file_path << std::endl;
return false;
}
for (const auto& modified_line : file_lines)
@ -151,7 +156,7 @@ int str2int(const std::string &str)
try {
return std::stoi(str);
} catch (const std::exception& e) {
std::cerr << "Error: Invalid integer string: [" << str << "]" << std::endl;
error << "Invalid integer string: [" << str << "]" << std::endl;
return 0;
}
}
@ -307,8 +312,9 @@ std::string requote(std::string str) {
}
int die(const std::string & msg) {
std::cerr << msg << std::endl;
int return_die(const std::string & msg) {
error << "Fatal error:" << std::endl;
error << msg << std::endl;
return 1;
}
@ -445,4 +451,223 @@ std::string tolower(const std::string& str) {
return result;
}
// Common utility function to make HTTP requests
struct HttpResult {
bool success;
int status;
std::string body;
std::string error;
};
HttpResult make_http_request(const std::string& url) {
try {
// Parse the URL to get host and path
std::string host;
std::string path;
size_t protocol_end = url.find("://");
if (protocol_end != std::string::npos) {
size_t host_start = protocol_end + 3;
size_t path_start = url.find('/', host_start);
if (path_start != std::string::npos) {
host = url.substr(host_start, path_start - host_start);
path = url.substr(path_start);
} else {
host = url.substr(host_start);
path = "/";
}
} else {
return {false, 0, "", "Invalid URL format"};
}
// Create HTTP client
httplib::Client cli(host);
cli.set_connection_timeout(10); // 10 second timeout
// Make GET request
auto res = cli.Get(path);
if (!res) {
return {false, 0, "", "Failed to connect to server"};
}
if (res->status != 200) {
return {false, res->status, "", "HTTP request failed with status " + std::to_string(res->status)};
}
return {true, res->status, res->body, ""};
} catch (const std::exception& e) {
return {false, 0, "", std::string("Exception: ") + e.what()};
}
}
bool download_file(const std::string &url, const std::string &destination) {
auto result = make_http_request(url);
if (!result.success) {
warning << "Failed to download file from URL: " << url << std::endl;
return false;
}
try {
std::ofstream out_file(destination, std::ios::binary);
if (!out_file) {
warning << "Failed to open file for writing: " << destination << std::endl;
return false;
}
out_file.write(result.body.c_str(), result.body.size());
out_file.close();
return true;
} catch (const std::exception& e) {
warning << "Failed to download file from URL: " << url << std::endl;
warning << "Exception: " << e.what() << std::endl;
return false;
}
}
nlohmann::json get_json_from_url(const std::string &url) {
auto result = make_http_request(url);
if (!result.success) {
warning << "Failed to get JSON from URL: " << url << std::endl;
return nlohmann::json();
}
try {
return nlohmann::json::parse(result.body);
} catch (const nlohmann::json::parse_error& e) {
warning << "Failed to parse JSON from URL: " << url << std::endl;
warning << "JSON: " << result.body << std::endl;
return nlohmann::json();
}
}
std::string get_string_from_url(const std::string &url) {
auto result = make_http_request(url);
if (!result.success) {
warning << "Failed to get string from URL: " << url << std::endl;
return std::string();
}
return result.body;
}
bool match_line(const std::string &line, const std::string &pattern) {
return trim(line) == trim(pattern);
}
// replace or append a block of text to a file, matching first and last lines if replacing.
// edits file in-place.
bool file_replace_or_add_segment(std::string filepath, std::string segment)
{
// Create a backup of the original file
std::string backup_path = filepath + ".bak";
try {
std::filesystem::copy_file(filepath, backup_path, std::filesystem::copy_options::overwrite_existing);
} catch (const std::exception& e) {
std::cerr << "Error creating backup file: " << e.what() << std::endl;
return false;
}
// Handle empty segment
if (segment.empty()) {
error << "Empty segment provided" << std::endl;
return false;
}
// split the segment into lines
std::vector<std::string> segment_lines = split(segment, "\n");
// remove empty lines
segment_lines.erase(std::remove_if(segment_lines.begin(), segment_lines.end(), [](const std::string& line) {
return trim(line).empty();
}), segment_lines.end());
// remove any lines that are just whitespace
segment_lines.erase(std::remove_if(segment_lines.begin(), segment_lines.end(), [](const std::string& line) { return trim(line).empty(); }), segment_lines.end());
// check that the segment has at least two lines
if (segment_lines.size() < 2) {
error << "Segment must contain at least two non-empty lines" << std::endl;
return false;
}
// Read the entire file into memory
std::ifstream input_file(filepath);
if (!input_file.is_open()) {
error << "Unable to open file: " << filepath << std::endl;
return false;
}
std::vector<std::string> file_lines;
std::string line;
while (std::getline(input_file, line)) {
file_lines.push_back(line);
}
input_file.close();
// Store original file size for verification
size_t original_size = file_lines.size();
if (original_size == 0) {
warning << "File is empty" << std::endl;
}
// Try to find the matching block
bool found_match = false;
for (size_t i = 0; i < file_lines.size(); i++) {
if (match_line(file_lines[i], segment_lines[0])) {
// Found potential start, look for end
for (size_t j = i + 1; j < file_lines.size(); j++) {
if (match_line(file_lines[j], segment_lines[segment_lines.size() - 1])) {
// Found matching block, replace it
file_lines.erase(file_lines.begin() + i, file_lines.begin() + j + 1);
file_lines.insert(file_lines.begin() + i, segment_lines.begin(), segment_lines.end());
found_match = true;
break;
}
}
if (found_match) break;
}
}
// If no match found, append the segment
if (!found_match) {
file_lines.insert(file_lines.end(), segment_lines.begin(), segment_lines.end());
}
// Write back to file
std::ofstream output_file(filepath);
if (!output_file.is_open()) {
error << "Unable to open file for writing: " << filepath << std::endl;
return false;
}
for (const auto& line : file_lines) {
output_file << line << "\n";
}
output_file.close();
// If everything succeeded, remove the backup
try {
std::filesystem::remove(backup_path);
} catch (const std::exception& e) {
warning << "Could not remove backup file: " << e.what() << std::endl;
}
return true;
}
bool legal_service_name(const std::string &service_name) {
static bool initialized = false;
static bool legal_chars[256] = {false}; // Initialize all to false
// One-time initialization
if (!initialized) {
// Set true for valid characters
for (unsigned char c : "0123456789"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"._-") {
legal_chars[c] = true;
}
initialized = true;
}
return std::all_of(service_name.begin(), service_name.end(),
[](unsigned char c) { return legal_chars[c]; });
}
} // namespace dropshell

View File

@ -5,6 +5,7 @@
#include <map>
#include "output.hpp"
#include <nlohmann/json.hpp>
namespace dropshell {
@ -44,7 +45,7 @@ int count_substring(const std::string &substring, const std::string &text);
std::string random_alphanumeric_string(int length);
int die(const std::string & msg);
int return_die(const std::string & msg);
std::string safearg(int argc, char *argv[], int index);
std::string safearg(const std::vector<std::string> & args, int index);
@ -61,4 +62,20 @@ std::string get_line_wrap(std::string & src, int maxchars);
std::string tolower(const std::string& str);
bool download_file(const std::string& url, const std::string& destination);
nlohmann::json get_json_from_url(const std::string& url);
std::string get_string_from_url(const std::string& url);
// replace or append a block of text to a file, matching first and last lines if replacing.
bool file_replace_or_add_segment(std::string filepath, std::string segment);
constexpr unsigned int switchhash(const char *s, int off = 0)
{
return !s[off] ? 5381 : (switchhash(s, off + 1) * 33) ^ s[off];
}
bool legal_service_name(const std::string & service_name);
} // namespace dropshell

12
source/test.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
ARCH=$(uname -m)
PREV_DIR=$(pwd)
trap 'cd "$PREV_DIR"' EXIT
"$SCRIPT_DIR/output/dropshell.${ARCH}" hash "${SCRIPT_DIR}/test.sh"
"$SCRIPT_DIR/output/dropshell.${ARCH}" help