Compare commits
35 Commits
v2025.0625
...
main
Author | SHA1 | Date | |
---|---|---|---|
3eb78acf70 | |||
3d21d1da7d | |||
344d62034c | |||
78e41214d7 | |||
512ba200c2 | |||
5f04bd23a1 | |||
67bb7f747f | |||
e55fe1a17c | |||
2f056b8500 | |||
fe3c5d2ad9 | |||
2ab38fd053 | |||
9dda4e1649 | |||
d8883c4419 | |||
4c4257eebe | |||
4bb85c63b8 | |||
e5f3569b2a | |||
de200a5bb6 | |||
0f1cfdcc28 | |||
7f937c1090 | |||
d7964d3a78 | |||
719475e29f | |||
70cb5c1b3a | |||
facc6b73b0 | |||
9a24576e37 | |||
3f68f44e3d | |||
dbe88a7121 | |||
00d1e86157 | |||
3388a46bf3 | |||
0f5421630a | |||
50fb5f9da6 | |||
8e2611e362 | |||
a1b12fe177 | |||
902e68069a | |||
0aafc2cc1e | |||
2067caf253 |
@ -15,6 +15,8 @@ jobs:
|
|||||||
- linux/amd64
|
- linux/amd64
|
||||||
- linux/arm64
|
- linux/arm64
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
container:
|
||||||
|
image: gitea.jde.nz/public/dropshell-build-base:latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@ -26,7 +28,10 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKER_PUSH_TOKEN }}
|
password: ${{ secrets.DOCKER_PUSH_TOKEN }}
|
||||||
- name: Build Test Publish All
|
- name: Build Test Publish All
|
||||||
run: |
|
run: |
|
||||||
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh
|
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} \
|
||||||
|
RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} \
|
||||||
|
GITEA_CONTAINER_NAME=${{ env.JOB_CONTAINER_NAME }} \
|
||||||
|
./buildtestpublish_all.sh
|
||||||
|
|
||||||
test-install-from-scratch:
|
test-install-from-scratch:
|
||||||
needs: [build]
|
needs: [build]
|
||||||
|
16
README.md
16
README.md
@ -60,6 +60,8 @@ getpkg version
|
|||||||
|
|
||||||
### Information
|
### Information
|
||||||
|
|
||||||
|
- **`getpkg list`** - List all available packages with status
|
||||||
|
- **`getpkg clean`** - Clean up orphaned configs and symlinks
|
||||||
- **`getpkg version`** - Show getpkg version
|
- **`getpkg version`** - Show getpkg version
|
||||||
- **`getpkg help`** - Show detailed help
|
- **`getpkg help`** - Show detailed help
|
||||||
- **`getpkg autocomplete`** - Show available commands for completion
|
- **`getpkg autocomplete`** - Show available commands for completion
|
||||||
@ -99,14 +101,14 @@ Tools are automatically downloaded for your architecture, with fallback to unive
|
|||||||
### Installing Popular Tools
|
### Installing Popular Tools
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Install development tools
|
# Install available tools
|
||||||
getpkg whatsdirty # Fast grep alternative
|
getpkg install dehydrate # File to C++ code generator
|
||||||
getpkg fd # Fast find alternative
|
getpkg install bb64 # Bash base64 encoder/decoder
|
||||||
getpkg bat # Cat with syntax highlighting
|
|
||||||
|
|
||||||
# Install system utilities
|
# Development tools (for repository development)
|
||||||
getpkg whatsdirty # Check git repo status
|
getpkg install whatsdirty # Check git repo status
|
||||||
getpkg sos # Simple object storage client
|
getpkg install sos # Simple object storage client
|
||||||
|
getpkg install gp # Git push utility
|
||||||
```
|
```
|
||||||
|
|
||||||
### Publishing Your Own Tools
|
### Publishing Your Own Tools
|
||||||
|
@ -26,6 +26,8 @@ Usage:
|
|||||||
bb64 -[i|d] BASE64COMMAND Displays the decoded command
|
bb64 -[i|d] BASE64COMMAND Displays the decoded command
|
||||||
bb64 -e COMMAND Encodes the command and prints the result
|
bb64 -e COMMAND Encodes the command and prints the result
|
||||||
bb64 -u Updates bb64 to the latest version (uses docker)
|
bb64 -u Updates bb64 to the latest version (uses docker)
|
||||||
|
bb64 -v Prints the version number
|
||||||
|
bb64 version Prints the version number
|
||||||
```
|
```
|
||||||
|
|
||||||
# Implementation Notes
|
# Implementation Notes
|
||||||
|
@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
|
|||||||
# make sure we have the latest base image.
|
# make sure we have the latest base image.
|
||||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||||
|
|
||||||
|
# Build with or without cache based on NO_CACHE environment variable
|
||||||
|
CACHE_FLAG=""
|
||||||
|
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||||
|
CACHE_FLAG="--no-cache"
|
||||||
|
fi
|
||||||
|
|
||||||
docker build \
|
docker build \
|
||||||
|
${CACHE_FLAG} \
|
||||||
-t "${PROJECT}-build" \
|
-t "${PROJECT}-build" \
|
||||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||||
--build-arg PROJECT="${PROJECT}" \
|
--build-arg PROJECT="${PROJECT}" \
|
||||||
|
24
bb64/clean.sh
Executable file
24
bb64/clean.sh
Executable file
@ -0,0 +1,24 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
PROJECT="bb64"
|
||||||
|
|
||||||
|
echo "Cleaning ${PROJECT}..."
|
||||||
|
|
||||||
|
# Remove output directory
|
||||||
|
if [ -d "${SCRIPT_DIR}/output" ]; then
|
||||||
|
echo "Removing output directory..."
|
||||||
|
rm -rf "${SCRIPT_DIR}/output"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove Docker images related to this project
|
||||||
|
echo "Removing Docker images..."
|
||||||
|
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
|
||||||
|
|
||||||
|
# Remove Docker build cache
|
||||||
|
echo "Pruning Docker build cache..."
|
||||||
|
docker builder prune -f
|
||||||
|
|
||||||
|
echo "✓ ${PROJECT} cleaned successfully"
|
@ -20,7 +20,14 @@ echo "Building version $VERSION" >&2
|
|||||||
# build release version
|
# build release version
|
||||||
export CMAKE_BUILD_TYPE="Release"
|
export CMAKE_BUILD_TYPE="Release"
|
||||||
|
|
||||||
|
# Build with or without cache based on NO_CACHE environment variable
|
||||||
|
CACHE_FLAG=""
|
||||||
|
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||||
|
CACHE_FLAG="--no-cache"
|
||||||
|
fi
|
||||||
|
|
||||||
docker build \
|
docker build \
|
||||||
|
${CACHE_FLAG} \
|
||||||
-t "${PROJECT}-build" \
|
-t "${PROJECT}-build" \
|
||||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||||
--build-arg PROJECT="${PROJECT}" \
|
--build-arg PROJECT="${PROJECT}" \
|
||||||
@ -84,9 +91,11 @@ if git rev-parse "$TAG" >/dev/null 2>&1; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if tag exists on remote
|
# Check if tag exists on remote
|
||||||
|
TAG_EXISTS_ON_REMOTE=false
|
||||||
if git ls-remote --tags origin | grep -q "refs/tags/$TAG"; then
|
if git ls-remote --tags origin | grep -q "refs/tags/$TAG"; then
|
||||||
echo "Tag $TAG already exists on remote - this is expected for multi-architecture builds"
|
echo "Tag $TAG already exists on remote - this is expected for multi-architecture builds"
|
||||||
echo "Skipping tag creation and proceeding with release attachment..."
|
echo "Skipping tag creation and proceeding with release attachment..."
|
||||||
|
TAG_EXISTS_ON_REMOTE=true
|
||||||
else
|
else
|
||||||
echo "Creating new tag $TAG..."
|
echo "Creating new tag $TAG..."
|
||||||
git tag -a "$TAG" -m "Release $TAG"
|
git tag -a "$TAG" -m "Release $TAG"
|
||||||
@ -105,12 +114,20 @@ echo "Getting or creating release $TAG on Gitea..."
|
|||||||
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
|
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
|
||||||
-H "Authorization: token $RELEASE_WRITE_TOKEN")
|
-H "Authorization: token $RELEASE_WRITE_TOKEN")
|
||||||
|
|
||||||
|
echo "Existing release check response: $EXISTING_RELEASE" >&2
|
||||||
|
|
||||||
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
|
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
|
||||||
# Release already exists, get its ID
|
# Release already exists, get its ID
|
||||||
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
|
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
|
||||||
echo "Release $TAG already exists with ID: $RELEASE_ID"
|
echo "Release $TAG already exists with ID: $RELEASE_ID"
|
||||||
else
|
else
|
||||||
# Create new release
|
# Create new release only if tag was just created
|
||||||
|
if [ "$TAG_EXISTS_ON_REMOTE" = true ]; then
|
||||||
|
echo "Tag exists on remote but no release found - this shouldn't happen" >&2
|
||||||
|
echo "API response was: $EXISTING_RELEASE" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
echo "Creating new release $TAG on Gitea..."
|
echo "Creating new release $TAG on Gitea..."
|
||||||
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
|
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
|
@ -150,6 +150,7 @@ Usage:
|
|||||||
bb64 -u Updates bb64 to the latest version (uses docker)
|
bb64 -u Updates bb64 to the latest version (uses docker)
|
||||||
|
|
||||||
bb64 -v Prints the version number
|
bb64 -v Prints the version number
|
||||||
|
bb64 version Prints the version number
|
||||||
|
|
||||||
)" << std::endl;
|
)" << std::endl;
|
||||||
return -1;
|
return -1;
|
||||||
@ -161,7 +162,7 @@ Usage:
|
|||||||
{
|
{
|
||||||
if (mode == "-u")
|
if (mode == "-u")
|
||||||
return update_bb64();
|
return update_bb64();
|
||||||
else if (mode == "-v")
|
else if (mode == "-v" || mode == "version")
|
||||||
{
|
{
|
||||||
std::cout << VERSION << std::endl;
|
std::cout << VERSION << std::endl;
|
||||||
return 0;
|
return 0;
|
||||||
|
135
bb64/test.sh
Executable file
135
bb64/test.sh
Executable file
@ -0,0 +1,135 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
PROJECT="bb64"
|
||||||
|
BB64="$SCRIPT_DIR/output/$PROJECT"
|
||||||
|
TEST_DIR="$SCRIPT_DIR/test_temp"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Test counters
|
||||||
|
TESTS_PASSED=0
|
||||||
|
TESTS_FAILED=0
|
||||||
|
|
||||||
|
# Function to print test results
|
||||||
|
print_test_result() {
|
||||||
|
local test_name="$1"
|
||||||
|
local result="$2"
|
||||||
|
if [ "$result" -eq 0 ]; then
|
||||||
|
echo -e "${GREEN}✓${NC} $test_name"
|
||||||
|
TESTS_PASSED=$((TESTS_PASSED + 1))
|
||||||
|
else
|
||||||
|
echo -e "${RED}✗${NC} $test_name"
|
||||||
|
TESTS_FAILED=$((TESTS_FAILED + 1))
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to cleanup test artifacts
|
||||||
|
cleanup() {
|
||||||
|
echo -e "\n${YELLOW}Cleaning up test artifacts...${NC}"
|
||||||
|
rm -rf "$TEST_DIR"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up trap to ensure cleanup runs
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Create test directory
|
||||||
|
mkdir -p "$TEST_DIR"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}Running bb64 tests...${NC}\n"
|
||||||
|
|
||||||
|
# Check if bb64 binary exists
|
||||||
|
if [ ! -f "$BB64" ]; then
|
||||||
|
echo -e "${RED}Error: bb64 binary not found at $BB64${NC}"
|
||||||
|
echo "Please run ./build.sh first to build bb64"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -x "$BB64" ]; then
|
||||||
|
echo -e "${RED}Error: bb64 binary is not executable${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Using bb64 binary: $BB64"
|
||||||
|
|
||||||
|
# Test 1: Version command with -v flag
|
||||||
|
echo "Test 1: Version command (-v flag)"
|
||||||
|
VERSION_OUTPUT=$("$BB64" -v 2>&1 || true)
|
||||||
|
# Version output should be just the version number
|
||||||
|
VERSION=$(echo "$VERSION_OUTPUT" | head -n 1)
|
||||||
|
if [[ "$VERSION" =~ ^[0-9]{4}\.[0-9]{4}\.[0-9]{4}$ ]]; then
|
||||||
|
print_test_result "Version format with -v flag (YYYY.MMDD.HHMM)" 0
|
||||||
|
else
|
||||||
|
print_test_result "Version format with -v flag (YYYY.MMDD.HHMM)" 1
|
||||||
|
echo " Expected: YYYY.MMDD.HHMM format, got: '$VERSION'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 2: Version command with 'version' argument
|
||||||
|
printf "\nTest 2: Version command (version argument)\n"
|
||||||
|
VERSION_OUTPUT2=$("$BB64" version 2>&1 || true)
|
||||||
|
# Version output should be just the version number
|
||||||
|
VERSION2=$(echo "$VERSION_OUTPUT2" | head -n 1)
|
||||||
|
if [[ "$VERSION2" =~ ^[0-9]{4}\.[0-9]{4}\.[0-9]{4}$ ]]; then
|
||||||
|
print_test_result "Version format with 'version' argument (YYYY.MMDD.HHMM)" 0
|
||||||
|
else
|
||||||
|
print_test_result "Version format with 'version' argument (YYYY.MMDD.HHMM)" 1
|
||||||
|
echo " Expected: YYYY.MMDD.HHMM format, got: '$VERSION2'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 3: Both version commands should return the same version
|
||||||
|
printf "\nTest 3: Version consistency\n"
|
||||||
|
if [ "$VERSION" = "$VERSION2" ]; then
|
||||||
|
print_test_result "Both -v and version return same version" 0
|
||||||
|
else
|
||||||
|
print_test_result "Both -v and version return same version" 1
|
||||||
|
echo " -v returned: '$VERSION'"
|
||||||
|
echo " version returned: '$VERSION2'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 4: Basic encoding test
|
||||||
|
echo -e "\nTest 4: Basic encoding test"
|
||||||
|
TEST_STRING="hello world"
|
||||||
|
ENCODED_OUTPUT=$("$BB64" -e <<< "$TEST_STRING" 2>&1 || true)
|
||||||
|
if [ -n "$ENCODED_OUTPUT" ]; then
|
||||||
|
print_test_result "Basic encoding produces output" 0
|
||||||
|
else
|
||||||
|
print_test_result "Basic encoding produces output" 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 5: Basic decoding test (using -d flag)
|
||||||
|
echo -e "\nTest 5: Basic decoding test"
|
||||||
|
# Encode "echo hello" and then decode it
|
||||||
|
ENCODED_ECHO=$(echo "echo hello" | "$BB64" -e)
|
||||||
|
if [ -n "$ENCODED_ECHO" ]; then
|
||||||
|
DECODED_OUTPUT=$("$BB64" -d "$ENCODED_ECHO" 2>&1 || true)
|
||||||
|
if [[ "$DECODED_OUTPUT" == *"echo hello"* ]]; then
|
||||||
|
print_test_result "Basic decoding works correctly" 0
|
||||||
|
else
|
||||||
|
print_test_result "Basic decoding works correctly" 1
|
||||||
|
echo " Expected to contain 'echo hello', got: '$DECODED_OUTPUT'"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_test_result "Basic decoding works correctly" 1
|
||||||
|
echo " Failed to encode test string"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cleanup
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
echo -e "\n${YELLOW}Test Summary:${NC}"
|
||||||
|
echo -e "Tests passed: ${GREEN}${TESTS_PASSED}${NC}"
|
||||||
|
echo -e "Tests failed: ${RED}${TESTS_FAILED}${NC}"
|
||||||
|
|
||||||
|
if [ "$TESTS_FAILED" -eq 0 ]; then
|
||||||
|
echo -e "\n${GREEN}All tests passed!${NC}"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo -e "\n${RED}Some tests failed!${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
@ -2,9 +2,6 @@
|
|||||||
set -uo pipefail # Remove -e to handle errors manually
|
set -uo pipefail # Remove -e to handle errors manually
|
||||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
|
||||||
docker builder prune -f
|
|
||||||
|
|
||||||
|
|
||||||
# Colors for output
|
# Colors for output
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
GREEN='\033[0;32m'
|
GREEN='\033[0;32m'
|
||||||
@ -237,15 +234,17 @@ function print_summary() {
|
|||||||
echo
|
echo
|
||||||
}
|
}
|
||||||
|
|
||||||
title "🔨 BUILDING ALL TOOLS 🔨"
|
title "🔨 BUILDING GETPKG 🔨"
|
||||||
|
|
||||||
getpkg/build.sh
|
"${SCRIPT_DIR}/getpkg/build.sh"
|
||||||
export GETPKG="${SCRIPT_DIR}/getpkg/output/getpkg"
|
export GETPKG="${SCRIPT_DIR}/getpkg/output/getpkg"
|
||||||
if [ ! -f "$GETPKG" ]; then
|
if [ ! -f "$GETPKG" ]; then
|
||||||
echo "Build failed."
|
echo "Build failed."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
title "🔨 BUILDING ALL TOOLS 🔨"
|
||||||
|
|
||||||
buildtestpublish_all
|
buildtestpublish_all
|
||||||
|
|
||||||
print_summary
|
print_summary
|
||||||
|
44
clean.sh
Executable file
44
clean.sh
Executable file
@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
|
||||||
|
echo "🧹 CLEANING ALL PROJECTS 🧹"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Get all project directories
|
||||||
|
PROJECT_DIRS=$(find "$SCRIPT_DIR" -maxdepth 1 -type d \
|
||||||
|
-not -name ".*" \
|
||||||
|
-not -path "$SCRIPT_DIR" | sort)
|
||||||
|
|
||||||
|
for dir in $PROJECT_DIRS; do
|
||||||
|
PROJECT_NAME=$(basename "$dir")
|
||||||
|
|
||||||
|
if [ -f "$dir/clean.sh" ]; then
|
||||||
|
echo "Cleaning $PROJECT_NAME..."
|
||||||
|
cd "$dir"
|
||||||
|
./clean.sh
|
||||||
|
echo
|
||||||
|
else
|
||||||
|
echo "⚠️ No clean.sh found for $PROJECT_NAME, skipping..."
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Global Docker cleanup
|
||||||
|
echo "🐳 Global Docker cleanup..."
|
||||||
|
echo "Removing unused Docker images..."
|
||||||
|
docker image prune -f
|
||||||
|
|
||||||
|
echo "Removing unused Docker containers..."
|
||||||
|
docker container prune -f
|
||||||
|
|
||||||
|
echo "Removing unused Docker networks..."
|
||||||
|
docker network prune -f
|
||||||
|
|
||||||
|
echo "Removing unused Docker volumes..."
|
||||||
|
docker volume prune -f
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "✅ All projects cleaned successfully!"
|
@ -1,65 +0,0 @@
|
|||||||
ARG IMAGE_TAG
|
|
||||||
FROM gitea.jde.nz/public/dropshell-build-base:latest AS builder
|
|
||||||
|
|
||||||
ARG PROJECT
|
|
||||||
ARG CMAKE_BUILD_TYPE=Debug
|
|
||||||
|
|
||||||
# Set working directory
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
SHELL ["/bin/bash", "-c"]
|
|
||||||
|
|
||||||
# Create cache directories
|
|
||||||
RUN mkdir -p /ccache
|
|
||||||
|
|
||||||
# Set up ccache
|
|
||||||
ENV CCACHE_DIR=/ccache
|
|
||||||
ENV CCACHE_COMPILERCHECK=content
|
|
||||||
ENV CCACHE_MAXSIZE=2G
|
|
||||||
|
|
||||||
# Copy build files
|
|
||||||
COPY CMakeLists.txt ./
|
|
||||||
COPY src/version.hpp.in src/
|
|
||||||
|
|
||||||
# Copy source files
|
|
||||||
COPY src/ src/
|
|
||||||
COPY contrib/ contrib/
|
|
||||||
|
|
||||||
# Configure project
|
|
||||||
RUN --mount=type=cache,target=/ccache \
|
|
||||||
--mount=type=cache,target=/build \
|
|
||||||
mkdir -p /build && \
|
|
||||||
cmake -G Ninja -S /app -B /build \
|
|
||||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
|
|
||||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
|
||||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
|
||||||
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
|
|
||||||
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
|
|
||||||
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
|
|
||||||
-DPROJECT_NAME="${PROJECT}" \
|
|
||||||
-DCMAKE_STRIP=OFF \
|
|
||||||
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
|
|
||||||
|
|
||||||
# Build project
|
|
||||||
RUN --mount=type=cache,target=/ccache \
|
|
||||||
--mount=type=cache,target=/build \
|
|
||||||
cmake --build /build
|
|
||||||
|
|
||||||
# Copy the built executable to a regular directory for the final stage
|
|
||||||
RUN --mount=type=cache,target=/build \
|
|
||||||
mkdir -p /output && \
|
|
||||||
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
|
|
||||||
find /build -type f -executable -exec cp {} /output/${PROJECT} \;
|
|
||||||
|
|
||||||
# if we're a release build, then run upx on the binary.
|
|
||||||
RUN if [ "${CMAKE_BUILD_TYPE}" = "Release" ]; then \
|
|
||||||
upx /output/${PROJECT}; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Final stage that only contains the binary
|
|
||||||
FROM scratch AS project
|
|
||||||
|
|
||||||
ARG PROJECT
|
|
||||||
|
|
||||||
# Copy the actual binary from the regular directory
|
|
||||||
COPY --from=builder /output/${PROJECT} /${PROJECT}
|
|
@ -1,22 +1,56 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Get script directory - handle different execution contexts
|
||||||
|
if [ -n "${BASH_SOURCE[0]}" ]; then
|
||||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
PROJECT="dehydrate"
|
else
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "$0" )" &> /dev/null && pwd )"
|
||||||
|
fi
|
||||||
|
PROJECT="$(basename "$(dirname "${SCRIPT_DIR}")")"
|
||||||
|
|
||||||
export CMAKE_BUILD_TYPE="Debug"
|
|
||||||
|
|
||||||
rm -rf "${SCRIPT_DIR}/output"
|
# Debug output for CI
|
||||||
|
echo "${PROJECT} build script running from: ${SCRIPT_DIR}"
|
||||||
|
|
||||||
|
mkdir -p "${SCRIPT_DIR}/build"
|
||||||
mkdir -p "${SCRIPT_DIR}/output"
|
mkdir -p "${SCRIPT_DIR}/output"
|
||||||
|
|
||||||
# make sure we have the latest base image.
|
# Run build in container with mounted directories
|
||||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
COMMAND_TO_RUN="
|
||||||
|
cmake -G Ninja -S . -B ./build \
|
||||||
|
-DCMAKE_BUILD_TYPE=\${CMAKE_BUILD_TYPE} \
|
||||||
|
-DPROJECT_NAME=${PROJECT} && \
|
||||||
|
cmake --build ./build && \
|
||||||
|
cp ./build/${PROJECT} ./output/
|
||||||
|
"
|
||||||
|
|
||||||
docker build \
|
if [ -n "${GITEA_CONTAINER_NAME:-}" ]; then
|
||||||
-t "${PROJECT}-build" \
|
echo "We're in a gitea container: ${GITEA_CONTAINER_NAME}"
|
||||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
echo "Checking cmake availability..."
|
||||||
--build-arg PROJECT="${PROJECT}" \
|
if which cmake >/dev/null 2>&1; then
|
||||||
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
|
echo "cmake found, building directly"
|
||||||
--output "${SCRIPT_DIR}/output" \
|
cd "${SCRIPT_DIR}" && ${COMMAND_TO_RUN}
|
||||||
"${SCRIPT_DIR}"
|
else
|
||||||
|
echo "cmake not found in CI environment, using docker build instead"
|
||||||
|
docker run --rm \
|
||||||
|
-v "${SCRIPT_DIR}:/app:ro" \
|
||||||
|
-v "${SCRIPT_DIR}/build:/app/build" \
|
||||||
|
-v "${SCRIPT_DIR}/output:/app/output" \
|
||||||
|
-e CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE:-Debug}" \
|
||||||
|
gitea.jde.nz/public/dropshell-build-base:latest \
|
||||||
|
bash -c "cd /app && ${COMMAND_TO_RUN}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Building in new docker container"
|
||||||
|
docker run --rm \
|
||||||
|
--user "$(id -u):$(id -g)" \
|
||||||
|
-v "${SCRIPT_DIR}:/app:ro" \
|
||||||
|
-v "${SCRIPT_DIR}/build:/app/build" \
|
||||||
|
-v "${SCRIPT_DIR}/output:/app/output" \
|
||||||
|
-e CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE:-Debug}" \
|
||||||
|
gitea.jde.nz/public/dropshell-build-base:latest \
|
||||||
|
bash -c "cd /app && ${COMMAND_TO_RUN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Build complete"
|
18
dehydrate/clean.sh
Executable file
18
dehydrate/clean.sh
Executable file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
PROJECT="$(basename "$(dirname "${SCRIPT_DIR}")")"
|
||||||
|
|
||||||
|
echo "Cleaning ${PROJECT}..."
|
||||||
|
|
||||||
|
# Remove output and build directories
|
||||||
|
for dir in "output" "build"; do
|
||||||
|
if [ -d "${SCRIPT_DIR}/${dir}" ]; then
|
||||||
|
echo "Removing ${dir} directory..."
|
||||||
|
rm -rf "${SCRIPT_DIR:?}/${dir}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✓ ${PROJECT} cleaned successfully"
|
@ -35,14 +35,7 @@ heading "Building ${PROJECT}"
|
|||||||
|
|
||||||
# build release version
|
# build release version
|
||||||
export CMAKE_BUILD_TYPE="Release"
|
export CMAKE_BUILD_TYPE="Release"
|
||||||
|
"${SCRIPT_DIR}/build.sh"
|
||||||
docker build \
|
|
||||||
-t "${PROJECT}-build" \
|
|
||||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
|
||||||
--build-arg PROJECT="${PROJECT}" \
|
|
||||||
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
|
|
||||||
--output "${OUTPUT}" \
|
|
||||||
"${SCRIPT_DIR}"
|
|
||||||
|
|
||||||
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."
|
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
PROJECT_DIR="$( cd "$SCRIPT_DIR/.." && pwd )"
|
PROJECT_DIR="$( cd "$SCRIPT_DIR/.." && pwd )"
|
||||||
|
|
||||||
cd "$SCRIPT_DIR"
|
cd "$SCRIPT_DIR" || exit 1
|
||||||
|
|
||||||
# Clean up old test data and any existing binaries
|
# Clean up old test data and any existing binaries
|
||||||
# Force removal with chmod to handle permission issues
|
# Force removal with chmod to handle permission issues
|
||||||
|
@ -36,13 +36,16 @@ target_include_directories(${PROJECT_NAME} PRIVATE
|
|||||||
src/common)
|
src/common)
|
||||||
|
|
||||||
# Find packages
|
# Find packages
|
||||||
find_package(OpenSSL REQUIRED)
|
|
||||||
find_package(Drogon CONFIG REQUIRED)
|
|
||||||
find_package(nlohmann_json REQUIRED)
|
find_package(nlohmann_json REQUIRED)
|
||||||
|
|
||||||
|
# Add module path for FindCPRStatic
|
||||||
|
list(APPEND CMAKE_MODULE_PATH "/usr/local/share/cmake/Modules")
|
||||||
|
|
||||||
|
# Find packages
|
||||||
|
find_package(nlohmann_json REQUIRED)
|
||||||
|
find_package(CPRStatic REQUIRED)
|
||||||
|
|
||||||
# Link libraries
|
# Link libraries
|
||||||
target_link_libraries(${PROJECT_NAME} PRIVATE
|
target_link_libraries(${PROJECT_NAME} PRIVATE
|
||||||
nlohmann_json::nlohmann_json Drogon::Drogon
|
nlohmann_json::nlohmann_json
|
||||||
/usr/local/lib/libpgcommon.a /usr/local/lib/libpgport.a
|
cpr::cpr_static)
|
||||||
lzma dl)
|
|
||||||
|
|
@ -1,83 +0,0 @@
|
|||||||
ARG IMAGE_TAG
|
|
||||||
FROM gitea.jde.nz/public/dropshell-build-base:latest AS builder
|
|
||||||
|
|
||||||
ARG PROJECT
|
|
||||||
ARG CMAKE_BUILD_TYPE=Debug
|
|
||||||
|
|
||||||
# Set working directory
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
SHELL ["/bin/bash", "-c"]
|
|
||||||
|
|
||||||
# Create cache directories
|
|
||||||
RUN mkdir -p /ccache
|
|
||||||
|
|
||||||
# Set up ccache
|
|
||||||
ENV CCACHE_DIR=/ccache
|
|
||||||
ENV CCACHE_COMPILERCHECK=content
|
|
||||||
ENV CCACHE_MAXSIZE=2G
|
|
||||||
|
|
||||||
# Copy only build files first (for better layer caching)
|
|
||||||
COPY CMakeLists.txt cmake_prebuild.sh ./
|
|
||||||
COPY src/version.hpp.in src/
|
|
||||||
|
|
||||||
# Run prebuild script early (this rarely changes)
|
|
||||||
RUN bash cmake_prebuild.sh
|
|
||||||
|
|
||||||
# Copy source files (this invalidates cache when source changes)
|
|
||||||
COPY src/ src/
|
|
||||||
|
|
||||||
# Configure project (this step is cached unless CMakeLists.txt changes)
|
|
||||||
RUN --mount=type=cache,target=/ccache \
|
|
||||||
--mount=type=cache,target=/build \
|
|
||||||
mkdir -p /build && \
|
|
||||||
SSL_LIB=$(find /usr/local -name "libssl.a" | head -1) && \
|
|
||||||
CRYPTO_LIB=$(find /usr/local -name "libcrypto.a" | head -1) && \
|
|
||||||
echo "Found SSL: $SSL_LIB, Crypto: $CRYPTO_LIB" && \
|
|
||||||
cmake -G Ninja -S /app -B /build \
|
|
||||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
|
|
||||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
|
||||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
|
||||||
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
|
|
||||||
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
|
|
||||||
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
|
|
||||||
-DPROJECT_NAME="${PROJECT}" \
|
|
||||||
-DCMAKE_STRIP=OFF \
|
|
||||||
-DOPENSSL_SSL_LIBRARY="$SSL_LIB" \
|
|
||||||
-DOPENSSL_CRYPTO_LIBRARY="$CRYPTO_LIB" \
|
|
||||||
-DOPENSSL_INCLUDE_DIR=/usr/local/include \
|
|
||||||
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
|
|
||||||
|
|
||||||
# Run prebuild script
|
|
||||||
RUN --mount=type=cache,target=/ccache \
|
|
||||||
--mount=type=cache,target=/build \
|
|
||||||
cmake --build /build --target run_prebuild_script
|
|
||||||
|
|
||||||
# Build project (ccache will help here when only some files change)
|
|
||||||
RUN --mount=type=cache,target=/ccache \
|
|
||||||
--mount=type=cache,target=/build \
|
|
||||||
cmake --build /build
|
|
||||||
|
|
||||||
# Copy the built executable to a regular directory for the final stage
|
|
||||||
RUN --mount=type=cache,target=/build \
|
|
||||||
mkdir -p /output && \
|
|
||||||
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
|
|
||||||
find /build -type f -executable -exec cp {} /output/${PROJECT} \;
|
|
||||||
|
|
||||||
|
|
||||||
# if we're a release build, then run upx on the binary.
|
|
||||||
RUN if [ "${CMAKE_BUILD_TYPE}" = "Release" ]; then \
|
|
||||||
upx /output/${PROJECT}; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Final stage that only contains the binary
|
|
||||||
FROM scratch AS project
|
|
||||||
|
|
||||||
ARG PROJECT
|
|
||||||
|
|
||||||
# Copy CA certificates for SSL validation
|
|
||||||
#COPY --from=builder /etc/ssl/certs/ /etc/ssl/certs/
|
|
||||||
|
|
||||||
# Copy the actual binary from the regular directory
|
|
||||||
COPY --from=builder /output/${PROJECT} /${PROJECT}
|
|
||||||
|
|
@ -1,25 +1,57 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Get script directory - handle different execution contexts
|
||||||
|
if [ -n "${BASH_SOURCE[0]}" ]; then
|
||||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
else
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "$0" )" &> /dev/null && pwd )"
|
||||||
|
fi
|
||||||
|
PROJECT="$(basename "$(dirname "${SCRIPT_DIR}")")"
|
||||||
|
|
||||||
|
|
||||||
export CMAKE_BUILD_TYPE="Debug"
|
# Debug output for CI
|
||||||
|
echo "${PROJECT} build script running from: ${SCRIPT_DIR}"
|
||||||
|
|
||||||
rm -rf "${SCRIPT_DIR}/output"
|
mkdir -p "${SCRIPT_DIR}/build"
|
||||||
mkdir -p "${SCRIPT_DIR}/output"
|
mkdir -p "${SCRIPT_DIR}/output"
|
||||||
|
|
||||||
PROJECT="getpkg"
|
# Run build in container with mounted directories
|
||||||
|
COMMAND_TO_RUN="cmake -G Ninja -S . -B ./build \
|
||||||
|
-DCMAKE_BUILD_TYPE=\${CMAKE_BUILD_TYPE} \
|
||||||
|
-DPROJECT_NAME=${PROJECT} && \
|
||||||
|
cmake --build ./build && \
|
||||||
|
cp ./build/${PROJECT} ./output/"
|
||||||
|
|
||||||
# make sure we have the latest base image.
|
if [ -n "${GITEA_CONTAINER_NAME:-}" ]; then
|
||||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
echo "We're in a gitea container: ${GITEA_CONTAINER_NAME}"
|
||||||
|
echo "=== ENVIRONMENT DEBUG ==="
|
||||||
|
echo "Image info:"
|
||||||
|
cat /etc/os-release 2>/dev/null || cat /etc/alpine-release 2>/dev/null || echo "Unknown OS"
|
||||||
|
echo "Current PATH: $PATH"
|
||||||
|
echo "cmake location: $(which cmake 2>/dev/null || echo 'not found')"
|
||||||
|
echo "========================="
|
||||||
|
|
||||||
docker build \
|
# Should now be running in Alpine build-base image with cmake available
|
||||||
-t "${PROJECT}-build" \
|
if which cmake >/dev/null 2>&1; then
|
||||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
echo "cmake found at: $(which cmake)"
|
||||||
--build-arg PROJECT="${PROJECT}" \
|
echo "Building directly in Alpine CI environment"
|
||||||
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
|
cd "${SCRIPT_DIR}" && ${COMMAND_TO_RUN}
|
||||||
--output "${SCRIPT_DIR}/output" \
|
else
|
||||||
"${SCRIPT_DIR}"
|
echo "ERROR: cmake not found in build-base image!"
|
||||||
|
echo "This indicates the container directive isn't working"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Building in new docker container"
|
||||||
|
docker run --rm \
|
||||||
|
--user "$(id -u):$(id -g)" \
|
||||||
|
-v "${SCRIPT_DIR}:/app:ro" \
|
||||||
|
-v "${SCRIPT_DIR}/build:/app/build" \
|
||||||
|
-v "${SCRIPT_DIR}/output:/app/output" \
|
||||||
|
-e CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE:-Debug}" \
|
||||||
|
gitea.jde.nz/public/dropshell-build-base:latest \
|
||||||
|
bash -c "cd /app && ${COMMAND_TO_RUN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Build complete"
|
18
getpkg/clean.sh
Executable file
18
getpkg/clean.sh
Executable file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
PROJECT="$(basename "$(dirname "${SCRIPT_DIR}")")"
|
||||||
|
|
||||||
|
echo "Cleaning ${PROJECT}..."
|
||||||
|
|
||||||
|
# Remove output and build directories
|
||||||
|
for dir in "output" "build"; do
|
||||||
|
if [ -d "${SCRIPT_DIR}/${dir}" ]; then
|
||||||
|
echo "Removing ${dir} directory..."
|
||||||
|
rm -rf "${SCRIPT_DIR:?}/${dir}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✓ ${PROJECT} cleaned successfully"
|
1
getpkg/debug_test.txt
Normal file
1
getpkg/debug_test.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
Debug content
|
@ -34,15 +34,7 @@ heading "Building ${PROJECT}"
|
|||||||
|
|
||||||
# build release version
|
# build release version
|
||||||
export CMAKE_BUILD_TYPE="Release"
|
export CMAKE_BUILD_TYPE="Release"
|
||||||
|
"${SCRIPT_DIR}/build.sh"
|
||||||
docker build \
|
|
||||||
-t "${PROJECT}-build" \
|
|
||||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
|
||||||
--build-arg PROJECT="${PROJECT}" \
|
|
||||||
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
|
|
||||||
--output "${OUTPUT}" \
|
|
||||||
"${SCRIPT_DIR}"
|
|
||||||
|
|
||||||
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."
|
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------
|
||||||
|
@ -1,530 +1,317 @@
|
|||||||
#include "GetbinClient.hpp"
|
#include "GetbinClient.hpp"
|
||||||
#include <drogon/HttpClient.h>
|
#include <cpr/cpr.h>
|
||||||
#include <trantor/net/EventLoop.h>
|
|
||||||
#include <openssl/ssl.h>
|
|
||||||
#include <openssl/opensslconf.h>
|
|
||||||
#include <fstream>
|
|
||||||
#include <sstream>
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
#include <string>
|
#include <fstream>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <thread>
|
#include <filesystem>
|
||||||
#include <chrono>
|
#include <sstream>
|
||||||
#include <cstdio>
|
|
||||||
#include <map>
|
|
||||||
#include <atomic>
|
|
||||||
#include <mutex>
|
|
||||||
#include <condition_variable>
|
|
||||||
#include <vector>
|
|
||||||
#include <ctime>
|
|
||||||
#include <algorithm>
|
|
||||||
#include <set>
|
#include <set>
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
using json = nlohmann::json;
|
using json = nlohmann::json;
|
||||||
|
|
||||||
static constexpr const char* SERVER_HOST = "getpkg.xyz";
|
const std::string GetbinClient::SERVER_HOST = "getpkg.xyz";
|
||||||
|
|
||||||
// Initialize SSL to use only secure protocols
|
GetbinClient::GetbinClient() {
|
||||||
static class SSLInitializer {
|
// Initialize CPR (done automatically, but we could add global config here)
|
||||||
public:
|
|
||||||
SSLInitializer() {
|
|
||||||
// Disable SSL 2.0, 3.0, TLS 1.0, and TLS 1.1
|
|
||||||
SSL_load_error_strings();
|
|
||||||
SSL_library_init();
|
|
||||||
// Note: This doesn't completely silence the warning but ensures we're using secure protocols
|
|
||||||
}
|
|
||||||
} ssl_init;
|
|
||||||
|
|
||||||
static std::string find_ca_certificates() {
|
|
||||||
// Common CA certificate locations across different Linux distributions
|
|
||||||
const std::vector<std::string> ca_paths = {
|
|
||||||
"/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/Raspbian
|
|
||||||
"/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL/CentOS
|
|
||||||
"/etc/ssl/ca-bundle.pem", // OpenSUSE
|
|
||||||
"/etc/pki/tls/cert.pem", // Fedora/RHEL alternative
|
|
||||||
"/etc/ssl/certs/ca-bundle.crt", // Some distros
|
|
||||||
"/etc/ssl/cert.pem", // Alpine Linux
|
|
||||||
"/usr/local/share/certs/ca-root-nss.crt", // FreeBSD
|
|
||||||
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", // CentOS/RHEL 7+
|
|
||||||
"/etc/ca-certificates/extracted/tls-ca-bundle.pem" // Arch Linux
|
|
||||||
};
|
|
||||||
|
|
||||||
for (const auto& path : ca_paths) {
|
|
||||||
std::ifstream file(path);
|
|
||||||
if (file.good()) {
|
|
||||||
file.close();
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return "";
|
std::string GetbinClient::getUserAgent() const {
|
||||||
|
return "getpkg/1.0";
|
||||||
}
|
}
|
||||||
|
|
||||||
GetbinClient::GetbinClient() {}
|
bool GetbinClient::download(const std::string& toolName, const std::string& arch, const std::string& outPath,
|
||||||
|
ProgressCallback progressCallback) {
|
||||||
|
try {
|
||||||
|
std::string url = "https://" + SERVER_HOST + "/object/" + toolName + ":" + arch;
|
||||||
|
|
||||||
bool GetbinClient::download(const std::string& toolName, const std::string& arch, const std::string& outPath) {
|
cpr::Session session;
|
||||||
bool success = false;
|
session.SetUrl(cpr::Url{url});
|
||||||
bool done = false;
|
session.SetHeader(cpr::Header{{"User-Agent", getUserAgent()}});
|
||||||
std::mutex mtx;
|
session.SetTimeout(cpr::Timeout{30000}); // 30 seconds
|
||||||
std::condition_variable cv;
|
session.SetVerifySsl(cpr::VerifySsl{true});
|
||||||
|
|
||||||
std::thread worker([&]() {
|
// Add progress callback if provided
|
||||||
trantor::EventLoop loop;
|
if (progressCallback) {
|
||||||
|
session.SetProgressCallback(cpr::ProgressCallback{[progressCallback](cpr::cpr_off_t downloadTotal, cpr::cpr_off_t downloadNow,
|
||||||
auto client = drogon::HttpClient::newHttpClient(
|
cpr::cpr_off_t uploadTotal, cpr::cpr_off_t uploadNow,
|
||||||
"https://" + std::string(SERVER_HOST),
|
intptr_t userdata) -> bool {
|
||||||
&loop,
|
return progressCallback(static_cast<size_t>(downloadNow), static_cast<size_t>(downloadTotal));
|
||||||
false, // useOldTLS = false (disable TLS 1.0/1.1)
|
}});
|
||||||
true // validateCert = true
|
|
||||||
);
|
|
||||||
|
|
||||||
// Configure SSL certificates for HTTPS
|
|
||||||
std::string ca_path = find_ca_certificates();
|
|
||||||
if (!ca_path.empty()) {
|
|
||||||
// Use addSSLConfigs with proper parameter names for OpenSSL
|
|
||||||
std::vector<std::pair<std::string, std::string>> sslConfigs;
|
|
||||||
sslConfigs.push_back({"VerifyCAFile", ca_path});
|
|
||||||
client->addSSLConfigs(sslConfigs);
|
|
||||||
} else {
|
|
||||||
// If no CA certificates found, print warning but continue
|
|
||||||
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
client->enableCookies();
|
auto response = session.Get();
|
||||||
client->setUserAgent("getpkg/1.0");
|
|
||||||
|
|
||||||
std::string object_path = "/object/" + toolName + ":" + arch;
|
if (response.status_code == 200) {
|
||||||
|
|
||||||
auto req = drogon::HttpRequest::newHttpRequest();
|
|
||||||
req->setMethod(drogon::Get);
|
|
||||||
req->setPath(object_path);
|
|
||||||
|
|
||||||
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
|
|
||||||
std::lock_guard<std::mutex> lock(mtx);
|
|
||||||
if (result == drogon::ReqResult::Ok && response && response->getStatusCode() == drogon::k200OK) {
|
|
||||||
std::ofstream ofs(outPath, std::ios::binary);
|
std::ofstream ofs(outPath, std::ios::binary);
|
||||||
if (ofs) {
|
if (ofs) {
|
||||||
const auto& body = response->getBody();
|
ofs.write(response.text.data(), response.text.size());
|
||||||
ofs.write(body.data(), body.size());
|
return ofs.good();
|
||||||
success = ofs.good();
|
|
||||||
}
|
}
|
||||||
|
} else if (response.status_code == 404) {
|
||||||
|
// Not found - this is expected for arch fallback
|
||||||
|
return false;
|
||||||
} else {
|
} else {
|
||||||
std::cerr << "[GetbinClient::download] HTTP request failed." << std::endl;
|
std::cerr << "[GetbinClient::download] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||||
}
|
|
||||||
done = true;
|
|
||||||
cv.notify_one();
|
|
||||||
loop.quit();
|
|
||||||
}, 30.0); // 30 second timeout
|
|
||||||
|
|
||||||
loop.loop();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for completion
|
|
||||||
{
|
|
||||||
std::unique_lock<std::mutex> lock(mtx);
|
|
||||||
cv.wait(lock, [&] { return done; });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
worker.join();
|
return false;
|
||||||
return success;
|
} catch (const std::exception& e) {
|
||||||
}
|
std::cerr << "[GetbinClient::download] Exception: " << e.what() << std::endl;
|
||||||
|
|
||||||
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token) {
|
|
||||||
// Read file first
|
|
||||||
std::ifstream ifs(archivePath, std::ios::binary);
|
|
||||||
if (!ifs) {
|
|
||||||
std::cerr << "[GetbinClient::upload] Failed to open archive file: " << archivePath << std::endl;
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
std::string file_content((std::istreambuf_iterator<char>(ifs)), std::istreambuf_iterator<char>());
|
|
||||||
|
|
||||||
// Compose metadata
|
|
||||||
json metadata = { {"labeltags", json::array()} };
|
|
||||||
std::string filename = archivePath.substr(archivePath.find_last_of("/\\") + 1);
|
|
||||||
size_t dot = filename.find('.');
|
|
||||||
std::string labeltag = dot != std::string::npos ? filename.substr(0, dot) : filename;
|
|
||||||
metadata["labeltags"].push_back(labeltag);
|
|
||||||
|
|
||||||
bool success = false;
|
|
||||||
bool done = false;
|
|
||||||
std::mutex mtx;
|
|
||||||
std::condition_variable cv;
|
|
||||||
|
|
||||||
std::thread worker([&]() {
|
|
||||||
trantor::EventLoop loop;
|
|
||||||
|
|
||||||
auto client = drogon::HttpClient::newHttpClient(
|
|
||||||
"https://" + std::string(SERVER_HOST),
|
|
||||||
&loop,
|
|
||||||
false, // useOldTLS = false (disable TLS 1.0/1.1)
|
|
||||||
true // validateCert = true
|
|
||||||
);
|
|
||||||
|
|
||||||
// Configure SSL certificates
|
|
||||||
std::string ca_path = find_ca_certificates();
|
|
||||||
std::vector<std::pair<std::string, std::string>> sslConfigs;
|
|
||||||
if (!ca_path.empty()) {
|
|
||||||
sslConfigs.push_back({"VerifyCAFile", ca_path});
|
|
||||||
}
|
|
||||||
// Configure SSL for secure connections
|
|
||||||
client->addSSLConfigs(sslConfigs);
|
|
||||||
|
|
||||||
if (ca_path.empty()) {
|
|
||||||
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
client->enableCookies();
|
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash,
|
||||||
client->setUserAgent("getpkg/1.0");
|
const std::string& token, ProgressCallback progressCallback) {
|
||||||
|
|
||||||
// Create upload file from memory content
|
|
||||||
// First save content to a temporary file since UploadFile expects a file path
|
|
||||||
std::string temp_file = "/tmp/getpkg_upload_" + std::to_string(std::time(nullptr)) + ".tgz";
|
|
||||||
std::ofstream temp_ofs(temp_file, std::ios::binary);
|
|
||||||
if (!temp_ofs) {
|
|
||||||
std::cerr << "[GetbinClient::upload] Failed to create temporary file: " << temp_file << std::endl;
|
|
||||||
success = false;
|
|
||||||
done = true;
|
|
||||||
cv.notify_one();
|
|
||||||
loop.quit();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
temp_ofs.write(file_content.data(), file_content.size());
|
|
||||||
temp_ofs.close();
|
|
||||||
|
|
||||||
// Create upload request with file
|
|
||||||
drogon::UploadFile upload_file(temp_file);
|
|
||||||
|
|
||||||
auto req = drogon::HttpRequest::newFileUploadRequest({upload_file});
|
|
||||||
req->setMethod(drogon::Put);
|
|
||||||
req->setPath("/upload");
|
|
||||||
req->addHeader("Authorization", "Bearer " + token);
|
|
||||||
|
|
||||||
// Add metadata as form parameter
|
|
||||||
req->setParameter("metadata", metadata.dump());
|
|
||||||
|
|
||||||
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
|
|
||||||
std::lock_guard<std::mutex> lock(mtx);
|
|
||||||
if (result == drogon::ReqResult::Ok && response) {
|
|
||||||
int status_code = static_cast<int>(response->getStatusCode());
|
|
||||||
std::string response_body(response->getBody());
|
|
||||||
|
|
||||||
if (status_code == 200 || status_code == 201) {
|
|
||||||
try {
|
try {
|
||||||
auto resp_json = json::parse(response_body);
|
std::string url = "https://" + SERVER_HOST + "/upload";
|
||||||
if (resp_json.contains("url")) outUrl = resp_json["url"].get<std::string>();
|
|
||||||
if (resp_json.contains("hash")) outHash = resp_json["hash"].get<std::string>();
|
|
||||||
success = true;
|
|
||||||
} catch (const std::exception& e) {
|
|
||||||
std::cerr << "[GetbinClient::upload] Failed to parse JSON response: " << e.what() << std::endl;
|
|
||||||
std::cerr << "[GetbinClient::upload] Response body: " << response_body << std::endl;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
std::cerr << "[GetbinClient::upload] HTTP error: status code " << status_code << std::endl;
|
|
||||||
std::cerr << "[GetbinClient::upload] Response body: " << response_body << std::endl;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
std::cerr << "[GetbinClient::upload] HTTP /upload request failed." << std::endl;
|
|
||||||
}
|
|
||||||
done = true;
|
|
||||||
cv.notify_one();
|
|
||||||
loop.quit();
|
|
||||||
}, 60.0); // 60 second timeout
|
|
||||||
|
|
||||||
loop.loop();
|
cpr::Session session;
|
||||||
|
session.SetUrl(cpr::Url{url});
|
||||||
|
session.SetHeader(cpr::Header{
|
||||||
|
{"User-Agent", getUserAgent()},
|
||||||
|
{"Authorization", "Bearer " + token}
|
||||||
|
});
|
||||||
|
session.SetTimeout(cpr::Timeout{300000}); // 5 minutes for uploads
|
||||||
|
session.SetVerifySsl(cpr::VerifySsl{true});
|
||||||
|
|
||||||
// Clean up temporary file
|
|
||||||
std::remove(temp_file.c_str());
|
// Extract tool name and arch from archive path for labeltags
|
||||||
|
// Archive path format: /path/to/tool-name:arch.tgz or similar
|
||||||
|
std::string archiveName = std::filesystem::path(archivePath).filename().string();
|
||||||
|
std::string toolNameArch = archiveName;
|
||||||
|
if (toolNameArch.ends_with(".tgz")) {
|
||||||
|
toolNameArch = toolNameArch.substr(0, toolNameArch.length() - 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create metadata JSON with labeltags
|
||||||
|
json metadata;
|
||||||
|
metadata["labeltags"] = json::array({toolNameArch});
|
||||||
|
|
||||||
|
// Set up multipart form with file and metadata
|
||||||
|
session.SetMultipart(cpr::Multipart{
|
||||||
|
cpr::Part{"file", cpr::File{archivePath}},
|
||||||
|
cpr::Part{"metadata", metadata.dump(), "application/json"}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Wait for completion
|
// Add progress callback if provided
|
||||||
{
|
if (progressCallback) {
|
||||||
std::unique_lock<std::mutex> lock(mtx);
|
session.SetProgressCallback(cpr::ProgressCallback{[progressCallback](cpr::cpr_off_t downloadTotal, cpr::cpr_off_t downloadNow,
|
||||||
cv.wait(lock, [&] { return done; });
|
cpr::cpr_off_t uploadTotal, cpr::cpr_off_t uploadNow,
|
||||||
|
intptr_t userdata) -> bool {
|
||||||
|
return progressCallback(static_cast<size_t>(uploadNow), static_cast<size_t>(uploadTotal));
|
||||||
|
}});
|
||||||
}
|
}
|
||||||
|
|
||||||
worker.join();
|
auto response = session.Put();
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool GetbinClient::getHash(const std::string& toolName, const std::string& arch, std::string& outHash) {
|
if (response.status_code == 200) {
|
||||||
bool success = false;
|
|
||||||
bool done = false;
|
|
||||||
std::mutex mtx;
|
|
||||||
std::condition_variable cv;
|
|
||||||
|
|
||||||
std::thread worker([&]() {
|
|
||||||
trantor::EventLoop loop;
|
|
||||||
|
|
||||||
auto client = drogon::HttpClient::newHttpClient(
|
|
||||||
"https://" + std::string(SERVER_HOST),
|
|
||||||
&loop,
|
|
||||||
false, // useOldTLS = false (disable TLS 1.0/1.1)
|
|
||||||
true // validateCert = true
|
|
||||||
);
|
|
||||||
|
|
||||||
// Configure SSL certificates
|
|
||||||
std::string ca_path = find_ca_certificates();
|
|
||||||
std::vector<std::pair<std::string, std::string>> sslConfigs;
|
|
||||||
if (!ca_path.empty()) {
|
|
||||||
sslConfigs.push_back({"VerifyCAFile", ca_path});
|
|
||||||
}
|
|
||||||
// Configure SSL for secure connections
|
|
||||||
client->addSSLConfigs(sslConfigs);
|
|
||||||
|
|
||||||
if (ca_path.empty()) {
|
|
||||||
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
client->enableCookies();
|
|
||||||
client->setUserAgent("getpkg/1.0");
|
|
||||||
|
|
||||||
std::string hash_path = "/hash/" + toolName + ":" + arch;
|
|
||||||
|
|
||||||
auto req = drogon::HttpRequest::newHttpRequest();
|
|
||||||
req->setMethod(drogon::Get);
|
|
||||||
req->setPath(hash_path);
|
|
||||||
|
|
||||||
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
|
|
||||||
std::lock_guard<std::mutex> lock(mtx);
|
|
||||||
if (result == drogon::ReqResult::Ok && response && response->getStatusCode() == drogon::k200OK) {
|
|
||||||
std::string response_body(response->getBody());
|
|
||||||
|
|
||||||
// Try to parse hash from response body
|
|
||||||
try {
|
try {
|
||||||
// Try JSON first
|
auto resp_json = json::parse(response.text);
|
||||||
auto resp_json = json::parse(response_body);
|
if (resp_json.contains("hash") && resp_json.contains("result") && resp_json["result"] == "success") {
|
||||||
if (resp_json.contains("hash")) {
|
outUrl = "https://" + SERVER_HOST + "/object/" + resp_json["hash"].get<std::string>();
|
||||||
outHash = resp_json["hash"].get<std::string>();
|
outHash = resp_json["hash"].get<std::string>();
|
||||||
success = true;
|
return true;
|
||||||
}
|
}
|
||||||
} catch (...) {
|
} catch (const json::exception& e) {
|
||||||
// Not JSON, treat as plain text
|
// Try to extract from plain text response
|
||||||
outHash = response_body;
|
outUrl = "";
|
||||||
|
outHash = response.text;
|
||||||
// Remove trailing newline if present
|
// Remove trailing newline if present
|
||||||
if (!outHash.empty() && outHash.back() == '\n') {
|
if (!outHash.empty() && outHash.back() == '\n') {
|
||||||
outHash.pop_back();
|
outHash.pop_back();
|
||||||
}
|
}
|
||||||
success = !outHash.empty();
|
return !outHash.empty();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
std::cerr << "[GetbinClient::upload] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||||
|
if (!response.text.empty()) {
|
||||||
|
std::cerr << "[GetbinClient::upload] Response: " << response.text << std::endl;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
done = true;
|
|
||||||
cv.notify_one();
|
|
||||||
loop.quit();
|
|
||||||
}, 10.0); // 10 second timeout
|
|
||||||
|
|
||||||
loop.loop();
|
return false;
|
||||||
});
|
} catch (const std::exception& e) {
|
||||||
|
std::cerr << "[GetbinClient::upload] Exception: " << e.what() << std::endl;
|
||||||
// Wait for completion
|
return false;
|
||||||
{
|
}
|
||||||
std::unique_lock<std::mutex> lock(mtx);
|
|
||||||
cv.wait(lock, [&] { return done; });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
worker.join();
|
bool GetbinClient::getHash(const std::string& toolName, const std::string& arch, std::string& outHash) {
|
||||||
return success;
|
try {
|
||||||
|
std::string url = "https://" + SERVER_HOST + "/hash/" + toolName + ":" + arch;
|
||||||
|
|
||||||
|
auto response = cpr::Get(cpr::Url{url},
|
||||||
|
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||||
|
cpr::Timeout{10000}, // 10 seconds
|
||||||
|
cpr::VerifySsl{true});
|
||||||
|
|
||||||
|
if (response.status_code == 200) {
|
||||||
|
try {
|
||||||
|
// Try JSON first
|
||||||
|
auto resp_json = json::parse(response.text);
|
||||||
|
if (resp_json.contains("hash")) {
|
||||||
|
outHash = resp_json["hash"].get<std::string>();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (const json::exception&) {
|
||||||
|
// Not JSON, treat as plain text
|
||||||
|
outHash = response.text;
|
||||||
|
// Remove trailing newline if present
|
||||||
|
if (!outHash.empty() && outHash.back() == '\n') {
|
||||||
|
outHash.pop_back();
|
||||||
|
}
|
||||||
|
return !outHash.empty();
|
||||||
|
}
|
||||||
|
} else if (response.status_code == 404) {
|
||||||
|
// Not found - this is expected for non-existent tools/archs
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
std::cerr << "[GetbinClient::getHash] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
} catch (const std::exception& e) {
|
||||||
|
std::cerr << "[GetbinClient::getHash] Exception: " << e.what() << std::endl;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool GetbinClient::deleteObject(const std::string& hash, const std::string& token) {
|
bool GetbinClient::deleteObject(const std::string& hash, const std::string& token) {
|
||||||
bool success = false;
|
|
||||||
bool done = false;
|
|
||||||
std::mutex mtx;
|
|
||||||
std::condition_variable cv;
|
|
||||||
|
|
||||||
std::thread worker([&]() {
|
|
||||||
trantor::EventLoop loop;
|
|
||||||
|
|
||||||
auto client = drogon::HttpClient::newHttpClient(
|
|
||||||
"https://" + std::string(SERVER_HOST),
|
|
||||||
&loop,
|
|
||||||
false, // useOldTLS = false (disable TLS 1.0/1.1)
|
|
||||||
true // validateCert = true
|
|
||||||
);
|
|
||||||
|
|
||||||
// Configure SSL certificates
|
|
||||||
std::string ca_path = find_ca_certificates();
|
|
||||||
std::vector<std::pair<std::string, std::string>> sslConfigs;
|
|
||||||
if (!ca_path.empty()) {
|
|
||||||
sslConfigs.push_back({"VerifyCAFile", ca_path});
|
|
||||||
}
|
|
||||||
// Configure SSL for secure connections
|
|
||||||
client->addSSLConfigs(sslConfigs);
|
|
||||||
|
|
||||||
if (ca_path.empty()) {
|
|
||||||
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
client->enableCookies();
|
|
||||||
client->setUserAgent("getpkg/1.0");
|
|
||||||
|
|
||||||
std::string delete_path = "/deleteobject?hash=" + hash;
|
|
||||||
|
|
||||||
auto req = drogon::HttpRequest::newHttpRequest();
|
|
||||||
req->setMethod(drogon::Get);
|
|
||||||
req->setPath(delete_path);
|
|
||||||
req->addHeader("Authorization", "Bearer " + token);
|
|
||||||
|
|
||||||
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
|
|
||||||
std::lock_guard<std::mutex> lock(mtx);
|
|
||||||
if (result == drogon::ReqResult::Ok && response) {
|
|
||||||
int status_code = static_cast<int>(response->getStatusCode());
|
|
||||||
std::string response_body(response->getBody());
|
|
||||||
|
|
||||||
if (status_code == 200) {
|
|
||||||
// Check if the response indicates success
|
|
||||||
try {
|
try {
|
||||||
auto resp_json = json::parse(response_body);
|
std::string url = "https://" + SERVER_HOST + "/deleteobject?hash=" + hash;
|
||||||
if (resp_json.contains("result") && resp_json["result"] == "success") {
|
|
||||||
success = true;
|
auto response = cpr::Get(cpr::Url{url},
|
||||||
}
|
cpr::Header{
|
||||||
} catch (...) {
|
{"User-Agent", getUserAgent()},
|
||||||
// If not JSON, assume success if 200 OK
|
{"Authorization", "Bearer " + token}
|
||||||
success = true;
|
},
|
||||||
}
|
cpr::Timeout{30000}, // 30 seconds
|
||||||
|
cpr::VerifySsl{true});
|
||||||
|
|
||||||
|
if (response.status_code == 200) {
|
||||||
|
return true;
|
||||||
} else {
|
} else {
|
||||||
std::cerr << "[GetbinClient::deleteObject] HTTP error: status code " << status_code << std::endl;
|
std::cerr << "[GetbinClient::deleteObject] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||||
std::cerr << "[GetbinClient::deleteObject] Response body: " << response_body << std::endl;
|
if (!response.text.empty()) {
|
||||||
|
std::cerr << "[GetbinClient::deleteObject] Response: " << response.text << std::endl;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
std::cerr << "[GetbinClient::deleteObject] HTTP request failed." << std::endl;
|
|
||||||
}
|
|
||||||
done = true;
|
|
||||||
cv.notify_one();
|
|
||||||
loop.quit();
|
|
||||||
}, 10.0); // 10 second timeout
|
|
||||||
|
|
||||||
loop.loop();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for completion
|
|
||||||
{
|
|
||||||
std::unique_lock<std::mutex> lock(mtx);
|
|
||||||
cv.wait(lock, [&] { return done; });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
worker.join();
|
return false;
|
||||||
return success;
|
} catch (const std::exception& e) {
|
||||||
|
std::cerr << "[GetbinClient::deleteObject] Exception: " << e.what() << std::endl;
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||||
outPackages.clear();
|
|
||||||
|
|
||||||
// Set up SSL configuration
|
|
||||||
std::string ca_path = find_ca_certificates();
|
|
||||||
|
|
||||||
bool success = false;
|
|
||||||
bool done = false;
|
|
||||||
std::mutex mtx;
|
|
||||||
std::condition_variable cv;
|
|
||||||
|
|
||||||
std::thread worker([&]() {
|
|
||||||
trantor::EventLoop loop;
|
|
||||||
|
|
||||||
auto client = drogon::HttpClient::newHttpClient(
|
|
||||||
"https://" + std::string(SERVER_HOST),
|
|
||||||
&loop,
|
|
||||||
false, // useOldTLS = false (disable TLS 1.0/1.1)
|
|
||||||
true // validateCert = true
|
|
||||||
);
|
|
||||||
std::vector<std::pair<std::string, std::string>> sslConfigs;
|
|
||||||
if (!ca_path.empty()) {
|
|
||||||
sslConfigs.push_back({"VerifyCAFile", ca_path});
|
|
||||||
}
|
|
||||||
// Configure SSL for secure connections
|
|
||||||
client->addSSLConfigs(sslConfigs);
|
|
||||||
|
|
||||||
auto req = drogon::HttpRequest::newHttpRequest();
|
|
||||||
req->setMethod(drogon::Get);
|
|
||||||
req->setPath("/dir");
|
|
||||||
|
|
||||||
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
|
|
||||||
if (result == drogon::ReqResult::Ok) {
|
|
||||||
int status_code = response->getStatusCode();
|
|
||||||
std::string response_body = std::string(response->getBody());
|
|
||||||
|
|
||||||
if (status_code == 200) {
|
|
||||||
try {
|
try {
|
||||||
json json_response = json::parse(response_body);
|
std::string url = "https://" + SERVER_HOST + "/dir";
|
||||||
|
|
||||||
if (json_response.contains("entries") && json_response["entries"].is_array()) {
|
auto response = cpr::Get(cpr::Url{url},
|
||||||
for (const auto& entry : json_response["entries"]) {
|
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||||
|
cpr::Timeout{30000}, // 30 seconds
|
||||||
|
cpr::VerifySsl{true});
|
||||||
|
|
||||||
|
if (response.status_code == 200) {
|
||||||
|
try {
|
||||||
|
auto resp_json = json::parse(response.text);
|
||||||
|
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
|
||||||
|
outPackages.clear();
|
||||||
|
std::set<std::string> uniqueTools;
|
||||||
|
|
||||||
|
for (const auto& entry : resp_json["entries"]) {
|
||||||
if (entry.contains("labeltags") && entry["labeltags"].is_array()) {
|
if (entry.contains("labeltags") && entry["labeltags"].is_array()) {
|
||||||
for (const auto& labeltag : entry["labeltags"]) {
|
for (const auto& labeltag : entry["labeltags"]) {
|
||||||
if (labeltag.is_string()) {
|
if (labeltag.is_string()) {
|
||||||
std::string name = labeltag.get<std::string>();
|
std::string tag = labeltag.get<std::string>();
|
||||||
// Extract tool name (remove architecture suffix if present)
|
// Extract tool name from "tool:arch" format
|
||||||
size_t colon_pos = name.find(":");
|
size_t colonPos = tag.find(":");
|
||||||
if (colon_pos != std::string::npos) {
|
if (colonPos != std::string::npos) {
|
||||||
name = name.substr(0, colon_pos);
|
std::string toolName = tag.substr(0, colonPos);
|
||||||
|
if (!toolName.empty()) {
|
||||||
|
uniqueTools.insert(toolName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert set to vector
|
||||||
|
for (const auto& tool : uniqueTools) {
|
||||||
|
outPackages.push_back(tool);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (const json::exception&) {
|
||||||
|
// Try to parse as newline-separated list
|
||||||
|
outPackages.clear();
|
||||||
|
std::istringstream stream(response.text);
|
||||||
|
std::string line;
|
||||||
|
while (std::getline(stream, line)) {
|
||||||
|
if (!line.empty()) {
|
||||||
|
outPackages.push_back(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return !outPackages.empty();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
std::cerr << "[GetbinClient::listPackages] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip empty names
|
return false;
|
||||||
if (name.empty()) continue;
|
|
||||||
|
|
||||||
// Add to list if not already present
|
|
||||||
if (std::find(outPackages.begin(), outPackages.end(), name) == outPackages.end()) {
|
|
||||||
outPackages.push_back(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
success = true;
|
|
||||||
}
|
|
||||||
} catch (const std::exception& e) {
|
} catch (const std::exception& e) {
|
||||||
std::cerr << "[GetbinClient::listPackages] JSON parse error: " << e.what() << std::endl;
|
std::cerr << "[GetbinClient::listPackages] Exception: " << e.what() << std::endl;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool GetbinClient::listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries) {
|
||||||
|
try {
|
||||||
|
std::string url = "https://" + SERVER_HOST + "/dir";
|
||||||
|
|
||||||
|
auto response = cpr::Get(cpr::Url{url},
|
||||||
|
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||||
|
cpr::Timeout{30000}, // 30 seconds
|
||||||
|
cpr::VerifySsl{true});
|
||||||
|
|
||||||
|
if (response.status_code == 200) {
|
||||||
|
try {
|
||||||
|
auto resp_json = json::parse(response.text);
|
||||||
|
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
|
||||||
|
outEntries.clear();
|
||||||
|
|
||||||
|
for (const auto& entry : resp_json["entries"]) {
|
||||||
|
if (entry.contains("hash") && entry.contains("labeltags") &&
|
||||||
|
entry["hash"].is_string() && entry["labeltags"].is_array()) {
|
||||||
|
|
||||||
|
std::string hash = entry["hash"].get<std::string>();
|
||||||
|
std::vector<std::string> labeltags;
|
||||||
|
|
||||||
|
for (const auto& tag : entry["labeltags"]) {
|
||||||
|
if (tag.is_string()) {
|
||||||
|
labeltags.push_back(tag.get<std::string>());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
outEntries.push_back({hash, labeltags});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (const json::exception& e) {
|
||||||
|
std::cerr << "[GetbinClient::listAllEntries] JSON parse error: " << e.what() << std::endl;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
std::cerr << "[GetbinClient::listPackages] HTTP error: status code " << status_code << std::endl;
|
std::cerr << "[GetbinClient::listAllEntries] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||||
}
|
|
||||||
} else {
|
|
||||||
std::cerr << "[GetbinClient::listPackages] HTTP request failed." << std::endl;
|
|
||||||
}
|
|
||||||
done = true;
|
|
||||||
cv.notify_one();
|
|
||||||
loop.quit();
|
|
||||||
}, 10.0);
|
|
||||||
|
|
||||||
loop.loop();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for completion
|
|
||||||
{
|
|
||||||
std::unique_lock<std::mutex> lock(mtx);
|
|
||||||
cv.wait(lock, [&] { return done; });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
worker.join();
|
return false;
|
||||||
|
} catch (const std::exception& e) {
|
||||||
// Filter out duplicates where we have both toolname and toolname-noarch
|
std::cerr << "[GetbinClient::listAllEntries] Exception: " << e.what() << std::endl;
|
||||||
// Keep the base name and remove the -noarch variant
|
return false;
|
||||||
std::vector<std::string> filteredPackages;
|
|
||||||
std::set<std::string> baseNames;
|
|
||||||
|
|
||||||
// First pass: collect all base names (without -noarch)
|
|
||||||
for (const auto& pkg : outPackages) {
|
|
||||||
const std::string suffix = "-noarch";
|
|
||||||
if (pkg.length() < suffix.length() || pkg.substr(pkg.length() - suffix.length()) != suffix) {
|
|
||||||
baseNames.insert(pkg);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Second pass: add packages, skipping -noarch variants if base exists
|
|
||||||
for (const auto& pkg : outPackages) {
|
|
||||||
const std::string suffix = "-noarch";
|
|
||||||
if (pkg.length() >= suffix.length() && pkg.substr(pkg.length() - suffix.length()) == suffix) {
|
|
||||||
std::string baseName = pkg.substr(0, pkg.length() - suffix.length());
|
|
||||||
if (baseNames.find(baseName) == baseNames.end()) {
|
|
||||||
filteredPackages.push_back(pkg); // Keep -noarch only if no base version
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
filteredPackages.push_back(pkg); // Always keep base versions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
outPackages = std::move(filteredPackages);
|
|
||||||
|
|
||||||
// Sort the packages for better display
|
|
||||||
std::sort(outPackages.begin(), outPackages.end());
|
|
||||||
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
@ -1,13 +1,25 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <functional>
|
||||||
|
|
||||||
class GetbinClient {
|
class GetbinClient {
|
||||||
public:
|
public:
|
||||||
GetbinClient();
|
GetbinClient();
|
||||||
bool download(const std::string& toolName, const std::string& arch, const std::string& outPath);
|
|
||||||
bool upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token);
|
// Progress callback: (downloaded_bytes, total_bytes) -> should_continue
|
||||||
|
using ProgressCallback = std::function<bool(size_t, size_t)>;
|
||||||
|
|
||||||
|
bool download(const std::string& toolName, const std::string& arch, const std::string& outPath,
|
||||||
|
ProgressCallback progressCallback = nullptr);
|
||||||
|
bool upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token,
|
||||||
|
ProgressCallback progressCallback = nullptr);
|
||||||
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
|
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
|
||||||
bool deleteObject(const std::string& hash, const std::string& token);
|
bool deleteObject(const std::string& hash, const std::string& token);
|
||||||
bool listPackages(std::vector<std::string>& outPackages);
|
bool listPackages(std::vector<std::string>& outPackages);
|
||||||
|
bool listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries);
|
||||||
|
|
||||||
|
private:
|
||||||
|
static const std::string SERVER_HOST;
|
||||||
|
std::string getUserAgent() const;
|
||||||
};
|
};
|
@ -76,6 +76,17 @@
|
|||||||
namespace {
|
namespace {
|
||||||
using json = nlohmann::json;
|
using json = nlohmann::json;
|
||||||
|
|
||||||
|
// Clear current line and reset cursor to beginning
|
||||||
|
void clearLine() {
|
||||||
|
std::cout << "\r\033[K" << std::flush;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear current line and print message
|
||||||
|
void clearAndPrint(const std::string& message) {
|
||||||
|
clearLine();
|
||||||
|
std::cout << message << std::flush;
|
||||||
|
}
|
||||||
|
|
||||||
// Compare versions (returns true if v1 < v2)
|
// Compare versions (returns true if v1 < v2)
|
||||||
bool isVersionOlder(const std::string& v1, const std::string& v2) {
|
bool isVersionOlder(const std::string& v1, const std::string& v2) {
|
||||||
// Simple version comparison - assumes versions are in YYYY.MMDD.HHMM format
|
// Simple version comparison - assumes versions are in YYYY.MMDD.HHMM format
|
||||||
@ -200,27 +211,43 @@ int install_tool(int argc, char* argv[]) {
|
|||||||
// Download tool - try arch-specific version first, then universal fallback
|
// Download tool - try arch-specific version first, then universal fallback
|
||||||
GetbinClient getbin2;
|
GetbinClient getbin2;
|
||||||
std::string downloadArch = arch;
|
std::string downloadArch = arch;
|
||||||
//std::cout << "Downloading " << toolName << ":" << arch << "..." << std::endl;
|
|
||||||
if (!getbin2.download(toolName, arch, archivePath.string())) {
|
// Progress callback for downloads
|
||||||
|
auto progressCallback = [&toolName](size_t downloaded, size_t total) -> bool {
|
||||||
|
if (total > 0) {
|
||||||
|
int percent = (downloaded * 100) / total;
|
||||||
|
std::cout << "\rDownloading " << toolName << "... " << percent << "%" << std::flush;
|
||||||
|
} else {
|
||||||
|
std::cout << "\rDownloading " << toolName << "... " << downloaded << " bytes" << std::flush;
|
||||||
|
}
|
||||||
|
return true; // Continue download
|
||||||
|
};
|
||||||
|
|
||||||
|
std::cout << "Downloading " << toolName << "..." << std::flush;
|
||||||
|
if (!getbin2.download(toolName, arch, archivePath.string(), progressCallback)) {
|
||||||
// Try universal version as fallback
|
// Try universal version as fallback
|
||||||
//std::cout << "Arch-specific version not found, trying universal version..." << std::endl;
|
clearAndPrint("Arch-specific version not found, trying universal...\n");
|
||||||
//std::cout << "Downloading " << toolName << ":universal..." << std::endl;
|
if (!getbin2.download(toolName, "universal", archivePath.string(), progressCallback)) {
|
||||||
if (!getbin2.download(toolName, "universal", archivePath.string())) {
|
std::cerr << "\rFailed to download tool archive (tried both " << arch << " and universal)." << std::endl;
|
||||||
std::cerr << "Failed to download tool archive (tried both " << arch << " and universal)." << std::endl;
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
downloadArch = "universal";
|
downloadArch = "universal";
|
||||||
}
|
}
|
||||||
|
clearAndPrint("Downloading " + toolName + "... done\n");
|
||||||
|
|
||||||
// Unpack tool
|
// Unpack tool
|
||||||
|
std::cout << "Unpacking..." << std::flush;
|
||||||
if (!common::unpack_tgz(archivePath.string(), binDir.string())) {
|
if (!common::unpack_tgz(archivePath.string(), binDir.string())) {
|
||||||
std::cerr << "Failed to unpack tool archive." << std::endl;
|
std::cerr << "\rFailed to unpack tool archive." << std::endl;
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
clearAndPrint("Unpacking... done\n");
|
||||||
|
|
||||||
// Add to PATH and autocomplete
|
// Add to PATH and autocomplete
|
||||||
|
std::cout << "Configuring..." << std::flush;
|
||||||
scriptManager.addToolEntry(toolName, binDir.string());
|
scriptManager.addToolEntry(toolName, binDir.string());
|
||||||
scriptManager.addAutocomplete(toolName);
|
scriptManager.addAutocomplete(toolName);
|
||||||
|
clearAndPrint("Configuring... done\n");
|
||||||
|
|
||||||
// Get tool info
|
// Get tool info
|
||||||
std::string hash;
|
std::string hash;
|
||||||
@ -314,10 +341,24 @@ int publish_tool(int argc, char* argv[]) {
|
|||||||
}
|
}
|
||||||
GetbinClient getbin;
|
GetbinClient getbin;
|
||||||
std::string url, hash;
|
std::string url, hash;
|
||||||
if (!getbin.upload(archivePath.string(), url, hash, token)) {
|
|
||||||
std::cerr << "Failed to upload archive." << std::endl;
|
// Progress callback for upload
|
||||||
|
auto uploadProgressCallback = [](size_t uploaded, size_t total) -> bool {
|
||||||
|
if (total > 0) {
|
||||||
|
int percent = (uploaded * 100) / total;
|
||||||
|
std::cout << "\rUploading... " << percent << "%" << std::flush;
|
||||||
|
} else {
|
||||||
|
std::cout << "\rUploading... " << uploaded << " bytes" << std::flush;
|
||||||
|
}
|
||||||
|
return true; // Continue upload
|
||||||
|
};
|
||||||
|
|
||||||
|
std::cout << "Uploading..." << std::flush;
|
||||||
|
if (!getbin.upload(archivePath.string(), url, hash, token, uploadProgressCallback)) {
|
||||||
|
std::cerr << "\rFailed to upload archive." << std::endl;
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
clearAndPrint("Uploading... done\n");
|
||||||
std::cout << "Published! URL: " << url << "\nHash: " << hash << std::endl;
|
std::cout << "Published! URL: " << url << "\nHash: " << hash << std::endl;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -326,73 +367,161 @@ int update_tool(int argc, char* argv[]) {
|
|||||||
std::string home = get_home();
|
std::string home = get_home();
|
||||||
std::filesystem::path configDir = std::filesystem::path(home) / ".config/getpkg";
|
std::filesystem::path configDir = std::filesystem::path(home) / ".config/getpkg";
|
||||||
|
|
||||||
// Collect all installed tools
|
// Structure to hold tool information
|
||||||
std::vector<std::tuple<std::string, std::string, std::string>> updateResults; // name, status, version
|
struct ToolInfo {
|
||||||
|
std::string name;
|
||||||
// Capture stdout to process install_tool output
|
std::string localHash;
|
||||||
auto processToolUpdate = [&](const std::string& toolName) -> std::tuple<std::string, std::string> {
|
std::string remoteHash;
|
||||||
// Redirect stdout and stderr to capture output
|
std::string arch;
|
||||||
std::stringstream buffer;
|
std::string version;
|
||||||
std::stringstream errBuffer;
|
bool needsUpdate = false;
|
||||||
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
|
std::string status = "Up to date";
|
||||||
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
|
|
||||||
|
|
||||||
char* toolArgv[] = {argv[0], (char*)"install", (char*)toolName.c_str()};
|
|
||||||
int result = install_tool(3, toolArgv);
|
|
||||||
|
|
||||||
// Restore stdout and stderr
|
|
||||||
std::cout.rdbuf(oldOut);
|
|
||||||
std::cerr.rdbuf(oldErr);
|
|
||||||
|
|
||||||
std::string output = buffer.str();
|
|
||||||
std::string status = "Failed";
|
|
||||||
std::string version = "-";
|
|
||||||
|
|
||||||
if (result == 0) {
|
|
||||||
if (output.find("is already up to date") != std::string::npos) {
|
|
||||||
status = "Up to date";
|
|
||||||
} else if (output.find("Installed " + toolName + " successfully") != std::string::npos) {
|
|
||||||
// Check if it was an update or fresh install
|
|
||||||
if (output.find("Updating " + toolName) != std::string::npos) {
|
|
||||||
status = "Updated";
|
|
||||||
} else {
|
|
||||||
status = "Installed";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to get version from config
|
|
||||||
std::filesystem::path toolInfoPath = configDir / (toolName + ".json");
|
|
||||||
if (std::filesystem::exists(toolInfoPath)) {
|
|
||||||
std::ifstream tfile(toolInfoPath);
|
|
||||||
json toolInfo;
|
|
||||||
tfile >> toolInfo;
|
|
||||||
version = toolInfo.value("version", "-");
|
|
||||||
if (!version.empty() && version.back() == '\n') version.pop_back();
|
|
||||||
// If version is empty, try to show something useful
|
|
||||||
if (version.empty() || version == "-") {
|
|
||||||
version = "installed";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::make_tuple(status, version);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// First update getpkg itself
|
std::vector<ToolInfo> tools;
|
||||||
auto [getpkgStatus, getpkgVersion] = processToolUpdate("getpkg");
|
|
||||||
updateResults.push_back(std::make_tuple("getpkg", getpkgStatus, getpkgVersion));
|
|
||||||
|
|
||||||
// Then update all other installed tools
|
// Collect all installed tools
|
||||||
if (std::filesystem::exists(configDir)) {
|
if (std::filesystem::exists(configDir)) {
|
||||||
for (const auto& entry : std::filesystem::directory_iterator(configDir)) {
|
for (const auto& entry : std::filesystem::directory_iterator(configDir)) {
|
||||||
if (entry.path().extension() == ".json") {
|
if (entry.path().extension() == ".json") {
|
||||||
std::string tname = entry.path().stem();
|
std::string tname = entry.path().stem();
|
||||||
if (tname != "getpkg") { // Skip getpkg since we already did it
|
|
||||||
auto [status, version] = processToolUpdate(tname);
|
ToolInfo tool;
|
||||||
updateResults.push_back(std::make_tuple(tname, status, version));
|
tool.name = tname;
|
||||||
|
|
||||||
|
// Read local tool info
|
||||||
|
std::ifstream tfile(entry.path());
|
||||||
|
if (tfile.good()) {
|
||||||
|
json toolInfo;
|
||||||
|
tfile >> toolInfo;
|
||||||
|
tool.localHash = toolInfo.value("hash", "");
|
||||||
|
tool.arch = toolInfo.value("arch", get_arch());
|
||||||
|
tool.version = toolInfo.value("version", "-");
|
||||||
|
if (!tool.version.empty() && tool.version.back() == '\n') {
|
||||||
|
tool.version.pop_back();
|
||||||
|
}
|
||||||
|
if (tool.version.empty() || tool.version == "-") {
|
||||||
|
tool.version = "installed";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tools.push_back(tool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (tools.empty()) {
|
||||||
|
std::cout << "No tools installed." << std::endl;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 1: Check for updates (with progress)
|
||||||
|
std::cout << "Checking " << tools.size() << " tools for updates..." << std::endl;
|
||||||
|
|
||||||
|
GetbinClient getbin;
|
||||||
|
for (size_t i = 0; i < tools.size(); ++i) {
|
||||||
|
auto& tool = tools[i];
|
||||||
|
|
||||||
|
// Show progress
|
||||||
|
std::cout << "\r[" << (i + 1) << "/" << tools.size() << "] Checking " << tool.name << "..." << std::flush;
|
||||||
|
|
||||||
|
// Check remote hash
|
||||||
|
std::string remoteHash;
|
||||||
|
if (getbin.getHash(tool.name, tool.arch, remoteHash) && !remoteHash.empty()) {
|
||||||
|
tool.remoteHash = remoteHash;
|
||||||
|
if (tool.localHash != remoteHash) {
|
||||||
|
tool.needsUpdate = true;
|
||||||
|
tool.status = "Needs update";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tool.status = "Check failed";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
clearLine(); // Clear progress line
|
||||||
|
|
||||||
|
// Step 2: Update tools that need updating
|
||||||
|
std::vector<std::tuple<std::string, std::string, std::string>> updateResults;
|
||||||
|
|
||||||
|
// First update getpkg if it needs updating
|
||||||
|
auto getpkgIt = std::find_if(tools.begin(), tools.end(),
|
||||||
|
[](const ToolInfo& t) { return t.name == "getpkg"; });
|
||||||
|
|
||||||
|
if (getpkgIt != tools.end() && getpkgIt->needsUpdate) {
|
||||||
|
std::cout << "Updating getpkg..." << std::flush;
|
||||||
|
|
||||||
|
// Use install_tool for actual update
|
||||||
|
std::stringstream buffer, errBuffer;
|
||||||
|
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
|
||||||
|
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
|
||||||
|
|
||||||
|
char* toolArgv[] = {argv[0], (char*)"install", (char*)"getpkg"};
|
||||||
|
int result = install_tool(3, toolArgv);
|
||||||
|
|
||||||
|
std::cout.rdbuf(oldOut);
|
||||||
|
std::cerr.rdbuf(oldErr);
|
||||||
|
|
||||||
|
if (result == 0) {
|
||||||
|
getpkgIt->status = "Updated";
|
||||||
|
std::cout << " Updated" << std::endl;
|
||||||
|
} else {
|
||||||
|
getpkgIt->status = "Failed";
|
||||||
|
std::cout << " Failed" << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update other tools
|
||||||
|
int toolsToUpdate = std::count_if(tools.begin(), tools.end(),
|
||||||
|
[](const ToolInfo& t) { return t.needsUpdate && t.name != "getpkg"; });
|
||||||
|
|
||||||
|
if (toolsToUpdate > 0) {
|
||||||
|
std::cout << "Updating " << toolsToUpdate << " tools..." << std::endl;
|
||||||
|
|
||||||
|
int updatedCount = 0;
|
||||||
|
for (auto& tool : tools) {
|
||||||
|
if (tool.needsUpdate && tool.name != "getpkg") {
|
||||||
|
updatedCount++;
|
||||||
|
std::cout << "[" << updatedCount << "/" << toolsToUpdate << "] Updating " << tool.name << "..." << std::flush;
|
||||||
|
|
||||||
|
// Use install_tool for actual update
|
||||||
|
std::stringstream buffer, errBuffer;
|
||||||
|
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
|
||||||
|
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
|
||||||
|
|
||||||
|
char* toolArgv[] = {argv[0], (char*)"install", (char*)tool.name.c_str()};
|
||||||
|
int result = install_tool(3, toolArgv);
|
||||||
|
|
||||||
|
std::cout.rdbuf(oldOut);
|
||||||
|
std::cerr.rdbuf(oldErr);
|
||||||
|
|
||||||
|
if (result == 0) {
|
||||||
|
tool.status = "Updated";
|
||||||
|
clearAndPrint("Updated\n");
|
||||||
|
|
||||||
|
// Re-read version after update
|
||||||
|
std::filesystem::path toolInfoPath = configDir / (tool.name + ".json");
|
||||||
|
if (std::filesystem::exists(toolInfoPath)) {
|
||||||
|
std::ifstream tfile(toolInfoPath);
|
||||||
|
json toolInfo;
|
||||||
|
tfile >> toolInfo;
|
||||||
|
tool.version = toolInfo.value("version", tool.version);
|
||||||
|
if (!tool.version.empty() && tool.version.back() == '\n') {
|
||||||
|
tool.version.pop_back();
|
||||||
|
}
|
||||||
|
if (tool.version.empty() || tool.version == "-") {
|
||||||
|
tool.version = "installed";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tool.status = "Failed";
|
||||||
|
clearAndPrint("Failed\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare results for display
|
||||||
|
for (const auto& tool : tools) {
|
||||||
|
updateResults.push_back(std::make_tuple(tool.name, tool.status, tool.version));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Display results in a table
|
// Display results in a table
|
||||||
@ -583,35 +712,34 @@ int unpublish_tool(int argc, char* argv[]) {
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No specific architecture - unpublish all architectures
|
// No specific architecture - unpublish ALL entries with this tool name
|
||||||
std::vector<std::string> allArchitectures = {"x86_64", "aarch64", "universal"};
|
std::vector<std::pair<std::string, std::vector<std::string>>> allEntries;
|
||||||
std::vector<std::pair<std::string, std::string>> foundPackages;
|
std::vector<std::pair<std::string, std::string>> foundPackages; // (tag, hash)
|
||||||
|
|
||||||
std::cout << "Searching for " << toolName << " across all architectures..." << std::endl;
|
std::cout << "Searching for all entries with label '" << toolName << "'..." << std::endl;
|
||||||
|
|
||||||
// Find all existing versions
|
if (!getbin.listAllEntries(allEntries)) {
|
||||||
for (const auto& arch : allArchitectures) {
|
std::cerr << "Failed to get directory listing from server" << std::endl;
|
||||||
std::string archHash;
|
return 1;
|
||||||
if (getbin.getHash(toolName, arch, archHash) && !archHash.empty()) {
|
|
||||||
// Validate hash
|
|
||||||
bool validHash = true;
|
|
||||||
for (char c : archHash) {
|
|
||||||
if (!std::isdigit(c)) {
|
|
||||||
validHash = false;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (validHash) {
|
// Find all entries with labeltags starting with toolName:
|
||||||
foundPackages.push_back({arch, archHash});
|
for (const auto& entry : allEntries) {
|
||||||
std::cout << " Found " << toolName << ":" << arch << " (hash: " << archHash << ")" << std::endl;
|
const std::string& hash = entry.first;
|
||||||
|
const std::vector<std::string>& labeltags = entry.second;
|
||||||
|
|
||||||
|
for (const std::string& tag : labeltags) {
|
||||||
|
if (tag.find(toolName + ":") == 0) {
|
||||||
|
// Found a matching labeltag
|
||||||
|
foundPackages.push_back({tag, hash});
|
||||||
|
std::cout << " Found " << tag << " (hash: " << hash << ")" << std::endl;
|
||||||
|
break; // Only count each hash once even if it has multiple matching tags
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (foundPackages.empty()) {
|
if (foundPackages.empty()) {
|
||||||
std::cerr << "No packages found for " << toolName << std::endl;
|
std::cerr << "No packages found for " << toolName << std::endl;
|
||||||
std::cerr << "Searched architectures: x86_64, aarch64, universal" << std::endl;
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -623,7 +751,7 @@ int unpublish_tool(int argc, char* argv[]) {
|
|||||||
int failCount = 0;
|
int failCount = 0;
|
||||||
|
|
||||||
for (const auto& [arch, archHash] : foundPackages) {
|
for (const auto& [arch, archHash] : foundPackages) {
|
||||||
std::cout << " Unpublishing " << toolName << ":" << arch << "... ";
|
std::cout << " Unpublishing " << arch << "... ";
|
||||||
if (getbin.deleteObject(archHash, token)) {
|
if (getbin.deleteObject(archHash, token)) {
|
||||||
std::cout << "OK" << std::endl;
|
std::cout << "OK" << std::endl;
|
||||||
successCount++;
|
successCount++;
|
||||||
@ -706,7 +834,7 @@ int list_packages(int argc, char* argv[]) {
|
|||||||
for (const auto& packageName : availablePackages) {
|
for (const auto& packageName : availablePackages) {
|
||||||
std::string status = "Available";
|
std::string status = "Available";
|
||||||
std::string localVersion = "-";
|
std::string localVersion = "-";
|
||||||
std::string remoteStatus = "✓";
|
std::string remoteStatus = "-";
|
||||||
|
|
||||||
auto it = installedPackages.find(packageName);
|
auto it = installedPackages.find(packageName);
|
||||||
if (it != installedPackages.end()) {
|
if (it != installedPackages.end()) {
|
||||||
|
127
getpkg/test.sh
127
getpkg/test.sh
@ -455,12 +455,13 @@ EOF
|
|||||||
CONFIG_EXISTS=false
|
CONFIG_EXISTS=false
|
||||||
TOOL_DIR_EXISTS=false
|
TOOL_DIR_EXISTS=false
|
||||||
SYMLINK_EXISTS=false
|
SYMLINK_EXISTS=false
|
||||||
HELPER_SYMLINK_EXISTS=false
|
# HELPER_SYMLINK_EXISTS=false
|
||||||
|
|
||||||
[ -f ~/.config/getpkg/"${TEST_UNINSTALL_TOOL}.json" ] && CONFIG_EXISTS=true
|
[ -f ~/.config/getpkg/"${TEST_UNINSTALL_TOOL}.json" ] && CONFIG_EXISTS=true
|
||||||
[ -d ~/.getpkg/"$TEST_UNINSTALL_TOOL" ] && TOOL_DIR_EXISTS=true
|
[ -d ~/.getpkg/"$TEST_UNINSTALL_TOOL" ] && TOOL_DIR_EXISTS=true
|
||||||
[ -L ~/.local/bin/getpkg/"$TEST_UNINSTALL_TOOL" ] && SYMLINK_EXISTS=true
|
[ -L ~/.local/bin/getpkg/"$TEST_UNINSTALL_TOOL" ] && SYMLINK_EXISTS=true
|
||||||
[ -L ~/.local/bin/getpkg/"${TEST_UNINSTALL_TOOL}-helper" ] && HELPER_SYMLINK_EXISTS=true
|
# Check if helper symlink exists (not currently used in validation)
|
||||||
|
# [ -L ~/.local/bin/getpkg/"${TEST_UNINSTALL_TOOL}-helper" ] && HELPER_SYMLINK_EXISTS=true
|
||||||
|
|
||||||
if $CONFIG_EXISTS && $TOOL_DIR_EXISTS && $SYMLINK_EXISTS; then
|
if $CONFIG_EXISTS && $TOOL_DIR_EXISTS && $SYMLINK_EXISTS; then
|
||||||
# Now uninstall
|
# Now uninstall
|
||||||
@ -528,6 +529,128 @@ EOF
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# Test 13.5: Comprehensive unpublish functionality
|
||||||
|
echo -e "\nTest 13.5: Comprehensive unpublish functionality"
|
||||||
|
|
||||||
|
# Only run unpublish tests if SOS_WRITE_TOKEN is available
|
||||||
|
if [ -n "${SOS_WRITE_TOKEN:-}" ]; then
|
||||||
|
# Create unique test names for unpublish tests
|
||||||
|
UNPUBLISH_TOOL_BASE="test-unpublish-$RANDOM"
|
||||||
|
UNPUBLISH_TOOL_MULTI="${UNPUBLISH_TOOL_BASE}-multi"
|
||||||
|
UNPUBLISH_TOOL_CUSTOM="${UNPUBLISH_TOOL_BASE}-custom"
|
||||||
|
UNPUBLISH_TEST_DIR="${TEST_DIR}/unpublish_tests"
|
||||||
|
|
||||||
|
# Create test directory structure
|
||||||
|
mkdir -p "$UNPUBLISH_TEST_DIR"
|
||||||
|
|
||||||
|
# Test 13.5a: Create and publish tool with multiple architectures
|
||||||
|
echo "Test 13.5a: Unpublish tool with multiple architectures"
|
||||||
|
echo '#!/bin/bash
|
||||||
|
echo "Multi-arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
|
||||||
|
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
|
||||||
|
|
||||||
|
# Publish to multiple architectures
|
||||||
|
PUBLISH_x86_64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||||
|
PUBLISH_aarch64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:aarch64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||||
|
PUBLISH_universal_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||||
|
|
||||||
|
if [[ "$PUBLISH_x86_64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_aarch64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_universal_OUTPUT" =~ Published! ]]; then
|
||||||
|
# Test robust unpublish - should remove ALL architectures
|
||||||
|
sleep 1 # Give server time to process all publishes
|
||||||
|
UNPUBLISH_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_MULTI" 2>&1)
|
||||||
|
UNPUBLISH_EXIT_CODE=$?
|
||||||
|
|
||||||
|
# Check that unpublish found and removed packages
|
||||||
|
if [ $UNPUBLISH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_OUTPUT" =~ "Found" ]] && [[ "$UNPUBLISH_OUTPUT" =~ "Successfully unpublished" ]]; then
|
||||||
|
print_test_result "Unpublish removes all architectures" 0
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish removes all architectures" 1
|
||||||
|
echo " Unpublish failed: $UNPUBLISH_OUTPUT"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish removes all architectures" 1
|
||||||
|
echo " Failed to publish test tool to multiple architectures"
|
||||||
|
echo " x86_64: $PUBLISH_x86_64_OUTPUT"
|
||||||
|
echo " aarch64: $PUBLISH_aarch64_OUTPUT"
|
||||||
|
echo " universal: $PUBLISH_universal_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 13.5b: Unpublish tool with universal architecture
|
||||||
|
echo "Test 13.5b: Unpublish tool with universal architecture"
|
||||||
|
echo '#!/bin/bash
|
||||||
|
echo "Universal arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
|
||||||
|
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
|
||||||
|
|
||||||
|
# Publish with universal architecture
|
||||||
|
PUBLISH_CUSTOM_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_CUSTOM}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||||
|
|
||||||
|
if [[ "$PUBLISH_CUSTOM_OUTPUT" =~ Published! ]]; then
|
||||||
|
# Test that unpublish can find and remove custom tags
|
||||||
|
UNPUBLISH_CUSTOM_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_CUSTOM" 2>&1)
|
||||||
|
UNPUBLISH_CUSTOM_EXIT_CODE=$?
|
||||||
|
|
||||||
|
if [ $UNPUBLISH_CUSTOM_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_CUSTOM_OUTPUT" =~ Found\ ${UNPUBLISH_TOOL_CUSTOM}:universal ]]; then
|
||||||
|
print_test_result "Unpublish finds universal architecture" 0
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish finds universal architecture" 1
|
||||||
|
echo " Failed to find or unpublish custom tag: $UNPUBLISH_CUSTOM_OUTPUT"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish finds universal architecture" 1
|
||||||
|
echo " Failed to publish tool with custom tag: $PUBLISH_CUSTOM_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 13.5c: Unpublish non-existent tool
|
||||||
|
echo "Test 13.5c: Unpublish non-existent tool"
|
||||||
|
NON_EXISTENT_TOOL="non-existent-tool-$RANDOM"
|
||||||
|
UNPUBLISH_MISSING_OUTPUT=$("$GETPKG" unpublish "$NON_EXISTENT_TOOL" 2>&1)
|
||||||
|
UNPUBLISH_MISSING_EXIT_CODE=$?
|
||||||
|
|
||||||
|
if [ $UNPUBLISH_MISSING_EXIT_CODE -ne 0 ] && [[ "$UNPUBLISH_MISSING_OUTPUT" =~ "No packages found" ]]; then
|
||||||
|
print_test_result "Unpublish handles missing tools gracefully" 0
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish handles missing tools gracefully" 1
|
||||||
|
echo " Expected failure for non-existent tool, got: $UNPUBLISH_MISSING_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 13.5d: Unpublish by hash
|
||||||
|
echo "Test 13.5d: Unpublish by hash"
|
||||||
|
UNPUBLISH_TOOL_HASH="${UNPUBLISH_TOOL_BASE}-hash"
|
||||||
|
echo '#!/bin/bash
|
||||||
|
echo "Hash unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
|
||||||
|
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
|
||||||
|
|
||||||
|
PUBLISH_HASH_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_HASH}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||||
|
|
||||||
|
if [[ "$PUBLISH_HASH_OUTPUT" =~ Hash:\ ([0-9]+) ]]; then
|
||||||
|
EXTRACTED_HASH="${BASH_REMATCH[1]}"
|
||||||
|
|
||||||
|
# Test unpublish by hash
|
||||||
|
UNPUBLISH_HASH_OUTPUT=$("$GETPKG" unpublish "$EXTRACTED_HASH" 2>&1)
|
||||||
|
UNPUBLISH_HASH_EXIT_CODE=$?
|
||||||
|
|
||||||
|
if [ $UNPUBLISH_HASH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_HASH_OUTPUT" =~ "Successfully unpublished hash" ]]; then
|
||||||
|
print_test_result "Unpublish by hash works" 0
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish by hash works" 1
|
||||||
|
echo " Failed to unpublish by hash: $UNPUBLISH_HASH_OUTPUT"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_test_result "Unpublish by hash works" 1
|
||||||
|
echo " Could not extract hash from publish output"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Cleanup unpublish test directory
|
||||||
|
rm -rf "$UNPUBLISH_TEST_DIR"
|
||||||
|
|
||||||
|
else
|
||||||
|
echo " Skipping unpublish tests (SOS_WRITE_TOKEN not set)"
|
||||||
|
print_test_result "Unpublish removes all architectures" 0 # Pass as skipped
|
||||||
|
print_test_result "Unpublish finds universal architecture" 0
|
||||||
|
print_test_result "Unpublish handles missing tools gracefully" 0
|
||||||
|
print_test_result "Unpublish by hash works" 0
|
||||||
|
fi
|
||||||
# Test 14: Invalid tool name validation
|
# Test 14: Invalid tool name validation
|
||||||
echo -e "\nTest 14: Invalid tool name validation"
|
echo -e "\nTest 14: Invalid tool name validation"
|
||||||
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)
|
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)
|
||||||
|
1
getpkg/test_debug/debug-test
Executable file
1
getpkg/test_debug/debug-test
Executable file
@ -0,0 +1 @@
|
|||||||
|
#!/bin/bash\necho debug
|
1
getpkg/test_debug2/debug-test2
Executable file
1
getpkg/test_debug2/debug-test2
Executable file
@ -0,0 +1 @@
|
|||||||
|
#!/bin/bash\necho debug2
|
1
getpkg/test_display/test-display
Executable file
1
getpkg/test_display/test-display
Executable file
@ -0,0 +1 @@
|
|||||||
|
#!/bin/bash\necho display test
|
1
getpkg/test_multi/test-multi
Executable file
1
getpkg/test_multi/test-multi
Executable file
@ -0,0 +1 @@
|
|||||||
|
#!/bin/bash\necho multi arch
|
1
getpkg/test_robust/test-robust
Executable file
1
getpkg/test_robust/test-robust
Executable file
@ -0,0 +1 @@
|
|||||||
|
#!/bin/bash\necho robust test
|
1
getpkg/test_upload.txt
Normal file
1
getpkg/test_upload.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
test content
|
20
sos/clean.sh
Executable file
20
sos/clean.sh
Executable file
@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||||
|
PROJECT="sos"
|
||||||
|
|
||||||
|
echo "Cleaning ${PROJECT}..."
|
||||||
|
|
||||||
|
# Remove output directory (if it exists)
|
||||||
|
if [ -d "${SCRIPT_DIR}/output" ]; then
|
||||||
|
echo "Removing output directory..."
|
||||||
|
rm -rf "${SCRIPT_DIR}/output"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove any temporary files
|
||||||
|
echo "Removing temporary files..."
|
||||||
|
find "${SCRIPT_DIR}" -name "*.tmp" -o -name "*.temp" -o -name "*~" | xargs -r rm -f
|
||||||
|
|
||||||
|
echo "✓ ${PROJECT} cleaned successfully"
|
Reference in New Issue
Block a user