16 Commits

Author SHA1 Message Date
8e2611e362 Modify getpkg/src/GetbinClient.cpp.bak
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m14s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m5s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Failing after 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Failing after 6s
2025-06-29 19:02:48 +12:00
a1b12fe177 docs: Update 4 files
Some checks failed
Build-Test-Publish / build (linux/arm64) (push) Has been cancelled
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been cancelled
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been cancelled
Build-Test-Publish / build (linux/amd64) (push) Has been cancelled
2025-06-29 19:02:09 +12:00
902e68069a Modify getpkg/src/main.cpp
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m18s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m13s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 8s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-29 11:53:32 +12:00
0aafc2cc1e docs: Update 3 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m21s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m15s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-26 21:23:10 +12:00
2067caf253 Modify bb64/src/bb64.cpp
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m18s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m15s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 8s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-26 21:09:06 +12:00
4d500cbddd Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m14s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-25 22:47:45 +12:00
884609f661 Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m14s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:42:52 +12:00
a5a36c179b Modify dehydrate/test/build_dehydrate_test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m17s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m15s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:41:01 +12:00
42b51ef0be test: Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m8s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m57s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:38:12 +12:00
f094d532cf Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m27s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m41s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:33:09 +12:00
fffa88482a Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/arm64) (push) Failing after 30s
Build-Test-Publish / build (linux/amd64) (push) Failing after 32s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:32:04 +12:00
54af706032 Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 25s
Build-Test-Publish / build (linux/arm64) (push) Failing after 28s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:25:21 +12:00
ef7470dcce Modify dehydrate/src/argparse.cpp
Some checks failed
Build-Test-Publish / build (linux/arm64) (push) Failing after 40s
Build-Test-Publish / build (linux/amd64) (push) Failing after 39s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:22:24 +12:00
d18f875c0e docs: Update 4 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 32s
Build-Test-Publish / build (linux/arm64) (push) Failing after 39s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:18:43 +12:00
6f525b4f6c Modify bb64/publish.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 24s
Build-Test-Publish / build (linux/arm64) (push) Failing after 27s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:14:08 +12:00
ed13bdb5b5 Modify bb64/publish.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 24s
Build-Test-Publish / build (linux/arm64) (push) Failing after 26s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:11:44 +12:00
20 changed files with 737 additions and 621 deletions

View File

@ -60,6 +60,8 @@ getpkg version
### Information
- **`getpkg list`** - List all available packages with status
- **`getpkg clean`** - Clean up orphaned configs and symlinks
- **`getpkg version`** - Show getpkg version
- **`getpkg help`** - Show detailed help
- **`getpkg autocomplete`** - Show available commands for completion
@ -99,14 +101,14 @@ Tools are automatically downloaded for your architecture, with fallback to unive
### Installing Popular Tools
```bash
# Install development tools
getpkg whatsdirty # Fast grep alternative
getpkg fd # Fast find alternative
getpkg bat # Cat with syntax highlighting
# Install available tools
getpkg install dehydrate # File to C++ code generator
getpkg install bb64 # Bash base64 encoder/decoder
# Install system utilities
getpkg whatsdirty # Check git repo status
getpkg sos # Simple object storage client
# Development tools (for repository development)
getpkg install whatsdirty # Check git repo status
getpkg install sos # Simple object storage client
getpkg install gp # Git push utility
```
### Publishing Your Own Tools

View File

@ -5,7 +5,8 @@ if(NOT DEFINED PROJECT_NAME)
message(FATAL_ERROR "PROJECT_NAME is not defined. Pass it via -DPROJECT_NAME=<name>")
endif()
project(${PROJECT_NAME})
string(TIMESTAMP PROJECT_VERSION "%Y.%m%d.%H%M")
project(${PROJECT_NAME} VERSION ${PROJECT_VERSION} LANGUAGES CXX)
# Build configuration
set(CMAKE_CXX_STANDARD 23)
@ -21,6 +22,8 @@ add_executable(${PROJECT_NAME}
src/b64ed.cpp
)
# Configure version.hpp
configure_file("src/version.hpp.in" "src/autogen/version.hpp" @ONLY)
# Include directories
target_include_directories(${PROJECT_NAME} PRIVATE

View File

@ -26,6 +26,8 @@ Usage:
bb64 -[i|d] BASE64COMMAND Displays the decoded command
bb64 -e COMMAND Encodes the command and prints the result
bb64 -u Updates bb64 to the latest version (uses docker)
bb64 -v Prints the version number
bb64 version Prints the version number
```
# Implementation Notes

View File

@ -12,19 +12,10 @@ if [ "$ARCH" = "aarch64" ]; then
ARCH_ALIAS="arm64"
fi
# Increment version
VERSION_FILE="${SCRIPT_DIR}/src/version.h"
if [ ! -f "${VERSION_FILE}" ]; then
echo "${VERSION_FILE} not found!" >&2
exit 1
else
v=$(cat "${VERSION_FILE}" | grep -o 'static const char \*VERSION = "[0-9.]*";' | cut -d'"' -f2)
oldv=$v
v=$((v+1))
echo "Incrementing version from $oldv to $v" >&2
echo "static const char *VERSION = \"$v\";" > "${VERSION_FILE}"
fi
TAG="v$v"
# Get version from CMake timestamp
VERSION=$(date +"%Y.%m%d.%H%M")
TAG="v$VERSION"
echo "Building version $VERSION" >&2
# build release version
export CMAKE_BUILD_TYPE="Release"
@ -77,24 +68,68 @@ if [ -z "$RELEASE_WRITE_TOKEN" ]; then
exit 1
fi
echo "Creating release $TAG on Gitea..."
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
# Create and push git tag
echo "Creating git tag $TAG..."
# Configure git identity if not set (for CI environments)
if ! git config user.email >/dev/null 2>&1; then
git config user.email "ci@gitea.jde.nz"
git config user.name "CI Bot"
fi
# Check if tag already exists locally
if git rev-parse "$TAG" >/dev/null 2>&1; then
echo "Tag $TAG already exists locally, deleting it first..."
git tag -d "$TAG"
fi
# Check if tag exists on remote
if git ls-remote --tags origin | grep -q "refs/tags/$TAG"; then
echo "Tag $TAG already exists on remote - this is expected for multi-architecture builds"
echo "Skipping tag creation and proceeding with release attachment..."
else
echo "Creating new tag $TAG..."
git tag -a "$TAG" -m "Release $TAG"
if ! git push origin "$TAG"; then
echo "Failed to push tag $TAG to origin" >&2
# Try to delete local tag if push failed
git tag -d "$TAG"
exit 1
fi
echo "Successfully created and pushed tag $TAG"
fi
echo "Getting or creating release $TAG on Gitea..."
# First try to get existing release
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
-H "Authorization: token $RELEASE_WRITE_TOKEN")
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
# Release already exists, get its ID
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
echo "Release $TAG already exists with ID: $RELEASE_ID"
else
# Create new release
echo "Creating new release $TAG on Gitea..."
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
-H "Content-Type: application/json" \
-H "Authorization: token $RELEASE_WRITE_TOKEN" \
-d "$RELEASE_DATA")
echo "Release API response: $RELEASE_RESPONSE"
echo "Release API response: $RELEASE_RESPONSE"
RELEASE_ID=$(echo "$RELEASE_RESPONSE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
RELEASE_ID=$(echo "$RELEASE_RESPONSE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
if [ -z "$RELEASE_ID" ]; then
if [ -z "$RELEASE_ID" ]; then
echo "Failed to create release on Gitea." >&2
echo "API URL: $API_URL/releases" >&2
echo "Release data: $RELEASE_DATA" >&2
exit 1
fi
fi
echo "Created release with ID: $RELEASE_ID"
echo "Created new release with ID: $RELEASE_ID"
fi
# Upload binaries and install.sh
echo "Uploading assets to release..."
@ -125,7 +160,7 @@ for FILE in ${PROJECT}.${ARCH_ALIAS} ${PROJECT}.${ARCH} install.sh; do
fi
done
echo "Published bb64 version $v to $REPO_URL (tag $TAG) with binaries for $ARCH_ALIAS / $ARCH."
echo "Published bb64 version $VERSION to $REPO_URL (tag $TAG) with binaries for $ARCH_ALIAS / $ARCH."
#--------------------------------------------------------------------------------

View File

@ -5,7 +5,7 @@
#include <cstring>
#include <sstream>
#include <filesystem>
#include "version.h"
#include "version.hpp"
#include "b64ed.hpp"
// Recursively decode and print if nested bb64 command is found
@ -150,6 +150,7 @@ Usage:
bb64 -u Updates bb64 to the latest version (uses docker)
bb64 -v Prints the version number
bb64 version Prints the version number
)" << std::endl;
return -1;
@ -161,7 +162,7 @@ Usage:
{
if (mode == "-u")
return update_bb64();
else if (mode == "-v")
else if (mode == "-v" || mode == "version")
{
std::cout << VERSION << std::endl;
return 0;

View File

@ -1 +0,0 @@
static const char *VERSION = "39";

1
bb64/src/version.hpp.in Normal file
View File

@ -0,0 +1 @@
static const char *VERSION = "@PROJECT_VERSION@";

135
bb64/test.sh Executable file
View File

@ -0,0 +1,135 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT="bb64"
BB64="$SCRIPT_DIR/output/$PROJECT"
TEST_DIR="$SCRIPT_DIR/test_temp"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Test counters
TESTS_PASSED=0
TESTS_FAILED=0
# Function to print test results
print_test_result() {
local test_name="$1"
local result="$2"
if [ "$result" -eq 0 ]; then
echo -e "${GREEN}${NC} $test_name"
TESTS_PASSED=$((TESTS_PASSED + 1))
else
echo -e "${RED}${NC} $test_name"
TESTS_FAILED=$((TESTS_FAILED + 1))
fi
}
# Function to cleanup test artifacts
cleanup() {
echo -e "\n${YELLOW}Cleaning up test artifacts...${NC}"
rm -rf "$TEST_DIR"
}
# Set up trap to ensure cleanup runs
trap cleanup EXIT
# Create test directory
mkdir -p "$TEST_DIR"
echo -e "${YELLOW}Running bb64 tests...${NC}\n"
# Check if bb64 binary exists
if [ ! -f "$BB64" ]; then
echo -e "${RED}Error: bb64 binary not found at $BB64${NC}"
echo "Please run ./build.sh first to build bb64"
exit 1
fi
if [ ! -x "$BB64" ]; then
echo -e "${RED}Error: bb64 binary is not executable${NC}"
exit 1
fi
echo "Using bb64 binary: $BB64"
# Test 1: Version command with -v flag
echo "Test 1: Version command (-v flag)"
VERSION_OUTPUT=$("$BB64" -v 2>&1 || true)
# Version output should be just the version number
VERSION=$(echo "$VERSION_OUTPUT" | head -n 1)
if [[ "$VERSION" =~ ^[0-9]{4}\.[0-9]{4}\.[0-9]{4}$ ]]; then
print_test_result "Version format with -v flag (YYYY.MMDD.HHMM)" 0
else
print_test_result "Version format with -v flag (YYYY.MMDD.HHMM)" 1
echo " Expected: YYYY.MMDD.HHMM format, got: '$VERSION'"
fi
# Test 2: Version command with 'version' argument
printf "\nTest 2: Version command (version argument)\n"
VERSION_OUTPUT2=$("$BB64" version 2>&1 || true)
# Version output should be just the version number
VERSION2=$(echo "$VERSION_OUTPUT2" | head -n 1)
if [[ "$VERSION2" =~ ^[0-9]{4}\.[0-9]{4}\.[0-9]{4}$ ]]; then
print_test_result "Version format with 'version' argument (YYYY.MMDD.HHMM)" 0
else
print_test_result "Version format with 'version' argument (YYYY.MMDD.HHMM)" 1
echo " Expected: YYYY.MMDD.HHMM format, got: '$VERSION2'"
fi
# Test 3: Both version commands should return the same version
printf "\nTest 3: Version consistency\n"
if [ "$VERSION" = "$VERSION2" ]; then
print_test_result "Both -v and version return same version" 0
else
print_test_result "Both -v and version return same version" 1
echo " -v returned: '$VERSION'"
echo " version returned: '$VERSION2'"
fi
# Test 4: Basic encoding test
echo -e "\nTest 4: Basic encoding test"
TEST_STRING="hello world"
ENCODED_OUTPUT=$("$BB64" -e <<< "$TEST_STRING" 2>&1 || true)
if [ -n "$ENCODED_OUTPUT" ]; then
print_test_result "Basic encoding produces output" 0
else
print_test_result "Basic encoding produces output" 1
fi
# Test 5: Basic decoding test (using -d flag)
echo -e "\nTest 5: Basic decoding test"
# Encode "echo hello" and then decode it
ENCODED_ECHO=$(echo "echo hello" | "$BB64" -e)
if [ -n "$ENCODED_ECHO" ]; then
DECODED_OUTPUT=$("$BB64" -d "$ENCODED_ECHO" 2>&1 || true)
if [[ "$DECODED_OUTPUT" == *"echo hello"* ]]; then
print_test_result "Basic decoding works correctly" 0
else
print_test_result "Basic decoding works correctly" 1
echo " Expected to contain 'echo hello', got: '$DECODED_OUTPUT'"
fi
else
print_test_result "Basic decoding works correctly" 1
echo " Failed to encode test string"
fi
cleanup
# Print summary
echo -e "\n${YELLOW}Test Summary:${NC}"
echo -e "Tests passed: ${GREEN}${TESTS_PASSED}${NC}"
echo -e "Tests failed: ${RED}${TESTS_FAILED}${NC}"
if [ "$TESTS_FAILED" -eq 0 ]; then
echo -e "\n${GREEN}All tests passed!${NC}"
exit 0
else
echo -e "\n${RED}Some tests failed!${NC}"
exit 1
fi

View File

@ -2,6 +2,15 @@
set -uo pipefail # Remove -e to handle errors manually
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
docker builder prune -f
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Arrays to track results
declare -A BUILD_RESULTS
declare -A TEST_RESULTS
@ -106,13 +115,35 @@ function buildtestpublish() {
cd "$dir" || echo "Failed to cd to $dir"
subtitle "🔨 BUILDING $TOOLNAME_UPPER 🔨"
dothis build "$dir" "$TOOLNAME"
if dothis build "$dir" "$TOOLNAME"; then
BUILD_SUCCESS=true
else
BUILD_SUCCESS=false
fi
subtitle "🔍 TESTING $TOOLNAME_UPPER 🔍"
dothis test "$dir" "$TOOLNAME"
if [ "$BUILD_SUCCESS" = true ]; then
if dothis test "$dir" "$TOOLNAME"; then
TEST_SUCCESS=true
else
TEST_SUCCESS=false
fi
else
echo "Skipping tests - build failed"
TEST_RESULTS["$TOOLNAME"]="SKIP"
TEST_SUCCESS=false
fi
subtitle "📦 PUBLISHING $TOOLNAME_UPPER 📦"
if [ "$BUILD_SUCCESS" = true ] && [ "$TEST_SUCCESS" = true ]; then
dothis publish "$dir" "$TOOLNAME"
elif [ "$BUILD_SUCCESS" = true ] && [ "${TEST_RESULTS[$TOOLNAME]}" = "SKIP" ]; then
# If tests are skipped (no test script), allow publish if build succeeded
dothis publish "$dir" "$TOOLNAME"
else
echo "Skipping publish - build or tests failed"
PUBLISH_RESULTS["$TOOLNAME"]="SKIP"
fi
echo "Done"
}
@ -166,9 +197,36 @@ function print_summary() {
local test_status="${TEST_RESULTS[$project]:-'-'}"
local publish_status="${PUBLISH_RESULTS[$project]:-'-'}"
printf "│ %-*s │ %-7s │ %-7s │ %-7s │\n" \
# Format status with proper spacing and colors for Unicode characters
local build_col test_col publish_col
# Format build status with colors
case "$build_status" in
"✓") build_col=$(printf " %s✓%s " "$GREEN" "$NC") ;;
"✗") build_col=$(printf " %s✗%s " "$RED" "$NC") ;;
"SKIP") build_col=$(printf " %s-%s " "$YELLOW" "$NC") ;;
*) build_col=" - " ;;
esac
# Format test status with colors
case "$test_status" in
"✓") test_col=$(printf " %s✓%s " "$GREEN" "$NC") ;;
"✗") test_col=$(printf " %s✗%s " "$RED" "$NC") ;;
"SKIP") test_col=$(printf " %s-%s " "$YELLOW" "$NC") ;;
*) test_col=" - " ;;
esac
# Format publish status with colors
case "$publish_status" in
"✓") publish_col=$(printf " %s✓%s " "$GREEN" "$NC") ;;
"✗") publish_col=$(printf " %s✗%s " "$RED" "$NC") ;;
"SKIP") publish_col=$(printf " %s-%s " "$YELLOW" "$NC") ;;
*) publish_col=" - " ;;
esac
printf "│ %-*s │%b│%b│%b│\n" \
$max_project_width "$project" \
"$build_status" "$test_status" "$publish_status"
"$build_col" "$test_col" "$publish_col"
done
# Print bottom border

View File

@ -17,12 +17,19 @@ Examples:
dehydrate src/ output/ Creates _src.cpp and _src.hpp in output/
dehydrate -u Updates dehydrate to the latest version
dehydrate -v Shows version number
dehydrate version Shows version number
)";
Args parse_args(int argc, char* argv[]) {
Args args;
int idx = 1;
// Check for "version" as first argument (no dash)
if (argc > 1 && std::string(argv[1]) == "version") {
args.version = true;
return args;
}
// Parse flags
while (idx < argc && argv[idx][0] == '-') {
std::string flag = argv[idx];

View File

@ -16,39 +16,48 @@ rm -f dehydrate_test
# Build the test program using Docker
# The Docker container supports both amd64 and arm64 architectures
docker run --rm \
-v "$PROJECT_DIR":/workdir \
-w /workdir/test \
gitea.jde.nz/public/dropshell-build-base:latest \
bash -c "
# Verify we can find the source file
if [ ! -f dehydrate_test.cpp ]; then
echo 'ERROR: dehydrate_test.cpp not found in current directory'
echo 'Working directory:' && pwd
echo 'Available files:' && ls -la
exit 1
fi
echo "Building dehydrate test executable..."
# Clean any existing binary and compile
rm -f dehydrate_test
if ! g++ -std=c++23 -static dehydrate_test.cpp -o dehydrate_test; then
# Use docker cp approach since volume mounting may not work in CI
CONTAINER_NAME="dehydrate-test-build-$$"
# Start container in detached mode
docker run -d --name "$CONTAINER_NAME" \
gitea.jde.nz/public/dropshell-build-base:latest \
sleep 60
# Copy source file into container
docker cp dehydrate_test.cpp "$CONTAINER_NAME":/dehydrate_test.cpp
# Compile in container
docker exec "$CONTAINER_NAME" bash -c "
echo 'Compiling dehydrate test...'
if ! g++ -std=c++23 -static /dehydrate_test.cpp -o /dehydrate_test; then
echo 'ERROR: Compilation failed'
exit 1
fi
# Verify binary was created and is executable
if [ ! -f dehydrate_test ]; then
# Verify binary was created
if [ ! -f /dehydrate_test ]; then
echo 'ERROR: Binary was not created'
exit 1
fi
# Quick architecture check - just verify the binary format
if ! file dehydrate_test | grep -q 'executable'; then
# Quick architecture check
if ! file /dehydrate_test | grep -q 'executable'; then
echo 'ERROR: Generated file is not an executable'
file dehydrate_test
file /dehydrate_test
exit 1
fi
"
echo 'Compilation successful'
"
# Copy binary back to host
docker cp "$CONTAINER_NAME":/dehydrate_test ./dehydrate_test
# Clean up container
docker rm -f "$CONTAINER_NAME"
# Check if compilation succeeded
if [ ! -f "./dehydrate_test" ]; then

View File

@ -36,13 +36,16 @@ target_include_directories(${PROJECT_NAME} PRIVATE
src/common)
# Find packages
find_package(OpenSSL REQUIRED)
find_package(Drogon CONFIG REQUIRED)
find_package(nlohmann_json REQUIRED)
# Add module path for FindCPRStatic
list(APPEND CMAKE_MODULE_PATH "/usr/local/share/cmake/Modules")
# Find packages
find_package(nlohmann_json REQUIRED)
find_package(CPRStatic REQUIRED)
# Link libraries
target_link_libraries(${PROJECT_NAME} PRIVATE
nlohmann_json::nlohmann_json Drogon::Drogon
/usr/local/lib/libpgcommon.a /usr/local/lib/libpgport.a
lzma dl)
nlohmann_json::nlohmann_json
cpr::cpr_static)

1
getpkg/debug_test.txt Normal file
View File

@ -0,0 +1 @@
Debug content

View File

@ -1,530 +1,258 @@
#include "GetbinClient.hpp"
#include <drogon/HttpClient.h>
#include <trantor/net/EventLoop.h>
#include <openssl/ssl.h>
#include <openssl/opensslconf.h>
#include <fstream>
#include <sstream>
#include <cpr/cpr.h>
#include <nlohmann/json.hpp>
#include <string>
#include <fstream>
#include <iostream>
#include <thread>
#include <chrono>
#include <cstdio>
#include <map>
#include <atomic>
#include <mutex>
#include <condition_variable>
#include <vector>
#include <ctime>
#include <filesystem>
#include <sstream>
#include <algorithm>
#include <set>
using json = nlohmann::json;
static constexpr const char* SERVER_HOST = "getpkg.xyz";
const std::string GetbinClient::SERVER_HOST = "getpkg.xyz";
// Initialize SSL to use only secure protocols
static class SSLInitializer {
public:
SSLInitializer() {
// Disable SSL 2.0, 3.0, TLS 1.0, and TLS 1.1
SSL_load_error_strings();
SSL_library_init();
// Note: This doesn't completely silence the warning but ensures we're using secure protocols
}
} ssl_init;
static std::string find_ca_certificates() {
// Common CA certificate locations across different Linux distributions
const std::vector<std::string> ca_paths = {
"/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/Raspbian
"/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL/CentOS
"/etc/ssl/ca-bundle.pem", // OpenSUSE
"/etc/pki/tls/cert.pem", // Fedora/RHEL alternative
"/etc/ssl/certs/ca-bundle.crt", // Some distros
"/etc/ssl/cert.pem", // Alpine Linux
"/usr/local/share/certs/ca-root-nss.crt", // FreeBSD
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", // CentOS/RHEL 7+
"/etc/ca-certificates/extracted/tls-ca-bundle.pem" // Arch Linux
};
for (const auto& path : ca_paths) {
std::ifstream file(path);
if (file.good()) {
file.close();
return path;
}
}
return "";
GetbinClient::GetbinClient() {
// Initialize CPR (done automatically, but we could add global config here)
}
GetbinClient::GetbinClient() {}
std::string GetbinClient::getUserAgent() const {
return "getpkg/1.0";
}
bool GetbinClient::download(const std::string& toolName, const std::string& arch, const std::string& outPath) {
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
bool GetbinClient::download(const std::string& toolName, const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback) {
try {
std::string url = "https://" + SERVER_HOST + "/object/" + toolName + ":" + arch;
std::thread worker([&]() {
trantor::EventLoop loop;
cpr::Session session;
session.SetUrl(cpr::Url{url});
session.SetHeader(cpr::Header{{"User-Agent", getUserAgent()}});
session.SetTimeout(cpr::Timeout{30000}); // 30 seconds
session.SetVerifySsl(cpr::VerifySsl{true});
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates for HTTPS
std::string ca_path = find_ca_certificates();
if (!ca_path.empty()) {
// Use addSSLConfigs with proper parameter names for OpenSSL
std::vector<std::pair<std::string, std::string>> sslConfigs;
sslConfigs.push_back({"VerifyCAFile", ca_path});
client->addSSLConfigs(sslConfigs);
} else {
// If no CA certificates found, print warning but continue
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
// Add progress callback if provided
if (progressCallback) {
session.SetProgressCallback(cpr::ProgressCallback{[progressCallback](cpr::cpr_off_t downloadTotal, cpr::cpr_off_t downloadNow,
cpr::cpr_off_t uploadTotal, cpr::cpr_off_t uploadNow,
intptr_t userdata) -> bool {
return progressCallback(static_cast<size_t>(downloadNow), static_cast<size_t>(downloadTotal));
}});
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
auto response = session.Get();
std::string object_path = "/object/" + toolName + ":" + arch;
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath(object_path);
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response && response->getStatusCode() == drogon::k200OK) {
if (response.status_code == 200) {
std::ofstream ofs(outPath, std::ios::binary);
if (ofs) {
const auto& body = response->getBody();
ofs.write(body.data(), body.size());
success = ofs.good();
ofs.write(response.text.data(), response.text.size());
return ofs.good();
}
} else if (response.status_code == 404) {
// Not found - this is expected for arch fallback
return false;
} else {
std::cerr << "[GetbinClient::download] HTTP request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 30.0); // 30 second timeout
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
std::cerr << "[GetbinClient::download] HTTP " << response.status_code << ": " << response.error.message << std::endl;
}
worker.join();
return success;
}
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token) {
// Read file first
std::ifstream ifs(archivePath, std::ios::binary);
if (!ifs) {
std::cerr << "[GetbinClient::upload] Failed to open archive file: " << archivePath << std::endl;
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::download] Exception: " << e.what() << std::endl;
return false;
}
std::string file_content((std::istreambuf_iterator<char>(ifs)), std::istreambuf_iterator<char>());
// Compose metadata
json metadata = { {"labeltags", json::array()} };
std::string filename = archivePath.substr(archivePath.find_last_of("/\\") + 1);
size_t dot = filename.find('.');
std::string labeltag = dot != std::string::npos ? filename.substr(0, dot) : filename;
metadata["labeltags"].push_back(labeltag);
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates
std::string ca_path = find_ca_certificates();
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
if (ca_path.empty()) {
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
// Create upload file from memory content
// First save content to a temporary file since UploadFile expects a file path
std::string temp_file = "/tmp/getpkg_upload_" + std::to_string(std::time(nullptr)) + ".tgz";
std::ofstream temp_ofs(temp_file, std::ios::binary);
if (!temp_ofs) {
std::cerr << "[GetbinClient::upload] Failed to create temporary file: " << temp_file << std::endl;
success = false;
done = true;
cv.notify_one();
loop.quit();
return;
}
temp_ofs.write(file_content.data(), file_content.size());
temp_ofs.close();
// Create upload request with file
drogon::UploadFile upload_file(temp_file);
auto req = drogon::HttpRequest::newFileUploadRequest({upload_file});
req->setMethod(drogon::Put);
req->setPath("/upload");
req->addHeader("Authorization", "Bearer " + token);
// Add metadata as form parameter
req->setParameter("metadata", metadata.dump());
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response) {
int status_code = static_cast<int>(response->getStatusCode());
std::string response_body(response->getBody());
if (status_code == 200 || status_code == 201) {
try {
auto resp_json = json::parse(response_body);
if (resp_json.contains("url")) outUrl = resp_json["url"].get<std::string>();
if (resp_json.contains("hash")) outHash = resp_json["hash"].get<std::string>();
success = true;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::upload] Failed to parse JSON response: " << e.what() << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << response_body << std::endl;
}
} else {
std::cerr << "[GetbinClient::upload] HTTP error: status code " << status_code << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << response_body << std::endl;
}
} else {
std::cerr << "[GetbinClient::upload] HTTP /upload request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 60.0); // 60 second timeout
loop.loop();
// Clean up temporary file
std::remove(temp_file.c_str());
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
}
worker.join();
return success;
}
bool GetbinClient::getHash(const std::string& toolName, const std::string& arch, std::string& outHash) {
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates
std::string ca_path = find_ca_certificates();
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
if (ca_path.empty()) {
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
std::string hash_path = "/hash/" + toolName + ":" + arch;
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath(hash_path);
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response && response->getStatusCode() == drogon::k200OK) {
std::string response_body(response->getBody());
// Try to parse hash from response body
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash,
const std::string& token, ProgressCallback progressCallback) {
try {
// Try JSON first
auto resp_json = json::parse(response_body);
if (resp_json.contains("hash")) {
outHash = resp_json["hash"].get<std::string>();
success = true;
std::string url = "https://" + SERVER_HOST + "/upload";
cpr::Session session;
session.SetUrl(cpr::Url{url});
session.SetHeader(cpr::Header{
{"User-Agent", getUserAgent()},
{"Authorization", "Bearer " + token}
});
session.SetTimeout(cpr::Timeout{300000}); // 5 minutes for uploads
session.SetVerifySsl(cpr::VerifySsl{true});
// Extract tool name and arch from archive path for labeltags
// Archive path format: /path/to/tool-name:arch.tgz or similar
std::string archiveName = std::filesystem::path(archivePath).filename().string();
std::string toolNameArch = archiveName;
if (toolNameArch.ends_with(".tgz")) {
toolNameArch = toolNameArch.substr(0, toolNameArch.length() - 4);
}
} catch (...) {
// Not JSON, treat as plain text
outHash = response_body;
// Create metadata JSON with labeltags
json metadata;
metadata["labeltags"] = json::array({toolNameArch});
// Set up multipart form with file and metadata
session.SetMultipart(cpr::Multipart{
cpr::Part{"file", cpr::File{archivePath}},
cpr::Part{"metadata", metadata.dump(), "application/json"}
});
// Add progress callback if provided
if (progressCallback) {
session.SetProgressCallback(cpr::ProgressCallback{[progressCallback](cpr::cpr_off_t downloadTotal, cpr::cpr_off_t downloadNow,
cpr::cpr_off_t uploadTotal, cpr::cpr_off_t uploadNow,
intptr_t userdata) -> bool {
return progressCallback(static_cast<size_t>(uploadNow), static_cast<size_t>(uploadTotal));
}});
}
auto response = session.Put();
if (response.status_code == 200) {
try {
auto resp_json = json::parse(response.text);
if (resp_json.contains("hash") && resp_json.contains("result") && resp_json["result"] == "success") {
outUrl = "https://" + SERVER_HOST + "/object/" + resp_json["hash"].get<std::string>();
outHash = resp_json["hash"].get<std::string>();
return true;
}
} catch (const json::exception& e) {
// Try to extract from plain text response
outUrl = "";
outHash = response.text;
// Remove trailing newline if present
if (!outHash.empty() && outHash.back() == '\n') {
outHash.pop_back();
}
success = !outHash.empty();
return !outHash.empty();
}
} else {
std::cerr << "[GetbinClient::upload] HTTP " << response.status_code << ": " << response.error.message << std::endl;
if (!response.text.empty()) {
std::cerr << "[GetbinClient::upload] Response: " << response.text << std::endl;
}
}
done = true;
cv.notify_one();
loop.quit();
}, 10.0); // 10 second timeout
loop.loop();
});
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::upload] Exception: " << e.what() << std::endl;
return false;
}
}
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
bool GetbinClient::getHash(const std::string& toolName, const std::string& arch, std::string& outHash) {
try {
std::string url = "https://" + SERVER_HOST + "/hash/" + toolName + ":" + arch;
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{10000}, // 10 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
try {
// Try JSON first
auto resp_json = json::parse(response.text);
if (resp_json.contains("hash")) {
outHash = resp_json["hash"].get<std::string>();
return true;
}
} catch (const json::exception&) {
// Not JSON, treat as plain text
outHash = response.text;
// Remove trailing newline if present
if (!outHash.empty() && outHash.back() == '\n') {
outHash.pop_back();
}
return !outHash.empty();
}
} else if (response.status_code == 404) {
// Not found - this is expected for non-existent tools/archs
return false;
} else {
std::cerr << "[GetbinClient::getHash] HTTP " << response.status_code << ": " << response.error.message << std::endl;
}
worker.join();
return success;
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::getHash] Exception: " << e.what() << std::endl;
return false;
}
}
bool GetbinClient::deleteObject(const std::string& hash, const std::string& token) {
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates
std::string ca_path = find_ca_certificates();
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
if (ca_path.empty()) {
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
std::string delete_path = "/deleteobject?hash=" + hash;
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath(delete_path);
req->addHeader("Authorization", "Bearer " + token);
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response) {
int status_code = static_cast<int>(response->getStatusCode());
std::string response_body(response->getBody());
if (status_code == 200) {
// Check if the response indicates success
try {
auto resp_json = json::parse(response_body);
if (resp_json.contains("result") && resp_json["result"] == "success") {
success = true;
}
} catch (...) {
// If not JSON, assume success if 200 OK
success = true;
}
std::string url = "https://" + SERVER_HOST + "/deleteobject?hash=" + hash;
auto response = cpr::Get(cpr::Url{url},
cpr::Header{
{"User-Agent", getUserAgent()},
{"Authorization", "Bearer " + token}
},
cpr::Timeout{30000}, // 30 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
return true;
} else {
std::cerr << "[GetbinClient::deleteObject] HTTP error: status code " << status_code << std::endl;
std::cerr << "[GetbinClient::deleteObject] Response body: " << response_body << std::endl;
}
} else {
std::cerr << "[GetbinClient::deleteObject] HTTP request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 10.0); // 10 second timeout
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
}
worker.join();
return success;
}
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
outPackages.clear();
// Set up SSL configuration
std::string ca_path = find_ca_certificates();
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath("/dir");
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
if (result == drogon::ReqResult::Ok) {
int status_code = response->getStatusCode();
std::string response_body = std::string(response->getBody());
if (status_code == 200) {
try {
json json_response = json::parse(response_body);
if (json_response.contains("entries") && json_response["entries"].is_array()) {
for (const auto& entry : json_response["entries"]) {
if (entry.contains("labeltags") && entry["labeltags"].is_array()) {
for (const auto& labeltag : entry["labeltags"]) {
if (labeltag.is_string()) {
std::string name = labeltag.get<std::string>();
// Extract tool name (remove architecture suffix if present)
size_t colon_pos = name.find(":");
if (colon_pos != std::string::npos) {
name = name.substr(0, colon_pos);
}
// Skip empty names
if (name.empty()) continue;
// Add to list if not already present
if (std::find(outPackages.begin(), outPackages.end(), name) == outPackages.end()) {
outPackages.push_back(name);
std::cerr << "[GetbinClient::deleteObject] HTTP " << response.status_code << ": " << response.error.message << std::endl;
if (!response.text.empty()) {
std::cerr << "[GetbinClient::deleteObject] Response: " << response.text << std::endl;
}
}
}
}
}
success = true;
}
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::listPackages] JSON parse error: " << e.what() << std::endl;
std::cerr << "[GetbinClient::deleteObject] Exception: " << e.what() << std::endl;
return false;
}
}
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
try {
std::string url = "https://" + SERVER_HOST + "/packages";
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{30000}, // 30 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
try {
auto resp_json = json::parse(response.text);
if (resp_json.is_array()) {
outPackages.clear();
for (const auto& item : resp_json) {
if (item.is_string()) {
outPackages.push_back(item.get<std::string>());
}
}
return true;
} else if (resp_json.contains("packages") && resp_json["packages"].is_array()) {
outPackages.clear();
for (const auto& item : resp_json["packages"]) {
if (item.is_string()) {
outPackages.push_back(item.get<std::string>());
}
}
return true;
}
} catch (const json::exception&) {
// Try to parse as newline-separated list
outPackages.clear();
std::istringstream stream(response.text);
std::string line;
while (std::getline(stream, line)) {
if (!line.empty()) {
outPackages.push_back(line);
}
}
return !outPackages.empty();
}
} else {
std::cerr << "[GetbinClient::listPackages] HTTP " << response.status_code << ": " << response.error.message << std::endl;
}
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::listPackages] Exception: " << e.what() << std::endl;
return false;
}
} else {
std::cerr << "[GetbinClient::listPackages] HTTP error: status code " << status_code << std::endl;
}
} else {
std::cerr << "[GetbinClient::listPackages] HTTP request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 10.0);
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
}
worker.join();
// Filter out duplicates where we have both toolname and toolname-noarch
// Keep the base name and remove the -noarch variant
std::vector<std::string> filteredPackages;
std::set<std::string> baseNames;
// First pass: collect all base names (without -noarch)
for (const auto& pkg : outPackages) {
const std::string suffix = "-noarch";
if (pkg.length() < suffix.length() || pkg.substr(pkg.length() - suffix.length()) != suffix) {
baseNames.insert(pkg);
}
}
// Second pass: add packages, skipping -noarch variants if base exists
for (const auto& pkg : outPackages) {
const std::string suffix = "-noarch";
if (pkg.length() >= suffix.length() && pkg.substr(pkg.length() - suffix.length()) == suffix) {
std::string baseName = pkg.substr(0, pkg.length() - suffix.length());
if (baseNames.find(baseName) == baseNames.end()) {
filteredPackages.push_back(pkg); // Keep -noarch only if no base version
}
} else {
filteredPackages.push_back(pkg); // Always keep base versions
}
}
outPackages = std::move(filteredPackages);
// Sort the packages for better display
std::sort(outPackages.begin(), outPackages.end());
return success;
}

View File

@ -1,13 +1,24 @@
#pragma once
#include <string>
#include <vector>
#include <functional>
class GetbinClient {
public:
GetbinClient();
bool download(const std::string& toolName, const std::string& arch, const std::string& outPath);
bool upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token);
// Progress callback: (downloaded_bytes, total_bytes) -> should_continue
using ProgressCallback = std::function<bool(size_t, size_t)>;
bool download(const std::string& toolName, const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback = nullptr);
bool upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token,
ProgressCallback progressCallback = nullptr);
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
bool deleteObject(const std::string& hash, const std::string& token);
bool listPackages(std::vector<std::string>& outPackages);
private:
static const std::string SERVER_HOST;
std::string getUserAgent() const;
};

View File

@ -200,27 +200,43 @@ int install_tool(int argc, char* argv[]) {
// Download tool - try arch-specific version first, then universal fallback
GetbinClient getbin2;
std::string downloadArch = arch;
//std::cout << "Downloading " << toolName << ":" << arch << "..." << std::endl;
if (!getbin2.download(toolName, arch, archivePath.string())) {
// Progress callback for downloads
auto progressCallback = [&toolName](size_t downloaded, size_t total) -> bool {
if (total > 0) {
int percent = (downloaded * 100) / total;
std::cout << "\rDownloading " << toolName << "... " << percent << "%" << std::flush;
} else {
std::cout << "\rDownloading " << toolName << "... " << downloaded << " bytes" << std::flush;
}
return true; // Continue download
};
std::cout << "Downloading " << toolName << "..." << std::flush;
if (!getbin2.download(toolName, arch, archivePath.string(), progressCallback)) {
// Try universal version as fallback
//std::cout << "Arch-specific version not found, trying universal version..." << std::endl;
//std::cout << "Downloading " << toolName << ":universal..." << std::endl;
if (!getbin2.download(toolName, "universal", archivePath.string())) {
std::cerr << "Failed to download tool archive (tried both " << arch << " and universal)." << std::endl;
std::cout << "\rArch-specific version not found, trying universal..." << std::endl;
if (!getbin2.download(toolName, "universal", archivePath.string(), progressCallback)) {
std::cerr << "\rFailed to download tool archive (tried both " << arch << " and universal)." << std::endl;
return 1;
}
downloadArch = "universal";
}
std::cout << "\rDownloading " << toolName << "... done" << std::endl;
// Unpack tool
std::cout << "Unpacking..." << std::flush;
if (!common::unpack_tgz(archivePath.string(), binDir.string())) {
std::cerr << "Failed to unpack tool archive." << std::endl;
std::cerr << "\rFailed to unpack tool archive." << std::endl;
return 1;
}
std::cout << "\rUnpacking... done" << std::endl;
// Add to PATH and autocomplete
std::cout << "Configuring..." << std::flush;
scriptManager.addToolEntry(toolName, binDir.string());
scriptManager.addAutocomplete(toolName);
std::cout << "\rConfiguring... done" << std::endl;
// Get tool info
std::string hash;
@ -314,10 +330,24 @@ int publish_tool(int argc, char* argv[]) {
}
GetbinClient getbin;
std::string url, hash;
if (!getbin.upload(archivePath.string(), url, hash, token)) {
std::cerr << "Failed to upload archive." << std::endl;
// Progress callback for upload
auto uploadProgressCallback = [](size_t uploaded, size_t total) -> bool {
if (total > 0) {
int percent = (uploaded * 100) / total;
std::cout << "\rUploading... " << percent << "%" << std::flush;
} else {
std::cout << "\rUploading... " << uploaded << " bytes" << std::flush;
}
return true; // Continue upload
};
std::cout << "Uploading..." << std::flush;
if (!getbin.upload(archivePath.string(), url, hash, token, uploadProgressCallback)) {
std::cerr << "\rFailed to upload archive." << std::endl;
return 1;
}
std::cout << "\rUploading... done" << std::endl;
std::cout << "Published! URL: " << url << "\nHash: " << hash << std::endl;
return 0;
}
@ -326,73 +356,161 @@ int update_tool(int argc, char* argv[]) {
std::string home = get_home();
std::filesystem::path configDir = std::filesystem::path(home) / ".config/getpkg";
// Collect all installed tools
std::vector<std::tuple<std::string, std::string, std::string>> updateResults; // name, status, version
// Capture stdout to process install_tool output
auto processToolUpdate = [&](const std::string& toolName) -> std::tuple<std::string, std::string> {
// Redirect stdout and stderr to capture output
std::stringstream buffer;
std::stringstream errBuffer;
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
char* toolArgv[] = {argv[0], (char*)"install", (char*)toolName.c_str()};
int result = install_tool(3, toolArgv);
// Restore stdout and stderr
std::cout.rdbuf(oldOut);
std::cerr.rdbuf(oldErr);
std::string output = buffer.str();
std::string status = "Failed";
std::string version = "-";
if (result == 0) {
if (output.find("is already up to date") != std::string::npos) {
status = "Up to date";
} else if (output.find("Installed " + toolName + " successfully") != std::string::npos) {
// Check if it was an update or fresh install
if (output.find("Updating " + toolName) != std::string::npos) {
status = "Updated";
} else {
status = "Installed";
}
}
// Try to get version from config
std::filesystem::path toolInfoPath = configDir / (toolName + ".json");
if (std::filesystem::exists(toolInfoPath)) {
std::ifstream tfile(toolInfoPath);
json toolInfo;
tfile >> toolInfo;
version = toolInfo.value("version", "-");
if (!version.empty() && version.back() == '\n') version.pop_back();
// If version is empty, try to show something useful
if (version.empty() || version == "-") {
version = "installed";
}
}
}
return std::make_tuple(status, version);
// Structure to hold tool information
struct ToolInfo {
std::string name;
std::string localHash;
std::string remoteHash;
std::string arch;
std::string version;
bool needsUpdate = false;
std::string status = "Up to date";
};
// First update getpkg itself
auto [getpkgStatus, getpkgVersion] = processToolUpdate("getpkg");
updateResults.push_back(std::make_tuple("getpkg", getpkgStatus, getpkgVersion));
std::vector<ToolInfo> tools;
// Then update all other installed tools
// Collect all installed tools
if (std::filesystem::exists(configDir)) {
for (const auto& entry : std::filesystem::directory_iterator(configDir)) {
if (entry.path().extension() == ".json") {
std::string tname = entry.path().stem();
if (tname != "getpkg") { // Skip getpkg since we already did it
auto [status, version] = processToolUpdate(tname);
updateResults.push_back(std::make_tuple(tname, status, version));
ToolInfo tool;
tool.name = tname;
// Read local tool info
std::ifstream tfile(entry.path());
if (tfile.good()) {
json toolInfo;
tfile >> toolInfo;
tool.localHash = toolInfo.value("hash", "");
tool.arch = toolInfo.value("arch", get_arch());
tool.version = toolInfo.value("version", "-");
if (!tool.version.empty() && tool.version.back() == '\n') {
tool.version.pop_back();
}
if (tool.version.empty() || tool.version == "-") {
tool.version = "installed";
}
}
tools.push_back(tool);
}
}
}
if (tools.empty()) {
std::cout << "No tools installed." << std::endl;
return 0;
}
// Step 1: Check for updates (with progress)
std::cout << "Checking " << tools.size() << " tools for updates..." << std::endl;
GetbinClient getbin;
for (size_t i = 0; i < tools.size(); ++i) {
auto& tool = tools[i];
// Show progress
std::cout << "\r[" << (i + 1) << "/" << tools.size() << "] Checking " << tool.name << "..." << std::flush;
// Check remote hash
std::string remoteHash;
if (getbin.getHash(tool.name, tool.arch, remoteHash) && !remoteHash.empty()) {
tool.remoteHash = remoteHash;
if (tool.localHash != remoteHash) {
tool.needsUpdate = true;
tool.status = "Needs update";
}
} else {
tool.status = "Check failed";
}
}
std::cout << "\r" << std::string(50, ' ') << "\r" << std::flush; // Clear progress line
// Step 2: Update tools that need updating
std::vector<std::tuple<std::string, std::string, std::string>> updateResults;
// First update getpkg if it needs updating
auto getpkgIt = std::find_if(tools.begin(), tools.end(),
[](const ToolInfo& t) { return t.name == "getpkg"; });
if (getpkgIt != tools.end() && getpkgIt->needsUpdate) {
std::cout << "Updating getpkg..." << std::flush;
// Use install_tool for actual update
std::stringstream buffer, errBuffer;
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
char* toolArgv[] = {argv[0], (char*)"install", (char*)"getpkg"};
int result = install_tool(3, toolArgv);
std::cout.rdbuf(oldOut);
std::cerr.rdbuf(oldErr);
if (result == 0) {
getpkgIt->status = "Updated";
std::cout << " Updated" << std::endl;
} else {
getpkgIt->status = "Failed";
std::cout << " Failed" << std::endl;
}
}
// Update other tools
int toolsToUpdate = std::count_if(tools.begin(), tools.end(),
[](const ToolInfo& t) { return t.needsUpdate && t.name != "getpkg"; });
if (toolsToUpdate > 0) {
std::cout << "Updating " << toolsToUpdate << " tools..." << std::endl;
int updatedCount = 0;
for (auto& tool : tools) {
if (tool.needsUpdate && tool.name != "getpkg") {
updatedCount++;
std::cout << "[" << updatedCount << "/" << toolsToUpdate << "] Updating " << tool.name << "..." << std::flush;
// Use install_tool for actual update
std::stringstream buffer, errBuffer;
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
char* toolArgv[] = {argv[0], (char*)"install", (char*)tool.name.c_str()};
int result = install_tool(3, toolArgv);
std::cout.rdbuf(oldOut);
std::cerr.rdbuf(oldErr);
if (result == 0) {
tool.status = "Updated";
std::cout << " Updated" << std::endl;
// Re-read version after update
std::filesystem::path toolInfoPath = configDir / (tool.name + ".json");
if (std::filesystem::exists(toolInfoPath)) {
std::ifstream tfile(toolInfoPath);
json toolInfo;
tfile >> toolInfo;
tool.version = toolInfo.value("version", tool.version);
if (!tool.version.empty() && tool.version.back() == '\n') {
tool.version.pop_back();
}
if (tool.version.empty() || tool.version == "-") {
tool.version = "installed";
}
}
} else {
tool.status = "Failed";
std::cout << " Failed" << std::endl;
}
}
}
}
// Prepare results for display
for (const auto& tool : tools) {
updateResults.push_back(std::make_tuple(tool.name, tool.status, tool.version));
}
// Display results in a table

1
getpkg/test_debug/debug-test Executable file
View File

@ -0,0 +1 @@
#!/bin/bash\necho debug

1
getpkg/test_debug2/debug-test2 Executable file
View File

@ -0,0 +1 @@
#!/bin/bash\necho debug2

1
getpkg/test_upload.txt Normal file
View File

@ -0,0 +1 @@
test content

2
gp/gp
View File

@ -350,7 +350,7 @@ case "${1:-}" in
exit 0
;;
version)
echo "gp version 2.0.0"
echo "2.0.1"
exit 0
;;
esac