Compare commits
9 Commits
v2025.0629
...
v2025.0629
Author | SHA1 | Date | |
---|---|---|---|
70cb5c1b3a | |||
facc6b73b0 | |||
9a24576e37 | |||
3f68f44e3d | |||
dbe88a7121 | |||
00d1e86157 | |||
3388a46bf3 | |||
0f5421630a | |||
50fb5f9da6 |
@ -26,7 +26,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKER_PUSH_TOKEN }}
|
||||
- name: Build Test Publish All
|
||||
run: |
|
||||
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh
|
||||
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh --no-cache
|
||||
|
||||
test-install-from-scratch:
|
||||
needs: [build]
|
||||
|
@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
|
||||
# make sure we have the latest base image.
|
||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
|
24
bb64/clean.sh
Executable file
24
bb64/clean.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT="bb64"
|
||||
|
||||
echo "Cleaning ${PROJECT}..."
|
||||
|
||||
# Remove output directory
|
||||
if [ -d "${SCRIPT_DIR}/output" ]; then
|
||||
echo "Removing output directory..."
|
||||
rm -rf "${SCRIPT_DIR}/output"
|
||||
fi
|
||||
|
||||
# Remove Docker images related to this project
|
||||
echo "Removing Docker images..."
|
||||
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
|
||||
|
||||
# Remove Docker build cache
|
||||
echo "Pruning Docker build cache..."
|
||||
docker builder prune -f
|
||||
|
||||
echo "✓ ${PROJECT} cleaned successfully"
|
@ -20,7 +20,14 @@ echo "Building version $VERSION" >&2
|
||||
# build release version
|
||||
export CMAKE_BUILD_TYPE="Release"
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
@ -84,9 +91,11 @@ if git rev-parse "$TAG" >/dev/null 2>&1; then
|
||||
fi
|
||||
|
||||
# Check if tag exists on remote
|
||||
TAG_EXISTS_ON_REMOTE=false
|
||||
if git ls-remote --tags origin | grep -q "refs/tags/$TAG"; then
|
||||
echo "Tag $TAG already exists on remote - this is expected for multi-architecture builds"
|
||||
echo "Skipping tag creation and proceeding with release attachment..."
|
||||
TAG_EXISTS_ON_REMOTE=true
|
||||
else
|
||||
echo "Creating new tag $TAG..."
|
||||
git tag -a "$TAG" -m "Release $TAG"
|
||||
@ -105,12 +114,20 @@ echo "Getting or creating release $TAG on Gitea..."
|
||||
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
|
||||
-H "Authorization: token $RELEASE_WRITE_TOKEN")
|
||||
|
||||
echo "Existing release check response: $EXISTING_RELEASE" >&2
|
||||
|
||||
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
|
||||
# Release already exists, get its ID
|
||||
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
|
||||
echo "Release $TAG already exists with ID: $RELEASE_ID"
|
||||
else
|
||||
# Create new release
|
||||
# Create new release only if tag was just created
|
||||
if [ "$TAG_EXISTS_ON_REMOTE" = true ]; then
|
||||
echo "Tag exists on remote but no release found - this shouldn't happen" >&2
|
||||
echo "API response was: $EXISTING_RELEASE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Creating new release $TAG on Gitea..."
|
||||
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
|
||||
-H "Content-Type: application/json" \
|
||||
|
@ -2,6 +2,13 @@
|
||||
set -uo pipefail # Remove -e to handle errors manually
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
|
||||
# Parse command line arguments
|
||||
NO_CACHE=false
|
||||
if [[ "$*" == *"--no-cache"* ]]; then
|
||||
NO_CACHE=true
|
||||
export NO_CACHE
|
||||
fi
|
||||
|
||||
docker builder prune -f
|
||||
|
||||
|
||||
|
44
clean.sh
Executable file
44
clean.sh
Executable file
@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
|
||||
echo "🧹 CLEANING ALL PROJECTS 🧹"
|
||||
echo
|
||||
|
||||
# Get all project directories
|
||||
PROJECT_DIRS=$(find "$SCRIPT_DIR" -maxdepth 1 -type d \
|
||||
-not -name ".*" \
|
||||
-not -path "$SCRIPT_DIR" | sort)
|
||||
|
||||
for dir in $PROJECT_DIRS; do
|
||||
PROJECT_NAME=$(basename "$dir")
|
||||
|
||||
if [ -f "$dir/clean.sh" ]; then
|
||||
echo "Cleaning $PROJECT_NAME..."
|
||||
cd "$dir"
|
||||
./clean.sh
|
||||
echo
|
||||
else
|
||||
echo "⚠️ No clean.sh found for $PROJECT_NAME, skipping..."
|
||||
echo
|
||||
fi
|
||||
done
|
||||
|
||||
# Global Docker cleanup
|
||||
echo "🐳 Global Docker cleanup..."
|
||||
echo "Removing unused Docker images..."
|
||||
docker image prune -f
|
||||
|
||||
echo "Removing unused Docker containers..."
|
||||
docker container prune -f
|
||||
|
||||
echo "Removing unused Docker networks..."
|
||||
docker network prune -f
|
||||
|
||||
echo "Removing unused Docker volumes..."
|
||||
docker volume prune -f
|
||||
|
||||
echo
|
||||
echo "✅ All projects cleaned successfully!"
|
@ -1,65 +0,0 @@
|
||||
ARG IMAGE_TAG
|
||||
FROM gitea.jde.nz/public/dropshell-build-base:latest AS builder
|
||||
|
||||
ARG PROJECT
|
||||
ARG CMAKE_BUILD_TYPE=Debug
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
# Create cache directories
|
||||
RUN mkdir -p /ccache
|
||||
|
||||
# Set up ccache
|
||||
ENV CCACHE_DIR=/ccache
|
||||
ENV CCACHE_COMPILERCHECK=content
|
||||
ENV CCACHE_MAXSIZE=2G
|
||||
|
||||
# Copy build files
|
||||
COPY CMakeLists.txt ./
|
||||
COPY src/version.hpp.in src/
|
||||
|
||||
# Copy source files
|
||||
COPY src/ src/
|
||||
COPY contrib/ contrib/
|
||||
|
||||
# Configure project
|
||||
RUN --mount=type=cache,target=/ccache \
|
||||
--mount=type=cache,target=/build \
|
||||
mkdir -p /build && \
|
||||
cmake -G Ninja -S /app -B /build \
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
|
||||
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
|
||||
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
|
||||
-DPROJECT_NAME="${PROJECT}" \
|
||||
-DCMAKE_STRIP=OFF \
|
||||
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
|
||||
|
||||
# Build project
|
||||
RUN --mount=type=cache,target=/ccache \
|
||||
--mount=type=cache,target=/build \
|
||||
cmake --build /build
|
||||
|
||||
# Copy the built executable to a regular directory for the final stage
|
||||
RUN --mount=type=cache,target=/build \
|
||||
mkdir -p /output && \
|
||||
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
|
||||
find /build -type f -executable -exec cp {} /output/${PROJECT} \;
|
||||
|
||||
# if we're a release build, then run upx on the binary.
|
||||
RUN if [ "${CMAKE_BUILD_TYPE}" = "Release" ]; then \
|
||||
upx /output/${PROJECT}; \
|
||||
fi
|
||||
|
||||
# Final stage that only contains the binary
|
||||
FROM scratch AS project
|
||||
|
||||
ARG PROJECT
|
||||
|
||||
# Copy the actual binary from the regular directory
|
||||
COPY --from=builder /output/${PROJECT} /${PROJECT}
|
@ -1,22 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
# build.sh using docker run approach
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT="dehydrate"
|
||||
|
||||
export CMAKE_BUILD_TYPE="Debug"
|
||||
|
||||
rm -rf "${SCRIPT_DIR}/output"
|
||||
# Create persistent build directory
|
||||
mkdir -p "${SCRIPT_DIR}/build"
|
||||
mkdir -p "${SCRIPT_DIR}/output"
|
||||
|
||||
# make sure we have the latest base image.
|
||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||
|
||||
docker build \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
|
||||
--output "${SCRIPT_DIR}/output" \
|
||||
"${SCRIPT_DIR}"
|
||||
# Run build in container with mounted directories
|
||||
docker run --rm \
|
||||
--user "$(id -u):$(id -g)" \
|
||||
-v "${SCRIPT_DIR}:/src:ro" \
|
||||
-v "${SCRIPT_DIR}/build:/build" \
|
||||
-v "${SCRIPT_DIR}/output:/output" \
|
||||
-e CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE:-Debug}" \
|
||||
gitea.jde.nz/public/dropshell-build-base:latest \
|
||||
bash -c "
|
||||
cd /build && \
|
||||
cmake -G Ninja -S /src -B . \
|
||||
-DCMAKE_BUILD_TYPE=\${CMAKE_BUILD_TYPE} \
|
||||
-DPROJECT_NAME=${PROJECT} && \
|
||||
cmake --build . && \
|
||||
cp ${PROJECT} /output/
|
||||
"
|
18
dehydrate/clean.sh
Executable file
18
dehydrate/clean.sh
Executable file
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT="dehydrate"
|
||||
|
||||
echo "Cleaning ${PROJECT}..."
|
||||
|
||||
# Remove output and build directories
|
||||
for dir in "output" "build"; do
|
||||
if [ -d "${SCRIPT_DIR}/${dir}" ]; then
|
||||
echo "Removing ${dir} directory..."
|
||||
rm -rf "${SCRIPT_DIR:?}/${dir}"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✓ ${PROJECT} cleaned successfully"
|
@ -35,14 +35,7 @@ heading "Building ${PROJECT}"
|
||||
|
||||
# build release version
|
||||
export CMAKE_BUILD_TYPE="Release"
|
||||
|
||||
docker build \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
|
||||
--output "${OUTPUT}" \
|
||||
"${SCRIPT_DIR}"
|
||||
"${SCRIPT_DIR}/build.sh"
|
||||
|
||||
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
PROJECT_DIR="$( cd "$SCRIPT_DIR/.." && pwd )"
|
||||
|
||||
cd "$SCRIPT_DIR"
|
||||
cd "$SCRIPT_DIR" || exit 1
|
||||
|
||||
# Clean up old test data and any existing binaries
|
||||
# Force removal with chmod to handle permission issues
|
||||
|
@ -31,21 +31,14 @@ COPY src/ src/
|
||||
RUN --mount=type=cache,target=/ccache \
|
||||
--mount=type=cache,target=/build \
|
||||
mkdir -p /build && \
|
||||
SSL_LIB=$(find /usr/local -name "libssl.a" | head -1) && \
|
||||
CRYPTO_LIB=$(find /usr/local -name "libcrypto.a" | head -1) && \
|
||||
echo "Found SSL: $SSL_LIB, Crypto: $CRYPTO_LIB" && \
|
||||
cmake -G Ninja -S /app -B /build \
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
|
||||
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
|
||||
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
|
||||
-DPROJECT_NAME="${PROJECT}" \
|
||||
-DCMAKE_STRIP=OFF \
|
||||
-DOPENSSL_SSL_LIBRARY="$SSL_LIB" \
|
||||
-DOPENSSL_CRYPTO_LIBRARY="$CRYPTO_LIB" \
|
||||
-DOPENSSL_INCLUDE_DIR=/usr/local/include \
|
||||
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
|
||||
|
||||
# Run prebuild script
|
||||
|
@ -15,7 +15,14 @@ PROJECT="getpkg"
|
||||
# make sure we have the latest base image.
|
||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
|
24
getpkg/clean.sh
Executable file
24
getpkg/clean.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT="getpkg"
|
||||
|
||||
echo "Cleaning ${PROJECT}..."
|
||||
|
||||
# Remove output directory
|
||||
if [ -d "${SCRIPT_DIR}/output" ]; then
|
||||
echo "Removing output directory..."
|
||||
rm -rf "${SCRIPT_DIR}/output"
|
||||
fi
|
||||
|
||||
# Remove Docker images related to this project
|
||||
echo "Removing Docker images..."
|
||||
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
|
||||
|
||||
# Remove Docker build cache
|
||||
echo "Pruning Docker build cache..."
|
||||
docker builder prune -f
|
||||
|
||||
echo "✓ ${PROJECT} cleaned successfully"
|
@ -35,7 +35,14 @@ heading "Building ${PROJECT}"
|
||||
# build release version
|
||||
export CMAKE_BUILD_TYPE="Release"
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
|
@ -5,6 +5,7 @@
|
||||
#include <iostream>
|
||||
#include <filesystem>
|
||||
#include <sstream>
|
||||
#include <set>
|
||||
#include <algorithm>
|
||||
|
||||
using json = nlohmann::json;
|
||||
@ -207,7 +208,7 @@ bool GetbinClient::deleteObject(const std::string& hash, const std::string& toke
|
||||
|
||||
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||
try {
|
||||
std::string url = "https://" + SERVER_HOST + "/packages";
|
||||
std::string url = "https://" + SERVER_HOST + "/dir";
|
||||
|
||||
auto response = cpr::Get(cpr::Url{url},
|
||||
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||
@ -217,20 +218,31 @@ bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||
if (response.status_code == 200) {
|
||||
try {
|
||||
auto resp_json = json::parse(response.text);
|
||||
if (resp_json.is_array()) {
|
||||
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
|
||||
outPackages.clear();
|
||||
for (const auto& item : resp_json) {
|
||||
if (item.is_string()) {
|
||||
outPackages.push_back(item.get<std::string>());
|
||||
std::set<std::string> uniqueTools;
|
||||
|
||||
for (const auto& entry : resp_json["entries"]) {
|
||||
if (entry.contains("labeltags") && entry["labeltags"].is_array()) {
|
||||
for (const auto& labeltag : entry["labeltags"]) {
|
||||
if (labeltag.is_string()) {
|
||||
std::string tag = labeltag.get<std::string>();
|
||||
// Extract tool name from "tool:arch" format
|
||||
size_t colonPos = tag.find(":");
|
||||
if (colonPos != std::string::npos) {
|
||||
std::string toolName = tag.substr(0, colonPos);
|
||||
if (!toolName.empty()) {
|
||||
uniqueTools.insert(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else if (resp_json.contains("packages") && resp_json["packages"].is_array()) {
|
||||
outPackages.clear();
|
||||
for (const auto& item : resp_json["packages"]) {
|
||||
if (item.is_string()) {
|
||||
outPackages.push_back(item.get<std::string>());
|
||||
}
|
||||
|
||||
// Convert set to vector
|
||||
for (const auto& tool : uniqueTools) {
|
||||
outPackages.push_back(tool);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -255,4 +267,51 @@ bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||
std::cerr << "[GetbinClient::listPackages] Exception: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool GetbinClient::listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries) {
|
||||
try {
|
||||
std::string url = "https://" + SERVER_HOST + "/dir";
|
||||
|
||||
auto response = cpr::Get(cpr::Url{url},
|
||||
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||
cpr::Timeout{30000}, // 30 seconds
|
||||
cpr::VerifySsl{true});
|
||||
|
||||
if (response.status_code == 200) {
|
||||
try {
|
||||
auto resp_json = json::parse(response.text);
|
||||
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
|
||||
outEntries.clear();
|
||||
|
||||
for (const auto& entry : resp_json["entries"]) {
|
||||
if (entry.contains("hash") && entry.contains("labeltags") &&
|
||||
entry["hash"].is_string() && entry["labeltags"].is_array()) {
|
||||
|
||||
std::string hash = entry["hash"].get<std::string>();
|
||||
std::vector<std::string> labeltags;
|
||||
|
||||
for (const auto& tag : entry["labeltags"]) {
|
||||
if (tag.is_string()) {
|
||||
labeltags.push_back(tag.get<std::string>());
|
||||
}
|
||||
}
|
||||
|
||||
outEntries.push_back({hash, labeltags});
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
} catch (const json::exception& e) {
|
||||
std::cerr << "[GetbinClient::listAllEntries] JSON parse error: " << e.what() << std::endl;
|
||||
}
|
||||
} else {
|
||||
std::cerr << "[GetbinClient::listAllEntries] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "[GetbinClient::listAllEntries] Exception: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -17,8 +17,9 @@ public:
|
||||
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
|
||||
bool deleteObject(const std::string& hash, const std::string& token);
|
||||
bool listPackages(std::vector<std::string>& outPackages);
|
||||
bool listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries);
|
||||
|
||||
private:
|
||||
static const std::string SERVER_HOST;
|
||||
std::string getUserAgent() const;
|
||||
};
|
||||
};
|
||||
|
@ -76,6 +76,17 @@
|
||||
namespace {
|
||||
using json = nlohmann::json;
|
||||
|
||||
// Clear current line and reset cursor to beginning
|
||||
void clearLine() {
|
||||
std::cout << "\r\033[K" << std::flush;
|
||||
}
|
||||
|
||||
// Clear current line and print message
|
||||
void clearAndPrint(const std::string& message) {
|
||||
clearLine();
|
||||
std::cout << message << std::flush;
|
||||
}
|
||||
|
||||
// Compare versions (returns true if v1 < v2)
|
||||
bool isVersionOlder(const std::string& v1, const std::string& v2) {
|
||||
// Simple version comparison - assumes versions are in YYYY.MMDD.HHMM format
|
||||
@ -215,14 +226,14 @@ int install_tool(int argc, char* argv[]) {
|
||||
std::cout << "Downloading " << toolName << "..." << std::flush;
|
||||
if (!getbin2.download(toolName, arch, archivePath.string(), progressCallback)) {
|
||||
// Try universal version as fallback
|
||||
std::cout << "\rArch-specific version not found, trying universal..." << std::endl;
|
||||
clearAndPrint("Arch-specific version not found, trying universal...\n");
|
||||
if (!getbin2.download(toolName, "universal", archivePath.string(), progressCallback)) {
|
||||
std::cerr << "\rFailed to download tool archive (tried both " << arch << " and universal)." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
downloadArch = "universal";
|
||||
}
|
||||
std::cout << "\rDownloading " << toolName << "... done" << std::endl;
|
||||
clearAndPrint("Downloading " + toolName + "... done\n");
|
||||
|
||||
// Unpack tool
|
||||
std::cout << "Unpacking..." << std::flush;
|
||||
@ -230,13 +241,13 @@ int install_tool(int argc, char* argv[]) {
|
||||
std::cerr << "\rFailed to unpack tool archive." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
std::cout << "\rUnpacking... done" << std::endl;
|
||||
clearAndPrint("Unpacking... done\n");
|
||||
|
||||
// Add to PATH and autocomplete
|
||||
std::cout << "Configuring..." << std::flush;
|
||||
scriptManager.addToolEntry(toolName, binDir.string());
|
||||
scriptManager.addAutocomplete(toolName);
|
||||
std::cout << "\rConfiguring... done" << std::endl;
|
||||
clearAndPrint("Configuring... done\n");
|
||||
|
||||
// Get tool info
|
||||
std::string hash;
|
||||
@ -347,7 +358,7 @@ int publish_tool(int argc, char* argv[]) {
|
||||
std::cerr << "\rFailed to upload archive." << std::endl;
|
||||
return 1;
|
||||
}
|
||||
std::cout << "\rUploading... done" << std::endl;
|
||||
clearAndPrint("Uploading... done\n");
|
||||
std::cout << "Published! URL: " << url << "\nHash: " << hash << std::endl;
|
||||
return 0;
|
||||
}
|
||||
@ -426,7 +437,7 @@ int update_tool(int argc, char* argv[]) {
|
||||
tool.status = "Check failed";
|
||||
}
|
||||
}
|
||||
std::cout << "\r" << std::string(50, ' ') << "\r" << std::flush; // Clear progress line
|
||||
clearLine(); // Clear progress line
|
||||
|
||||
// Step 2: Update tools that need updating
|
||||
std::vector<std::tuple<std::string, std::string, std::string>> updateResults;
|
||||
@ -484,7 +495,7 @@ int update_tool(int argc, char* argv[]) {
|
||||
|
||||
if (result == 0) {
|
||||
tool.status = "Updated";
|
||||
std::cout << " Updated" << std::endl;
|
||||
clearAndPrint("Updated\n");
|
||||
|
||||
// Re-read version after update
|
||||
std::filesystem::path toolInfoPath = configDir / (tool.name + ".json");
|
||||
@ -502,7 +513,7 @@ int update_tool(int argc, char* argv[]) {
|
||||
}
|
||||
} else {
|
||||
tool.status = "Failed";
|
||||
std::cout << " Failed" << std::endl;
|
||||
clearAndPrint("Failed\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -701,35 +712,34 @@ int unpublish_tool(int argc, char* argv[]) {
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
// No specific architecture - unpublish all architectures
|
||||
std::vector<std::string> allArchitectures = {"x86_64", "aarch64", "universal"};
|
||||
std::vector<std::pair<std::string, std::string>> foundPackages;
|
||||
// No specific architecture - unpublish ALL entries with this tool name
|
||||
std::vector<std::pair<std::string, std::vector<std::string>>> allEntries;
|
||||
std::vector<std::pair<std::string, std::string>> foundPackages; // (tag, hash)
|
||||
|
||||
std::cout << "Searching for " << toolName << " across all architectures..." << std::endl;
|
||||
std::cout << "Searching for all entries with label '" << toolName << "'..." << std::endl;
|
||||
|
||||
// Find all existing versions
|
||||
for (const auto& arch : allArchitectures) {
|
||||
std::string archHash;
|
||||
if (getbin.getHash(toolName, arch, archHash) && !archHash.empty()) {
|
||||
// Validate hash
|
||||
bool validHash = true;
|
||||
for (char c : archHash) {
|
||||
if (!std::isdigit(c)) {
|
||||
validHash = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (validHash) {
|
||||
foundPackages.push_back({arch, archHash});
|
||||
std::cout << " Found " << toolName << ":" << arch << " (hash: " << archHash << ")" << std::endl;
|
||||
if (!getbin.listAllEntries(allEntries)) {
|
||||
std::cerr << "Failed to get directory listing from server" << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Find all entries with labeltags starting with toolName:
|
||||
for (const auto& entry : allEntries) {
|
||||
const std::string& hash = entry.first;
|
||||
const std::vector<std::string>& labeltags = entry.second;
|
||||
|
||||
for (const std::string& tag : labeltags) {
|
||||
if (tag.find(toolName + ":") == 0) {
|
||||
// Found a matching labeltag
|
||||
foundPackages.push_back({tag, hash});
|
||||
std::cout << " Found " << tag << " (hash: " << hash << ")" << std::endl;
|
||||
break; // Only count each hash once even if it has multiple matching tags
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundPackages.empty()) {
|
||||
std::cerr << "No packages found for " << toolName << std::endl;
|
||||
std::cerr << "Searched architectures: x86_64, aarch64, universal" << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -741,7 +751,7 @@ int unpublish_tool(int argc, char* argv[]) {
|
||||
int failCount = 0;
|
||||
|
||||
for (const auto& [arch, archHash] : foundPackages) {
|
||||
std::cout << " Unpublishing " << toolName << ":" << arch << "... ";
|
||||
std::cout << " Unpublishing " << arch << "... ";
|
||||
if (getbin.deleteObject(archHash, token)) {
|
||||
std::cout << "OK" << std::endl;
|
||||
successCount++;
|
||||
@ -824,7 +834,7 @@ int list_packages(int argc, char* argv[]) {
|
||||
for (const auto& packageName : availablePackages) {
|
||||
std::string status = "Available";
|
||||
std::string localVersion = "-";
|
||||
std::string remoteStatus = "✓";
|
||||
std::string remoteStatus = "-";
|
||||
|
||||
auto it = installedPackages.find(packageName);
|
||||
if (it != installedPackages.end()) {
|
||||
|
122
getpkg/test.sh
122
getpkg/test.sh
@ -528,6 +528,128 @@ EOF
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Test 13.5: Comprehensive unpublish functionality
|
||||
echo -e "\nTest 13.5: Comprehensive unpublish functionality"
|
||||
|
||||
# Only run unpublish tests if SOS_WRITE_TOKEN is available
|
||||
if [ -n "${SOS_WRITE_TOKEN:-}" ]; then
|
||||
# Create unique test names for unpublish tests
|
||||
UNPUBLISH_TOOL_BASE="test-unpublish-$RANDOM"
|
||||
UNPUBLISH_TOOL_MULTI="${UNPUBLISH_TOOL_BASE}-multi"
|
||||
UNPUBLISH_TOOL_CUSTOM="${UNPUBLISH_TOOL_BASE}-custom"
|
||||
UNPUBLISH_TEST_DIR="${TEST_DIR}/unpublish_tests"
|
||||
|
||||
# Create test directory structure
|
||||
mkdir -p "$UNPUBLISH_TEST_DIR"
|
||||
|
||||
# Test 13.5a: Create and publish tool with multiple architectures
|
||||
echo "Test 13.5a: Unpublish tool with multiple architectures"
|
||||
echo '#!/bin/bash
|
||||
echo "Multi-arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
|
||||
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
|
||||
|
||||
# Publish to multiple architectures
|
||||
PUBLISH_x86_64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
PUBLISH_aarch64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:aarch64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
PUBLISH_universal_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
|
||||
if [[ "$PUBLISH_x86_64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_aarch64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_universal_OUTPUT" =~ Published! ]]; then
|
||||
# Test robust unpublish - should remove ALL architectures
|
||||
sleep 1 # Give server time to process all publishes
|
||||
UNPUBLISH_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_MULTI" 2>&1)
|
||||
UNPUBLISH_EXIT_CODE=$?
|
||||
|
||||
# Check that unpublish found and removed packages
|
||||
if [ $UNPUBLISH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_OUTPUT" =~ "Found" ]] && [[ "$UNPUBLISH_OUTPUT" =~ "Successfully unpublished" ]]; then
|
||||
print_test_result "Unpublish removes all architectures" 0
|
||||
else
|
||||
print_test_result "Unpublish removes all architectures" 1
|
||||
echo " Unpublish failed: $UNPUBLISH_OUTPUT"
|
||||
fi
|
||||
else
|
||||
print_test_result "Unpublish removes all architectures" 1
|
||||
echo " Failed to publish test tool to multiple architectures"
|
||||
echo " x86_64: $PUBLISH_x86_64_OUTPUT"
|
||||
echo " aarch64: $PUBLISH_aarch64_OUTPUT"
|
||||
echo " universal: $PUBLISH_universal_OUTPUT"
|
||||
fi
|
||||
|
||||
# Test 13.5b: Unpublish tool with universal architecture
|
||||
echo "Test 13.5b: Unpublish tool with universal architecture"
|
||||
echo '#!/bin/bash
|
||||
echo "Universal arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
|
||||
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
|
||||
|
||||
# Publish with universal architecture
|
||||
PUBLISH_CUSTOM_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_CUSTOM}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
|
||||
if [[ "$PUBLISH_CUSTOM_OUTPUT" =~ Published! ]]; then
|
||||
# Test that unpublish can find and remove custom tags
|
||||
UNPUBLISH_CUSTOM_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_CUSTOM" 2>&1)
|
||||
UNPUBLISH_CUSTOM_EXIT_CODE=$?
|
||||
|
||||
if [ $UNPUBLISH_CUSTOM_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_CUSTOM_OUTPUT" =~ "Found ${UNPUBLISH_TOOL_CUSTOM}:universal" ]]; then
|
||||
print_test_result "Unpublish finds universal architecture" 0
|
||||
else
|
||||
print_test_result "Unpublish finds universal architecture" 1
|
||||
echo " Failed to find or unpublish custom tag: $UNPUBLISH_CUSTOM_OUTPUT"
|
||||
fi
|
||||
else
|
||||
print_test_result "Unpublish finds universal architecture" 1
|
||||
echo " Failed to publish tool with custom tag: $PUBLISH_CUSTOM_OUTPUT"
|
||||
fi
|
||||
|
||||
# Test 13.5c: Unpublish non-existent tool
|
||||
echo "Test 13.5c: Unpublish non-existent tool"
|
||||
NON_EXISTENT_TOOL="non-existent-tool-$RANDOM"
|
||||
UNPUBLISH_MISSING_OUTPUT=$("$GETPKG" unpublish "$NON_EXISTENT_TOOL" 2>&1)
|
||||
UNPUBLISH_MISSING_EXIT_CODE=$?
|
||||
|
||||
if [ $UNPUBLISH_MISSING_EXIT_CODE -ne 0 ] && [[ "$UNPUBLISH_MISSING_OUTPUT" =~ "No packages found" ]]; then
|
||||
print_test_result "Unpublish handles missing tools gracefully" 0
|
||||
else
|
||||
print_test_result "Unpublish handles missing tools gracefully" 1
|
||||
echo " Expected failure for non-existent tool, got: $UNPUBLISH_MISSING_OUTPUT"
|
||||
fi
|
||||
|
||||
# Test 13.5d: Unpublish by hash
|
||||
echo "Test 13.5d: Unpublish by hash"
|
||||
UNPUBLISH_TOOL_HASH="${UNPUBLISH_TOOL_BASE}-hash"
|
||||
echo '#!/bin/bash
|
||||
echo "Hash unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
|
||||
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
|
||||
|
||||
PUBLISH_HASH_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_HASH}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
|
||||
if [[ "$PUBLISH_HASH_OUTPUT" =~ Hash:\ ([0-9]+) ]]; then
|
||||
EXTRACTED_HASH="${BASH_REMATCH[1]}"
|
||||
|
||||
# Test unpublish by hash
|
||||
UNPUBLISH_HASH_OUTPUT=$("$GETPKG" unpublish "$EXTRACTED_HASH" 2>&1)
|
||||
UNPUBLISH_HASH_EXIT_CODE=$?
|
||||
|
||||
if [ $UNPUBLISH_HASH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_HASH_OUTPUT" =~ "Successfully unpublished hash" ]]; then
|
||||
print_test_result "Unpublish by hash works" 0
|
||||
else
|
||||
print_test_result "Unpublish by hash works" 1
|
||||
echo " Failed to unpublish by hash: $UNPUBLISH_HASH_OUTPUT"
|
||||
fi
|
||||
else
|
||||
print_test_result "Unpublish by hash works" 1
|
||||
echo " Could not extract hash from publish output"
|
||||
fi
|
||||
|
||||
# Cleanup unpublish test directory
|
||||
rm -rf "$UNPUBLISH_TEST_DIR"
|
||||
|
||||
else
|
||||
echo " Skipping unpublish tests (SOS_WRITE_TOKEN not set)"
|
||||
print_test_result "Unpublish removes all architectures" 0 # Pass as skipped
|
||||
print_test_result "Unpublish finds universal architecture" 0
|
||||
print_test_result "Unpublish handles missing tools gracefully" 0
|
||||
print_test_result "Unpublish by hash works" 0
|
||||
fi
|
||||
# Test 14: Invalid tool name validation
|
||||
echo -e "\nTest 14: Invalid tool name validation"
|
||||
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)
|
||||
|
1
getpkg/test_display/test-display
Executable file
1
getpkg/test_display/test-display
Executable file
@ -0,0 +1 @@
|
||||
#!/bin/bash\necho display test
|
1
getpkg/test_multi/test-multi
Executable file
1
getpkg/test_multi/test-multi
Executable file
@ -0,0 +1 @@
|
||||
#!/bin/bash\necho multi arch
|
1
getpkg/test_robust/test-robust
Executable file
1
getpkg/test_robust/test-robust
Executable file
@ -0,0 +1 @@
|
||||
#!/bin/bash\necho robust test
|
20
sos/clean.sh
Executable file
20
sos/clean.sh
Executable file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT="sos"
|
||||
|
||||
echo "Cleaning ${PROJECT}..."
|
||||
|
||||
# Remove output directory (if it exists)
|
||||
if [ -d "${SCRIPT_DIR}/output" ]; then
|
||||
echo "Removing output directory..."
|
||||
rm -rf "${SCRIPT_DIR}/output"
|
||||
fi
|
||||
|
||||
# Remove any temporary files
|
||||
echo "Removing temporary files..."
|
||||
find "${SCRIPT_DIR}" -name "*.tmp" -o -name "*.temp" -o -name "*~" | xargs -r rm -f
|
||||
|
||||
echo "✓ ${PROJECT} cleaned successfully"
|
Reference in New Issue
Block a user