Compare commits
5 Commits
v2025.0629
...
v2025.0629
Author | SHA1 | Date | |
---|---|---|---|
dbe88a7121 | |||
00d1e86157 | |||
3388a46bf3 | |||
0f5421630a | |||
50fb5f9da6 |
@ -26,7 +26,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKER_PUSH_TOKEN }}
|
||||
- name: Build Test Publish All
|
||||
run: |
|
||||
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh
|
||||
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh --no-cache
|
||||
|
||||
test-install-from-scratch:
|
||||
needs: [build]
|
||||
|
@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
|
||||
# make sure we have the latest base image.
|
||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
|
@ -105,12 +105,20 @@ echo "Getting or creating release $TAG on Gitea..."
|
||||
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
|
||||
-H "Authorization: token $RELEASE_WRITE_TOKEN")
|
||||
|
||||
echo "Existing release check response: $EXISTING_RELEASE" >&2
|
||||
|
||||
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
|
||||
# Release already exists, get its ID
|
||||
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
|
||||
echo "Release $TAG already exists with ID: $RELEASE_ID"
|
||||
else
|
||||
# Create new release
|
||||
# Create new release only if tag was just created
|
||||
if [ "$TAG_EXISTS_ON_REMOTE" = true ]; then
|
||||
echo "Tag exists on remote but no release found - this shouldn't happen" >&2
|
||||
echo "API response was: $EXISTING_RELEASE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Creating new release $TAG on Gitea..."
|
||||
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
|
||||
-H "Content-Type: application/json" \
|
||||
|
@ -2,6 +2,13 @@
|
||||
set -uo pipefail # Remove -e to handle errors manually
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
|
||||
# Parse command line arguments
|
||||
NO_CACHE=false
|
||||
if [[ "$*" == *"--no-cache"* ]]; then
|
||||
NO_CACHE=true
|
||||
export NO_CACHE
|
||||
fi
|
||||
|
||||
docker builder prune -f
|
||||
|
||||
|
||||
|
@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
|
||||
# make sure we have the latest base image.
|
||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
|
@ -15,7 +15,14 @@ PROJECT="getpkg"
|
||||
# make sure we have the latest base image.
|
||||
docker pull gitea.jde.nz/public/dropshell-build-base:latest
|
||||
|
||||
# Build with or without cache based on NO_CACHE environment variable
|
||||
CACHE_FLAG=""
|
||||
if [ "${NO_CACHE:-false}" = "true" ]; then
|
||||
CACHE_FLAG="--no-cache"
|
||||
fi
|
||||
|
||||
docker build \
|
||||
${CACHE_FLAG} \
|
||||
-t "${PROJECT}-build" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
|
||||
--build-arg PROJECT="${PROJECT}" \
|
||||
|
@ -5,6 +5,7 @@
|
||||
#include <iostream>
|
||||
#include <filesystem>
|
||||
#include <sstream>
|
||||
#include <set>
|
||||
#include <algorithm>
|
||||
|
||||
using json = nlohmann::json;
|
||||
@ -207,7 +208,7 @@ bool GetbinClient::deleteObject(const std::string& hash, const std::string& toke
|
||||
|
||||
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||
try {
|
||||
std::string url = "https://" + SERVER_HOST + "/packages";
|
||||
std::string url = "https://" + SERVER_HOST + "/dir";
|
||||
|
||||
auto response = cpr::Get(cpr::Url{url},
|
||||
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||
@ -217,20 +218,31 @@ bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||
if (response.status_code == 200) {
|
||||
try {
|
||||
auto resp_json = json::parse(response.text);
|
||||
if (resp_json.is_array()) {
|
||||
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
|
||||
outPackages.clear();
|
||||
for (const auto& item : resp_json) {
|
||||
if (item.is_string()) {
|
||||
outPackages.push_back(item.get<std::string>());
|
||||
std::set<std::string> uniqueTools;
|
||||
|
||||
for (const auto& entry : resp_json["entries"]) {
|
||||
if (entry.contains("labeltags") && entry["labeltags"].is_array()) {
|
||||
for (const auto& labeltag : entry["labeltags"]) {
|
||||
if (labeltag.is_string()) {
|
||||
std::string tag = labeltag.get<std::string>();
|
||||
// Extract tool name from "tool:arch" format
|
||||
size_t colonPos = tag.find(":");
|
||||
if (colonPos != std::string::npos) {
|
||||
std::string toolName = tag.substr(0, colonPos);
|
||||
if (!toolName.empty()) {
|
||||
uniqueTools.insert(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else if (resp_json.contains("packages") && resp_json["packages"].is_array()) {
|
||||
outPackages.clear();
|
||||
for (const auto& item : resp_json["packages"]) {
|
||||
if (item.is_string()) {
|
||||
outPackages.push_back(item.get<std::string>());
|
||||
}
|
||||
|
||||
// Convert set to vector
|
||||
for (const auto& tool : uniqueTools) {
|
||||
outPackages.push_back(tool);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -255,4 +267,51 @@ bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
|
||||
std::cerr << "[GetbinClient::listPackages] Exception: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool GetbinClient::listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries) {
|
||||
try {
|
||||
std::string url = "https://" + SERVER_HOST + "/dir";
|
||||
|
||||
auto response = cpr::Get(cpr::Url{url},
|
||||
cpr::Header{{"User-Agent", getUserAgent()}},
|
||||
cpr::Timeout{30000}, // 30 seconds
|
||||
cpr::VerifySsl{true});
|
||||
|
||||
if (response.status_code == 200) {
|
||||
try {
|
||||
auto resp_json = json::parse(response.text);
|
||||
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
|
||||
outEntries.clear();
|
||||
|
||||
for (const auto& entry : resp_json["entries"]) {
|
||||
if (entry.contains("hash") && entry.contains("labeltags") &&
|
||||
entry["hash"].is_string() && entry["labeltags"].is_array()) {
|
||||
|
||||
std::string hash = entry["hash"].get<std::string>();
|
||||
std::vector<std::string> labeltags;
|
||||
|
||||
for (const auto& tag : entry["labeltags"]) {
|
||||
if (tag.is_string()) {
|
||||
labeltags.push_back(tag.get<std::string>());
|
||||
}
|
||||
}
|
||||
|
||||
outEntries.push_back({hash, labeltags});
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
} catch (const json::exception& e) {
|
||||
std::cerr << "[GetbinClient::listAllEntries] JSON parse error: " << e.what() << std::endl;
|
||||
}
|
||||
} else {
|
||||
std::cerr << "[GetbinClient::listAllEntries] HTTP " << response.status_code << ": " << response.error.message << std::endl;
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "[GetbinClient::listAllEntries] Exception: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -17,8 +17,9 @@ public:
|
||||
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
|
||||
bool deleteObject(const std::string& hash, const std::string& token);
|
||||
bool listPackages(std::vector<std::string>& outPackages);
|
||||
bool listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries);
|
||||
|
||||
private:
|
||||
static const std::string SERVER_HOST;
|
||||
std::string getUserAgent() const;
|
||||
};
|
||||
};
|
||||
|
@ -701,35 +701,34 @@ int unpublish_tool(int argc, char* argv[]) {
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
// No specific architecture - unpublish all architectures
|
||||
std::vector<std::string> allArchitectures = {"x86_64", "aarch64", "universal"};
|
||||
std::vector<std::pair<std::string, std::string>> foundPackages;
|
||||
// No specific architecture - unpublish ALL entries with this tool name
|
||||
std::vector<std::pair<std::string, std::vector<std::string>>> allEntries;
|
||||
std::vector<std::pair<std::string, std::string>> foundPackages; // (tag, hash)
|
||||
|
||||
std::cout << "Searching for " << toolName << " across all architectures..." << std::endl;
|
||||
std::cout << "Searching for all entries with label '" << toolName << "'..." << std::endl;
|
||||
|
||||
// Find all existing versions
|
||||
for (const auto& arch : allArchitectures) {
|
||||
std::string archHash;
|
||||
if (getbin.getHash(toolName, arch, archHash) && !archHash.empty()) {
|
||||
// Validate hash
|
||||
bool validHash = true;
|
||||
for (char c : archHash) {
|
||||
if (!std::isdigit(c)) {
|
||||
validHash = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (validHash) {
|
||||
foundPackages.push_back({arch, archHash});
|
||||
std::cout << " Found " << toolName << ":" << arch << " (hash: " << archHash << ")" << std::endl;
|
||||
if (!getbin.listAllEntries(allEntries)) {
|
||||
std::cerr << "Failed to get directory listing from server" << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Find all entries with labeltags starting with toolName:
|
||||
for (const auto& entry : allEntries) {
|
||||
const std::string& hash = entry.first;
|
||||
const std::vector<std::string>& labeltags = entry.second;
|
||||
|
||||
for (const std::string& tag : labeltags) {
|
||||
if (tag.find(toolName + ":") == 0) {
|
||||
// Found a matching labeltag
|
||||
foundPackages.push_back({tag, hash});
|
||||
std::cout << " Found " << tag << " (hash: " << hash << ")" << std::endl;
|
||||
break; // Only count each hash once even if it has multiple matching tags
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundPackages.empty()) {
|
||||
std::cerr << "No packages found for " << toolName << std::endl;
|
||||
std::cerr << "Searched architectures: x86_64, aarch64, universal" << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -741,7 +740,7 @@ int unpublish_tool(int argc, char* argv[]) {
|
||||
int failCount = 0;
|
||||
|
||||
for (const auto& [arch, archHash] : foundPackages) {
|
||||
std::cout << " Unpublishing " << toolName << ":" << arch << "... ";
|
||||
std::cout << " Unpublishing " << arch << "... ";
|
||||
if (getbin.deleteObject(archHash, token)) {
|
||||
std::cout << "OK" << std::endl;
|
||||
successCount++;
|
||||
@ -824,7 +823,7 @@ int list_packages(int argc, char* argv[]) {
|
||||
for (const auto& packageName : availablePackages) {
|
||||
std::string status = "Available";
|
||||
std::string localVersion = "-";
|
||||
std::string remoteStatus = "✓";
|
||||
std::string remoteStatus = "-";
|
||||
|
||||
auto it = installedPackages.find(packageName);
|
||||
if (it != installedPackages.end()) {
|
||||
|
122
getpkg/test.sh
122
getpkg/test.sh
@ -528,6 +528,128 @@ EOF
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Test 13.5: Comprehensive unpublish functionality
|
||||
echo -e "\nTest 13.5: Comprehensive unpublish functionality"
|
||||
|
||||
# Only run unpublish tests if SOS_WRITE_TOKEN is available
|
||||
if [ -n "${SOS_WRITE_TOKEN:-}" ]; then
|
||||
# Create unique test names for unpublish tests
|
||||
UNPUBLISH_TOOL_BASE="test-unpublish-$RANDOM"
|
||||
UNPUBLISH_TOOL_MULTI="${UNPUBLISH_TOOL_BASE}-multi"
|
||||
UNPUBLISH_TOOL_CUSTOM="${UNPUBLISH_TOOL_BASE}-custom"
|
||||
UNPUBLISH_TEST_DIR="${TEST_DIR}/unpublish_tests"
|
||||
|
||||
# Create test directory structure
|
||||
mkdir -p "$UNPUBLISH_TEST_DIR"
|
||||
|
||||
# Test 13.5a: Create and publish tool with multiple architectures
|
||||
echo "Test 13.5a: Unpublish tool with multiple architectures"
|
||||
echo '#!/bin/bash
|
||||
echo "Multi-arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
|
||||
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
|
||||
|
||||
# Publish to multiple architectures
|
||||
PUBLISH_x86_64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
PUBLISH_aarch64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:aarch64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
PUBLISH_universal_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
|
||||
if [[ "$PUBLISH_x86_64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_aarch64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_universal_OUTPUT" =~ Published! ]]; then
|
||||
# Test robust unpublish - should remove ALL architectures
|
||||
sleep 1 # Give server time to process all publishes
|
||||
UNPUBLISH_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_MULTI" 2>&1)
|
||||
UNPUBLISH_EXIT_CODE=$?
|
||||
|
||||
# Check that unpublish found and removed packages
|
||||
if [ $UNPUBLISH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_OUTPUT" =~ "Found" ]] && [[ "$UNPUBLISH_OUTPUT" =~ "Successfully unpublished" ]]; then
|
||||
print_test_result "Unpublish removes all architectures" 0
|
||||
else
|
||||
print_test_result "Unpublish removes all architectures" 1
|
||||
echo " Unpublish failed: $UNPUBLISH_OUTPUT"
|
||||
fi
|
||||
else
|
||||
print_test_result "Unpublish removes all architectures" 1
|
||||
echo " Failed to publish test tool to multiple architectures"
|
||||
echo " x86_64: $PUBLISH_x86_64_OUTPUT"
|
||||
echo " aarch64: $PUBLISH_aarch64_OUTPUT"
|
||||
echo " universal: $PUBLISH_universal_OUTPUT"
|
||||
fi
|
||||
|
||||
# Test 13.5b: Unpublish tool with universal architecture
|
||||
echo "Test 13.5b: Unpublish tool with universal architecture"
|
||||
echo '#!/bin/bash
|
||||
echo "Universal arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
|
||||
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
|
||||
|
||||
# Publish with universal architecture
|
||||
PUBLISH_CUSTOM_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_CUSTOM}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
|
||||
if [[ "$PUBLISH_CUSTOM_OUTPUT" =~ Published! ]]; then
|
||||
# Test that unpublish can find and remove custom tags
|
||||
UNPUBLISH_CUSTOM_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_CUSTOM" 2>&1)
|
||||
UNPUBLISH_CUSTOM_EXIT_CODE=$?
|
||||
|
||||
if [ $UNPUBLISH_CUSTOM_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_CUSTOM_OUTPUT" =~ "Found ${UNPUBLISH_TOOL_CUSTOM}:universal" ]]; then
|
||||
print_test_result "Unpublish finds universal architecture" 0
|
||||
else
|
||||
print_test_result "Unpublish finds universal architecture" 1
|
||||
echo " Failed to find or unpublish custom tag: $UNPUBLISH_CUSTOM_OUTPUT"
|
||||
fi
|
||||
else
|
||||
print_test_result "Unpublish finds universal architecture" 1
|
||||
echo " Failed to publish tool with custom tag: $PUBLISH_CUSTOM_OUTPUT"
|
||||
fi
|
||||
|
||||
# Test 13.5c: Unpublish non-existent tool
|
||||
echo "Test 13.5c: Unpublish non-existent tool"
|
||||
NON_EXISTENT_TOOL="non-existent-tool-$RANDOM"
|
||||
UNPUBLISH_MISSING_OUTPUT=$("$GETPKG" unpublish "$NON_EXISTENT_TOOL" 2>&1)
|
||||
UNPUBLISH_MISSING_EXIT_CODE=$?
|
||||
|
||||
if [ $UNPUBLISH_MISSING_EXIT_CODE -ne 0 ] && [[ "$UNPUBLISH_MISSING_OUTPUT" =~ "No packages found" ]]; then
|
||||
print_test_result "Unpublish handles missing tools gracefully" 0
|
||||
else
|
||||
print_test_result "Unpublish handles missing tools gracefully" 1
|
||||
echo " Expected failure for non-existent tool, got: $UNPUBLISH_MISSING_OUTPUT"
|
||||
fi
|
||||
|
||||
# Test 13.5d: Unpublish by hash
|
||||
echo "Test 13.5d: Unpublish by hash"
|
||||
UNPUBLISH_TOOL_HASH="${UNPUBLISH_TOOL_BASE}-hash"
|
||||
echo '#!/bin/bash
|
||||
echo "Hash unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
|
||||
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
|
||||
|
||||
PUBLISH_HASH_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_HASH}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
|
||||
|
||||
if [[ "$PUBLISH_HASH_OUTPUT" =~ Hash:\ ([0-9]+) ]]; then
|
||||
EXTRACTED_HASH="${BASH_REMATCH[1]}"
|
||||
|
||||
# Test unpublish by hash
|
||||
UNPUBLISH_HASH_OUTPUT=$("$GETPKG" unpublish "$EXTRACTED_HASH" 2>&1)
|
||||
UNPUBLISH_HASH_EXIT_CODE=$?
|
||||
|
||||
if [ $UNPUBLISH_HASH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_HASH_OUTPUT" =~ "Successfully unpublished hash" ]]; then
|
||||
print_test_result "Unpublish by hash works" 0
|
||||
else
|
||||
print_test_result "Unpublish by hash works" 1
|
||||
echo " Failed to unpublish by hash: $UNPUBLISH_HASH_OUTPUT"
|
||||
fi
|
||||
else
|
||||
print_test_result "Unpublish by hash works" 1
|
||||
echo " Could not extract hash from publish output"
|
||||
fi
|
||||
|
||||
# Cleanup unpublish test directory
|
||||
rm -rf "$UNPUBLISH_TEST_DIR"
|
||||
|
||||
else
|
||||
echo " Skipping unpublish tests (SOS_WRITE_TOKEN not set)"
|
||||
print_test_result "Unpublish removes all architectures" 0 # Pass as skipped
|
||||
print_test_result "Unpublish finds universal architecture" 0
|
||||
print_test_result "Unpublish handles missing tools gracefully" 0
|
||||
print_test_result "Unpublish by hash works" 0
|
||||
fi
|
||||
# Test 14: Invalid tool name validation
|
||||
echo -e "\nTest 14: Invalid tool name validation"
|
||||
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)
|
||||
|
1
getpkg/test_display/test-display
Executable file
1
getpkg/test_display/test-display
Executable file
@ -0,0 +1 @@
|
||||
#!/bin/bash\necho display test
|
1
getpkg/test_multi/test-multi
Executable file
1
getpkg/test_multi/test-multi
Executable file
@ -0,0 +1 @@
|
||||
#!/bin/bash\necho multi arch
|
1
getpkg/test_robust/test-robust
Executable file
1
getpkg/test_robust/test-robust
Executable file
@ -0,0 +1 @@
|
||||
#!/bin/bash\necho robust test
|
Reference in New Issue
Block a user