3 Commits

Author SHA1 Message Date
00d1e86157 Modify bb64/publish.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 9s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m10s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:18:42 +12:00
3388a46bf3 Modify getpkg/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m12s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:02:47 +12:00
0f5421630a feat: Update 3 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m16s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 19:55:07 +12:00
8 changed files with 205 additions and 25 deletions

View File

@ -105,12 +105,20 @@ echo "Getting or creating release $TAG on Gitea..."
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
-H "Authorization: token $RELEASE_WRITE_TOKEN")
echo "Existing release check response: $EXISTING_RELEASE" >&2
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
# Release already exists, get its ID
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
echo "Release $TAG already exists with ID: $RELEASE_ID"
else
# Create new release
# Create new release only if tag was just created
if [ "$TAG_EXISTS_ON_REMOTE" = true ]; then
echo "Tag exists on remote but no release found - this shouldn't happen" >&2
echo "API response was: $EXISTING_RELEASE" >&2
exit 1
fi
echo "Creating new release $TAG on Gitea..."
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
-H "Content-Type: application/json" \

View File

@ -267,4 +267,51 @@ bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
std::cerr << "[GetbinClient::listPackages] Exception: " << e.what() << std::endl;
return false;
}
}
}
bool GetbinClient::listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries) {
try {
std::string url = "https://" + SERVER_HOST + "/dir";
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{30000}, // 30 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
try {
auto resp_json = json::parse(response.text);
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
outEntries.clear();
for (const auto& entry : resp_json["entries"]) {
if (entry.contains("hash") && entry.contains("labeltags") &&
entry["hash"].is_string() && entry["labeltags"].is_array()) {
std::string hash = entry["hash"].get<std::string>();
std::vector<std::string> labeltags;
for (const auto& tag : entry["labeltags"]) {
if (tag.is_string()) {
labeltags.push_back(tag.get<std::string>());
}
}
outEntries.push_back({hash, labeltags});
}
}
return true;
}
} catch (const json::exception& e) {
std::cerr << "[GetbinClient::listAllEntries] JSON parse error: " << e.what() << std::endl;
}
} else {
std::cerr << "[GetbinClient::listAllEntries] HTTP " << response.status_code << ": " << response.error.message << std::endl;
}
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::listAllEntries] Exception: " << e.what() << std::endl;
return false;
}
}

View File

@ -17,8 +17,9 @@ public:
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
bool deleteObject(const std::string& hash, const std::string& token);
bool listPackages(std::vector<std::string>& outPackages);
bool listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries);
private:
static const std::string SERVER_HOST;
std::string getUserAgent() const;
};
};

View File

@ -701,35 +701,34 @@ int unpublish_tool(int argc, char* argv[]) {
return 1;
}
} else {
// No specific architecture - unpublish all architectures
std::vector<std::string> allArchitectures = {"x86_64", "aarch64", "universal"};
std::vector<std::pair<std::string, std::string>> foundPackages;
// No specific architecture - unpublish ALL entries with this tool name
std::vector<std::pair<std::string, std::vector<std::string>>> allEntries;
std::vector<std::pair<std::string, std::string>> foundPackages; // (tag, hash)
std::cout << "Searching for " << toolName << " across all architectures..." << std::endl;
std::cout << "Searching for all entries with label '" << toolName << "'..." << std::endl;
// Find all existing versions
for (const auto& arch : allArchitectures) {
std::string archHash;
if (getbin.getHash(toolName, arch, archHash) && !archHash.empty()) {
// Validate hash
bool validHash = true;
for (char c : archHash) {
if (!std::isdigit(c)) {
validHash = false;
break;
}
}
if (validHash) {
foundPackages.push_back({arch, archHash});
std::cout << " Found " << toolName << ":" << arch << " (hash: " << archHash << ")" << std::endl;
if (!getbin.listAllEntries(allEntries)) {
std::cerr << "Failed to get directory listing from server" << std::endl;
return 1;
}
// Find all entries with labeltags starting with toolName:
for (const auto& entry : allEntries) {
const std::string& hash = entry.first;
const std::vector<std::string>& labeltags = entry.second;
for (const std::string& tag : labeltags) {
if (tag.find(toolName + ":") == 0) {
// Found a matching labeltag
foundPackages.push_back({tag, hash});
std::cout << " Found " << tag << " (hash: " << hash << ")" << std::endl;
break; // Only count each hash once even if it has multiple matching tags
}
}
}
if (foundPackages.empty()) {
std::cerr << "No packages found for " << toolName << std::endl;
std::cerr << "Searched architectures: x86_64, aarch64, universal" << std::endl;
return 1;
}
@ -741,7 +740,7 @@ int unpublish_tool(int argc, char* argv[]) {
int failCount = 0;
for (const auto& [arch, archHash] : foundPackages) {
std::cout << " Unpublishing " << toolName << ":" << arch << "... ";
std::cout << " Unpublishing " << arch << "... ";
if (getbin.deleteObject(archHash, token)) {
std::cout << "OK" << std::endl;
successCount++;

View File

@ -528,6 +528,128 @@ EOF
fi
fi
# Test 13.5: Comprehensive unpublish functionality
echo -e "\nTest 13.5: Comprehensive unpublish functionality"
# Only run unpublish tests if SOS_WRITE_TOKEN is available
if [ -n "${SOS_WRITE_TOKEN:-}" ]; then
# Create unique test names for unpublish tests
UNPUBLISH_TOOL_BASE="test-unpublish-$RANDOM"
UNPUBLISH_TOOL_MULTI="${UNPUBLISH_TOOL_BASE}-multi"
UNPUBLISH_TOOL_CUSTOM="${UNPUBLISH_TOOL_BASE}-custom"
UNPUBLISH_TEST_DIR="${TEST_DIR}/unpublish_tests"
# Create test directory structure
mkdir -p "$UNPUBLISH_TEST_DIR"
# Test 13.5a: Create and publish tool with multiple architectures
echo "Test 13.5a: Unpublish tool with multiple architectures"
echo '#!/bin/bash
echo "Multi-arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
# Publish to multiple architectures
PUBLISH_x86_64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
PUBLISH_aarch64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:aarch64" "$UNPUBLISH_TEST_DIR" 2>&1)
PUBLISH_universal_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_x86_64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_aarch64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_universal_OUTPUT" =~ Published! ]]; then
# Test robust unpublish - should remove ALL architectures
sleep 1 # Give server time to process all publishes
UNPUBLISH_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_MULTI" 2>&1)
UNPUBLISH_EXIT_CODE=$?
# Check that unpublish found and removed packages
if [ $UNPUBLISH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_OUTPUT" =~ "Found" ]] && [[ "$UNPUBLISH_OUTPUT" =~ "Successfully unpublished" ]]; then
print_test_result "Unpublish removes all architectures" 0
else
print_test_result "Unpublish removes all architectures" 1
echo " Unpublish failed: $UNPUBLISH_OUTPUT"
fi
else
print_test_result "Unpublish removes all architectures" 1
echo " Failed to publish test tool to multiple architectures"
echo " x86_64: $PUBLISH_x86_64_OUTPUT"
echo " aarch64: $PUBLISH_aarch64_OUTPUT"
echo " universal: $PUBLISH_universal_OUTPUT"
fi
# Test 13.5b: Unpublish tool with universal architecture
echo "Test 13.5b: Unpublish tool with universal architecture"
echo '#!/bin/bash
echo "Universal arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
# Publish with universal architecture
PUBLISH_CUSTOM_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_CUSTOM}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_CUSTOM_OUTPUT" =~ Published! ]]; then
# Test that unpublish can find and remove custom tags
UNPUBLISH_CUSTOM_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_CUSTOM" 2>&1)
UNPUBLISH_CUSTOM_EXIT_CODE=$?
if [ $UNPUBLISH_CUSTOM_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_CUSTOM_OUTPUT" =~ "Found ${UNPUBLISH_TOOL_CUSTOM}:universal" ]]; then
print_test_result "Unpublish finds universal architecture" 0
else
print_test_result "Unpublish finds universal architecture" 1
echo " Failed to find or unpublish custom tag: $UNPUBLISH_CUSTOM_OUTPUT"
fi
else
print_test_result "Unpublish finds universal architecture" 1
echo " Failed to publish tool with custom tag: $PUBLISH_CUSTOM_OUTPUT"
fi
# Test 13.5c: Unpublish non-existent tool
echo "Test 13.5c: Unpublish non-existent tool"
NON_EXISTENT_TOOL="non-existent-tool-$RANDOM"
UNPUBLISH_MISSING_OUTPUT=$("$GETPKG" unpublish "$NON_EXISTENT_TOOL" 2>&1)
UNPUBLISH_MISSING_EXIT_CODE=$?
if [ $UNPUBLISH_MISSING_EXIT_CODE -ne 0 ] && [[ "$UNPUBLISH_MISSING_OUTPUT" =~ "No packages found" ]]; then
print_test_result "Unpublish handles missing tools gracefully" 0
else
print_test_result "Unpublish handles missing tools gracefully" 1
echo " Expected failure for non-existent tool, got: $UNPUBLISH_MISSING_OUTPUT"
fi
# Test 13.5d: Unpublish by hash
echo "Test 13.5d: Unpublish by hash"
UNPUBLISH_TOOL_HASH="${UNPUBLISH_TOOL_BASE}-hash"
echo '#!/bin/bash
echo "Hash unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
PUBLISH_HASH_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_HASH}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_HASH_OUTPUT" =~ Hash:\ ([0-9]+) ]]; then
EXTRACTED_HASH="${BASH_REMATCH[1]}"
# Test unpublish by hash
UNPUBLISH_HASH_OUTPUT=$("$GETPKG" unpublish "$EXTRACTED_HASH" 2>&1)
UNPUBLISH_HASH_EXIT_CODE=$?
if [ $UNPUBLISH_HASH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_HASH_OUTPUT" =~ "Successfully unpublished hash" ]]; then
print_test_result "Unpublish by hash works" 0
else
print_test_result "Unpublish by hash works" 1
echo " Failed to unpublish by hash: $UNPUBLISH_HASH_OUTPUT"
fi
else
print_test_result "Unpublish by hash works" 1
echo " Could not extract hash from publish output"
fi
# Cleanup unpublish test directory
rm -rf "$UNPUBLISH_TEST_DIR"
else
echo " Skipping unpublish tests (SOS_WRITE_TOKEN not set)"
print_test_result "Unpublish removes all architectures" 0 # Pass as skipped
print_test_result "Unpublish finds universal architecture" 0
print_test_result "Unpublish handles missing tools gracefully" 0
print_test_result "Unpublish by hash works" 0
fi
# Test 14: Invalid tool name validation
echo -e "\nTest 14: Invalid tool name validation"
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)

View File

@ -0,0 +1 @@
#!/bin/bash\necho display test

1
getpkg/test_multi/test-multi Executable file
View File

@ -0,0 +1 @@
#!/bin/bash\necho multi arch

1
getpkg/test_robust/test-robust Executable file
View File

@ -0,0 +1 @@
#!/bin/bash\necho robust test