6 Commits

Author SHA1 Message Date
facc6b73b0 feat: Update 4 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m30s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m33s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-29 20:52:40 +12:00
9a24576e37 Modify clean.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m28s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m28s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:31:18 +12:00
3f68f44e3d Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m29s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m32s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-29 20:28:25 +12:00
dbe88a7121 test: Update 5 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m28s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m32s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:24:57 +12:00
00d1e86157 Modify bb64/publish.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 9s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m10s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:18:42 +12:00
3388a46bf3 Modify getpkg/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m12s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:02:47 +12:00
16 changed files with 338 additions and 17 deletions

View File

@ -26,7 +26,7 @@ jobs:
password: ${{ secrets.DOCKER_PUSH_TOKEN }}
- name: Build Test Publish All
run: |
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh --no-cache
test-install-from-scratch:
needs: [build]

View File

@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
# make sure we have the latest base image.
docker pull gitea.jde.nz/public/dropshell-build-base:latest
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \

24
bb64/clean.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="bb64"
echo "Cleaning ${PROJECT}..."
# Remove output directory
if [ -d "${SCRIPT_DIR}/output" ]; then
echo "Removing output directory..."
rm -rf "${SCRIPT_DIR}/output"
fi
# Remove Docker images related to this project
echo "Removing Docker images..."
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
# Remove Docker build cache
echo "Pruning Docker build cache..."
docker builder prune -f
echo "${PROJECT} cleaned successfully"

View File

@ -20,7 +20,14 @@ echo "Building version $VERSION" >&2
# build release version
export CMAKE_BUILD_TYPE="Release"
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \
@ -84,9 +91,11 @@ if git rev-parse "$TAG" >/dev/null 2>&1; then
fi
# Check if tag exists on remote
TAG_EXISTS_ON_REMOTE=false
if git ls-remote --tags origin | grep -q "refs/tags/$TAG"; then
echo "Tag $TAG already exists on remote - this is expected for multi-architecture builds"
echo "Skipping tag creation and proceeding with release attachment..."
TAG_EXISTS_ON_REMOTE=true
else
echo "Creating new tag $TAG..."
git tag -a "$TAG" -m "Release $TAG"
@ -105,12 +114,20 @@ echo "Getting or creating release $TAG on Gitea..."
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
-H "Authorization: token $RELEASE_WRITE_TOKEN")
echo "Existing release check response: $EXISTING_RELEASE" >&2
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
# Release already exists, get its ID
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
echo "Release $TAG already exists with ID: $RELEASE_ID"
else
# Create new release
# Create new release only if tag was just created
if [ "$TAG_EXISTS_ON_REMOTE" = true ]; then
echo "Tag exists on remote but no release found - this shouldn't happen" >&2
echo "API response was: $EXISTING_RELEASE" >&2
exit 1
fi
echo "Creating new release $TAG on Gitea..."
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
-H "Content-Type: application/json" \

View File

@ -2,6 +2,13 @@
set -uo pipefail # Remove -e to handle errors manually
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
# Parse command line arguments
NO_CACHE=false
if [[ "$*" == *"--no-cache"* ]]; then
NO_CACHE=true
export NO_CACHE
fi
docker builder prune -f

44
clean.sh Executable file
View File

@ -0,0 +1,44 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
echo "🧹 CLEANING ALL PROJECTS 🧹"
echo
# Get all project directories
PROJECT_DIRS=$(find "$SCRIPT_DIR" -maxdepth 1 -type d \
-not -name ".*" \
-not -path "$SCRIPT_DIR" | sort)
for dir in $PROJECT_DIRS; do
PROJECT_NAME=$(basename "$dir")
if [ -f "$dir/clean.sh" ]; then
echo "Cleaning $PROJECT_NAME..."
cd "$dir"
./clean.sh
echo
else
echo "⚠️ No clean.sh found for $PROJECT_NAME, skipping..."
echo
fi
done
# Global Docker cleanup
echo "🐳 Global Docker cleanup..."
echo "Removing unused Docker images..."
docker image prune -f
echo "Removing unused Docker containers..."
docker container prune -f
echo "Removing unused Docker networks..."
docker network prune -f
echo "Removing unused Docker volumes..."
docker volume prune -f
echo
echo "✅ All projects cleaned successfully!"

View File

@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
# make sure we have the latest base image.
docker pull gitea.jde.nz/public/dropshell-build-base:latest
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \

24
dehydrate/clean.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="dehydrate"
echo "Cleaning ${PROJECT}..."
# Remove output directory
if [ -d "${SCRIPT_DIR}/output" ]; then
echo "Removing output directory..."
rm -rf "${SCRIPT_DIR}/output"
fi
# Remove Docker images related to this project
echo "Removing Docker images..."
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
# Remove Docker build cache
echo "Pruning Docker build cache..."
docker builder prune -f
echo "${PROJECT} cleaned successfully"

View File

@ -36,7 +36,14 @@ heading "Building ${PROJECT}"
# build release version
export CMAKE_BUILD_TYPE="Release"
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \

View File

@ -31,21 +31,14 @@ COPY src/ src/
RUN --mount=type=cache,target=/ccache \
--mount=type=cache,target=/build \
mkdir -p /build && \
SSL_LIB=$(find /usr/local -name "libssl.a" | head -1) && \
CRYPTO_LIB=$(find /usr/local -name "libcrypto.a" | head -1) && \
echo "Found SSL: $SSL_LIB, Crypto: $CRYPTO_LIB" && \
cmake -G Ninja -S /app -B /build \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
-DPROJECT_NAME="${PROJECT}" \
-DCMAKE_STRIP=OFF \
-DOPENSSL_SSL_LIBRARY="$SSL_LIB" \
-DOPENSSL_CRYPTO_LIBRARY="$CRYPTO_LIB" \
-DOPENSSL_INCLUDE_DIR=/usr/local/include \
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
# Run prebuild script

View File

@ -15,7 +15,14 @@ PROJECT="getpkg"
# make sure we have the latest base image.
docker pull gitea.jde.nz/public/dropshell-build-base:latest
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \

24
getpkg/clean.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="getpkg"
echo "Cleaning ${PROJECT}..."
# Remove output directory
if [ -d "${SCRIPT_DIR}/output" ]; then
echo "Removing output directory..."
rm -rf "${SCRIPT_DIR}/output"
fi
# Remove Docker images related to this project
echo "Removing Docker images..."
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
# Remove Docker build cache
echo "Pruning Docker build cache..."
docker builder prune -f
echo "${PROJECT} cleaned successfully"

View File

@ -35,7 +35,14 @@ heading "Building ${PROJECT}"
# build release version
export CMAKE_BUILD_TYPE="Release"
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \

View File

@ -76,6 +76,17 @@
namespace {
using json = nlohmann::json;
// Clear current line and reset cursor to beginning
void clearLine() {
std::cout << "\r\033[K" << std::flush;
}
// Clear current line and print message
void clearAndPrint(const std::string& message) {
clearLine();
std::cout << message << std::flush;
}
// Compare versions (returns true if v1 < v2)
bool isVersionOlder(const std::string& v1, const std::string& v2) {
// Simple version comparison - assumes versions are in YYYY.MMDD.HHMM format
@ -215,14 +226,14 @@ int install_tool(int argc, char* argv[]) {
std::cout << "Downloading " << toolName << "..." << std::flush;
if (!getbin2.download(toolName, arch, archivePath.string(), progressCallback)) {
// Try universal version as fallback
std::cout << "\rArch-specific version not found, trying universal..." << std::endl;
clearAndPrint("Arch-specific version not found, trying universal...\n");
if (!getbin2.download(toolName, "universal", archivePath.string(), progressCallback)) {
std::cerr << "\rFailed to download tool archive (tried both " << arch << " and universal)." << std::endl;
return 1;
}
downloadArch = "universal";
}
std::cout << "\rDownloading " << toolName << "... done" << std::endl;
clearAndPrint("Downloading " + toolName + "... done\n");
// Unpack tool
std::cout << "Unpacking..." << std::flush;
@ -230,13 +241,13 @@ int install_tool(int argc, char* argv[]) {
std::cerr << "\rFailed to unpack tool archive." << std::endl;
return 1;
}
std::cout << "\rUnpacking... done" << std::endl;
clearAndPrint("Unpacking... done\n");
// Add to PATH and autocomplete
std::cout << "Configuring..." << std::flush;
scriptManager.addToolEntry(toolName, binDir.string());
scriptManager.addAutocomplete(toolName);
std::cout << "\rConfiguring... done" << std::endl;
clearAndPrint("Configuring... done\n");
// Get tool info
std::string hash;
@ -347,7 +358,7 @@ int publish_tool(int argc, char* argv[]) {
std::cerr << "\rFailed to upload archive." << std::endl;
return 1;
}
std::cout << "\rUploading... done" << std::endl;
clearAndPrint("Uploading... done\n");
std::cout << "Published! URL: " << url << "\nHash: " << hash << std::endl;
return 0;
}
@ -426,7 +437,7 @@ int update_tool(int argc, char* argv[]) {
tool.status = "Check failed";
}
}
std::cout << "\r" << std::string(50, ' ') << "\r" << std::flush; // Clear progress line
clearLine(); // Clear progress line
// Step 2: Update tools that need updating
std::vector<std::tuple<std::string, std::string, std::string>> updateResults;
@ -484,7 +495,7 @@ int update_tool(int argc, char* argv[]) {
if (result == 0) {
tool.status = "Updated";
std::cout << " Updated" << std::endl;
clearAndPrint("Updated\n");
// Re-read version after update
std::filesystem::path toolInfoPath = configDir / (tool.name + ".json");
@ -502,7 +513,7 @@ int update_tool(int argc, char* argv[]) {
}
} else {
tool.status = "Failed";
std::cout << " Failed" << std::endl;
clearAndPrint("Failed\n");
}
}
}

View File

@ -528,6 +528,128 @@ EOF
fi
fi
# Test 13.5: Comprehensive unpublish functionality
echo -e "\nTest 13.5: Comprehensive unpublish functionality"
# Only run unpublish tests if SOS_WRITE_TOKEN is available
if [ -n "${SOS_WRITE_TOKEN:-}" ]; then
# Create unique test names for unpublish tests
UNPUBLISH_TOOL_BASE="test-unpublish-$RANDOM"
UNPUBLISH_TOOL_MULTI="${UNPUBLISH_TOOL_BASE}-multi"
UNPUBLISH_TOOL_CUSTOM="${UNPUBLISH_TOOL_BASE}-custom"
UNPUBLISH_TEST_DIR="${TEST_DIR}/unpublish_tests"
# Create test directory structure
mkdir -p "$UNPUBLISH_TEST_DIR"
# Test 13.5a: Create and publish tool with multiple architectures
echo "Test 13.5a: Unpublish tool with multiple architectures"
echo '#!/bin/bash
echo "Multi-arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
# Publish to multiple architectures
PUBLISH_x86_64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
PUBLISH_aarch64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:aarch64" "$UNPUBLISH_TEST_DIR" 2>&1)
PUBLISH_universal_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_x86_64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_aarch64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_universal_OUTPUT" =~ Published! ]]; then
# Test robust unpublish - should remove ALL architectures
sleep 1 # Give server time to process all publishes
UNPUBLISH_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_MULTI" 2>&1)
UNPUBLISH_EXIT_CODE=$?
# Check that unpublish found and removed packages
if [ $UNPUBLISH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_OUTPUT" =~ "Found" ]] && [[ "$UNPUBLISH_OUTPUT" =~ "Successfully unpublished" ]]; then
print_test_result "Unpublish removes all architectures" 0
else
print_test_result "Unpublish removes all architectures" 1
echo " Unpublish failed: $UNPUBLISH_OUTPUT"
fi
else
print_test_result "Unpublish removes all architectures" 1
echo " Failed to publish test tool to multiple architectures"
echo " x86_64: $PUBLISH_x86_64_OUTPUT"
echo " aarch64: $PUBLISH_aarch64_OUTPUT"
echo " universal: $PUBLISH_universal_OUTPUT"
fi
# Test 13.5b: Unpublish tool with universal architecture
echo "Test 13.5b: Unpublish tool with universal architecture"
echo '#!/bin/bash
echo "Universal arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
# Publish with universal architecture
PUBLISH_CUSTOM_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_CUSTOM}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_CUSTOM_OUTPUT" =~ Published! ]]; then
# Test that unpublish can find and remove custom tags
UNPUBLISH_CUSTOM_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_CUSTOM" 2>&1)
UNPUBLISH_CUSTOM_EXIT_CODE=$?
if [ $UNPUBLISH_CUSTOM_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_CUSTOM_OUTPUT" =~ "Found ${UNPUBLISH_TOOL_CUSTOM}:universal" ]]; then
print_test_result "Unpublish finds universal architecture" 0
else
print_test_result "Unpublish finds universal architecture" 1
echo " Failed to find or unpublish custom tag: $UNPUBLISH_CUSTOM_OUTPUT"
fi
else
print_test_result "Unpublish finds universal architecture" 1
echo " Failed to publish tool with custom tag: $PUBLISH_CUSTOM_OUTPUT"
fi
# Test 13.5c: Unpublish non-existent tool
echo "Test 13.5c: Unpublish non-existent tool"
NON_EXISTENT_TOOL="non-existent-tool-$RANDOM"
UNPUBLISH_MISSING_OUTPUT=$("$GETPKG" unpublish "$NON_EXISTENT_TOOL" 2>&1)
UNPUBLISH_MISSING_EXIT_CODE=$?
if [ $UNPUBLISH_MISSING_EXIT_CODE -ne 0 ] && [[ "$UNPUBLISH_MISSING_OUTPUT" =~ "No packages found" ]]; then
print_test_result "Unpublish handles missing tools gracefully" 0
else
print_test_result "Unpublish handles missing tools gracefully" 1
echo " Expected failure for non-existent tool, got: $UNPUBLISH_MISSING_OUTPUT"
fi
# Test 13.5d: Unpublish by hash
echo "Test 13.5d: Unpublish by hash"
UNPUBLISH_TOOL_HASH="${UNPUBLISH_TOOL_BASE}-hash"
echo '#!/bin/bash
echo "Hash unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
PUBLISH_HASH_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_HASH}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_HASH_OUTPUT" =~ Hash:\ ([0-9]+) ]]; then
EXTRACTED_HASH="${BASH_REMATCH[1]}"
# Test unpublish by hash
UNPUBLISH_HASH_OUTPUT=$("$GETPKG" unpublish "$EXTRACTED_HASH" 2>&1)
UNPUBLISH_HASH_EXIT_CODE=$?
if [ $UNPUBLISH_HASH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_HASH_OUTPUT" =~ "Successfully unpublished hash" ]]; then
print_test_result "Unpublish by hash works" 0
else
print_test_result "Unpublish by hash works" 1
echo " Failed to unpublish by hash: $UNPUBLISH_HASH_OUTPUT"
fi
else
print_test_result "Unpublish by hash works" 1
echo " Could not extract hash from publish output"
fi
# Cleanup unpublish test directory
rm -rf "$UNPUBLISH_TEST_DIR"
else
echo " Skipping unpublish tests (SOS_WRITE_TOKEN not set)"
print_test_result "Unpublish removes all architectures" 0 # Pass as skipped
print_test_result "Unpublish finds universal architecture" 0
print_test_result "Unpublish handles missing tools gracefully" 0
print_test_result "Unpublish by hash works" 0
fi
# Test 14: Invalid tool name validation
echo -e "\nTest 14: Invalid tool name validation"
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)

20
sos/clean.sh Executable file
View File

@ -0,0 +1,20 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="sos"
echo "Cleaning ${PROJECT}..."
# Remove output directory (if it exists)
if [ -d "${SCRIPT_DIR}/output" ]; then
echo "Removing output directory..."
rm -rf "${SCRIPT_DIR}/output"
fi
# Remove any temporary files
echo "Removing temporary files..."
find "${SCRIPT_DIR}" -name "*.tmp" -o -name "*.temp" -o -name "*~" | xargs -r rm -f
echo "${PROJECT} cleaned successfully"