This commit is contained in:
17
testing/compose.yaml
Normal file
17
testing/compose.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
services:
|
||||
sos:
|
||||
image: gitea.jde.nz/public/simple-object-storage:test
|
||||
container_name: sos-test
|
||||
ports:
|
||||
- 7703:7703
|
||||
volumes:
|
||||
- ${LOCALCONFIG}:/data/sos_config.json:ro
|
||||
environment:
|
||||
- SOS_CONFIG=/data/sos_config.json
|
||||
restart: no
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://127.0.0.1:7703/status"]
|
||||
interval: 1s
|
||||
timeout: 1s
|
||||
retries: 3
|
||||
start_period: 1s
|
12
testing/config.json
Normal file
12
testing/config.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"write_tokens": [
|
||||
"fizzle1",
|
||||
"fizzle2",
|
||||
"fizzle3"
|
||||
],
|
||||
"rate_limiting": {
|
||||
"auth_rate_limit": 5,
|
||||
"auth_window_seconds": 2
|
||||
},
|
||||
"port": 7703
|
||||
}
|
95
testing/test-docker.sh
Executable file
95
testing/test-docker.sh
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
set -x
|
||||
|
||||
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
MAIN_DIR=$(cd "${SCRIPT_DIR}/.." && pwd)
|
||||
|
||||
# FUNCTIONS
|
||||
function title() {
|
||||
echo "----------------------------------------"
|
||||
# Center the text
|
||||
local text="$1"
|
||||
local line_length=40
|
||||
local text_length=${#text}
|
||||
local padding=$(( (line_length - text_length) / 2 ))
|
||||
printf "%*s%s%*s\n" $padding "" "$text" $padding ""
|
||||
echo "----------------------------------------"
|
||||
}
|
||||
|
||||
function die() {
|
||||
title "error: $1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
function wait_for_container {
|
||||
container_id="$1"
|
||||
container_name="$(docker inspect "${container_id}" --format '{{ .Name }}')"
|
||||
echo "Waiting for container: ${container_name} [${container_id}]"
|
||||
waiting_done="false"
|
||||
while [[ "${waiting_done}" != "true" ]]; do
|
||||
container_state="$(docker inspect "${container_id}" --format '{{ .State.Status }}')"
|
||||
if [[ "${container_state}" == "running" ]]; then
|
||||
health_status="$(docker inspect "${container_id}" --format '{{ .State.Health.Status }}')"
|
||||
echo "${container_name}: container_state=${container_state}, health_status=${health_status}"
|
||||
if [[ ${health_status} == "healthy" ]]; then
|
||||
waiting_done="true"
|
||||
fi
|
||||
else
|
||||
echo "${container_name}: container_state=${container_state}"
|
||||
health_status="${container_state}"
|
||||
waiting_done="true"
|
||||
fi
|
||||
sleep 1;
|
||||
done;
|
||||
[ "${health_status}" == "healthy" ]
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
# build the executable
|
||||
title "Building amd64 executable"
|
||||
"${MAIN_DIR}/build.sh" amd64
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
# build the docker image
|
||||
title "Building docker image"
|
||||
docker buildx build --no-cache --load -t gitea.jde.nz/public/simple-object-storage:test --platform linux/amd64 "${MAIN_DIR}"
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
# run the docker container
|
||||
title "Running docker container"
|
||||
export LOCALCONFIG="${SCRIPT_DIR}/config.json"
|
||||
export COMPOSE_FILE="${SCRIPT_DIR}/compose.yaml"
|
||||
|
||||
[ -f "${LOCALCONFIG}" ] || die "Config file not found: ${LOCALCONFIG}"
|
||||
[ -f "${COMPOSE_FILE}" ] || die "Compose file not found: ${COMPOSE_FILE}"
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "Running tests"
|
||||
|
||||
# clear any existing containers.
|
||||
docker compose -f "${COMPOSE_FILE}" down
|
||||
docker compose -f "${COMPOSE_FILE}" rm -v
|
||||
|
||||
LOCALCONFIG=${LOCALCONFIG} docker compose -f "${COMPOSE_FILE}" up -d
|
||||
|
||||
# wait until healthy.
|
||||
if ! wait_for_container "sos-test"; then
|
||||
echo "----------------------------------------"
|
||||
echo "Container sos-test is not healthy"
|
||||
echo "----------------------------------------"
|
||||
docker logs sos-test
|
||||
docker inspect sos-test
|
||||
die "Container sos-test is not healthy"
|
||||
fi
|
||||
|
||||
# run the tests.
|
||||
"${SCRIPT_DIR}/test.sh" "http://localhost:7703"
|
||||
RESULT=$?
|
||||
|
||||
# clean up.
|
||||
docker compose -f "${COMPOSE_FILE}" down
|
||||
docker compose -f "${COMPOSE_FILE}" rm -v
|
||||
|
||||
exit $RESULT
|
||||
|
404
testing/test.sh
Executable file
404
testing/test.sh
Executable file
@@ -0,0 +1,404 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
|
||||
HOSTURL="${1:-http://127.0.0.1:7703}"
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
SCRIPT_NAME=$(basename "$0")
|
||||
|
||||
# FUNCTIONS
|
||||
function title() {
|
||||
echo "----------------------------------------"
|
||||
# Center the text
|
||||
local text="$1"
|
||||
local line_length=40
|
||||
local text_length=${#text}
|
||||
local padding=$(( (line_length - text_length) / 2 ))
|
||||
printf "%*s%s%*s\n" $padding "" "$text" $padding ""
|
||||
echo "----------------------------------------"
|
||||
}
|
||||
|
||||
|
||||
function die() {
|
||||
title "error: $1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
cat << EOF
|
||||
|
||||
|
||||
EOF
|
||||
|
||||
title "TESTING ${HOSTURL}"
|
||||
|
||||
cat << EOF
|
||||
|
||||
|
||||
EOF
|
||||
|
||||
# Test 0: Verify the script is running
|
||||
title "0: Verify the server is running"
|
||||
|
||||
|
||||
# test jq is installed
|
||||
if ! command -v jq &> /dev/null; then
|
||||
echo "jq could not be found"
|
||||
echo "sudo apt-get install jq"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# read ~/.config/simple_object_storage/config.json
|
||||
CONFIG_PATH="${SCRIPT_DIR}/config.json"
|
||||
if [ ! -f "${CONFIG_PATH}" ]; then
|
||||
echo "config file not found at ${CONFIG_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
CONFIG=$(cat "${CONFIG_PATH}")
|
||||
|
||||
# extract the first write token from the config
|
||||
WRITE_TOKEN=$(echo "$CONFIG" | jq -r '.write_tokens[0]')
|
||||
|
||||
BASE_TAG="autotest"
|
||||
|
||||
|
||||
# test if server is running
|
||||
if ! curl -s "${HOSTURL}/status" | jq -r '.result' | grep -q 'success'; then
|
||||
die "server is not running on ${HOSTURL}"
|
||||
fi
|
||||
|
||||
echo "Simple Object Storage server is running at ${HOSTURL}"
|
||||
|
||||
# test every action in the README.md file, leaving the system in the same state it was found
|
||||
# and print the output of each action
|
||||
|
||||
# Construct metadata JSON
|
||||
METADATA_JSON=$(cat <<EOF
|
||||
{
|
||||
"labeltags": ["${BASE_TAG}:test1"],
|
||||
"description": "Example file",
|
||||
"custom_field": "custom value"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
# Test 1: Verify extra metadata fields are preserved
|
||||
title "1: Upload script to ${BASE_TAG}:test1"
|
||||
|
||||
TEST_FILE="${SCRIPT_DIR}/${SCRIPT_NAME}"
|
||||
|
||||
# upload this script as an object
|
||||
echo "uploading ${TEST_FILE} to ${BASE_TAG}:test1"
|
||||
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
-F "file=@${TEST_FILE}" \
|
||||
-F "metadata=${METADATA_JSON}" \
|
||||
"${HOSTURL}/upload")
|
||||
|
||||
echo "upload response: ${UPLOAD_RESPONSE}"
|
||||
|
||||
OBJECT_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "2: Check sos hash matches"
|
||||
|
||||
# check the hash matches.
|
||||
CMD="${HOSTURL}/hash/${BASE_TAG}:test1"
|
||||
echo "checking hash via ${CMD}"
|
||||
CHECK_HASH=$(curl -s "${CMD}" | jq -r '.hash')
|
||||
[ "${OBJECT_HASH}" != "${CHECK_HASH}" ] && die "hash does not match: ${OBJECT_HASH} != ${CHECK_HASH}"
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "3: Check MD5Sum matches, for both label:tag and hash downloads"
|
||||
|
||||
# get md5sum of this file
|
||||
MD5SUM=$(md5sum "${TEST_FILE}" | awk '{print $1}')
|
||||
echo "md5sum of ${TEST_FILE} is ${MD5SUM}"
|
||||
|
||||
# download the object
|
||||
DOWNLOAD_FILE="_${TEST_FILE}.downloaded"
|
||||
echo "downloading ${OBJECT_HASH} to ${DOWNLOAD_FILE}1"
|
||||
if ! curl -s "${HOSTURL}/${OBJECT_HASH}" -o "${DOWNLOAD_FILE}1"; then
|
||||
die "failed to download ${HOSTURL}/${OBJECT_HASH} to ${DOWNLOAD_FILE}1"
|
||||
fi
|
||||
|
||||
# download the object again via the label:tag
|
||||
echo "downloading ${BASE_TAG}:test1 to ${DOWNLOAD_FILE}2"
|
||||
if ! curl -s "${HOSTURL}/${BASE_TAG}:test1" -o "${DOWNLOAD_FILE}2"; then
|
||||
die "failed to download ${BASE_TAG}:test1"
|
||||
fi
|
||||
|
||||
# get md5sum of the downloaded file
|
||||
MD5SUM_DOWNLOADED1=$(md5sum "${DOWNLOAD_FILE}1" | awk '{print $1}')
|
||||
echo "md5sum of ${DOWNLOAD_FILE}1 is ${MD5SUM_DOWNLOADED1}"
|
||||
[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED1}" ] && die "md5sums do not match"
|
||||
MD5SUM_DOWNLOADED2=$(md5sum "${DOWNLOAD_FILE}2" | awk '{print $1}')
|
||||
[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED2}" ] && die "md5sums do not match"
|
||||
|
||||
rm "${DOWNLOAD_FILE}1"
|
||||
rm "${DOWNLOAD_FILE}2"
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
LABELTAG="finangle-wrangler:fuzzy_test"
|
||||
title "3b: Upload script to ${LABELTAG}"
|
||||
|
||||
|
||||
METADATA_JSON=$(cat <<EOF
|
||||
{
|
||||
"labeltags": ["${LABELTAG}"],
|
||||
"description": "Test File for fuzzy test",
|
||||
"custom_field": "Upload2!!!111 Yay. This is a test file for the fuzzy test."
|
||||
}
|
||||
EOF
|
||||
)
|
||||
# upload this script as an object
|
||||
echo "uploading ${TEST_FILE} to ${LABELTAG}"
|
||||
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
-F "file=@${TEST_FILE}" \
|
||||
-F "metadata=${METADATA_JSON}" \
|
||||
"${HOSTURL}/upload")
|
||||
|
||||
echo "upload response: ${UPLOAD_RESPONSE}"
|
||||
|
||||
OBJECT_HASH2=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||
|
||||
if [ ! "${OBJECT_HASH}" == "${OBJECT_HASH2}" ]; then
|
||||
die "object hash does not match: ${OBJECT_HASH} != ${OBJECT_HASH2}"
|
||||
fi
|
||||
|
||||
# test the description and custom field matches our new ones.
|
||||
METADATA_RESPONSE=$(curl "${HOSTURL}/meta/${OBJECT_HASH2}")
|
||||
|
||||
echo "metadata response: ${METADATA_RESPONSE}"
|
||||
|
||||
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.description' | grep -q "Test File for fuzzy test"; then
|
||||
die "description does not match"
|
||||
fi
|
||||
|
||||
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.custom_field' | grep -q "Upload2!!!111 Yay. This is a test file for the fuzzy test."; then
|
||||
die "custom field does not match"
|
||||
fi
|
||||
|
||||
# download via the label:tag
|
||||
echo "downloading ${LABELTAG} to ${DOWNLOAD_FILE}3"
|
||||
if ! curl -s "${HOSTURL}/${LABELTAG}" -o "${DOWNLOAD_FILE}3"; then
|
||||
die "failed to download ${LABELTAG}"
|
||||
fi
|
||||
|
||||
# get md5sum of the downloaded file
|
||||
MD5SUM_DOWNLOADED3=$(md5sum "${DOWNLOAD_FILE}3" | awk '{print $1}')
|
||||
echo "md5sum of ${DOWNLOAD_FILE}3 is ${MD5SUM_DOWNLOADED3}"
|
||||
[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED3}" ] && die "md5sums do not match"
|
||||
|
||||
rm "${DOWNLOAD_FILE}3"
|
||||
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "4: Delete the object"
|
||||
|
||||
# delete the object
|
||||
echo "deleting ${OBJECT_HASH}"
|
||||
if ! curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${OBJECT_HASH}" | jq -r '.result' | grep -q 'success'; then
|
||||
die "failed to delete ${OBJECT_HASH}"
|
||||
fi
|
||||
|
||||
# verify the object is deleted
|
||||
echo "verifying ${OBJECT_HASH} is deleted"
|
||||
DELETE_RESPONSE=$(curl -s "${HOSTURL}/${OBJECT_HASH}")
|
||||
if ! echo "${DELETE_RESPONSE}" | jq -r '.result' | grep -q 'error'; then
|
||||
die "failed to verify ${OBJECT_HASH} is deleted"
|
||||
fi
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "5: Test metadata field preservation"
|
||||
|
||||
# Upload with extra metadata fields
|
||||
EXTRA_METADATA_JSON=$(cat <<EOF
|
||||
{
|
||||
"labeltags": ["${BASE_TAG}:test2"],
|
||||
"description": "Test with extra fields",
|
||||
"custom_field": "custom value",
|
||||
"extra_field1": "value1",
|
||||
"extra_field2": "value2"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
echo "uploading with extra metadata fields"
|
||||
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
-F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" \
|
||||
-F "metadata=${EXTRA_METADATA_JSON}" \
|
||||
"${HOSTURL}/upload")
|
||||
UPLOAD_EXIT_CODE=$?
|
||||
echo "Upload response: ${UPLOAD_RESPONSE}"
|
||||
echo "Upload exit code: ${UPLOAD_EXIT_CODE}"
|
||||
|
||||
if [ ${UPLOAD_EXIT_CODE} -ne 0 ]; then
|
||||
die "Failed to upload object: curl returned ${UPLOAD_EXIT_CODE}"
|
||||
fi
|
||||
|
||||
if ! echo "${UPLOAD_RESPONSE}" | jq -e . >/dev/null 2>&1; then
|
||||
die "Invalid JSON response from upload: ${UPLOAD_RESPONSE}"
|
||||
fi
|
||||
|
||||
OBJECT_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||
echo "Received hash: ${OBJECT_HASH}"
|
||||
|
||||
# Verify the object exists
|
||||
echo "Verifying object exists..."
|
||||
EXISTS_RESPONSE=$(curl -s "${HOSTURL}/exists/${BASE_TAG}:test2")
|
||||
echo "Exists response: ${EXISTS_RESPONSE}"
|
||||
|
||||
# Get metadata and verify extra fields are preserved
|
||||
echo "Retrieving metadata for ${BASE_TAG}:test2"
|
||||
METADATA_RESPONSE=$(curl -s "${HOSTURL}/meta/${BASE_TAG}:test2")
|
||||
CURL_EXIT_CODE=$?
|
||||
echo "Curl exit code: ${CURL_EXIT_CODE}"
|
||||
echo "Full metadata response: ${METADATA_RESPONSE}"
|
||||
|
||||
if [ ${CURL_EXIT_CODE} -ne 0 ]; then
|
||||
die "Failed to retrieve metadata: curl returned ${CURL_EXIT_CODE}"
|
||||
fi
|
||||
|
||||
if ! echo "${METADATA_RESPONSE}" | jq -e . >/dev/null 2>&1; then
|
||||
die "Invalid JSON response: ${METADATA_RESPONSE}"
|
||||
fi
|
||||
|
||||
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.extra_field1' | grep -q 'value1'; then
|
||||
die "extra_field1 not preserved in metadata"
|
||||
fi
|
||||
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.extra_field2' | grep -q 'value2'; then
|
||||
die "extra_field2 not preserved in metadata"
|
||||
fi
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "6: Test tag versioning behavior"
|
||||
|
||||
# Clean up
|
||||
curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${OBJECT_HASH}" > /dev/null
|
||||
|
||||
# Upload first version with tag 'latest'
|
||||
FIRST_METADATA_JSON=$(cat <<EOF
|
||||
{
|
||||
"labeltags": ["${BASE_TAG}:latest", "${BASE_TAG}:v1"],
|
||||
"description": "First version"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
echo "uploading first version with tag 'latest'"
|
||||
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
-F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" \
|
||||
-F "metadata=${FIRST_METADATA_JSON}" \
|
||||
"${HOSTURL}/upload")
|
||||
|
||||
FIRST_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||
|
||||
# Store first version's metadata before uploading second version
|
||||
FIRST_METADATA=$(curl -s "${HOSTURL}/meta/${FIRST_HASH}")
|
||||
echo "First version metadata response: ${FIRST_METADATA}"
|
||||
if ! echo "${FIRST_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:v1"; then
|
||||
die "First version does not have v1 tag"
|
||||
fi
|
||||
|
||||
# Upload second version with same tag 'latest'
|
||||
SECOND_METADATA_JSON=$(cat <<EOF
|
||||
{
|
||||
"labeltags": ["${BASE_TAG}:latest", "${BASE_TAG}:v2"],
|
||||
"description": "Second version"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
if [ ! -f "${SCRIPT_DIR}/test_1GB_file_upload.sh" ]; then
|
||||
die "test_1GB_file_upload.sh not found"
|
||||
fi
|
||||
|
||||
echo "uploading second version with tag 'latest'"
|
||||
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
-F "file=@${SCRIPT_DIR}/test_1GB_file_upload.sh" \
|
||||
-F "metadata=${SECOND_METADATA_JSON}" \
|
||||
"${HOSTURL}/upload")
|
||||
|
||||
SECOND_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||
|
||||
# Verify first version's metadata still has v1 tag
|
||||
FIRST_METADATA=$(curl -s "${HOSTURL}/meta/${FIRST_HASH}")
|
||||
echo "First version metadata response: ${FIRST_METADATA}"
|
||||
if ! echo "${FIRST_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:v1"; then
|
||||
die "First version does not have v1 tag"
|
||||
fi
|
||||
|
||||
# Verify first version's metadata no longer has the latest tag
|
||||
if echo "${FIRST_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:latest"; then
|
||||
die "First version still has latest tag"
|
||||
fi
|
||||
|
||||
# Verify second version has the correct tags: v2 and latest
|
||||
SECOND_METADATA=$(curl -s "${HOSTURL}/meta/${SECOND_HASH}")
|
||||
echo "Second version metadata response: ${SECOND_METADATA}"
|
||||
if ! echo "${SECOND_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:latest"; then
|
||||
die "Second version does not have latest tag"
|
||||
fi
|
||||
if ! echo "${SECOND_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:v2"; then
|
||||
die "Second version does not have v2 tag"
|
||||
fi
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "7: Test rate limiting behavior"
|
||||
|
||||
|
||||
# Clean up
|
||||
curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${FIRST_HASH}" > /dev/null
|
||||
curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${SECOND_HASH}" > /dev/null
|
||||
|
||||
# Use a known invalid token
|
||||
INVALID_TOKEN="invalid_token"
|
||||
|
||||
# Make 5 requests with an invalid token
|
||||
for i in {1..5}; do
|
||||
echo "Attempt $i with invalid token"
|
||||
RESPONSE=$(curl -s -X PUT -H "Authorization: Bearer ${INVALID_TOKEN}" -F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" -F "metadata={\"labeltags\":[\"test:latest\"]}" "${HOSTURL}/upload")
|
||||
echo "Response: ${RESPONSE}"
|
||||
done
|
||||
|
||||
# Now try a request with a valid token - should be rate limited
|
||||
echo "Attempting request with valid token (should be rate limited)"
|
||||
RESPONSE=$(curl -s -X PUT -H "Authorization: Bearer ${WRITE_TOKEN}" -F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" -F "metadata={\"labeltags\":[\"test:latest\"]}" "${HOSTURL}/upload")
|
||||
if ! echo "${RESPONSE}" | jq -r '.error' | grep -q "Too many authentication attempts"; then
|
||||
die "Expected rate limit error, got: ${RESPONSE}"
|
||||
fi
|
||||
|
||||
echo "Sleeping for 3 seconds to allow rate limit to reset"
|
||||
echo "(Normally 5 mins, but we set to 2s for this test!)"
|
||||
|
||||
sleep 3
|
||||
# Now try a request with a valid token - should be rate limited
|
||||
echo "Attempting request with valid token (should NOT be rate limited)"
|
||||
RESPONSE=$(curl -s -X PUT -H "Authorization: Bearer ${WRITE_TOKEN}" -F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" -F "metadata={\"labeltags\":[\"test:latest\"]}" "${HOSTURL}/upload")
|
||||
if echo "${RESPONSE}" | jq -r '.error' | grep -q "Too many authentication attempts"; then
|
||||
die "Expected no rate limit error, got: ${RESPONSE}"
|
||||
fi
|
||||
|
||||
# delete the object
|
||||
TODELHASH=$(curl -s "${HOSTURL}/hash/test:latest" | jq -r '.hash')
|
||||
echo "deleting test:latest ${TODELHASH}"
|
||||
|
||||
if ! curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${TODELHASH}" | jq -r '.result' | grep -q 'success'; then
|
||||
die "failed to delete ${TODELHASH}"
|
||||
fi
|
||||
|
||||
#------------------------------------------------------------------------------------------------
|
||||
title "ALL TESTS PASSED"
|
84
testing/test_1GB_file_upload.sh
Executable file
84
testing/test_1GB_file_upload.sh
Executable file
@@ -0,0 +1,84 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Create a temporary test file (1024MB!)
|
||||
echo "Creating test file..."
|
||||
dd if=/dev/urandom of=test_file.bin bs=1M count=1024
|
||||
|
||||
# Calculate original file hash
|
||||
echo "Calculating original file hash..."
|
||||
ORIGINAL_HASH=$(sha256sum test_file.bin | cut -d' ' -f1)
|
||||
echo "Original hash: $ORIGINAL_HASH"
|
||||
|
||||
# read ~/.config/simple_object_storage/config.json
|
||||
CONFIG_PATH="${HOME}/.config/simple_object_storage/config.json"
|
||||
if [ ! -f "${CONFIG_PATH}" ]; then
|
||||
echo "config file not found at ${CONFIG_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
CONFIG=$(cat "${CONFIG_PATH}")
|
||||
|
||||
# get the host and port from the config
|
||||
HOST=$(echo $CONFIG | jq -r '.host')
|
||||
PORT=$(echo $CONFIG | jq -r '.port')
|
||||
|
||||
# extract the first write token from the config
|
||||
WRITE_TOKEN=$(echo $CONFIG | jq -r '.write_tokens[0]')
|
||||
|
||||
# Upload the file
|
||||
echo "Uploading file..."
|
||||
RESPONSE=$(curl -X PUT \
|
||||
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
-F "file=@test_file.bin" \
|
||||
-F 'metadata={"labeltags":["test:latest","test:large"],"description":"Test file"}' \
|
||||
"http://${HOST}:${PORT}/upload")
|
||||
echo "Upload response: $RESPONSE"
|
||||
|
||||
# Extract the hash from the response
|
||||
HASH=$(echo $RESPONSE | jq -r '.hash')
|
||||
echo "Hash: $HASH"
|
||||
|
||||
# Check if the file exists by hash
|
||||
echo "Checking if file exists by hash..."
|
||||
EXISTS_HASH=$(curl "http://${HOST}:${PORT}/exists/$HASH")
|
||||
echo "Exists by hash response: $EXISTS_HASH"
|
||||
|
||||
# Check if the file exists by label:tag
|
||||
echo "Checking if file exists by label:tag..."
|
||||
EXISTS_TAG=$(curl "http://${HOST}:${PORT}/exists/test:latest")
|
||||
echo "Exists by label:tag response: $EXISTS_TAG"
|
||||
|
||||
# Download the file by hash
|
||||
echo "Downloading file by hash..."
|
||||
curl -o downloaded_by_hash.bin "http://${HOST}:${PORT}/object/$HASH"
|
||||
|
||||
# Download the file by label:tag
|
||||
echo "Downloading file by label:tag..."
|
||||
curl -o downloaded_by_tag.bin "http://${HOST}:${PORT}/object/test:latest"
|
||||
|
||||
# Verify downloaded files
|
||||
echo "Verifying downloaded files..."
|
||||
HASH_BY_HASH=$(sha256sum downloaded_by_hash.bin | cut -d' ' -f1)
|
||||
HASH_BY_TAG=$(sha256sum downloaded_by_tag.bin | cut -d' ' -f1)
|
||||
|
||||
echo "Original hash: $ORIGINAL_HASH"
|
||||
echo "Hash of file downloaded by hash: $HASH_BY_HASH"
|
||||
echo "Hash of file downloaded by tag: $HASH_BY_TAG"
|
||||
|
||||
if [ "$ORIGINAL_HASH" = "$HASH_BY_HASH" ] && [ "$ORIGINAL_HASH" = "$HASH_BY_TAG" ]; then
|
||||
echo "Hash verification successful!"
|
||||
else
|
||||
echo "Hash verification failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# delete the file!
|
||||
echo "Deleting file..."
|
||||
DELETE_RESPONSE=$(curl -H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||
"http://${HOST}:${PORT}/deleteobject?hash=${HASH}")
|
||||
echo "Delete response: $DELETE_RESPONSE"
|
||||
|
||||
# Clean up
|
||||
echo "Cleaning up..."
|
||||
rm test_file.bin downloaded_by_hash.bin downloaded_by_tag.bin
|
||||
|
||||
echo "Test completed."
|
Reference in New Issue
Block a user