From 085ace79148b2b53d8cef1486d27b8491bc94961 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sat, 3 May 2025 15:30:07 +1200 Subject: [PATCH] Fix lookup --- src/server.cpp | 146 +++++++++++++++++++++++++++----------------- test.sh | 74 +++++++++++++++++++++- test.sh.downloaded | 107 ++++++++++++++++++++++++++++++++ test.sh.downloaded3 | 111 +++++++++++++++++++++++++++++++++ 4 files changed, 382 insertions(+), 56 deletions(-) create mode 100644 test.sh.downloaded create mode 100644 test.sh.downloaded3 diff --git a/src/server.cpp b/src/server.cpp index dfeb98d..7cf71d9 100644 --- a/src/server.cpp +++ b/src/server.cpp @@ -7,7 +7,7 @@ #include // For getAllKeys #include // For litecask values #include // For std::runtime_error - +#include #include "server.hpp" #include "hash.hpp" @@ -59,7 +59,8 @@ bool Server::validate_write_request(const httplib::Request &req, httplib::Respon for (const auto& param : required_params) { if (!req.has_param(param)) { res.status = 400; - res.set_content("Missing required query parameter: " + param, "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Missing required query parameter: " + param}}; + res.set_content(response.dump(), "application/json"); return false; } } @@ -68,7 +69,8 @@ bool Server::validate_write_request(const httplib::Request &req, httplib::Respon bool write_token_valid = std::find(config_.write_tokens.begin(), config_.write_tokens.end(), params["token"]) != config_.write_tokens.end(); if (!write_token_valid) { res.status = 403; - res.set_content("Invalid write token", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Invalid write token"}}; + res.set_content(response.dump(), "application/json"); return false; } @@ -174,48 +176,57 @@ void Server::setup_routes() { void Server::handle_get_object(const httplib::Request& req, httplib::Response& res) { const auto& key = req.matches[1].str(); - std::string hash_str; + std::string hash_str = key; - // Check if the key looks like a hash (numeric) - bool is_hash_lookup = true; - for (char c : key) { - if (!std::isdigit(c)) { - is_hash_lookup = false; - break; - } - } - - if (!is_hash_lookup) { - // Lookup by label:tag in the database - dbEntry entry; - if (!db_->get(key, entry)) { - res.status = 404; - res.set_content("Object not found (label:tag)", "text/plain"); - return; - } + // first check if the key matches. + dbEntry entry; + if (db_->get(key, entry)) { + // got it! hash_str = entry.hash; - } else { - // Lookup directly by hash - hash_str = key; } if (hash_str.empty()) { res.status = 404; - res.set_content("Object hash could not be determined", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Object hash could not be determined"}}; + res.set_content(response.dump(), "application/json"); return; } + // check valid hash. + uint64_t hash_value; + try { + hash_value = std::stoull(hash_str); + } catch (const std::invalid_argument& e) { + res.status = 404; + nlohmann::json response = {{"result", "error"}, {"error", "Invalid hash: " + hash_str}}; + res.set_content(response.dump(), "application/json"); + return; + } + + std::stringstream oss; + oss << hash_value; + if (oss.str() != hash_str) { + res.status = 404; + nlohmann::json response = {{"result", "error"}, {"error", "Invalid hash: " + hash_str}}; + res.set_content(response.dump(), "application/json"); + return; + } + + // hash is valid, safe to look up! + // Construct the file path using the hash string - std::filesystem::path file_path = config_.object_store_path / hash_str; + std::filesystem::path file_path = config_.object_store_path / oss.str(); if (!std::filesystem::exists(file_path) || !std::filesystem::is_regular_file(file_path)) { res.status = 404; - res.set_content("Object file not found for hash: " + hash_str, "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Object file not found for hash: " + hash_str}}; + res.set_content(response.dump(), "application/json"); return; } // Send file using Response::set_file_content std::string content_type = "application/octet-stream"; // Basic default res.set_file_content(file_path.string(), content_type); + // No JSON response for file content } void Server::handle_get_hash(const httplib::Request& req, httplib::Response& res) { @@ -224,28 +235,32 @@ void Server::handle_get_hash(const httplib::Request& req, httplib::Response& res dbEntry entry; if (!db_->get(label_tag, entry)) { res.status = 404; - res.set_content("Label:tag not found", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Label:tag not found"}}; + res.set_content(response.dump(), "application/json"); return; } - res.set_content(entry.hash, "text/plain"); + nlohmann::json response = {{"result", "success"}, {"hash", entry.hash}}; + res.set_content(response.dump(), "application/json"); } void Server::handle_get_directory(const httplib::Request& /*req*/, httplib::Response& res) { - std::stringstream ss; std::vector entries; if (!db_->list(entries)) { res.status = 500; - res.set_content("Database error retrieving directory", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to retrieve directory listing"}}; + res.set_content(response.dump(), "application/json"); return; } + nlohmann::json entries_array = nlohmann::json::array(); for (const auto& entry : entries) { - ss << entry.label_tag << "," << entry.hash << "\n"; + entries_array.push_back({{"label_tag", entry.label_tag}, {"hash", entry.hash}}); } - res.set_content(ss.str(), "text/plain"); + nlohmann::json response = {{"result", "success"}, {"entries", entries_array}}; + res.set_content(response.dump(), "application/json"); } void Server::handle_put_object(const httplib::Request& req, httplib::Response& res) { @@ -259,14 +274,16 @@ void Server::handle_put_object(const httplib::Request& req, httplib::Response& r // 1. Check we're in the /upload path if (req.path != "/upload") { res.status = 404; - res.set_content("Not found - put requests must be to /upload", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Not found - put requests must be to /upload"}}; + res.set_content(response.dump(), "application/json"); return; } auto [label, tag] = parse_label_tag(params["labeltag"]); if (label.empty() || tag.empty()) { res.status = 400; - res.set_content("Invalid label:tag format", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Invalid label:tag format"}}; + res.set_content(response.dump(), "application/json"); return; } @@ -283,7 +300,8 @@ void Server::handle_put_object(const httplib::Request& req, httplib::Response& r std::ofstream temp_file(temp_path, std::ios::binary); if (!temp_file.is_open()) { res.status = 500; - res.set_content("Failed to create temporary file", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to create temporary file"}}; + res.set_content(response.dump(), "application/json"); return; } @@ -298,7 +316,8 @@ void Server::handle_put_object(const httplib::Request& req, httplib::Response& r uint64_t hash = hash_file(temp_path.string()); if (hash == 0) { res.status = 500; - res.set_content("Failed to calculate hash", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to calculate hash"}}; + res.set_content(response.dump(), "application/json"); return; } @@ -325,7 +344,8 @@ void Server::handle_put_object(const httplib::Request& req, httplib::Response& r } catch (const std::filesystem::filesystem_error& e) { std::cerr << "Error renaming temp file: " << e.what() << std::endl; res.status = 500; - res.set_content("Failed to store object file", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to store object file"}}; + res.set_content(response.dump(), "application/json"); return; } } @@ -338,13 +358,14 @@ void Server::handle_put_object(const httplib::Request& req, httplib::Response& r if (!db_->update_or_insert(entry)) { res.status = 500; - res.set_content("Failed to update database index", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to update database index"}}; + res.set_content(response.dump(), "application/json"); // Attempt to clean up the moved file if index fails try { if (std::filesystem::exists(final_path)) std::filesystem::remove(final_path); } catch(...) {}; return; } - res.set_content(std::to_string(hash), "text/plain"); + res.set_content(nlohmann::json({{"result", "success"}, {"hash", std::to_string(hash)}}).dump(), "application/json"); } void Server::handle_get_metadata(const httplib::Request& req, httplib::Response& res) { @@ -353,16 +374,19 @@ void Server::handle_get_metadata(const httplib::Request& req, httplib::Response& dbEntry entry; if (!db_->get(label_tag, entry)) { res.status = 404; - res.set_content("Metadata not found for label:tag: " + label_tag, "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Metadata not found for label:tag: " + label_tag}}; + res.set_content(response.dump(), "application/json"); return; } try { - res.set_content(entry.metadata.dump(), "application/json"); + nlohmann::json response = {{"result", "success"}, {"metadata", entry.metadata}}; + res.set_content(response.dump(), "application/json"); } catch (const nlohmann::json::exception& e) { std::cerr << "Error serializing metadata for " << label_tag << ": " << e.what() << std::endl; res.status = 500; - res.set_content("Internal server error: Failed to serialize metadata", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Internal server error: Failed to serialize metadata"}}; + res.set_content(response.dump(), "application/json"); } } @@ -398,18 +422,21 @@ void Server::handle_delete_tag(const httplib::Request& req, httplib::Response& r auto [label, tag] = parse_label_tag(params["labeltag"]); if (label.empty() || tag.empty()) { res.status = 400; - res.set_content("Invalid label:tag format", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Invalid label:tag format"}}; + res.set_content(response.dump(), "application/json"); return; } // Delete the label:tag from the database if (!db_->remove(params["labeltag"])) { res.status = 404; - res.set_content("Label:tag not found or deletion failed", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Label:tag not found or deletion failed"}}; + res.set_content(response.dump(), "application/json"); return; } - res.set_content("Label:tag deleted successfully", "text/plain"); + nlohmann::json response = {{"result", "success"}}; + res.set_content(response.dump(), "application/json"); } void Server::handle_delete_object(const httplib::Request& req, httplib::Response& res) { @@ -422,14 +449,16 @@ void Server::handle_delete_object(const httplib::Request& req, httplib::Response std::filesystem::path file_path = config_.object_store_path / params["hash"]; if (!std::filesystem::exists(file_path) || !std::filesystem::is_regular_file(file_path)) { res.status = 404; - res.set_content("Object not found for hash: " + params["hash"], "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Object not found for hash: " + params["hash"]}}; + res.set_content(response.dump(), "application/json"); return; } // Remove all tags that reference this hash if (!db_->remove_by_hash(params["hash"])) { res.status = 500; - res.set_content("Failed to remove some or all associated tags", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to remove some or all associated tags"}}; + res.set_content(response.dump(), "application/json"); return; } @@ -439,11 +468,13 @@ void Server::handle_delete_object(const httplib::Request& req, httplib::Response } catch (const std::filesystem::filesystem_error& e) { std::cerr << "Error deleting object file: " << e.what() << std::endl; res.status = 500; - res.set_content("Failed to delete object file: " + std::string(e.what()), "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to delete object file: " + std::string(e.what())}}; + res.set_content(response.dump(), "application/json"); return; } - res.set_content("Object and all associated tags deleted successfully", "text/plain"); + nlohmann::json response = {{"result", "success"}}; + res.set_content(response.dump(), "application/json"); } void Server::handle_append_tag(const httplib::Request& req, httplib::Response& res) { @@ -456,7 +487,8 @@ void Server::handle_append_tag(const httplib::Request& req, httplib::Response& r auto [label, tag] = parse_label_tag(params["labeltag"]); if (label.empty() || tag.empty()) { res.status = 400; - res.set_content("Invalid label:tag format", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Invalid label:tag format"}}; + res.set_content(response.dump(), "application/json"); return; } @@ -464,7 +496,8 @@ void Server::handle_append_tag(const httplib::Request& req, httplib::Response& r std::filesystem::path file_path = config_.object_store_path / params["hash"]; if (!std::filesystem::exists(file_path) || !std::filesystem::is_regular_file(file_path)) { res.status = 404; - res.set_content("Object not found for hash: " + params["hash"], "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Object not found for hash: " + params["hash"]}}; + res.set_content(response.dump(), "application/json"); return; } @@ -473,7 +506,8 @@ void Server::handle_append_tag(const httplib::Request& req, httplib::Response& r if (db_->get(params["labeltag"], existing_entry)) { if (existing_entry.hash == params["hash"]) { // Label:tag already points to this hash, nothing to do - res.set_content("Label:tag already points to this hash", "text/plain"); + nlohmann::json response = {{"result", "success"}, {"message", "Label:tag already points to this hash"}}; + res.set_content(response.dump(), "application/json"); return; } } @@ -486,11 +520,13 @@ void Server::handle_append_tag(const httplib::Request& req, httplib::Response& r if (!db_->update_or_insert(entry)) { res.status = 500; - res.set_content("Failed to append tag", "text/plain"); + nlohmann::json response = {{"result", "error"}, {"error", "Failed to append tag"}}; + res.set_content(response.dump(), "application/json"); return; } - res.set_content("Tag appended successfully", "text/plain"); + nlohmann::json response = {{"result", "success"}}; + res.set_content(response.dump(), "application/json"); } } // namespace simple_object_storage \ No newline at end of file diff --git a/test.sh b/test.sh index 8cb12b0..243dfee 100755 --- a/test.sh +++ b/test.sh @@ -3,6 +3,13 @@ SCRIPT_DIR=$(dirname $0) SCRIPT_NAME=$(basename $0) + +function die() { + echo "error: $1" + exit 1 +} + + # test jq is installed if ! command -v jq &> /dev/null; then echo "jq could not be found" @@ -34,6 +41,71 @@ BASE_TAG="autotest" # upload this script as an object echo "uploading ${SCRIPT_DIR}/${SCRIPT_NAME} to ${BASE_TAG}:test1" -OBJECT_HASH=$(curl "${BASE_URL}/upload?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1&filename=${SCRIPT_NAME}" -T ${SCRIPT_DIR}/${SCRIPT_NAME} | jq -r '.hash') +OBJECT_HASH=$(curl -s "${BASE_URL}/upload?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1&filename=${SCRIPT_NAME}" -T ${SCRIPT_DIR}/${SCRIPT_NAME} | jq -r '.hash') echo "received hash ${OBJECT_HASH}" +# check the hash matches. +CHECK_HASH=$(curl -s "${BASE_URL}/hash/${BASE_TAG}:test1" | jq -r '.hash') +[ "${OBJECT_HASH}" != "${CHECK_HASH}" ] && die "hash does not match" + +# get md5sum of this file +MD5SUM=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME} | awk '{print $1}') +echo "md5sum of ${SCRIPT_DIR}/${SCRIPT_NAME} is ${MD5SUM}" + +# download the object +echo "downloading ${OBJECT_HASH} to ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1" +curl -s "${BASE_URL}/object/${OBJECT_HASH}" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 + +# download the object again via the label:tag +echo "downloading ${BASE_TAG}:test1 to ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2" +curl -s "${BASE_URL}/object/${BASE_TAG}:test1" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 + +# get md5sum of the downloaded file +MD5SUM_DOWNLOADED1=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 | awk '{print $1}') +echo "md5sum of ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 is ${MD5SUM_DOWNLOADED1}" +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED1}" ] && die "md5sums do not match" +MD5SUM_DOWNLOADED2=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 | awk '{print $1}') +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED2}" ] && die "md5sums do not match" + +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 + +# delete the object tags +echo "deleting tag ${BASE_TAG}:test1" +if ! curl -s "${BASE_URL}/deletetag?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1" | jq -r '.result' | grep -q 'success'; then + die "failed to delete tag ${BASE_TAG}:test1" +fi + +# testing we CANT download via the label:tag +echo "testing we CANT download via the label:tag" +if curl -s "${BASE_URL}/object/${BASE_TAG}:test1" | jq -r '.result' | grep -q 'success'; then + die "downloaded via the label:tag" +fi + +# testing we can still download via the hash +echo "testing we can still download via the hash" +if ! curl -s "${BASE_URL}/object/${OBJECT_HASH}" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 | jq -r '.result' | grep -q 'success'; then + die "failed to download via the hash" +fi + +MD5SUM_DOWNLOADED3=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 | awk '{print $1}') +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED3}" ] && die "md5sums do not match" + +# delete the downloaded file +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 + +# delete the object +echo "deleting ${OBJECT_HASH}" +if ! curl -s "${BASE_URL}/deleteobject?token=${WRITE_TOKEN}&hash=${OBJECT_HASH}" | jq -r '.result' | grep -q 'success'; then + die "failed to delete ${OBJECT_HASH}" +fi + +# verify the object is deleted +echo "verifying ${OBJECT_HASH} is deleted" +DELETE_RESPONSE=$(curl -s "${BASE_URL}/object/${OBJECT_HASH}") +echo "delete response: ${DELETE_RESPONSE}" +if ! echo "${DELETE_RESPONSE}" | jq -r '.result' | grep -q 'success'; then + die "failed to verify ${OBJECT_HASH} is deleted" +fi + + diff --git a/test.sh.downloaded b/test.sh.downloaded new file mode 100644 index 0000000..14ac425 --- /dev/null +++ b/test.sh.downloaded @@ -0,0 +1,107 @@ +#! /bin/bash + +SCRIPT_DIR=$(dirname $0) +SCRIPT_NAME=$(basename $0) + + +function die() { + echo "error: $1" + exit 1 +} + + +# test jq is installed +if ! command -v jq &> /dev/null; then + echo "jq could not be found" + echo "sudo apt-get install jq" + exit 1 +fi + +# read ~/.config/simple_object_storage/config.json +CONFIG_PATH="${HOME}/.config/simple_object_storage/config.json" +if [ ! -f "${CONFIG_PATH}" ]; then + echo "config file not found at ${CONFIG_PATH}" + exit 1 +fi +CONFIG=$(cat "${CONFIG_PATH}") + +# get the host and port from the config +HOST=$(echo $CONFIG | jq -r '.host') +PORT=$(echo $CONFIG | jq -r '.port') + +# extract the first write token from the config +WRITE_TOKEN=$(echo $CONFIG | jq -r '.write_tokens[0]') + +BASE_URL="http://${HOST}:${PORT}" + +BASE_TAG="autotest" + + # test every action in the README.md file, leaving the system in the same state it was found + # and print the output of each action + +# upload this script as an object +echo "uploading ${SCRIPT_DIR}/${SCRIPT_NAME} to ${BASE_TAG}:test1" +OBJECT_HASH=$(curl -s "${BASE_URL}/upload?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1&filename=${SCRIPT_NAME}" -T ${SCRIPT_DIR}/${SCRIPT_NAME} | jq -r '.hash') +echo "received hash ${OBJECT_HASH}" + +# get md5sum of this file +MD5SUM=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME} | awk '{print $1}') +echo "md5sum of ${SCRIPT_DIR}/${SCRIPT_NAME} is ${MD5SUM}" + +# download the object +echo "downloading ${OBJECT_HASH} to ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded" +curl -s "${BASE_URL}/object/${OBJECT_HASH}" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded + +# download the object again via the label:tag +echo "downloading ${BASE_TAG}:test1 to ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2" +curl -s "${BASE_URL}/object/${BASE_TAG}:test1" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 + +# get md5sum of the downloaded file +MD5SUM_DOWNLOADED=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded | awk '{print $1}') +echo "md5sum of ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded is ${MD5SUM_DOWNLOADED}" +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED}" ] && die "md5sums do not match" +MD5SUM_DOWNLOADED2=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 | awk '{print $1}') +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED2}" ] && die "md5sums do not match" + +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 + +# delete the object tags +echo "deleting tag ${BASE_TAG}:test1" +if ! curl -s "${BASE_URL}/deletetag?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1" | jq -r '.result' | grep -q 'success'; then + die "failed to delete tag ${BASE_TAG}:test1" +fi + +# testing we CANT download via the label:tag +echo "testing we CANT download via the label:tag" +if curl -s "${BASE_URL}/object/${BASE_TAG}:test1" | jq -r '.result' | grep -q 'success'; then + die "downloaded via the label:tag" +fi + +# testing we can still download via the hash +echo "testing we can still download via the hash" +if ! curl -s "${BASE_URL}/object/${OBJECT_HASH}" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 | jq -r '.result' | grep -q 'success'; then + die "failed to download via the hash" +fi + +MD5SUM_DOWNLOADED3=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 | awk '{print $1}') +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED3}" ] && die "md5sums do not match" + +# delete the downloaded file +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 + +# delete the object +echo "deleting ${OBJECT_HASH}" +if ! curl -s "${BASE_URL}/deleteobject?token=${WRITE_TOKEN}&hash=${OBJECT_HASH}" | jq -r '.result' | grep -q 'success'; then + die "failed to delete ${OBJECT_HASH}" +fi + +# verify the object is deleted +echo "verifying ${OBJECT_HASH} is deleted" +DELETE_RESPONSE=$(curl -s "${BASE_URL}/object/${OBJECT_HASH}") +echo "delete response: ${DELETE_RESPONSE}" +if ! echo "${DELETE_RESPONSE}" | jq -r '.result' | grep -q 'success'; then + die "failed to verify ${OBJECT_HASH} is deleted" +fi + + diff --git a/test.sh.downloaded3 b/test.sh.downloaded3 new file mode 100644 index 0000000..243dfee --- /dev/null +++ b/test.sh.downloaded3 @@ -0,0 +1,111 @@ +#! /bin/bash + +SCRIPT_DIR=$(dirname $0) +SCRIPT_NAME=$(basename $0) + + +function die() { + echo "error: $1" + exit 1 +} + + +# test jq is installed +if ! command -v jq &> /dev/null; then + echo "jq could not be found" + echo "sudo apt-get install jq" + exit 1 +fi + +# read ~/.config/simple_object_storage/config.json +CONFIG_PATH="${HOME}/.config/simple_object_storage/config.json" +if [ ! -f "${CONFIG_PATH}" ]; then + echo "config file not found at ${CONFIG_PATH}" + exit 1 +fi +CONFIG=$(cat "${CONFIG_PATH}") + +# get the host and port from the config +HOST=$(echo $CONFIG | jq -r '.host') +PORT=$(echo $CONFIG | jq -r '.port') + +# extract the first write token from the config +WRITE_TOKEN=$(echo $CONFIG | jq -r '.write_tokens[0]') + +BASE_URL="http://${HOST}:${PORT}" + +BASE_TAG="autotest" + + # test every action in the README.md file, leaving the system in the same state it was found + # and print the output of each action + +# upload this script as an object +echo "uploading ${SCRIPT_DIR}/${SCRIPT_NAME} to ${BASE_TAG}:test1" +OBJECT_HASH=$(curl -s "${BASE_URL}/upload?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1&filename=${SCRIPT_NAME}" -T ${SCRIPT_DIR}/${SCRIPT_NAME} | jq -r '.hash') +echo "received hash ${OBJECT_HASH}" + +# check the hash matches. +CHECK_HASH=$(curl -s "${BASE_URL}/hash/${BASE_TAG}:test1" | jq -r '.hash') +[ "${OBJECT_HASH}" != "${CHECK_HASH}" ] && die "hash does not match" + +# get md5sum of this file +MD5SUM=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME} | awk '{print $1}') +echo "md5sum of ${SCRIPT_DIR}/${SCRIPT_NAME} is ${MD5SUM}" + +# download the object +echo "downloading ${OBJECT_HASH} to ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1" +curl -s "${BASE_URL}/object/${OBJECT_HASH}" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 + +# download the object again via the label:tag +echo "downloading ${BASE_TAG}:test1 to ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2" +curl -s "${BASE_URL}/object/${BASE_TAG}:test1" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 + +# get md5sum of the downloaded file +MD5SUM_DOWNLOADED1=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 | awk '{print $1}') +echo "md5sum of ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 is ${MD5SUM_DOWNLOADED1}" +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED1}" ] && die "md5sums do not match" +MD5SUM_DOWNLOADED2=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 | awk '{print $1}') +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED2}" ] && die "md5sums do not match" + +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded1 +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded2 + +# delete the object tags +echo "deleting tag ${BASE_TAG}:test1" +if ! curl -s "${BASE_URL}/deletetag?token=${WRITE_TOKEN}&labeltag=${BASE_TAG}:test1" | jq -r '.result' | grep -q 'success'; then + die "failed to delete tag ${BASE_TAG}:test1" +fi + +# testing we CANT download via the label:tag +echo "testing we CANT download via the label:tag" +if curl -s "${BASE_URL}/object/${BASE_TAG}:test1" | jq -r '.result' | grep -q 'success'; then + die "downloaded via the label:tag" +fi + +# testing we can still download via the hash +echo "testing we can still download via the hash" +if ! curl -s "${BASE_URL}/object/${OBJECT_HASH}" -o ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 | jq -r '.result' | grep -q 'success'; then + die "failed to download via the hash" +fi + +MD5SUM_DOWNLOADED3=$(md5sum ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 | awk '{print $1}') +[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED3}" ] && die "md5sums do not match" + +# delete the downloaded file +rm ${SCRIPT_DIR}/${SCRIPT_NAME}.downloaded3 + +# delete the object +echo "deleting ${OBJECT_HASH}" +if ! curl -s "${BASE_URL}/deleteobject?token=${WRITE_TOKEN}&hash=${OBJECT_HASH}" | jq -r '.result' | grep -q 'success'; then + die "failed to delete ${OBJECT_HASH}" +fi + +# verify the object is deleted +echo "verifying ${OBJECT_HASH} is deleted" +DELETE_RESPONSE=$(curl -s "${BASE_URL}/object/${OBJECT_HASH}") +echo "delete response: ${DELETE_RESPONSE}" +if ! echo "${DELETE_RESPONSE}" | jq -r '.result' | grep -q 'success'; then + die "failed to verify ${OBJECT_HASH} is deleted" +fi + +