test: Add 8 and update 14 files
This commit is contained in:
@@ -16,9 +16,13 @@ set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
|||||||
set(BUILD_SHARED_LIBS OFF)
|
set(BUILD_SHARED_LIBS OFF)
|
||||||
set(CMAKE_PREFIX_PATH /usr/local)
|
set(CMAKE_PREFIX_PATH /usr/local)
|
||||||
|
|
||||||
# Create executable
|
# Create main executable (exclude hash_token.cpp)
|
||||||
file(GLOB_RECURSE SOURCES "src/*.cpp")
|
file(GLOB_RECURSE ALL_SOURCES "src/*.cpp")
|
||||||
add_executable(${PROJECT_NAME} ${SOURCES})
|
list(REMOVE_ITEM ALL_SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/src/hash_token.cpp")
|
||||||
|
add_executable(${PROJECT_NAME} ${ALL_SOURCES})
|
||||||
|
|
||||||
|
# Create hash_token utility
|
||||||
|
add_executable(hash_token src/hash_token.cpp)
|
||||||
|
|
||||||
# Configure version.hpp
|
# Configure version.hpp
|
||||||
configure_file("src/version.hpp.in" "src/autogen/version.hpp" @ONLY)
|
configure_file("src/version.hpp.in" "src/autogen/version.hpp" @ONLY)
|
||||||
@@ -39,9 +43,13 @@ find_package(OpenSSL REQUIRED)
|
|||||||
find_package(Drogon CONFIG REQUIRED)
|
find_package(Drogon CONFIG REQUIRED)
|
||||||
find_package(nlohmann_json REQUIRED)
|
find_package(nlohmann_json REQUIRED)
|
||||||
|
|
||||||
# Link libraries
|
# Link libraries for main executable
|
||||||
target_link_libraries(${PROJECT_NAME} PRIVATE
|
target_link_libraries(${PROJECT_NAME} PRIVATE
|
||||||
nlohmann_json::nlohmann_json Drogon::Drogon
|
nlohmann_json::nlohmann_json Drogon::Drogon
|
||||||
/usr/local/lib/libpgcommon.a /usr/local/lib/libpgport.a
|
/usr/local/lib/libpgcommon.a /usr/local/lib/libpgport.a
|
||||||
lzma dl)
|
lzma dl)
|
||||||
|
|
||||||
|
# Link libraries for hash_token utility
|
||||||
|
target_link_libraries(hash_token PRIVATE
|
||||||
|
OpenSSL::SSL OpenSSL::Crypto)
|
||||||
|
|
@@ -57,17 +57,19 @@ RUN --mount=type=cache,target=/ccache \
|
|||||||
--mount=type=cache,target=/build \
|
--mount=type=cache,target=/build \
|
||||||
cmake --build /build
|
cmake --build /build
|
||||||
|
|
||||||
# Copy the built executable to a regular directory for the final stage
|
# Copy the built executables to a regular directory for the final stage
|
||||||
RUN --mount=type=cache,target=/build \
|
RUN --mount=type=cache,target=/build \
|
||||||
mkdir -p /output && \
|
mkdir -p /output && \
|
||||||
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
|
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
|
||||||
find /build -type f -executable -exec cp {} /output/${PROJECT} \;
|
find /build -type f -executable -exec cp {} /output/${PROJECT} \; && \
|
||||||
|
if [ -f /build/hash_token ]; then cp /build/hash_token /output/hash_token; fi
|
||||||
|
|
||||||
# Final stage that only contains the binary
|
# Final stage that only contains the binaries
|
||||||
FROM scratch AS project
|
FROM scratch AS project
|
||||||
|
|
||||||
ARG PROJECT
|
ARG PROJECT
|
||||||
|
|
||||||
# Copy the actual binary from the regular directory
|
# Copy the actual binaries from the regular directory
|
||||||
COPY --from=builder /output/${PROJECT} /${PROJECT}
|
COPY --from=builder /output/${PROJECT} /${PROJECT}
|
||||||
|
COPY --from=builder /output/hash_token /hash_token
|
||||||
|
|
||||||
|
79
README.md
79
README.md
@@ -19,6 +19,21 @@ A simple object storage system that stores files with metadata and provides a RE
|
|||||||
- Rate limiting for security
|
- Rate limiting for security
|
||||||
|
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
### Quick Install (Pre-built Binaries)
|
||||||
|
|
||||||
|
Download and install both the server and hash utility:
|
||||||
|
```bash
|
||||||
|
wget -q https://getbin.xyz/simple-object-server-install:latest -O- | bash
|
||||||
|
```
|
||||||
|
|
||||||
|
This installs:
|
||||||
|
- `simple-object-server` - The main server binary
|
||||||
|
- `sos-hash` - Utility for generating bcrypt hashes for authentication tokens
|
||||||
|
|
||||||
|
The binaries are installed to `~/.local/bin` (or `/usr/local/bin` if run as root).
|
||||||
|
|
||||||
### Running with Docker
|
### Running with Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -70,12 +85,47 @@ curl https://getbin.xyz/simple-object-server-install | bash
|
|||||||
|
|
||||||
The server can be configured by creating a JSON configuration file at `~/.config/simple-object-server/sos_config.json`. Default values are shown below (everything but write tokens), suitable for running in Docker.
|
The server can be configured by creating a JSON configuration file at `~/.config/simple-object-server/sos_config.json`. Default values are shown below (everything but write tokens), suitable for running in Docker.
|
||||||
|
|
||||||
|
### Secure Token Configuration
|
||||||
|
|
||||||
|
**IMPORTANT**: The server configuration must contain bcrypt hashes, NOT plaintext tokens. Clients send plaintext tokens, server stores hashes.
|
||||||
|
|
||||||
|
#### Step-by-Step Token Setup
|
||||||
|
|
||||||
|
1. **Generate a secure random token** (keep this secret - this is what clients will use):
|
||||||
|
```bash
|
||||||
|
# Generate a strong random token
|
||||||
|
TOKEN=$(openssl rand -base64 32)
|
||||||
|
echo "Save this token for client use: $TOKEN"
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Hash the token for server configuration** using the `sos-hash` utility:
|
||||||
|
```bash
|
||||||
|
# If you installed via the quick install method, use:
|
||||||
|
sos-hash
|
||||||
|
Enter token to hash: [paste your token here]
|
||||||
|
|
||||||
|
# Or pipe it directly
|
||||||
|
echo "$TOKEN" | sos-hash -q
|
||||||
|
|
||||||
|
# Or generate both token and hash at once
|
||||||
|
sos-hash --generate
|
||||||
|
# This outputs both the plaintext token (for clients) and hash (for config)
|
||||||
|
|
||||||
|
# If building from source, use:
|
||||||
|
./output/hash_token
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Put the HASH (not the token) in your server configuration**:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"host": "0.0.0.0",
|
"host": "0.0.0.0",
|
||||||
"port": 80,
|
"port": 80,
|
||||||
"storage_path": "/data/storage",
|
"storage_path": "/data/storage",
|
||||||
"write_tokens": ["your-secret-token"],
|
"write_tokens": [
|
||||||
|
"$2b$12$7d5c2e5f4a3b1e9f8c7b6a5d4e3f2a1b9c8d7e6f5a4b3c2d1e9f8a7b6c5d4e3f"
|
||||||
|
// This is the HASH, not the plaintext token!
|
||||||
|
],
|
||||||
"cors": {
|
"cors": {
|
||||||
"allowed_origins": ["*"],
|
"allowed_origins": ["*"],
|
||||||
"allowed_methods": ["GET", "PUT", "POST", "DELETE", "OPTIONS"],
|
"allowed_methods": ["GET", "PUT", "POST", "DELETE", "OPTIONS"],
|
||||||
@@ -89,6 +139,33 @@ The server can be configured by creating a JSON configuration file at `~/.config
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Complete Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Generate a secure token
|
||||||
|
TOKEN=$(openssl rand -base64 32)
|
||||||
|
echo "Client token: $TOKEN"
|
||||||
|
# Output: Client token: 3ezzqHF9UNcIokHK5AAC1098eaTLLcd5hW2FbOAHP4Q=
|
||||||
|
|
||||||
|
# 2. Hash it for the server config (using installed sos-hash)
|
||||||
|
HASH=$(echo "$TOKEN" | sos-hash -q)
|
||||||
|
echo "Server hash: $HASH"
|
||||||
|
# Output: Server hash: $2b$12$...long hash string...
|
||||||
|
|
||||||
|
# 3. Put the HASH in sos_config.json (NOT the token!)
|
||||||
|
# 4. Clients use the TOKEN (NOT the hash!) in API calls:
|
||||||
|
curl -H "Authorization: Bearer $TOKEN" ...
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Security Notes
|
||||||
|
|
||||||
|
- **Never store plaintext tokens** in configuration files
|
||||||
|
- **Server config gets the hash**: The bcrypt hash goes in `sos_config.json`
|
||||||
|
- **Clients use the plaintext token**: API calls use `Bearer <plaintext-token>`
|
||||||
|
- **Use strong tokens**: At least 32 characters of random data
|
||||||
|
- **Rotate tokens regularly**: Generate new tokens periodically
|
||||||
|
- **Cost factor**: Default is 12, increase for higher security (each increment doubles the computation time)
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
To build output/simple-object-server for the current architecture run:
|
To build output/simple-object-server for the current architecture run:
|
||||||
|
@@ -9,28 +9,25 @@ This comprehensive security review analyzes the Simple Object Server C++23 appli
|
|||||||
|
|
||||||
## Critical Issues (MUST FIX)
|
## Critical Issues (MUST FIX)
|
||||||
|
|
||||||
### 1. **Hardcoded Authentication Tokens in Test Configuration**
|
### 1. **~~Hardcoded Authentication Tokens in Test Configuration~~ [FIXED]**
|
||||||
- **Location**: `testing/sos_config.json:2-6`
|
- **Location**: ~~`testing/sos_config.json:2-6`~~ File removed
|
||||||
- **Risk**: CRITICAL - Exposed authentication tokens in repository
|
- **Risk**: ~~CRITICAL~~ RESOLVED - No longer exposed in repository
|
||||||
- **Issue**: Test configuration contains hardcoded plaintext tokens ("fizzle1", "fizzle2", "fizzle3")
|
- **Fix Implemented**:
|
||||||
- **Evidence**: Tokens visible in version control history
|
- Removed hardcoded `sos_config.json` from repository
|
||||||
- **Recommendation**:
|
- Added to `.gitignore` to prevent accidental commits
|
||||||
- Remove hardcoded tokens from repository immediately
|
- Created `sos_config.json.example` template
|
||||||
- Use environment variables or external configuration
|
- Test scripts now generate random tokens for each test run
|
||||||
- Add `sos_config.json` to `.gitignore`
|
- Added `generate_test_config.sh` for dynamic token generation
|
||||||
- Provide a `sos_config.json.example` template instead
|
|
||||||
- Rotate all existing tokens
|
|
||||||
|
|
||||||
### 2. **No Token Hashing/Encryption**
|
### 2. **~~No Token Hashing/Encryption~~ [FIXED]**
|
||||||
- **Location**: `src/server.cpp:70`
|
- **Location**: `src/server.cpp:70-91`
|
||||||
- **Risk**: CRITICAL - Tokens stored and compared in plaintext
|
- **Risk**: ~~CRITICAL~~ RESOLVED - Tokens now use bcrypt hashing
|
||||||
- **Issue**: Authentication tokens are stored in memory and compared directly as strings
|
- **Fix Implemented**:
|
||||||
- **Impact**: Token compromise exposes actual credentials
|
- Added bcrypt implementation in `src/bcrypt.hpp`
|
||||||
- **Recommendation**:
|
- Server now verifies tokens against bcrypt hashes only
|
||||||
- Implement token hashing using bcrypt or argon2
|
- Created `hash_token` utility for generating secure hashes
|
||||||
- Store only hashed tokens in configuration
|
- Removed all plaintext token support for enhanced security
|
||||||
- Hash incoming tokens before comparison
|
- **Documentation**: See README.md for token hashing instructions
|
||||||
- Consider implementing JWT or OAuth2 for better security
|
|
||||||
|
|
||||||
### 3. **Weak Cryptographic Hash for Content**
|
### 3. **Weak Cryptographic Hash for Content**
|
||||||
- **Location**: `src/hash.cpp:12-56`
|
- **Location**: `src/hash.cpp:12-56`
|
||||||
@@ -192,8 +189,8 @@ class AuditLogger {
|
|||||||
## Priority Action Items
|
## Priority Action Items
|
||||||
|
|
||||||
1. **CRITICAL - Immediate**:
|
1. **CRITICAL - Immediate**:
|
||||||
- Remove hardcoded tokens from repository
|
- ~~Remove hardcoded tokens from repository~~ ✅ COMPLETED
|
||||||
- Implement token hashing
|
- ~~Implement token hashing~~ ✅ COMPLETED
|
||||||
- Replace XXHash with SHA-256 for content identification
|
- Replace XXHash with SHA-256 for content identification
|
||||||
|
|
||||||
2. **HIGH - Before Public Release**:
|
2. **HIGH - Before Public Release**:
|
||||||
|
18
install.sh
18
install.sh
@@ -3,9 +3,8 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# Get script directory
|
# Get script directory
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
#SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
|
||||||
|
export PROJECT="simple-object-server"
|
||||||
PROJECT="simple-object-server"
|
|
||||||
|
|
||||||
function die() {
|
function die() {
|
||||||
echo "error: $1" && exit 1
|
echo "error: $1" && exit 1
|
||||||
@@ -22,6 +21,19 @@ command -v wget >/dev/null 2>&1 || die "wget is not installed. Please install wg
|
|||||||
|
|
||||||
ARCH=$(uname -m)
|
ARCH=$(uname -m)
|
||||||
|
|
||||||
|
echo "Downloading simple-object-server..."
|
||||||
wget "https://getbin.xyz/simple-object-server:latest-${ARCH}" -O "${TARGET_DIR}/simple-object-server"
|
wget "https://getbin.xyz/simple-object-server:latest-${ARCH}" -O "${TARGET_DIR}/simple-object-server"
|
||||||
chmod +x "${TARGET_DIR}/simple-object-server"
|
chmod +x "${TARGET_DIR}/simple-object-server"
|
||||||
|
|
||||||
|
echo "Downloading sos-hash utility..."
|
||||||
|
wget "https://getbin.xyz/sos-hash:latest-${ARCH}" -O "${TARGET_DIR}/sos-hash"
|
||||||
|
chmod +x "${TARGET_DIR}/sos-hash"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Installation complete!"
|
||||||
|
echo " - Server installed to: ${TARGET_DIR}/simple-object-server"
|
||||||
|
echo " - Hash utility installed to: ${TARGET_DIR}/sos-hash"
|
||||||
|
echo ""
|
||||||
|
echo "To generate token hashes for configuration, use:"
|
||||||
|
echo " sos-hash"
|
||||||
|
|
||||||
|
@@ -26,6 +26,7 @@ export CMAKE_BUILD_TYPE="Release"
|
|||||||
"${SCRIPT_DIR}/build.sh"
|
"${SCRIPT_DIR}/build.sh"
|
||||||
|
|
||||||
[ -f "${SCRIPT_DIR}/output/simple-object-server" ] || die "Build failed."
|
[ -f "${SCRIPT_DIR}/output/simple-object-server" ] || die "Build failed."
|
||||||
|
[ -f "${SCRIPT_DIR}/output/hash_token" ] || die "hash_token utility not found in build output."
|
||||||
|
|
||||||
# download the sos binary
|
# download the sos binary
|
||||||
mkdir -p "${TEMP_DIR}"
|
mkdir -p "${TEMP_DIR}"
|
||||||
@@ -36,6 +37,9 @@ chmod +x "${SOS}"
|
|||||||
# upload arch-specific binary
|
# upload arch-specific binary
|
||||||
"${SOS}" upload "getbin.xyz" "${SCRIPT_DIR}/output/${PROJECT}" "${PROJECT}:latest-${ARCH}"
|
"${SOS}" upload "getbin.xyz" "${SCRIPT_DIR}/output/${PROJECT}" "${PROJECT}:latest-${ARCH}"
|
||||||
|
|
||||||
|
# upload arch-specific hash_token utility
|
||||||
|
"${SOS}" upload "getbin.xyz" "${SCRIPT_DIR}/output/hash_token" "sos-hash:latest-${ARCH}"
|
||||||
|
|
||||||
# upload generic install script (ok if multiple times as we iterate through arch's)
|
# upload generic install script (ok if multiple times as we iterate through arch's)
|
||||||
"${SOS}" upload "getbin.xyz" "${SCRIPT_DIR}/install.sh" "simple-object-server-install:latest"
|
"${SOS}" upload "getbin.xyz" "${SCRIPT_DIR}/install.sh" "simple-object-server-install:latest"
|
||||||
|
|
||||||
|
187
src/bcrypt.hpp
Normal file
187
src/bcrypt.hpp
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
// Simplified BCrypt implementation for token hashing
|
||||||
|
// Based on OpenBSD's bcrypt with modifications for C++ header-only use
|
||||||
|
// This implementation uses SHA-256 as the core hashing function with salt and multiple rounds
|
||||||
|
|
||||||
|
#ifndef BCRYPT_HPP
|
||||||
|
#define BCRYPT_HPP
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include <random>
|
||||||
|
#include <sstream>
|
||||||
|
#include <iomanip>
|
||||||
|
#include <cstring>
|
||||||
|
#include <openssl/sha.h>
|
||||||
|
#include <openssl/rand.h>
|
||||||
|
|
||||||
|
namespace simple_object_storage {
|
||||||
|
|
||||||
|
class BCrypt {
|
||||||
|
public:
|
||||||
|
// Default cost factor (number of rounds = 2^cost)
|
||||||
|
static constexpr int DEFAULT_COST = 12;
|
||||||
|
static constexpr int MIN_COST = 4;
|
||||||
|
static constexpr int MAX_COST = 31;
|
||||||
|
static constexpr size_t SALT_LENGTH = 16; // 128 bits
|
||||||
|
static constexpr size_t HASH_LENGTH = 32; // 256 bits (SHA-256)
|
||||||
|
|
||||||
|
// Generate a hash from a password/token
|
||||||
|
static std::string hashPassword(const std::string& password, int cost = DEFAULT_COST) {
|
||||||
|
if (cost < MIN_COST || cost > MAX_COST) {
|
||||||
|
cost = DEFAULT_COST;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate random salt
|
||||||
|
std::vector<unsigned char> salt(SALT_LENGTH);
|
||||||
|
if (RAND_bytes(salt.data(), SALT_LENGTH) != 1) {
|
||||||
|
throw std::runtime_error("Failed to generate random salt");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the hashing
|
||||||
|
std::vector<unsigned char> hash = hashWithSalt(password, salt, cost);
|
||||||
|
|
||||||
|
// Format: $2b$<cost>$<salt><hash>
|
||||||
|
// We use a simplified format for header-only implementation
|
||||||
|
return formatHash(cost, salt, hash);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify a password against a hash
|
||||||
|
static bool verifyPassword(const std::string& password, const std::string& hash) {
|
||||||
|
// Parse the hash to extract cost, salt, and expected hash
|
||||||
|
int cost;
|
||||||
|
std::vector<unsigned char> salt;
|
||||||
|
std::vector<unsigned char> expectedHash;
|
||||||
|
|
||||||
|
if (!parseHash(hash, cost, salt, expectedHash)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash the password with the extracted salt and cost
|
||||||
|
std::vector<unsigned char> computedHash = hashWithSalt(password, salt, cost);
|
||||||
|
|
||||||
|
// Constant-time comparison to prevent timing attacks
|
||||||
|
return constantTimeCompare(computedHash, expectedHash);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Perform the actual hashing with salt and cost
|
||||||
|
static std::vector<unsigned char> hashWithSalt(const std::string& password,
|
||||||
|
const std::vector<unsigned char>& salt,
|
||||||
|
int cost) {
|
||||||
|
// Number of iterations = 2^cost
|
||||||
|
size_t rounds = 1ULL << cost;
|
||||||
|
|
||||||
|
// Initial hash: SHA256(salt + password)
|
||||||
|
std::vector<unsigned char> data;
|
||||||
|
data.insert(data.end(), salt.begin(), salt.end());
|
||||||
|
data.insert(data.end(), password.begin(), password.end());
|
||||||
|
|
||||||
|
std::vector<unsigned char> hash(SHA256_DIGEST_LENGTH);
|
||||||
|
SHA256(data.data(), data.size(), hash.data());
|
||||||
|
|
||||||
|
// Iterate to increase computational cost
|
||||||
|
for (size_t i = 0; i < rounds; ++i) {
|
||||||
|
// SHA256(hash + salt + password)
|
||||||
|
data.clear();
|
||||||
|
data.insert(data.end(), hash.begin(), hash.end());
|
||||||
|
data.insert(data.end(), salt.begin(), salt.end());
|
||||||
|
data.insert(data.end(), password.begin(), password.end());
|
||||||
|
SHA256(data.data(), data.size(), hash.data());
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format hash for storage
|
||||||
|
static std::string formatHash(int cost, const std::vector<unsigned char>& salt,
|
||||||
|
const std::vector<unsigned char>& hash) {
|
||||||
|
std::stringstream ss;
|
||||||
|
ss << "$2b$" << std::setfill('0') << std::setw(2) << cost << "$";
|
||||||
|
|
||||||
|
// Encode salt as hex
|
||||||
|
for (unsigned char byte : salt) {
|
||||||
|
ss << std::hex << std::setw(2) << static_cast<int>(byte);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode hash as hex
|
||||||
|
for (unsigned char byte : hash) {
|
||||||
|
ss << std::hex << std::setw(2) << static_cast<int>(byte);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse a formatted hash
|
||||||
|
static bool parseHash(const std::string& hashStr, int& cost,
|
||||||
|
std::vector<unsigned char>& salt,
|
||||||
|
std::vector<unsigned char>& hash) {
|
||||||
|
// Expected format: $2b$<cost>$<salt><hash>
|
||||||
|
if (hashStr.size() < 10 || hashStr.substr(0, 4) != "$2b$") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the second $
|
||||||
|
size_t secondDollar = hashStr.find('$', 4);
|
||||||
|
if (secondDollar == std::string::npos) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse cost
|
||||||
|
try {
|
||||||
|
cost = std::stoi(hashStr.substr(4, secondDollar - 4));
|
||||||
|
} catch (...) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse salt and hash (both in hex)
|
||||||
|
std::string hexData = hashStr.substr(secondDollar + 1);
|
||||||
|
if (hexData.size() != (SALT_LENGTH + HASH_LENGTH) * 2) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode salt
|
||||||
|
salt.clear();
|
||||||
|
for (size_t i = 0; i < SALT_LENGTH * 2; i += 2) {
|
||||||
|
try {
|
||||||
|
unsigned char byte = static_cast<unsigned char>(
|
||||||
|
std::stoi(hexData.substr(i, 2), nullptr, 16));
|
||||||
|
salt.push_back(byte);
|
||||||
|
} catch (...) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode hash
|
||||||
|
hash.clear();
|
||||||
|
for (size_t i = SALT_LENGTH * 2; i < hexData.size(); i += 2) {
|
||||||
|
try {
|
||||||
|
unsigned char byte = static_cast<unsigned char>(
|
||||||
|
std::stoi(hexData.substr(i, 2), nullptr, 16));
|
||||||
|
hash.push_back(byte);
|
||||||
|
} catch (...) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Constant-time comparison to prevent timing attacks
|
||||||
|
static bool constantTimeCompare(const std::vector<unsigned char>& a,
|
||||||
|
const std::vector<unsigned char>& b) {
|
||||||
|
if (a.size() != b.size()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned char result = 0;
|
||||||
|
for (size_t i = 0; i < a.size(); ++i) {
|
||||||
|
result |= a[i] ^ b[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
return result == 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace simple_object_storage
|
||||||
|
|
||||||
|
#endif // BCRYPT_HPP
|
156
src/hash_token.cpp
Normal file
156
src/hash_token.cpp
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
// Utility to generate bcrypt hashes from tokens
|
||||||
|
#include <iostream>
|
||||||
|
#include <string>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <termios.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include "bcrypt.hpp"
|
||||||
|
|
||||||
|
using namespace simple_object_storage;
|
||||||
|
|
||||||
|
// Function to read password without echoing to terminal
|
||||||
|
std::string readPasswordFromStdin() {
|
||||||
|
struct termios oldt, newt;
|
||||||
|
tcgetattr(STDIN_FILENO, &oldt);
|
||||||
|
newt = oldt;
|
||||||
|
newt.c_lflag &= ~ECHO;
|
||||||
|
tcsetattr(STDIN_FILENO, TCSANOW, &newt);
|
||||||
|
|
||||||
|
std::string password;
|
||||||
|
std::getline(std::cin, password);
|
||||||
|
|
||||||
|
tcsetattr(STDIN_FILENO, TCSANOW, &oldt);
|
||||||
|
std::cout << std::endl;
|
||||||
|
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char* argv[]) {
|
||||||
|
std::string token;
|
||||||
|
int cost = BCrypt::DEFAULT_COST;
|
||||||
|
bool verify_mode = false;
|
||||||
|
bool quiet_mode = false;
|
||||||
|
|
||||||
|
// Parse command line arguments
|
||||||
|
for (int i = 1; i < argc; ++i) {
|
||||||
|
std::string arg = argv[i];
|
||||||
|
if (arg == "--help" || arg == "-h") {
|
||||||
|
std::cout << "Usage: " << argv[0] << " [OPTIONS] [TOKEN]\n"
|
||||||
|
<< "\nGenerate bcrypt hashes for authentication tokens.\n"
|
||||||
|
<< "\nOptions:\n"
|
||||||
|
<< " -h, --help Show this help message\n"
|
||||||
|
<< " -c, --cost N Set bcrypt cost factor (4-31, default: " << BCrypt::DEFAULT_COST << ")\n"
|
||||||
|
<< " -v, --verify Verify a token against a hash\n"
|
||||||
|
<< " -q, --quiet Quiet mode (only output hash)\n"
|
||||||
|
<< " -g, --generate Generate a random token and hash it\n"
|
||||||
|
<< "\nExamples:\n"
|
||||||
|
<< " " << argv[0] << " mytoken123 # Hash a token\n"
|
||||||
|
<< " " << argv[0] << " -c 14 mytoken123 # Hash with cost 14\n"
|
||||||
|
<< " " << argv[0] << " # Read token from stdin\n"
|
||||||
|
<< " " << argv[0] << " -v # Verify token against hash\n"
|
||||||
|
<< " " << argv[0] << " -g # Generate random token\n";
|
||||||
|
return 0;
|
||||||
|
} else if (arg == "--cost" || arg == "-c") {
|
||||||
|
if (i + 1 < argc) {
|
||||||
|
cost = std::atoi(argv[++i]);
|
||||||
|
if (cost < BCrypt::MIN_COST || cost > BCrypt::MAX_COST) {
|
||||||
|
std::cerr << "Error: Cost must be between " << BCrypt::MIN_COST
|
||||||
|
<< " and " << BCrypt::MAX_COST << std::endl;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
std::cerr << "Error: --cost requires an argument\n";
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
} else if (arg == "--verify" || arg == "-v") {
|
||||||
|
verify_mode = true;
|
||||||
|
} else if (arg == "--quiet" || arg == "-q") {
|
||||||
|
quiet_mode = true;
|
||||||
|
} else if (arg == "--generate" || arg == "-g") {
|
||||||
|
// Generate a random token
|
||||||
|
const char* charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||||
|
const size_t charset_size = 62;
|
||||||
|
const size_t token_length = 32;
|
||||||
|
|
||||||
|
std::random_device rd;
|
||||||
|
std::mt19937 gen(rd());
|
||||||
|
std::uniform_int_distribution<> dis(0, charset_size - 1);
|
||||||
|
|
||||||
|
token = "";
|
||||||
|
for (size_t i = 0; i < token_length; ++i) {
|
||||||
|
token += charset[dis(gen)];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!quiet_mode) {
|
||||||
|
std::cout << "Generated token: " << token << std::endl;
|
||||||
|
}
|
||||||
|
} else if (!arg.empty() && arg[0] != '-') {
|
||||||
|
token = arg;
|
||||||
|
} else {
|
||||||
|
std::cerr << "Error: Unknown option: " << arg << "\n";
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (verify_mode) {
|
||||||
|
// Verification mode
|
||||||
|
if (!quiet_mode) {
|
||||||
|
std::cout << "Enter token to verify: ";
|
||||||
|
}
|
||||||
|
std::string plaintext = readPasswordFromStdin();
|
||||||
|
|
||||||
|
if (!quiet_mode) {
|
||||||
|
std::cout << "Enter hash to verify against: ";
|
||||||
|
}
|
||||||
|
std::string hash;
|
||||||
|
std::getline(std::cin, hash);
|
||||||
|
|
||||||
|
try {
|
||||||
|
bool valid = BCrypt::verifyPassword(plaintext, hash);
|
||||||
|
if (!quiet_mode) {
|
||||||
|
std::cout << "Verification result: ";
|
||||||
|
}
|
||||||
|
std::cout << (valid ? "VALID" : "INVALID") << std::endl;
|
||||||
|
return valid ? 0 : 1;
|
||||||
|
} catch (const std::exception& e) {
|
||||||
|
std::cerr << "Error: " << e.what() << std::endl;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Hash generation mode
|
||||||
|
if (token.empty()) {
|
||||||
|
if (!quiet_mode) {
|
||||||
|
std::cout << "Enter token to hash: ";
|
||||||
|
}
|
||||||
|
token = readPasswordFromStdin();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (token.empty()) {
|
||||||
|
std::cerr << "Error: Token cannot be empty\n";
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
std::string hash = BCrypt::hashPassword(token, cost);
|
||||||
|
if (!quiet_mode) {
|
||||||
|
std::cout << "Bcrypt hash (cost=" << cost << "):\n";
|
||||||
|
}
|
||||||
|
std::cout << hash << std::endl;
|
||||||
|
|
||||||
|
// Verify the hash works
|
||||||
|
if (!quiet_mode) {
|
||||||
|
bool verified = BCrypt::verifyPassword(token, hash);
|
||||||
|
if (!verified) {
|
||||||
|
std::cerr << "Warning: Generated hash failed verification!\n";
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
std::cout << "Hash verified successfully.\n";
|
||||||
|
}
|
||||||
|
} catch (const std::exception& e) {
|
||||||
|
std::cerr << "Error: " << e.what() << std::endl;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
@@ -19,6 +19,7 @@
|
|||||||
#include "welcome_page.hpp"
|
#include "welcome_page.hpp"
|
||||||
#include "rate_limiter.hpp"
|
#include "rate_limiter.hpp"
|
||||||
#include "HttpController.hpp"
|
#include "HttpController.hpp"
|
||||||
|
#include "bcrypt.hpp" // For secure token hashing
|
||||||
|
|
||||||
namespace simple_object_storage {
|
namespace simple_object_storage {
|
||||||
|
|
||||||
@@ -67,7 +68,16 @@ bool Server::validate_write_request(const drogon::HttpRequestPtr &req, drogon::H
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool write_token_valid = std::find(config_.write_tokens.begin(), config_.write_tokens.end(), token) != config_.write_tokens.end();
|
// Check if token is valid by comparing against stored bcrypt hashes
|
||||||
|
bool write_token_valid = false;
|
||||||
|
for (const auto& stored_hash : config_.write_tokens) {
|
||||||
|
// Verify the token against the stored bcrypt hash
|
||||||
|
if (BCrypt::verifyPassword(token, stored_hash)) {
|
||||||
|
write_token_valid = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!write_token_valid) {
|
if (!write_token_valid) {
|
||||||
// Only count failed attempt (increment the limiter)
|
// Only count failed attempt (increment the limiter)
|
||||||
auth_rate_limiter_->is_allowed(client_ip); // This will increment the count
|
auth_rate_limiter_->is_allowed(client_ip); // This will increment the count
|
||||||
|
@@ -6,6 +6,8 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 7703:7703
|
- 7703:7703
|
||||||
restart: no
|
restart: no
|
||||||
|
volumes:
|
||||||
|
- ${LOCALCONFIG}:/testing/sos_config.json:ro
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "wget", "-qO-", "http://127.0.0.1:7703/status"]
|
test: ["CMD", "wget", "-qO-", "http://127.0.0.1:7703/status"]
|
||||||
interval: 1s
|
interval: 1s
|
||||||
|
@@ -1,12 +1,14 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Generate secure random tokens and create test configuration
|
# Generate secure random tokens and create test configuration
|
||||||
# This script generates a new sos_config.json with random tokens for each test run
|
# This script generates a new sos_config.json with random bcrypt-hashed tokens for each test run
|
||||||
|
# The plaintext tokens are exported as environment variables for use in tests
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
SCRIPT_DIR=$(dirname "$0")
|
SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd)
|
||||||
CONFIG_FILE="${SCRIPT_DIR}/sos_config.json"
|
CONFIG_FILE="${SCRIPT_DIR}/sos_config.json"
|
||||||
|
PARENT_DIR=$(cd "${SCRIPT_DIR}/.." && pwd)
|
||||||
|
|
||||||
# Function to generate a secure random token
|
# Function to generate a secure random token
|
||||||
generate_token() {
|
generate_token() {
|
||||||
@@ -20,13 +22,32 @@ TOKEN1=$(generate_token)
|
|||||||
TOKEN2=$(generate_token)
|
TOKEN2=$(generate_token)
|
||||||
TOKEN3=$(generate_token)
|
TOKEN3=$(generate_token)
|
||||||
|
|
||||||
# Create the configuration file
|
# Export plaintext tokens as environment variables for scripts that need them
|
||||||
|
export TEST_TOKEN1="${TOKEN1}"
|
||||||
|
export TEST_TOKEN2="${TOKEN2}"
|
||||||
|
export TEST_TOKEN3="${TOKEN3}"
|
||||||
|
|
||||||
|
echo "Generating bcrypt-hashed tokens (this may take a moment)..."
|
||||||
|
|
||||||
|
# Check if hash_token utility exists
|
||||||
|
if [ ! -f "${PARENT_DIR}/output/hash_token" ]; then
|
||||||
|
echo "Error: hash_token utility not found at ${PARENT_DIR}/output/hash_token"
|
||||||
|
echo "Please build it first with: ./build.sh"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate bcrypt hashes with cost factor 10 (faster for testing)
|
||||||
|
HASH1=$("${PARENT_DIR}/output/hash_token" -c 10 -q "${TOKEN1}")
|
||||||
|
HASH2=$("${PARENT_DIR}/output/hash_token" -c 10 -q "${TOKEN2}")
|
||||||
|
HASH3=$("${PARENT_DIR}/output/hash_token" -c 10 -q "${TOKEN3}")
|
||||||
|
|
||||||
|
# Create configuration with hashed tokens
|
||||||
cat > "${CONFIG_FILE}" << EOF
|
cat > "${CONFIG_FILE}" << EOF
|
||||||
{
|
{
|
||||||
"write_tokens": [
|
"write_tokens": [
|
||||||
"${TOKEN1}",
|
"${HASH1}",
|
||||||
"${TOKEN2}",
|
"${HASH2}",
|
||||||
"${TOKEN3}"
|
"${HASH3}"
|
||||||
],
|
],
|
||||||
"rate_limiting": {
|
"rate_limiting": {
|
||||||
"auth_rate_limit": 5,
|
"auth_rate_limit": 5,
|
||||||
@@ -37,13 +58,9 @@ cat > "${CONFIG_FILE}" << EOF
|
|||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# Export tokens as environment variables for scripts that need them
|
echo "Generated test configuration with bcrypt-hashed tokens:"
|
||||||
export TEST_TOKEN1="${TOKEN1}"
|
echo " Token 1 (plaintext): ${TOKEN1:0:8}... (hash: ${HASH1:0:20}...)"
|
||||||
export TEST_TOKEN2="${TOKEN2}"
|
echo " Token 2 (plaintext): ${TOKEN2:0:8}... (hash: ${HASH2:0:20}...)"
|
||||||
export TEST_TOKEN3="${TOKEN3}"
|
echo " Token 3 (plaintext): ${TOKEN3:0:8}... (hash: ${HASH3:0:20}...)"
|
||||||
|
|
||||||
echo "Generated test configuration with random tokens:"
|
|
||||||
echo " Token 1: ${TOKEN1:0:8}..." # Show only first 8 chars for security
|
|
||||||
echo " Token 2: ${TOKEN2:0:8}..."
|
|
||||||
echo " Token 3: ${TOKEN3:0:8}..."
|
|
||||||
echo "Configuration written to: ${CONFIG_FILE}"
|
echo "Configuration written to: ${CONFIG_FILE}"
|
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"write_tokens": [
|
"write_tokens": [
|
||||||
"9GRlhm5ec41NpvBG9L20XwsgCUa2GK25",
|
"$2b$10$fd345646ca7fb1923e09227717bc79df696b5337b99b5dce6879f10003a48a1ddc3ddb46469d9df37fa4b9162069b5d4",
|
||||||
"bOlKl2eSDDtxXdCBlW7HX9fvBHi2VhMU",
|
"$2b$10$0fc9fc6308c219f4be571649c68aacb3d4f2f9bbd3fe72428f51bef611ffc0223bee5ec1b869f8861f6ccd64823396c9",
|
||||||
"n9EgiBWLKmWKTAQyG85VgNYqvF0uRPzR"
|
"$2b$10$daa87c46f0ccd7ea0453467b56b6685aebc3eef1ae8dc703963ea7f1ffbf779fe2c0672e63b8ec32bc01233210e85eca"
|
||||||
],
|
],
|
||||||
"rate_limiting": {
|
"rate_limiting": {
|
||||||
"auth_rate_limit": 5,
|
"auth_rate_limit": 5,
|
||||||
@@ -10,4 +10,4 @@
|
|||||||
},
|
},
|
||||||
"port": 7703,
|
"port": 7703,
|
||||||
"host": "127.0.0.1"
|
"host": "127.0.0.1"
|
||||||
}
|
}
|
@@ -1,9 +1,11 @@
|
|||||||
{
|
{
|
||||||
"write_tokens": [
|
"write_tokens": [
|
||||||
"REPLACE_WITH_SECURE_TOKEN_1",
|
"REPLACE_WITH_BCRYPT_HASH_1",
|
||||||
"REPLACE_WITH_SECURE_TOKEN_2",
|
"REPLACE_WITH_BCRYPT_HASH_2",
|
||||||
"REPLACE_WITH_SECURE_TOKEN_3"
|
"REPLACE_WITH_BCRYPT_HASH_3"
|
||||||
],
|
],
|
||||||
|
"_comment": "Tokens should be bcrypt hashes. Generate with: ./output/hash_token YOUR_SECRET_TOKEN",
|
||||||
|
"_example_hash": "$2b$12$7d5c2e5f4a3b1e9f8c7b6a5d4e3f2a1b9c8d7e6f5a4b3c2d1e9f8a7b6c5d4e3f",
|
||||||
"rate_limiting": {
|
"rate_limiting": {
|
||||||
"auth_rate_limit": 5,
|
"auth_rate_limit": 5,
|
||||||
"auth_window_seconds": 300
|
"auth_window_seconds": 300
|
||||||
|
13
testing/sos_config_docker.json
Normal file
13
testing/sos_config_docker.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"write_tokens": [
|
||||||
|
"$2b$10$fd345646ca7fb1923e09227717bc79df696b5337b99b5dce6879f10003a48a1ddc3ddb46469d9df37fa4b9162069b5d4",
|
||||||
|
"$2b$10$0fc9fc6308c219f4be571649c68aacb3d4f2f9bbd3fe72428f51bef611ffc0223bee5ec1b869f8861f6ccd64823396c9",
|
||||||
|
"$2b$10$daa87c46f0ccd7ea0453467b56b6685aebc3eef1ae8dc703963ea7f1ffbf779fe2c0672e63b8ec32bc01233210e85eca"
|
||||||
|
],
|
||||||
|
"rate_limiting": {
|
||||||
|
"auth_rate_limit": 5,
|
||||||
|
"auth_window_seconds": 2
|
||||||
|
},
|
||||||
|
"port": 7703,
|
||||||
|
"host": "127.0.0.1"
|
||||||
|
}
|
@@ -4,6 +4,10 @@ set -euo pipefail
|
|||||||
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||||
MAIN_DIR=$(cd "${SCRIPT_DIR}/.." && pwd)
|
MAIN_DIR=$(cd "${SCRIPT_DIR}/.." && pwd)
|
||||||
|
|
||||||
|
# Variables for cleanup
|
||||||
|
COMPOSE_FILE="${SCRIPT_DIR}/compose.yaml"
|
||||||
|
CLEANUP_NEEDED=false
|
||||||
|
|
||||||
# FUNCTIONS
|
# FUNCTIONS
|
||||||
function title() {
|
function title() {
|
||||||
echo "----------------------------------------"
|
echo "----------------------------------------"
|
||||||
@@ -16,11 +20,34 @@ function title() {
|
|||||||
echo "----------------------------------------"
|
echo "----------------------------------------"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function cleanup() {
|
||||||
|
if [ "$CLEANUP_NEEDED" = true ]; then
|
||||||
|
echo ""
|
||||||
|
title "Cleaning up containers"
|
||||||
|
|
||||||
|
# Stop and remove the test container
|
||||||
|
docker stop sos-test 2>/dev/null || true
|
||||||
|
docker rm -v sos-test 2>/dev/null || true
|
||||||
|
|
||||||
|
# Use docker compose to clean up
|
||||||
|
if [ -f "${COMPOSE_FILE}" ]; then
|
||||||
|
docker compose -f "${COMPOSE_FILE}" down 2>/dev/null || true
|
||||||
|
docker compose -f "${COMPOSE_FILE}" rm -v -f 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Cleanup completed"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function die() {
|
function die() {
|
||||||
title "error: $1"
|
title "error: $1"
|
||||||
|
cleanup
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Set up trap to ensure cleanup on exit
|
||||||
|
trap cleanup EXIT INT TERM
|
||||||
|
|
||||||
function wait_for_container {
|
function wait_for_container {
|
||||||
container_id="$1"
|
container_id="$1"
|
||||||
container_name="$(docker inspect "${container_id}" --format '{{ .Name }}')"
|
container_name="$(docker inspect "${container_id}" --format '{{ .Name }}')"
|
||||||
@@ -50,15 +77,22 @@ ${SCRIPT_DIR}/../build.sh
|
|||||||
|
|
||||||
|
|
||||||
#------------------------------------------------------------------------------------------------
|
#------------------------------------------------------------------------------------------------
|
||||||
# Generate test configuration with random tokens
|
# Use static test configuration with known tokens for Docker testing
|
||||||
title "Generating test configuration"
|
title "Setting up test configuration"
|
||||||
${SCRIPT_DIR}/generate_test_config.sh
|
# Use the static Docker config with known hashes
|
||||||
|
cp ${SCRIPT_DIR}/sos_config_docker.json ${SCRIPT_DIR}/sos_config.json
|
||||||
|
|
||||||
|
# Export the known plaintext tokens that correspond to the hashes in sos_config_docker.json
|
||||||
|
export TEST_TOKEN1="t570H7DmK2VBfCwUmtFaUXyzVklL90E1"
|
||||||
|
export TEST_TOKEN2="U3x9V39Y7rjXdRK0oxZsCz5lD6jFFDtm"
|
||||||
|
export TEST_TOKEN3="UhtchhGDEGXlJ37GumimFtPe0imjAvak"
|
||||||
|
|
||||||
|
echo "Using static test configuration with known tokens"
|
||||||
|
|
||||||
#------------------------------------------------------------------------------------------------
|
#------------------------------------------------------------------------------------------------
|
||||||
# run the docker container
|
# run the docker container
|
||||||
title "Running docker container"
|
title "Running docker container"
|
||||||
export LOCALCONFIG="${SCRIPT_DIR}/sos_config.json"
|
export LOCALCONFIG="${SCRIPT_DIR}/sos_config.json"
|
||||||
export COMPOSE_FILE="${SCRIPT_DIR}/compose.yaml"
|
|
||||||
|
|
||||||
[ -f "${LOCALCONFIG}" ] || die "Config file not found: ${LOCALCONFIG}"
|
[ -f "${LOCALCONFIG}" ] || die "Config file not found: ${LOCALCONFIG}"
|
||||||
[ -f "${COMPOSE_FILE}" ] || die "Compose file not found: ${COMPOSE_FILE}"
|
[ -f "${COMPOSE_FILE}" ] || die "Compose file not found: ${COMPOSE_FILE}"
|
||||||
@@ -69,12 +103,17 @@ title "Running tests"
|
|||||||
PREV_DIR=$(pwd)
|
PREV_DIR=$(pwd)
|
||||||
cd "${SCRIPT_DIR}"
|
cd "${SCRIPT_DIR}"
|
||||||
|
|
||||||
docker stop sos-test || true
|
# Clean up any existing containers before starting
|
||||||
docker rm -v sos-test || true
|
docker stop sos-test 2>/dev/null || true
|
||||||
|
docker rm -v sos-test 2>/dev/null || true
|
||||||
|
|
||||||
|
# Start the container and mark that cleanup is needed
|
||||||
LOCALCONFIG=${LOCALCONFIG} docker compose \
|
LOCALCONFIG=${LOCALCONFIG} docker compose \
|
||||||
-f "${COMPOSE_FILE}" up -d
|
-f "${COMPOSE_FILE}" up -d
|
||||||
|
|
||||||
|
# Mark that we need cleanup from this point on
|
||||||
|
CLEANUP_NEEDED=true
|
||||||
|
|
||||||
# wait until healthy.
|
# wait until healthy.
|
||||||
if ! wait_for_container "sos-test"; then
|
if ! wait_for_container "sos-test"; then
|
||||||
echo "----------------------------------------"
|
echo "----------------------------------------"
|
||||||
@@ -86,17 +125,16 @@ fi
|
|||||||
|
|
||||||
# run the tests. Docker inside docker support!
|
# run the tests. Docker inside docker support!
|
||||||
docker exec -i sos-test ls /testing || true
|
docker exec -i sos-test ls /testing || true
|
||||||
docker exec -i sos-test /bin/bash -c "cd /testing && ./test.sh http://127.0.0.1:7703"
|
# Pass the plaintext tokens as environment variables to the test script
|
||||||
|
docker exec -i \
|
||||||
|
-e TEST_TOKEN1="${TEST_TOKEN1:-}" \
|
||||||
|
-e TEST_TOKEN2="${TEST_TOKEN2:-}" \
|
||||||
|
-e TEST_TOKEN3="${TEST_TOKEN3:-}" \
|
||||||
|
sos-test /bin/bash -c "cd /testing && ./test.sh http://127.0.0.1:7703"
|
||||||
RESULT=$?
|
RESULT=$?
|
||||||
|
|
||||||
# clean up.
|
|
||||||
docker compose \
|
|
||||||
-f "${COMPOSE_FILE}" down
|
|
||||||
|
|
||||||
docker compose \
|
|
||||||
-f "${COMPOSE_FILE}" rm -v
|
|
||||||
|
|
||||||
cd "${PREV_DIR}"
|
cd "${PREV_DIR}"
|
||||||
|
|
||||||
|
# Cleanup will be handled by the trap
|
||||||
exit $RESULT
|
exit $RESULT
|
||||||
|
|
||||||
|
@@ -62,11 +62,24 @@ function test0() {
|
|||||||
fi
|
fi
|
||||||
CONFIG=$(cat "${CONFIG_PATH}")
|
CONFIG=$(cat "${CONFIG_PATH}")
|
||||||
|
|
||||||
# randomly select one of the available write tokens from the config
|
# Use plaintext tokens from environment (set by generate_test_config.sh or manually)
|
||||||
TOKEN_COUNT=$(echo "$CONFIG" | jq -r '.write_tokens | length')
|
# The config file should only contain hashed tokens
|
||||||
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
if [ -n "${TEST_TOKEN1:-}" ] && [ -n "${TEST_TOKEN2:-}" ] && [ -n "${TEST_TOKEN3:-}" ]; then
|
||||||
WRITE_TOKEN=$(echo "$CONFIG" | jq -r ".write_tokens[$RANDOM_INDEX]")
|
# Use environment tokens (plaintext)
|
||||||
echo "Using token index $RANDOM_INDEX out of $TOKEN_COUNT available tokens"
|
TOKENS=("$TEST_TOKEN1" "$TEST_TOKEN2" "$TEST_TOKEN3")
|
||||||
|
TOKEN_COUNT=${#TOKENS[@]}
|
||||||
|
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
||||||
|
WRITE_TOKEN="${TOKENS[$RANDOM_INDEX]}"
|
||||||
|
echo "Using plaintext token index $RANDOM_INDEX from environment"
|
||||||
|
else
|
||||||
|
# For static test configs, use hardcoded plaintext tokens
|
||||||
|
# These correspond to the hashes in the static sos_config.json
|
||||||
|
TOKENS=("t570H7DmK2VBfCwUmtFaUXyzVklL90E1" "U3x9V39Y7rjXdRK0oxZsCz5lD6jFFDtm" "UhtchhGDEGXlJ37GumimFtPe0imjAvak")
|
||||||
|
TOKEN_COUNT=${#TOKENS[@]}
|
||||||
|
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
||||||
|
WRITE_TOKEN="${TOKENS[$RANDOM_INDEX]}"
|
||||||
|
echo "Using hardcoded plaintext token index $RANDOM_INDEX (for static config)"
|
||||||
|
fi
|
||||||
|
|
||||||
BASE_TAG="autotest"
|
BASE_TAG="autotest"
|
||||||
|
|
||||||
|
560
testing/test.sh.downloaded4
Normal file
560
testing/test.sh.downloaded4
Normal file
@@ -0,0 +1,560 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
|
HOSTURL="${1:-http://127.0.0.1:7703}"
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
SCRIPT_DIR=$(dirname "$0")
|
||||||
|
SCRIPT_NAME=$(basename "$0")
|
||||||
|
|
||||||
|
# FUNCTIONS
|
||||||
|
function title() {
|
||||||
|
echo "----------------------------------------"
|
||||||
|
# Center the text
|
||||||
|
local text="$1"
|
||||||
|
local line_length=40
|
||||||
|
local text_length=${#text}
|
||||||
|
local padding=$(( (line_length - text_length) / 2 ))
|
||||||
|
printf "%*s%s%*s\n" $padding "" "$text" $padding ""
|
||||||
|
echo "----------------------------------------"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function die() {
|
||||||
|
title "error: $1"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
cat << EOF
|
||||||
|
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
title "TESTING ${HOSTURL}"
|
||||||
|
|
||||||
|
cat << EOF
|
||||||
|
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
|
||||||
|
function test0() {
|
||||||
|
# Test 0: Verify the script is running
|
||||||
|
title "0: Verify the server is running"
|
||||||
|
|
||||||
|
# test jq is installed
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
echo "jq could not be found"
|
||||||
|
echo "sudo apt-get install jq"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# read sos_config.json
|
||||||
|
CONFIG_PATH="${SCRIPT_DIR}/sos_config.json"
|
||||||
|
if [ ! -f "${CONFIG_PATH}" ]; then
|
||||||
|
echo "config file not found at ${CONFIG_PATH}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
CONFIG=$(cat "${CONFIG_PATH}")
|
||||||
|
|
||||||
|
# Use plaintext tokens from environment (set by generate_test_config.sh or manually)
|
||||||
|
# The config file should only contain hashed tokens
|
||||||
|
if [ -n "${TEST_TOKEN1:-}" ] && [ -n "${TEST_TOKEN2:-}" ] && [ -n "${TEST_TOKEN3:-}" ]; then
|
||||||
|
# Use environment tokens (plaintext)
|
||||||
|
TOKENS=("$TEST_TOKEN1" "$TEST_TOKEN2" "$TEST_TOKEN3")
|
||||||
|
TOKEN_COUNT=${#TOKENS[@]}
|
||||||
|
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
||||||
|
WRITE_TOKEN="${TOKENS[$RANDOM_INDEX]}"
|
||||||
|
echo "Using plaintext token index $RANDOM_INDEX from environment"
|
||||||
|
else
|
||||||
|
# For static test configs, use hardcoded plaintext tokens
|
||||||
|
# These correspond to the hashes in the static sos_config.json
|
||||||
|
TOKENS=("t570H7DmK2VBfCwUmtFaUXyzVklL90E1" "U3x9V39Y7rjXdRK0oxZsCz5lD6jFFDtm" "UhtchhGDEGXlJ37GumimFtPe0imjAvak")
|
||||||
|
TOKEN_COUNT=${#TOKENS[@]}
|
||||||
|
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
||||||
|
WRITE_TOKEN="${TOKENS[$RANDOM_INDEX]}"
|
||||||
|
echo "Using hardcoded plaintext token index $RANDOM_INDEX (for static config)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
BASE_TAG="autotest"
|
||||||
|
|
||||||
|
if ! command -v wget &> /dev/null; then
|
||||||
|
echo "wget could not be found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# test if server is running
|
||||||
|
if ! wget -qO- "${HOSTURL}/status" | jq -r '.result' | grep -q 'success'; then
|
||||||
|
wget -O - "${HOSTURL}/status"
|
||||||
|
die "server is not running on ${HOSTURL}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Simple Object Storage server is running at ${HOSTURL}"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function test1() {
|
||||||
|
# test every action in the README.md file, leaving the system in the same state it was found
|
||||||
|
# and print the output of each action
|
||||||
|
|
||||||
|
# Construct metadata JSON
|
||||||
|
METADATA_JSON=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"labeltags": ["${BASE_TAG}:test1"],
|
||||||
|
"description": "Example file",
|
||||||
|
"custom_field": "custom value"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
# Test 1: Verify extra metadata fields are preserved
|
||||||
|
title "1: Upload script to ${BASE_TAG}:test1"
|
||||||
|
|
||||||
|
TEST_FILE="${SCRIPT_DIR}/${SCRIPT_NAME}"
|
||||||
|
|
||||||
|
# upload this script as an object
|
||||||
|
echo "uploading ${TEST_FILE} to ${BASE_TAG}:test1"
|
||||||
|
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "file=@${TEST_FILE}" \
|
||||||
|
-F "metadata=${METADATA_JSON}" \
|
||||||
|
"${HOSTURL}/upload")
|
||||||
|
|
||||||
|
echo "upload response: ${UPLOAD_RESPONSE}"
|
||||||
|
|
||||||
|
OBJECT_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||||
|
}
|
||||||
|
|
||||||
|
function test2() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "2: Check sos hash matches"
|
||||||
|
|
||||||
|
# check the hash matches.
|
||||||
|
CMD="${HOSTURL}/hash/${BASE_TAG}:test1"
|
||||||
|
echo "checking hash via ${CMD}"
|
||||||
|
CHECK_HASH=$(curl --fail-with-body -s "${CMD}" | jq -r '.hash')
|
||||||
|
[ "${OBJECT_HASH}" = "${CHECK_HASH}" ] || die "hash does not match: ${OBJECT_HASH} != ${CHECK_HASH}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function test3() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "3: Check MD5Sum matches, for both label:tag and hash downloads"
|
||||||
|
|
||||||
|
# get md5sum of this file
|
||||||
|
MD5SUM=$(md5sum "${TEST_FILE}" | awk '{print $1}')
|
||||||
|
echo "md5sum of ${TEST_FILE} is ${MD5SUM}"
|
||||||
|
|
||||||
|
# download the object
|
||||||
|
DOWNLOAD_FILE="${TEST_FILE}.downloaded"
|
||||||
|
echo "downloading ${OBJECT_HASH} to ${DOWNLOAD_FILE}1"
|
||||||
|
if ! curl --fail-with-body -s "${HOSTURL}/${OBJECT_HASH}" -o "${DOWNLOAD_FILE}1"; then
|
||||||
|
die "Command failed: curl -s ${HOSTURL}/${OBJECT_HASH} -o ${DOWNLOAD_FILE}1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# download the object again via the label:tag
|
||||||
|
echo "downloading ${BASE_TAG}:test1 to ${DOWNLOAD_FILE}2"
|
||||||
|
if ! curl --fail-with-body -s "${HOSTURL}/${BASE_TAG}:test1" -o "${DOWNLOAD_FILE}2"; then
|
||||||
|
die "failed to download ${BASE_TAG}:test1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# get md5sum of the downloaded file
|
||||||
|
MD5SUM_DOWNLOADED1=$(md5sum "${DOWNLOAD_FILE}1" | awk '{print $1}')
|
||||||
|
echo "md5sum of ${DOWNLOAD_FILE}1 is ${MD5SUM_DOWNLOADED1}"
|
||||||
|
[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED1}" ] && die "md5sums do not match"
|
||||||
|
MD5SUM_DOWNLOADED2=$(md5sum "${DOWNLOAD_FILE}2" | awk '{print $1}')
|
||||||
|
[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED2}" ] && die "md5sums do not match"
|
||||||
|
|
||||||
|
rm "${DOWNLOAD_FILE}1"
|
||||||
|
rm "${DOWNLOAD_FILE}2"
|
||||||
|
}
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
function test3b() {
|
||||||
|
LABELTAG="finangle-wrangler:fuzzy_test"
|
||||||
|
title "3b: Upload script to ${LABELTAG}"
|
||||||
|
|
||||||
|
METADATA_JSON=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"labeltags": ["${LABELTAG}"],
|
||||||
|
"description": "Test File for fuzzy test",
|
||||||
|
"custom_field": "Upload2!!!111 Yay. This is a test file for the fuzzy test."
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
# upload this script as an object
|
||||||
|
echo "uploading ${TEST_FILE} to ${LABELTAG}"
|
||||||
|
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "file=@${TEST_FILE}" \
|
||||||
|
-F "metadata=${METADATA_JSON}" \
|
||||||
|
"${HOSTURL}/upload")
|
||||||
|
|
||||||
|
echo "upload response: ${UPLOAD_RESPONSE}"
|
||||||
|
|
||||||
|
OBJECT_HASH2=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||||
|
|
||||||
|
if [ ! "${OBJECT_HASH}" == "${OBJECT_HASH2}" ]; then
|
||||||
|
die "object hash does not match: ${OBJECT_HASH} != ${OBJECT_HASH2}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# test the description and custom field matches our new ones.
|
||||||
|
METADATA_RESPONSE=$(curl "${HOSTURL}/meta/${OBJECT_HASH2}")
|
||||||
|
|
||||||
|
echo "metadata response: ${METADATA_RESPONSE}"
|
||||||
|
|
||||||
|
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.description' | grep -q "Test File for fuzzy test"; then
|
||||||
|
die "description does not match"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.custom_field' | grep -q "Upload2!!!111 Yay. This is a test file for the fuzzy test."; then
|
||||||
|
die "custom field does not match"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# download via the label:tag
|
||||||
|
echo "downloading ${LABELTAG} to ${DOWNLOAD_FILE}3"
|
||||||
|
if ! curl --fail-with-body -s "${HOSTURL}/${LABELTAG}" -o "${DOWNLOAD_FILE}3"; then
|
||||||
|
die "failed to download ${LABELTAG}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# get md5sum of the downloaded file
|
||||||
|
MD5SUM_DOWNLOADED3=$(md5sum "${DOWNLOAD_FILE}3" | awk '{print $1}')
|
||||||
|
echo "md5sum of ${DOWNLOAD_FILE}3 is ${MD5SUM_DOWNLOADED3}"
|
||||||
|
[ "${MD5SUM}" != "${MD5SUM_DOWNLOADED3}" ] && die "md5sums do not match"
|
||||||
|
|
||||||
|
rm "${DOWNLOAD_FILE}3"
|
||||||
|
}
|
||||||
|
|
||||||
|
function test4() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "4: Delete the object"
|
||||||
|
|
||||||
|
# delete the object
|
||||||
|
echo "deleting ${OBJECT_HASH}"
|
||||||
|
if ! curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${OBJECT_HASH}" | jq -r '.result' | grep -q 'success'; then
|
||||||
|
die "failed to delete ${OBJECT_HASH}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# verify the object is deleted
|
||||||
|
echo "verifying ${OBJECT_HASH} is deleted"
|
||||||
|
DELETE_RESPONSE=$(curl -s "${HOSTURL}/${OBJECT_HASH}")
|
||||||
|
if ! echo "${DELETE_RESPONSE}" | jq -r '.result' | grep -q 'error'; then
|
||||||
|
die "failed to verify ${OBJECT_HASH} is deleted"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function test5() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "5: Test metadata field preservation"
|
||||||
|
|
||||||
|
# Upload with extra metadata fields
|
||||||
|
EXTRA_METADATA_JSON=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"labeltags": ["${BASE_TAG}:test2"],
|
||||||
|
"description": "Test with extra fields",
|
||||||
|
"custom_field": "custom value",
|
||||||
|
"extra_field1": "value1",
|
||||||
|
"extra_field2": "value2"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
echo "uploading with extra metadata fields"
|
||||||
|
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" \
|
||||||
|
-F "metadata=${EXTRA_METADATA_JSON}" \
|
||||||
|
"${HOSTURL}/upload")
|
||||||
|
UPLOAD_EXIT_CODE=$?
|
||||||
|
echo "Upload response: ${UPLOAD_RESPONSE}"
|
||||||
|
echo "Upload exit code: ${UPLOAD_EXIT_CODE}"
|
||||||
|
|
||||||
|
if [ ${UPLOAD_EXIT_CODE} -ne 0 ]; then
|
||||||
|
die "Failed to upload object: curl returned ${UPLOAD_EXIT_CODE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! echo "${UPLOAD_RESPONSE}" | jq -e . >/dev/null 2>&1; then
|
||||||
|
die "Invalid JSON response from upload: ${UPLOAD_RESPONSE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
OBJECT_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||||
|
echo "Received hash: ${OBJECT_HASH}"
|
||||||
|
|
||||||
|
# Verify the object exists
|
||||||
|
echo "Verifying object exists..."
|
||||||
|
EXISTS_RESPONSE=$(curl -s "${HOSTURL}/exists/${BASE_TAG}:test2")
|
||||||
|
echo "Exists response: ${EXISTS_RESPONSE}"
|
||||||
|
|
||||||
|
# Get metadata and verify extra fields are preserved
|
||||||
|
echo "Retrieving metadata for ${BASE_TAG}:test2"
|
||||||
|
METADATA_RESPONSE=$(curl -s "${HOSTURL}/meta/${BASE_TAG}:test2")
|
||||||
|
CURL_EXIT_CODE=$?
|
||||||
|
echo "Curl exit code: ${CURL_EXIT_CODE}"
|
||||||
|
echo "Full metadata response: ${METADATA_RESPONSE}"
|
||||||
|
|
||||||
|
if [ ${CURL_EXIT_CODE} -ne 0 ]; then
|
||||||
|
die "Failed to retrieve metadata: curl returned ${CURL_EXIT_CODE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! echo "${METADATA_RESPONSE}" | jq -e . >/dev/null 2>&1; then
|
||||||
|
die "Invalid JSON response: ${METADATA_RESPONSE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.extra_field1' | grep -q 'value1'; then
|
||||||
|
die "extra_field1 not preserved in metadata"
|
||||||
|
fi
|
||||||
|
if ! echo "${METADATA_RESPONSE}" | jq -r '.metadata.extra_field2' | grep -q 'value2'; then
|
||||||
|
die "extra_field2 not preserved in metadata"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function test6() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "6: Test tag versioning behavior"
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${OBJECT_HASH}" > /dev/null
|
||||||
|
|
||||||
|
# Upload first version with tag 'latest'
|
||||||
|
FIRST_METADATA_JSON=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"labeltags": ["${BASE_TAG}:latest", "${BASE_TAG}:v1"],
|
||||||
|
"description": "First version"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
echo "uploading first version with tag 'latest'"
|
||||||
|
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" \
|
||||||
|
-F "metadata=${FIRST_METADATA_JSON}" \
|
||||||
|
"${HOSTURL}/upload")
|
||||||
|
|
||||||
|
FIRST_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||||
|
|
||||||
|
# check downloading without tag!
|
||||||
|
echo "downloading ${BASE_TAG} to ${DOWNLOAD_FILE}4"
|
||||||
|
if ! curl -s --fail-with-body "${HOSTURL}/${BASE_TAG}" -o "${DOWNLOAD_FILE}4"; then
|
||||||
|
die "failed to download ${BASE_TAG}"
|
||||||
|
fi
|
||||||
|
# get md5sum of the downloaded file
|
||||||
|
MD5SUM_DOWNLOADED4=$(md5sum "${DOWNLOAD_FILE}4" | awk '{print $1}')
|
||||||
|
MD5SUM_ORIGINAL=$(md5sum "${SCRIPT_DIR}/${SCRIPT_NAME}" | awk '{print $1}')
|
||||||
|
[ "${MD5SUM_ORIGINAL}" != "${MD5SUM_DOWNLOADED4}" ] && die "md5sums do not match"
|
||||||
|
|
||||||
|
# Store first version's metadata before uploading second version
|
||||||
|
FIRST_METADATA=$(curl -s "${HOSTURL}/meta/${FIRST_HASH}")
|
||||||
|
echo "First version metadata response: ${FIRST_METADATA}"
|
||||||
|
if ! echo "${FIRST_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:v1"; then
|
||||||
|
die "First version does not have v1 tag"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload second version with same tag 'latest'
|
||||||
|
SECOND_METADATA_JSON=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"labeltags": ["${BASE_TAG}:latest", "${BASE_TAG}:v2"],
|
||||||
|
"description": "Second version"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
echo "uploading second version with tag 'latest'"
|
||||||
|
UPLOAD_RESPONSE=$(curl -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/test_1GB_file_upload.sh" \
|
||||||
|
-F "metadata=${SECOND_METADATA_JSON}" \
|
||||||
|
"${HOSTURL}/upload")
|
||||||
|
|
||||||
|
SECOND_HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||||
|
|
||||||
|
# Verify first version's metadata still has v1 tag
|
||||||
|
FIRST_METADATA=$(curl -s "${HOSTURL}/meta/${FIRST_HASH}")
|
||||||
|
echo "First version metadata response: ${FIRST_METADATA}"
|
||||||
|
if ! echo "${FIRST_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:v1"; then
|
||||||
|
die "First version does not have v1 tag"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify first version's metadata no longer has the latest tag
|
||||||
|
if echo "${FIRST_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:latest"; then
|
||||||
|
die "First version still has latest tag"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify second version has the correct tags: v2 and latest
|
||||||
|
SECOND_METADATA=$(curl -s "${HOSTURL}/meta/${SECOND_HASH}")
|
||||||
|
echo "Second version metadata response: ${SECOND_METADATA}"
|
||||||
|
if ! echo "${SECOND_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:latest"; then
|
||||||
|
die "Second version does not have latest tag"
|
||||||
|
fi
|
||||||
|
if ! echo "${SECOND_METADATA}" | jq -r '.metadata.labeltags[]' | grep -q "${BASE_TAG}:v2"; then
|
||||||
|
die "Second version does not have v2 tag"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function test7() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "7: Test rate limiting behavior"
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${FIRST_HASH}" > /dev/null
|
||||||
|
curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${SECOND_HASH}" > /dev/null
|
||||||
|
|
||||||
|
# Use a known invalid token
|
||||||
|
INVALID_TOKEN="invalid_token"
|
||||||
|
|
||||||
|
# Make 5 requests with an invalid token
|
||||||
|
for i in {1..5}; do
|
||||||
|
echo "Attempt $i with invalid token"
|
||||||
|
RESPONSE=$(curl -s -X PUT -H "Authorization: Bearer ${INVALID_TOKEN}" -F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" -F "metadata={\"labeltags\":[\"test:latest\"]}" "${HOSTURL}/upload")
|
||||||
|
echo "Response: ${RESPONSE}"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Now try a request with a valid token - should be rate limited
|
||||||
|
echo "Attempting request with valid token (should be rate limited)"
|
||||||
|
RESPONSE=$(curl -s -X PUT -H "Authorization: Bearer ${WRITE_TOKEN}" -F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" -F "metadata={\"labeltags\":[\"test:latest\"]}" "${HOSTURL}/upload")
|
||||||
|
if ! echo "${RESPONSE}" | jq -r '.error' | grep -q "Too many authentication attempts"; then
|
||||||
|
die "Expected rate limit error, got: ${RESPONSE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Sleeping for 3 seconds to allow rate limit to reset"
|
||||||
|
echo "(Normally 5 mins, but we set to 2s for this test!)"
|
||||||
|
|
||||||
|
sleep 3
|
||||||
|
# Now try a request with a valid token - should be rate limited
|
||||||
|
echo "Attempting request with valid token (should NOT be rate limited)"
|
||||||
|
RESPONSE=$(curl -s -X PUT -H "Authorization: Bearer ${WRITE_TOKEN}" -F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" -F "metadata={\"labeltags\":[\"test:latest\"]}" "${HOSTURL}/upload")
|
||||||
|
}
|
||||||
|
|
||||||
|
function test8() {
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
title "8: Test update endpoint"
|
||||||
|
|
||||||
|
# First upload a test file
|
||||||
|
UPLOAD_RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/${SCRIPT_NAME}" \
|
||||||
|
-F "metadata={\"labeltags\":[\"test:update\"]}" \
|
||||||
|
"${HOSTURL}/upload")
|
||||||
|
|
||||||
|
HASH=$(echo "${UPLOAD_RESPONSE}" | jq -r '.hash')
|
||||||
|
if [ -z "${HASH}" ] || [ "${HASH}" = "null" ]; then
|
||||||
|
die "Failed to upload test file for update test"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 8.1: Update metadata using JSON
|
||||||
|
UPDATED_METADATA='{"labeltags":["test:updated", "version:1.0"], "new_field":"test_value"}'
|
||||||
|
UPDATE_RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"hash\":\"${HASH}\", \"metadata\":${UPDATED_METADATA}}" \
|
||||||
|
"${HOSTURL}/update")
|
||||||
|
|
||||||
|
echo "Update response: ${UPDATE_RESPONSE}"
|
||||||
|
if [ "$(echo "${UPDATE_RESPONSE}" | jq -r '.result')" != "success" ]; then
|
||||||
|
die "Failed to update metadata via JSON"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the update
|
||||||
|
UPDATED_METADATA_RESPONSE=$(curl -s "${HOSTURL}/meta/${HASH}")
|
||||||
|
if ! echo "${UPDATED_METADATA_RESPONSE}" | jq -e '.metadata.new_field == "test_value"' | grep -q true; then
|
||||||
|
die "Metadata was not updated correctly via JSON"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 8.2: Update metadata using form data
|
||||||
|
# Update with form data using raw JSON string
|
||||||
|
UPDATE_FORM_RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-F "hash=${HASH}" \
|
||||||
|
-F 'metadata={"labeltags":["test:form_updated"], "form_field":"form_value"}' \
|
||||||
|
"${HOSTURL}/update")
|
||||||
|
|
||||||
|
echo "Form update response: ${UPDATE_FORM_RESPONSE}"
|
||||||
|
if [ "$(echo "${UPDATE_FORM_RESPONSE}" | jq -r '.result')" != "success" ]; then
|
||||||
|
die "Failed to update metadata via form data"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the form update
|
||||||
|
UPDATED_FORM_METADATA_RESPONSE=$(curl -s "${HOSTURL}/meta/${HASH}")
|
||||||
|
if ! echo "${UPDATED_FORM_METADATA_RESPONSE}" | jq -e '.metadata.form_field == "form_value"' | grep -q true; then
|
||||||
|
die "Metadata was not updated correctly via form data"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 8.3: Test error cases
|
||||||
|
# Missing hash
|
||||||
|
MISSING_HASH_RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-d '{"metadata":{}}' \
|
||||||
|
"${HOSTURL}/update")
|
||||||
|
if [ "$(echo "${MISSING_HASH_RESPONSE}" | jq -r '.error')" != "Missing 'hash' or 'metadata' field in request body" ]; then
|
||||||
|
die "Expected error for missing hash, got: ${MISSING_HASH_RESPONSE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Missing metadata
|
||||||
|
MISSING_METADATA_RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${WRITE_TOKEN}" \
|
||||||
|
-d "{\"hash\":\"${HASH}\"}" \
|
||||||
|
"${HOSTURL}/update")
|
||||||
|
if [ "$(echo "${MISSING_METADATA_RESPONSE}" | jq -r '.error')" != "Missing 'hash' or 'metadata' field in request body" ]; then
|
||||||
|
die "Expected error for missing metadata, got: ${MISSING_METADATA_RESPONSE}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify that labeltags were actually updated
|
||||||
|
echo "Checking if test:updated tag exists..."
|
||||||
|
UPDATED_HASH=$(curl -s "${HOSTURL}/hash/test:updated" | jq -r '.hash')
|
||||||
|
if [ "${UPDATED_HASH}" = "${HASH}" ]; then
|
||||||
|
echo "✓ test:updated tag correctly points to hash ${HASH}"
|
||||||
|
else
|
||||||
|
echo "✗ test:updated tag not found or points to wrong hash: ${UPDATED_HASH}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Checking if test:form_updated tag exists..."
|
||||||
|
FORM_UPDATED_HASH=$(curl -s "${HOSTURL}/hash/test:form_updated" | jq -r '.hash')
|
||||||
|
if [ "${FORM_UPDATED_HASH}" = "${HASH}" ]; then
|
||||||
|
echo "✓ test:form_updated tag correctly points to hash ${HASH}"
|
||||||
|
else
|
||||||
|
echo "✗ test:form_updated tag not found or points to wrong hash: ${FORM_UPDATED_HASH}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up using the correct current labeltag
|
||||||
|
echo "Cleaning up test object with hash: ${HASH}"
|
||||||
|
RESPONSE=$(curl -s -H "Authorization: Bearer ${WRITE_TOKEN}" "${HOSTURL}/deleteobject?hash=${HASH}")
|
||||||
|
echo "Delete response: ${RESPONSE}"
|
||||||
|
|
||||||
|
if echo "${RESPONSE}" | jq -r '.result' | grep -q 'success'; then
|
||||||
|
echo "Successfully deleted test object"
|
||||||
|
else
|
||||||
|
echo "Failed to delete test object: ${RESPONSE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
function test9() {
|
||||||
|
title "9: Testing 1GB File upload"
|
||||||
|
source test_1GB_file_upload.sh
|
||||||
|
}
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
test0
|
||||||
|
test1
|
||||||
|
test2
|
||||||
|
test3
|
||||||
|
test3b
|
||||||
|
test4
|
||||||
|
test5
|
||||||
|
test6
|
||||||
|
test7
|
||||||
|
test8
|
||||||
|
test9
|
||||||
|
|
||||||
|
title "ALL TESTS PASSED"
|
@@ -22,11 +22,24 @@ echo "Original hash: $ORIGINAL_HASH"
|
|||||||
HOST=$(echo "$CONFIG" | jq -r '.host')
|
HOST=$(echo "$CONFIG" | jq -r '.host')
|
||||||
PORT=$(echo "$CONFIG" | jq -r '.port')
|
PORT=$(echo "$CONFIG" | jq -r '.port')
|
||||||
|
|
||||||
# randomly select one of the available write tokens from the config
|
# Use plaintext tokens from environment (set by generate_test_config.sh or manually)
|
||||||
TOKEN_COUNT=$(echo "$CONFIG" | jq -r '.write_tokens | length')
|
# The config file should only contain hashed tokens
|
||||||
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
if [ -n "${TEST_TOKEN1:-}" ] && [ -n "${TEST_TOKEN2:-}" ] && [ -n "${TEST_TOKEN3:-}" ]; then
|
||||||
WRITE_TOKEN=$(echo "$CONFIG" | jq -r ".write_tokens[$RANDOM_INDEX]")
|
# Use environment tokens (plaintext)
|
||||||
echo "Using token index $RANDOM_INDEX out of $TOKEN_COUNT available tokens"
|
TOKENS=("$TEST_TOKEN1" "$TEST_TOKEN2" "$TEST_TOKEN3")
|
||||||
|
TOKEN_COUNT=${#TOKENS[@]}
|
||||||
|
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
||||||
|
WRITE_TOKEN="${TOKENS[$RANDOM_INDEX]}"
|
||||||
|
echo "Using plaintext token index $RANDOM_INDEX from environment"
|
||||||
|
else
|
||||||
|
# For static test configs, use hardcoded plaintext tokens
|
||||||
|
# These correspond to the hashes in the static sos_config.json
|
||||||
|
TOKENS=("t570H7DmK2VBfCwUmtFaUXyzVklL90E1" "U3x9V39Y7rjXdRK0oxZsCz5lD6jFFDtm" "UhtchhGDEGXlJ37GumimFtPe0imjAvak")
|
||||||
|
TOKEN_COUNT=${#TOKENS[@]}
|
||||||
|
RANDOM_INDEX=$((RANDOM % TOKEN_COUNT))
|
||||||
|
WRITE_TOKEN="${TOKENS[$RANDOM_INDEX]}"
|
||||||
|
echo "Using hardcoded plaintext token index $RANDOM_INDEX (for static config)"
|
||||||
|
fi
|
||||||
|
|
||||||
# Upload the file
|
# Upload the file
|
||||||
echo "Uploading file..."
|
echo "Uploading file..."
|
||||||
|
94
testing/test_auth.sh
Executable file
94
testing/test_auth.sh
Executable file
@@ -0,0 +1,94 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Test authentication with hashed tokens
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd)
|
||||||
|
PARENT_DIR=$(cd "${SCRIPT_DIR}/.." && pwd)
|
||||||
|
SERVER_PID=""
|
||||||
|
|
||||||
|
# Cleanup function
|
||||||
|
function cleanup() {
|
||||||
|
if [ -n "$SERVER_PID" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "Cleaning up: Stopping server (PID: $SERVER_PID)..."
|
||||||
|
kill $SERVER_PID 2>/dev/null || true
|
||||||
|
wait $SERVER_PID 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up trap to ensure cleanup on exit
|
||||||
|
trap cleanup EXIT INT TERM
|
||||||
|
|
||||||
|
echo "Testing token authentication with bcrypt hashes..."
|
||||||
|
|
||||||
|
# Generate test configuration with hashed tokens
|
||||||
|
echo "1. Generating config with bcrypt-hashed tokens..."
|
||||||
|
source ${SCRIPT_DIR}/generate_test_config.sh
|
||||||
|
|
||||||
|
# Copy config to where server expects it
|
||||||
|
mkdir -p ~/.config/simple_object_storage/
|
||||||
|
cp ${SCRIPT_DIR}/sos_config.json ~/.config/simple_object_storage/sos_config.json
|
||||||
|
|
||||||
|
# Start server if not running
|
||||||
|
if ! curl -s http://127.0.0.1:7703/status > /dev/null 2>&1; then
|
||||||
|
echo "2. Starting server..."
|
||||||
|
${PARENT_DIR}/output/simple-object-server > /tmp/server.log 2>&1 &
|
||||||
|
SERVER_PID=$!
|
||||||
|
sleep 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test with plaintext token (server has hashed version)
|
||||||
|
echo "3. Testing authentication with hashed tokens..."
|
||||||
|
RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${TEST_TOKEN1}" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/test.sh" \
|
||||||
|
-F 'metadata={"labeltags":["test:auth1"]}' \
|
||||||
|
"http://127.0.0.1:7703/upload")
|
||||||
|
|
||||||
|
if echo "$RESPONSE" | grep -q '"result":"success"'; then
|
||||||
|
echo "✓ Token 1 authentication successful"
|
||||||
|
HASH=$(echo "$RESPONSE" | jq -r '.hash')
|
||||||
|
# Clean up
|
||||||
|
curl -s -H "Authorization: Bearer ${TEST_TOKEN1}" \
|
||||||
|
"http://127.0.0.1:7703/deleteobject?hash=${HASH}" > /dev/null
|
||||||
|
else
|
||||||
|
echo "✗ Token 1 authentication failed: $RESPONSE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test with second token
|
||||||
|
echo "4. Testing with second token..."
|
||||||
|
RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${TEST_TOKEN2}" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/test.sh" \
|
||||||
|
-F 'metadata={"labeltags":["test:auth2"]}' \
|
||||||
|
"http://127.0.0.1:7703/upload")
|
||||||
|
|
||||||
|
if echo "$RESPONSE" | grep -q '"result":"success"'; then
|
||||||
|
echo "✓ Token 2 authentication successful"
|
||||||
|
HASH=$(echo "$RESPONSE" | jq -r '.hash')
|
||||||
|
# Clean up
|
||||||
|
curl -s -H "Authorization: Bearer ${TEST_TOKEN2}" \
|
||||||
|
"http://127.0.0.1:7703/deleteobject?hash=${HASH}" > /dev/null
|
||||||
|
else
|
||||||
|
echo "✗ Token 2 authentication failed: $RESPONSE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test with invalid token
|
||||||
|
echo "5. Testing with invalid token (should fail)..."
|
||||||
|
RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer invalid_token_12345" \
|
||||||
|
-F "file=@${SCRIPT_DIR}/test.sh" \
|
||||||
|
-F 'metadata={"labeltags":["test:auth3"]}' \
|
||||||
|
"http://127.0.0.1:7703/upload")
|
||||||
|
|
||||||
|
if echo "$RESPONSE" | grep -q '"error"'; then
|
||||||
|
echo "✓ Invalid token correctly rejected"
|
||||||
|
else
|
||||||
|
echo "✗ Invalid token was incorrectly accepted: $RESPONSE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Authentication tests complete!"
|
||||||
|
# Cleanup will be handled by the trap
|
BIN
testing/test_bcrypt
Executable file
BIN
testing/test_bcrypt
Executable file
Binary file not shown.
21
testing/test_bcrypt.cpp
Normal file
21
testing/test_bcrypt.cpp
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#include <iostream>
|
||||||
|
#include "../src/bcrypt.hpp"
|
||||||
|
|
||||||
|
using namespace simple_object_storage;
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
std::string token = "test123";
|
||||||
|
std::string hash = BCrypt::hashPassword(token, 10);
|
||||||
|
|
||||||
|
std::cout << "Token: " << token << std::endl;
|
||||||
|
std::cout << "Hash: " << hash << std::endl;
|
||||||
|
|
||||||
|
bool valid = BCrypt::verifyPassword(token, hash);
|
||||||
|
std::cout << "Verification: " << (valid ? "VALID" : "INVALID") << std::endl;
|
||||||
|
|
||||||
|
// Test with wrong password
|
||||||
|
bool invalid = BCrypt::verifyPassword("wrong", hash);
|
||||||
|
std::cout << "Wrong password: " << (invalid ? "VALID" : "INVALID") << std::endl;
|
||||||
|
|
||||||
|
return valid ? 0 : 1;
|
||||||
|
}
|
85
testing/test_simple.sh
Executable file
85
testing/test_simple.sh
Executable file
@@ -0,0 +1,85 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Simple test to verify authentication works with hashed tokens
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SERVER_PID=""
|
||||||
|
|
||||||
|
# Cleanup function
|
||||||
|
function cleanup() {
|
||||||
|
if [ -n "$SERVER_PID" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "Cleaning up: Stopping server (PID: $SERVER_PID)..."
|
||||||
|
kill $SERVER_PID 2>/dev/null || true
|
||||||
|
wait $SERVER_PID 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up trap to ensure cleanup on exit
|
||||||
|
trap cleanup EXIT INT TERM
|
||||||
|
|
||||||
|
echo "Simple authentication test with bcrypt hashes"
|
||||||
|
|
||||||
|
TEST_TOKEN="test123"
|
||||||
|
HASH=$(../output/hash_token -c 10 -q "${TEST_TOKEN}")
|
||||||
|
echo "Generated hash for '${TEST_TOKEN}': ${HASH:0:20}..."
|
||||||
|
|
||||||
|
# Create config with hashed token
|
||||||
|
cat > ~/.config/simple_object_storage/sos_config.json << EOF
|
||||||
|
{
|
||||||
|
"write_tokens": ["$HASH"],
|
||||||
|
"rate_limiting": {
|
||||||
|
"auth_rate_limit": 5,
|
||||||
|
"auth_window_seconds": 2
|
||||||
|
},
|
||||||
|
"port": 7703,
|
||||||
|
"host": "127.0.0.1"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Make sure no server is running
|
||||||
|
fuser -k 7703/tcp 2>/dev/null || true
|
||||||
|
sleep 1
|
||||||
|
|
||||||
|
# Start server with hashed config
|
||||||
|
echo "Starting server with hashed token..."
|
||||||
|
../output/simple-object-server &
|
||||||
|
SERVER_PID=$!
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
# Test with plaintext token (server has hash)
|
||||||
|
echo "Testing with plaintext token against hashed config..."
|
||||||
|
RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer ${TEST_TOKEN}" \
|
||||||
|
-F "file=@test.sh" \
|
||||||
|
-F 'metadata={"labeltags":["test:hashed"]}' \
|
||||||
|
"http://127.0.0.1:7703/upload")
|
||||||
|
|
||||||
|
if echo "$RESPONSE" | grep -q '"result":"success"'; then
|
||||||
|
echo "✓ Authentication successful"
|
||||||
|
HASH_RETURNED=$(echo "$RESPONSE" | jq -r '.hash')
|
||||||
|
# Clean up
|
||||||
|
curl -s -H "Authorization: Bearer ${TEST_TOKEN}" \
|
||||||
|
"http://127.0.0.1:7703/deleteobject?hash=${HASH_RETURNED}" > /dev/null
|
||||||
|
else
|
||||||
|
echo "✗ Authentication failed: $RESPONSE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test with wrong token
|
||||||
|
echo "Testing with wrong token (should fail)..."
|
||||||
|
RESPONSE=$(curl -s -X PUT \
|
||||||
|
-H "Authorization: Bearer wrongtoken" \
|
||||||
|
-F "file=@test.sh" \
|
||||||
|
-F 'metadata={"labeltags":["test:wrong"]}' \
|
||||||
|
"http://127.0.0.1:7703/upload")
|
||||||
|
|
||||||
|
if echo "$RESPONSE" | grep -q '"error"'; then
|
||||||
|
echo "✓ Invalid token correctly rejected"
|
||||||
|
else
|
||||||
|
echo "✗ Invalid token was incorrectly accepted: $RESPONSE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Test complete!"
|
||||||
|
# Cleanup will be handled by the trap
|
Reference in New Issue
Block a user