:-'Generic Commit'

This commit is contained in:
Your Name 2025-05-28 22:26:47 +12:00
parent 3bc0c65c40
commit 03ce71ecbe
8 changed files with 191 additions and 295 deletions

17
.vscode/c_cpp_properties.json vendored Normal file
View File

@ -0,0 +1,17 @@
{
"configurations": [
{
"name": "Linux",
"intelliSenseMode": "linux-gcc-x64",
"compilerPath": "/usr/bin/g++",
"cStandard": "c17",
"cppStandard": "c++23",
"includePath": [
"${workspaceFolder}/dropshell-tool/src",
"${workspaceFolder}/dropshell-tool/src/autogen"
]
// ... other settings ...
}
],
"version": 4
}

View File

@ -10,6 +10,24 @@ _die() {
exit 1
}
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Function to print status messages
print_status() {
echo -e "${GREEN}[*] $1${NC}"
}
print_error() {
echo -e "${RED}[!] $1${NC}"
}
print_warning() {
echo -e "${YELLOW}[!] $1${NC}"
}
@ -17,9 +35,65 @@ _die() {
# Headers on Host (Native)
#----------------------------------------------------------------------------------------------------------
function install_headers() {
sudo apt install -y nlohmann-json3-dev wget curl cmake ninja-build mold
# Function to check if a package is installed
function is_package_installed() {
dpkg -l "$1" 2>/dev/null | grep -q "^ii"
}
function install_packages() {
local PACKAGES
local HAVE_UPDATED=0
# Detect distribution
if [ -f /etc/os-release ]; then
. /etc/os-release
OS=$NAME
VER=$VERSION_ID
else
print_error "Could not detect distribution"
exit 1
fi
print_status "Detected OS: $OS $VER"
# Define packages based on distribution
case $OS in
"Ubuntu"|"Debian GNU/Linux")
# Common packages for both Ubuntu and Debian
PACKAGES="nlohmann-json3-dev wget curl cmake ninja-build mold"
;;
*)
print_error "Unsupported distribution: $OS"
exit 1
;;
esac
# Install missing packages
print_status "Checking and installing required packages..."
for pkg in $PACKAGES; do
if ! is_package_installed "$pkg"; then
print_status "Installing $pkg..."
# Update package lists
if [ "$HAVE_UPDATED" -eq 0 ]; then
print_status "Updating package lists..."
apt-get update
HAVE_UPDATED=1
fi
if ! sudo apt-get install -y "$pkg"; then
print_error "Failed to install $pkg"
exit 1
fi
else
print_status "$pkg is already installed"
fi
done
}
function install_headers() {
# put libassert headers on the host.
if [ ! -f /usr/local/lib/libassert.a ]; then
git clone https://github.com/jeremy-rifkin/libassert.git
@ -216,11 +290,15 @@ function installmusl() {
#----------------------------------------------------------------------------------------------------------
function main() {
install_packages
install_headers
installmusl
install_openssl_musl
}
main

View File

@ -72,6 +72,7 @@ add_dependencies(${PROJECT_EXE_NAME} run_prebuild_script)
target_include_directories(${PROJECT_EXE_NAME} PRIVATE
$<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/src/autogen>
${CMAKE_CURRENT_SOURCE_DIR}/src
${CMAKE_CURRENT_SOURCE_DIR}/src/autogen
)
# Configure libassert

View File

@ -2,211 +2,70 @@
#include <vector>
#include <fstream>
#include <filesystem>
#include <cstring>
#include <zlib.h>
#include <cstdlib>
#include <sstream>
#include <span>
#include <iostream>
#include <unistd.h>
#include "tar_to_stream.hpp"
#include "ArchiveManager.hpp"
namespace fs = std::filesystem;
namespace {
constexpr size_t TAR_BLOCK_SIZE = 512;
// --- Minimal tar extraction logic for unpacking and config extraction ---
struct TarHeader {
char name[100];
char mode[8];
char uid[8];
char gid[8];
char size[12];
char mtime[12];
char chksum[8];
char typeflag;
char linkname[100];
char magic[6];
char version[2];
char uname[32];
char gname[32];
char devmajor[8];
char devminor[8];
char prefix[155];
char pad[12];
};
size_t tarFileSize(const TarHeader* hdr) {
return std::strtol(hdr->size, nullptr, 8);
}
std::string tarFileName(const TarHeader* hdr) {
std::string name(hdr->name);
if (hdr->prefix[0]) {
std::string prefix(hdr->prefix);
return prefix + "/" + name;
}
return name;
}
bool readTar(const std::vector<uint8_t>& tarData, const std::string& outDir, std::string* configJson = nullptr) {
size_t pos = 0;
while (pos + sizeof(TarHeader) <= tarData.size()) {
const TarHeader* hdr = reinterpret_cast<const TarHeader*>(&tarData[pos]);
if (hdr->name[0] == '\0') break;
size_t filesize = tarFileSize(hdr);
std::string filename = tarFileName(hdr);
size_t fileStart = pos + sizeof(TarHeader);
if (configJson && filename == "dropshell-tool-config.json") {
*configJson = std::string(reinterpret_cast<const char*>(&tarData[fileStart]), filesize);
} else if (!outDir.empty()) {
fs::path outPath = fs::path(outDir) / filename;
fs::create_directories(outPath.parent_path());
std::ofstream ofs(outPath, std::ios::binary);
ofs.write(reinterpret_cast<const char*>(&tarData[fileStart]), filesize);
}
pos = fileStart + filesize;
pos += (TAR_BLOCK_SIZE - (filesize % TAR_BLOCK_SIZE)) % TAR_BLOCK_SIZE;
}
return true;
}
bool extractConfigJson(const std::vector<uint8_t>& tarData, std::string& outJson) {
return readTar(tarData, "", &outJson);
}
bool replaceConfigJson(std::vector<uint8_t>& tarData, const std::string& json) {
// Remove old config, add new one at end
std::vector<uint8_t> newTar;
size_t pos = 0;
bool replaced = false;
while (pos + sizeof(TarHeader) <= tarData.size()) {
const TarHeader* hdr = reinterpret_cast<const TarHeader*>(&tarData[pos]);
if (hdr->name[0] == '\0') break;
size_t filesize = tarFileSize(hdr);
std::string filename = tarFileName(hdr);
size_t fileStart = pos + sizeof(TarHeader);
if (filename != "dropshell-tool-config.json") {
newTar.insert(newTar.end(), &tarData[pos], &tarData[fileStart + filesize]);
size_t pad = (TAR_BLOCK_SIZE - (filesize % TAR_BLOCK_SIZE)) % TAR_BLOCK_SIZE;
newTar.insert(newTar.end(), pad, 0);
} else {
replaced = true;
}
pos = fileStart + filesize;
pos += (TAR_BLOCK_SIZE - (filesize % TAR_BLOCK_SIZE)) % TAR_BLOCK_SIZE;
}
// Add new config
// Use tar_to_stream to add the config file
std::ostringstream oss(std::ios::binary);
oss.write(reinterpret_cast<const char*>(newTar.data()), newTar.size());
std::string uname = "root";
std::string gname = "root";
tar_to_stream_properties props = {
"dropshell-tool-config.json",
std::as_bytes(std::span(json.data(), json.size())),
0u, "644", 0u, 0u, uname, gname
};
tar_to_stream(oss, std::move(props));
tar_to_stream_tail(oss);
std::string tarStr = oss.str();
tarData.assign(tarStr.begin(), tarStr.end());
return true;
}
// --- zlib helpers ---
bool gzipCompress(const std::string& in, std::vector<uint8_t>& out) {
z_stream strm{};
deflateInit2(&strm, Z_BEST_COMPRESSION, Z_DEFLATED, 15 + 16, 8, Z_DEFAULT_STRATEGY);
out.resize(compressBound(in.size()));
strm.next_in = reinterpret_cast<Bytef*>(const_cast<char*>(in.data()));
strm.avail_in = in.size();
strm.next_out = out.data();
strm.avail_out = out.size();
int ret = deflate(&strm, Z_FINISH);
if (ret != Z_STREAM_END) { deflateEnd(&strm); return false; }
out.resize(strm.total_out);
deflateEnd(&strm);
return true;
}
bool gzipDecompress(const std::string& inPath, std::vector<uint8_t>& out) {
std::ifstream ifs(inPath, std::ios::binary);
if (!ifs) return false;
ifs.seekg(0, std::ios::end);
size_t insize = ifs.tellg();
ifs.seekg(0, std::ios::beg);
std::vector<uint8_t> inbuf(insize);
ifs.read(reinterpret_cast<char*>(inbuf.data()), insize);
z_stream strm{};
inflateInit2(&strm, 15 + 16);
out.resize(insize * 10); // crude guess
strm.next_in = inbuf.data();
strm.avail_in = inbuf.size();
strm.next_out = out.data();
strm.avail_out = out.size();
int ret = inflate(&strm, Z_FINISH);
if (ret != Z_STREAM_END) { inflateEnd(&strm); return false; }
out.resize(strm.total_out);
inflateEnd(&strm);
return true;
}
bool gzipDecompressToTar(const std::string& inPath, std::vector<uint8_t>& tarData) {
return gzipDecompress(inPath, tarData);
}
bool gzipCompressToFile(const std::string& tarData, const std::string& outPath) {
std::vector<uint8_t> gz;
if (!gzipCompress(tarData, gz)) return false;
std::ofstream ofs(outPath, std::ios::binary);
if (!ofs) return false;
ofs.write(reinterpret_cast<const char*>(gz.data()), gz.size());
return ofs.good();
}
}
ArchiveManager::ArchiveManager() {}
bool ArchiveManager::pack(const std::string& folderPath, const std::string& archivePath) {
// Use tar_to_stream to create tar in memory
std::ostringstream tarStream(std::ios::binary);
for (auto& p : fs::recursive_directory_iterator(folderPath)) {
if (!fs::is_regular_file(p)) continue;
std::ifstream ifs(p.path(), std::ios::binary);
if (!ifs) return false;
std::vector<char> data((std::istreambuf_iterator<char>(ifs)), {});
std::string uname = "root";
std::string gname = "root";
tar_to_stream_properties props = {
fs::relative(p.path(), folderPath).generic_string(),
std::as_bytes(std::span(data.data(), data.size())),
0u, "644", 0u, 0u, uname, gname
};
tar_to_stream(tarStream, std::move(props));
}
tar_to_stream_tail(tarStream);
std::string tarData = tarStream.str();
return gzipCompressToFile(tarData, archivePath);
// Use system tar to create gzipped tarball
std::ostringstream cmd;
cmd << "tar -czf '" << archivePath << "' -C '" << folderPath << "' .";
int ret = std::system(cmd.str().c_str());
return ret == 0;
}
bool ArchiveManager::unpack(const std::string& archivePath, const std::string& outDir) {
std::vector<uint8_t> tarData;
if (!gzipDecompressToTar(archivePath, tarData)) return false;
return readTar(tarData, outDir);
fs::create_directories(outDir);
std::ostringstream cmd;
cmd << "tar -xzf '" << archivePath << "' -C '" << outDir << "'";
int ret = std::system(cmd.str().c_str());
return ret == 0;
}
bool ArchiveManager::readConfigJson(const std::string& archivePath, std::string& outJson) {
std::vector<uint8_t> tarData;
if (!gzipDecompressToTar(archivePath, tarData)) return false;
return extractConfigJson(tarData, outJson);
// Extract config json to stdout
std::ostringstream cmd;
cmd << "tar -xOzf '" << archivePath << "' dropshell-tool-config.json";
FILE* pipe = popen(cmd.str().c_str(), "r");
if (!pipe) return false;
char buffer[4096];
std::ostringstream ss;
size_t n;
while ((n = fread(buffer, 1, sizeof(buffer), pipe)) > 0) {
ss.write(buffer, n);
}
int ret = pclose(pipe);
if (ret != 0) return false;
outJson = ss.str();
return true;
}
bool ArchiveManager::writeConfigJson(const std::string& archivePath, const std::string& json) {
std::vector<uint8_t> tarData;
if (!gzipDecompressToTar(archivePath, tarData)) return false;
if (!replaceConfigJson(tarData, json)) return false;
// Write back to archivePath
std::string tarStr(reinterpret_cast<const char*>(tarData.data()), tarData.size());
return gzipCompressToFile(tarStr, archivePath);
// 1. Extract archive to temp dir
std::string tmpDir = "/tmp/dropshell_tool_tmp_" + std::to_string(::getpid());
fs::create_directories(tmpDir);
std::ostringstream extractCmd;
extractCmd << "tar -xzf '" << archivePath << "' -C '" << tmpDir << "'";
if (std::system(extractCmd.str().c_str()) != 0) return false;
// 2. Write new config json
std::ofstream ofs(tmpDir + "/dropshell-tool-config.json", std::ios::binary);
if (!ofs) return false;
ofs << json;
ofs.close();
// 3. Repack
std::ostringstream packCmd;
packCmd << "tar -czf '" << archivePath << "' -C '" << tmpDir << "' .";
int ret = std::system(packCmd.str().c_str());
// 4. Cleanup
std::ostringstream rmCmd;
rmCmd << "rm -rf '" << tmpDir << "'";
std::system(rmCmd.str().c_str());
return ret == 0;
}

View File

@ -27,11 +27,14 @@ bool GetbinClient::download(const std::string& toolName, const std::string& arch
}
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token) {
httplib::SSLClient cli(SERVER_HOST, 443);
httplib::SSLClient cli(SERVER_HOST);
httplib::MultipartFormDataItems items;
// Read file
std::ifstream ifs(archivePath, std::ios::binary);
if (!ifs) return false;
if (!ifs) {
std::cerr << "[GetbinClient::upload] Failed to open archive file: " << archivePath << std::endl;
return false;
}
std::string file_content((std::istreambuf_iterator<char>(ifs)), std::istreambuf_iterator<char>());
// Compose metadata (minimal, can be extended)
json metadata = { {"labeltags", json::array()} };
@ -43,14 +46,31 @@ bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, s
items.push_back({"file", file_content, filename, "application/gzip"});
items.push_back({"metadata", metadata.dump(), "", "application/json"});
httplib::Headers headers = { {"Authorization", "Bearer " + token} };
auto res = cli.Put("/upload", headers, items);
if (!res || (res->status != 200 && res->status != 201)) return false;
if (!res) {
std::cerr << "[GetbinClient::upload] HTTP request failed (no response)." << std::endl;
return false;
}
if (res->status != 200 && res->status != 201) {
std::cerr << "[GetbinClient::upload] HTTP error: status code " << res->status << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << res->body << std::endl;
return false;
}
// Parse response for URL/hash
try {
auto resp_json = json::parse(res->body);
if (resp_json.contains("url")) outUrl = resp_json["url"].get<std::string>();
if (resp_json.contains("hash")) outHash = resp_json["hash"].get<std::string>();
} catch (...) { return false; }
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::upload] Failed to parse JSON response: " << e.what() << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << res->body << std::endl;
return false;
} catch (...) {
std::cerr << "[GetbinClient::upload] Unknown error while parsing JSON response." << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << res->body << std::endl;
return false;
}
return true;
}

View File

@ -0,0 +1,21 @@
#pragma once
/*
version.hpp is automatically generated by the build system, from version.hpp.in.
DO NOT EDIT VERSION.HPP!
*/
#include <string>
namespace dropshell {
// Version information
const std::string VERSION = "@PROJECT_VERSION@";
const std::string RELEASE_DATE = "@RELEASE_DATE@";
const std::string AUTHOR = "j842";
const std::string LICENSE = "MIT";
} // namespace dropshell

View File

@ -1,100 +0,0 @@
#pragma once
#include <span>
#include <string>
#include <string_view>
#include <cstring>
#include <cstdint>
struct tar_to_stream_properties {
/// Properties of the file to enter into the stream
std::string const &filename; /// name of the file to write
std::span<std::byte const> data; /// the location of the file's contents in memory
uint64_t mtime{0u}; /// file modification time, in seconds since epoch
std::string filemode{"644"}; /// file mode
unsigned int uid{0u}; /// file owner user ID
unsigned int gid{0u}; /// file owner group ID
std::string const &uname{"root"}; /// file owner username
std::string const &gname{"root"}; /// file owner group name
};
template<typename T>
void tar_to_stream(T &stream, /// stream to write to, e.g. ostream or ofstream
tar_to_stream_properties &&file) { /// properties of the file to enter into the stream
/// Read a "file" in memory, and write it as a TAR archive to the stream
struct { // offset
char name[100]{}; // 0 filename
char mode[8]{}; // 100 file mode: 0000644 etc
char uid[8]{}; // 108 user id, ascii representation of octal value: "0001750" (for UID 1000)
char gid[8]{}; // 116 group id, ascii representation of octal value: "0001750" (for GID 1000)
char size[12]{}; // 124 file size, ascii representation of octal value
char mtime[12]{"00000000000"}; // 136 modification time, seconds since epoch
char chksum[8]{' ', ' ', ' ', ' ', ' ', ' ', ' ', ' '}; // 148 checksum: six octal bytes followed by null and ' '. Checksum is the octal sum of all bytes in the header, with chksum field set to 8 spaces.
char typeflag{'0'}; // 156 '0'
char linkname[100]{}; // 157 null bytes when not a link
char magic[6]{'u', 's', 't', 'a', 'r', ' '}; // 257 format: Unix Standard TAR: "ustar ", not null-terminated
char version[2]{" "}; // 263 " "
char uname[32]{}; // 265 user name
char gname[32]{}; // 297 group name
char devmajor[8]{}; // 329 null bytes
char devminor[8]{}; // 337 null bytes
char prefix[155]{}; // 345 null bytes
char padding[12]{}; // 500 padding to reach 512 block size
} header; // 512
file.filemode.insert(file.filemode.begin(), 7 - file.filemode.length(), '0'); // zero-pad the file mode
std::strncpy(header.name, file.filename.c_str(), sizeof(header.name ) - 1); // leave one char for the final null
std::strncpy(header.mode, file.filemode.c_str(), sizeof(header.mode ) - 1);
std::strncpy(header.uname, file.uname.c_str(), sizeof(header.uname) - 1);
std::strncpy(header.gname, file.gname.c_str(), sizeof(header.gname) - 1);
sprintf(header.size, "%011lo", file.data.size());
sprintf(header.mtime, "%011llo", file.mtime);
sprintf(header.uid, "%07o", file.uid);
sprintf(header.gid, "%07o", file.gid);
{
unsigned int checksum_value = 0;
for(size_t i{0}; i != sizeof(header); ++i) {
checksum_value += reinterpret_cast<uint8_t*>(&header)[i];
}
sprintf(header.chksum, "%06o", checksum_value);
}
size_t const padding{(512u - file.data.size() % 512) & 511u};
stream << std::string_view{header.name, sizeof(header)}
<< std::string_view{reinterpret_cast<char const*>(file.data.data()), file.data.size()}
<< std::string(padding, '\0');
}
template<typename T>
[[deprecated("Use tar_to_stream_properties as argument: tar_to_stream(stream, {...}) - this allows use of designated initialisers and cleaner code. Refer to tar_to_stream's README for example usage")]]
void tar_to_stream(T &stream, /// stream to write to, e.g. ostream or ofstream
std::string const &filename, /// name of the file to write
char const *data_ptr, /// pointer to the data in this archive segment
size_t data_size, /// size of the data
uint64_t mtime = 0u, /// file modification time, in seconds since epoch
std::string filemode = "644", /// file mode
unsigned int uid = 0u, /// file owner user ID
unsigned int gid = 0u, /// file owner group ID
std::string const &uname = "root", /// file owner username
std::string const &gname = "root") { /// file owner group name
/// Explicit argument constructor, for backwards compatibility
tar_to_stream(stream, tar_to_stream_properties{
.filename{filename},
.data{std::as_bytes(std::span{data_ptr, data_size})},
.mtime{mtime},
.filemode{filemode},
.uid{uid},
.gid{gid},
.uname{uname},
.gname{gname},
});
}
template<typename T>
void tar_to_stream_tail(T &stream, unsigned int tail_length = 512u * 2u) {
/// TAR archives expect a tail of null bytes at the end - min of 512 * 2, but implementations often add more
stream << std::string(tail_length, '\0');
}

Binary file not shown.