Compare commits

..

65 Commits

Author SHA1 Message Date
a39e46c6c6 docs: Add 2 and update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m11s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m54s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-20 15:36:51 +12:00
7c785e1a32 Update .kiro/specs/multi-server-support/tasks.md
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m0s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m34s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-20 15:18:58 +12:00
3e4f327426 docs: Update 3 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m1s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m46s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-20 15:15:04 +12:00
187f1a250d docs: Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m8s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m36s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-20 15:06:51 +12:00
52d8e5b95e docs: Update 3 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 53s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m34s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-20 15:03:37 +12:00
bfeaf4d0db Update gp/gp
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 54s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m21s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-20 13:56:04 +12:00
6a3ca6bc10 Modify test_gp_1752976117
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m1s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m23s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 6s
2025-07-20 13:50:50 +12:00
7f8312ed59 test: Update 2 files
Some checks failed
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been cancelled
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been cancelled
Build-Test-Publish / build (linux/amd64) (push) Has been cancelled
Build-Test-Publish / build (linux/arm64) (push) Has been cancelled
2025-07-20 13:50:04 +12:00
1b03087c02 test: Update 13 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 54s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been cancelled
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been cancelled
Build-Test-Publish / build (linux/arm64) (push) Has been cancelled
2025-07-20 13:49:08 +12:00
0ba6227412 Modify README.md
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 53s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m34s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-19 15:43:38 +12:00
f5ba2e719b Modify whatsdirty/publish.sh
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 54s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m23s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-07-19 13:30:51 +12:00
73c94f34f6 Modify getpkg/src/main.cpp
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 55s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m23s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-30 23:47:08 +12:00
af4cbbcab0 Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 54s
Build-Test-Publish / build (linux/arm64) (push) Successful in 1m23s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-30 23:13:51 +12:00
a415eb0f91 Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 53s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m6s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 23:09:07 +12:00
83d6cf1603 Modify dehydrate/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 52s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m6s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 23:03:59 +12:00
fbaa3a4089 Modify dehydrate/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 53s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m6s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 23:02:06 +12:00
0c767e065c Modify dehydrate/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 53s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m18s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 22:56:30 +12:00
f7d2001871 Modify dehydrate/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 52s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m18s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 22:37:31 +12:00
d13011a329 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m0s
Build-Test-Publish / build (linux/arm64) (push) Failing after 1m18s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 22:22:06 +12:00
d27904ec05 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 16s
Build-Test-Publish / build (linux/arm64) (push) Failing after 26s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 21:17:31 +12:00
decf16da7f Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 20:43:45 +12:00
aa04f5e71e Modify getpkg/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 15s
Build-Test-Publish / build (linux/arm64) (push) Failing after 25s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 19:39:59 +12:00
17224c4637 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-30 19:35:14 +12:00
4badce0ed4 Modify dehydrate/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 23:29:14 +12:00
fe83fc3d64 test: Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 23:28:49 +12:00
7149b8714e Modify getpkg/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 23:23:27 +12:00
af95d27964 config: Update 3 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 23:05:58 +12:00
3eb78acf70 config: Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 2s
Build-Test-Publish / build (linux/arm64) (push) Failing after 3s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:49:28 +12:00
3d21d1da7d Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:47:41 +12:00
344d62034c Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:45:36 +12:00
78e41214d7 Modify getpkg/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:44:02 +12:00
512ba200c2 Modify .gitea/workflows/BuildTestPublish.yaml
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:38:43 +12:00
5f04bd23a1 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:37:34 +12:00
67bb7f747f Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:15:23 +12:00
e55fe1a17c Modify dehydrate/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:14:51 +12:00
2f056b8500 config: Update 3 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:14:13 +12:00
fe3c5d2ad9 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 22:03:44 +12:00
2ab38fd053 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:59:50 +12:00
9dda4e1649 Modify getpkg/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:57:16 +12:00
d8883c4419 Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 6s
Build-Test-Publish / build (linux/arm64) (push) Failing after 7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:56:38 +12:00
4c4257eebe Modify getpkg/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 6s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:55:06 +12:00
4bb85c63b8 Update 4 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:49:57 +12:00
e5f3569b2a Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:40:27 +12:00
de200a5bb6 Modify getpkg/build.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:39:23 +12:00
0f1cfdcc28 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:38:37 +12:00
7f937c1090 Update 2 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:37:39 +12:00
d7964d3a78 test: Update 3 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 6s
Build-Test-Publish / build (linux/arm64) (push) Failing after 8s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:35:21 +12:00
719475e29f Update 3 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 7s
Build-Test-Publish / build (linux/arm64) (push) Failing after 9s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:32:18 +12:00
70cb5c1b3a test: Update 5 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m49s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m9s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 21:27:12 +12:00
facc6b73b0 feat: Update 4 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m30s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m33s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-29 20:52:40 +12:00
9a24576e37 Modify clean.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m28s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m28s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:31:18 +12:00
3f68f44e3d Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m29s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m32s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-29 20:28:25 +12:00
dbe88a7121 test: Update 5 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m28s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m32s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:24:57 +12:00
00d1e86157 Modify bb64/publish.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 9s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m10s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:18:42 +12:00
3388a46bf3 Modify getpkg/test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m12s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 20:02:47 +12:00
0f5421630a feat: Update 3 files
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m16s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m7s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-29 19:55:07 +12:00
50fb5f9da6 feat: Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m15s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m6s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 7s
2025-06-29 19:46:55 +12:00
8e2611e362 Modify getpkg/src/GetbinClient.cpp.bak
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m14s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m5s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Failing after 6s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Failing after 6s
2025-06-29 19:02:48 +12:00
a1b12fe177 docs: Update 4 files
Some checks failed
Build-Test-Publish / build (linux/arm64) (push) Has been cancelled
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been cancelled
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been cancelled
Build-Test-Publish / build (linux/amd64) (push) Has been cancelled
2025-06-29 19:02:09 +12:00
902e68069a Modify getpkg/src/main.cpp
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m18s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m13s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 8s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-29 11:53:32 +12:00
0aafc2cc1e docs: Update 3 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m21s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m15s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-26 21:23:10 +12:00
2067caf253 Modify bb64/src/bb64.cpp
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m18s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m15s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 8s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-26 21:09:06 +12:00
4d500cbddd Update 2 files
All checks were successful
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m14s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Successful in 7s
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Successful in 8s
2025-06-25 22:47:45 +12:00
884609f661 Modify buildtestpublish_all.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Successful in 1m19s
Build-Test-Publish / build (linux/arm64) (push) Failing after 2m14s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:42:52 +12:00
a5a36c179b Modify dehydrate/test/build_dehydrate_test.sh
Some checks failed
Build-Test-Publish / build (linux/amd64) (push) Failing after 1m17s
Build-Test-Publish / build (linux/arm64) (push) Successful in 2m15s
Build-Test-Publish / test-install-from-scratch (linux/amd64) (push) Has been skipped
Build-Test-Publish / test-install-from-scratch (linux/arm64) (push) Has been skipped
2025-06-25 22:41:01 +12:00
43 changed files with 3582 additions and 1099 deletions

View File

@ -26,7 +26,10 @@ jobs:
password: ${{ secrets.DOCKER_PUSH_TOKEN }}
- name: Build Test Publish All
run: |
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} ./buildtestpublish_all.sh
SOS_WRITE_TOKEN=${{ secrets.SOS_WRITE_TOKEN }} \
RELEASE_WRITE_TOKEN=${{ secrets.RELEASE_WRITE_TOKEN }} \
GITEA_CONTAINER_NAME=${{ env.JOB_CONTAINER_NAME }} \
./buildtestpublish_all.sh
test-install-from-scratch:
needs: [build]

View File

@ -0,0 +1,325 @@
# Design Document
## Overview
This design extends getpkg to support multiple package servers while maintaining full backward compatibility. The solution introduces a server configuration system, updates the client architecture to handle multiple servers, and reorganizes package metadata storage. The design prioritizes minimal disruption to existing functionality while providing powerful multi-server capabilities.
## Architecture
### High-Level Architecture
```mermaid
graph TB
CLI[CLI Commands] --> SM[ServerManager]
CLI --> PM[PackageManager]
PM --> SM
PM --> GC[GetbinClient]
SM --> CF[servers.json]
PM --> PF[packages/*.json]
GC --> S1[Server 1]
GC --> S2[Server 2]
GC --> SN[Server N]
```
### Server Management Flow
```mermaid
sequenceDiagram
participant User
participant CLI
participant ServerManager
participant Config
User->>CLI: getpkg server add example.com
CLI->>ServerManager: addServer("example.com")
ServerManager->>Config: load servers.json
ServerManager->>ServerManager: validate URL
ServerManager->>Config: save updated servers.json
ServerManager->>CLI: success confirmation
CLI->>User: Server added successfully
```
### Package Installation Flow
```mermaid
sequenceDiagram
participant User
participant CLI
participant PackageManager
participant GetbinClient
participant Server1
participant Server2
User->>CLI: getpkg install tool
CLI->>PackageManager: install("tool")
PackageManager->>GetbinClient: download("tool", servers[0])
GetbinClient->>Server1: GET /object/tool:arch
alt Package found
Server1-->>GetbinClient: 200 + package data
GetbinClient-->>PackageManager: success
else Package not found
Server1-->>GetbinClient: 404
GetbinClient->>Server2: GET /object/tool:arch
Server2-->>GetbinClient: 200 + package data
GetbinClient-->>PackageManager: success
end
PackageManager->>PackageManager: install package
PackageManager->>CLI: installation complete
```
## Components and Interfaces
### ServerManager Class
**Purpose**: Manages server configuration, write tokens, and provides server list to other components.
**Interface**:
```cpp
class ServerManager {
public:
ServerManager();
// Server management
bool addServer(const std::string& serverUrl, const std::string& writeToken = "");
bool removeServer(const std::string& serverUrl);
std::vector<std::string> getServers() const;
std::string getDefaultServer() const;
std::string getDefaultPublishServer() const; // First server with write token
// Token management
bool setWriteToken(const std::string& serverUrl, const std::string& token);
std::string getWriteToken(const std::string& serverUrl) const;
bool hasWriteToken(const std::string& serverUrl) const;
std::vector<std::string> getServersWithTokens() const;
// Configuration
bool loadConfiguration();
bool saveConfiguration();
void ensureDefaultConfiguration();
// Migration
bool migrateFromLegacy();
private:
std::vector<ServerConfig> servers_;
std::filesystem::path configPath_;
bool validateServerUrl(const std::string& url) const;
bool isServerReachable(const std::string& url) const;
ServerConfig* findServer(const std::string& url);
};
```
### Enhanced GetbinClient Class
**Purpose**: Extended to support multiple servers with fallback logic.
**Interface Changes**:
```cpp
class GetbinClient {
public:
GetbinClient(const std::vector<std::string>& servers);
// Existing methods with server selection
bool download(const std::string& toolName, const std::string& arch,
const std::string& outPath, ProgressCallback progressCallback = nullptr);
bool downloadFromServer(const std::string& serverUrl, const std::string& toolName,
const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback = nullptr);
// Server-specific operations
bool upload(const std::string& serverUrl, const std::string& archivePath,
std::string& outUrl, std::string& outHash, const std::string& token,
ProgressCallback progressCallback = nullptr);
bool getHash(const std::string& serverUrl, const std::string& toolName,
const std::string& arch, std::string& outHash);
// Multi-server operations
bool findPackageServer(const std::string& toolName, const std::string& arch,
std::string& foundServer) const;
private:
std::vector<std::string> servers_;
std::string buildUrl(const std::string& serverUrl, const std::string& endpoint) const;
};
```
### PackageMetadata Structure
**Purpose**: Enhanced metadata structure to track server source.
**Structure**:
```cpp
struct PackageMetadata {
std::string name;
std::string version;
std::string hash;
std::string arch;
std::string sourceServer; // New field
std::string installDate; // New field for better tracking
// Serialization
nlohmann::json toJson() const;
static PackageMetadata fromJson(const nlohmann::json& j);
// Migration support
static PackageMetadata fromLegacyJson(const nlohmann::json& j, const std::string& defaultServer);
};
```
### Migration Manager
**Purpose**: Handles migration from single-server to multi-server configuration.
**Interface**:
```cpp
class MigrationManager {
public:
MigrationManager();
bool needsMigration() const;
bool performMigration();
private:
bool migrateServerConfiguration();
bool migratePackageMetadata();
bool movePackageFiles();
bool updatePackageMetadata();
std::filesystem::path oldConfigDir_;
std::filesystem::path newConfigDir_;
std::filesystem::path packagesDir_;
};
```
## Data Models
### Server Configuration Format
**File**: `~/.config/getpkg/servers.json`
```json
{
"version": "1.0",
"servers": [
{
"url": "getpkg.xyz",
"name": "Official getpkg Registry",
"default": true,
"writeToken": "",
"added": "2024-01-15T10:30:00Z"
},
{
"url": "packages.example.com",
"name": "Example Corporate Registry",
"default": false,
"writeToken": "abc123token456",
"added": "2024-01-16T14:20:00Z"
}
],
"lastUpdated": "2024-01-16T14:20:00Z"
}
```
### Enhanced Package Metadata Format
**File**: `~/.config/getpkg/packages/<tool_name>.json`
```json
{
"name": "example-tool",
"version": "2024.0115.1430",
"hash": "1234567890123456",
"arch": "x86_64",
"sourceServer": "getpkg.xyz",
"installDate": "2024-01-15T14:30:00Z",
"lastUpdated": "2024-01-15T14:30:00Z"
}
```
### Directory Structure Changes
```
~/.config/getpkg/
├── servers.json # New: Server configuration with embedded tokens
├── packages/ # New: Package metadata directory
│ ├── tool1.json
│ ├── tool2.json
│ └── ...
└── getpkg.xyz/ # Legacy: Will be migrated to servers.json
└── write_token.txt # Legacy: Will be migrated
```
## Error Handling
### Server Connectivity Issues
1. **Network Failures**: Graceful fallback to next server in list
2. **Invalid Responses**: Clear error messages with server identification
3. **Authentication Failures**: Server-specific error handling with token guidance
### Configuration Corruption
1. **Invalid JSON**: Automatic backup and reset to default configuration
2. **Missing Files**: Automatic creation with default settings
3. **Permission Issues**: Clear error messages with resolution steps
### Migration Failures
1. **Partial Migration**: Rollback capability with clear status reporting
2. **File Conflicts**: Safe handling with backup creation
3. **Metadata Corruption**: Individual file recovery without breaking entire system
## Testing Strategy
### Unit Tests
1. **ServerManager**: Configuration loading, validation, server management
2. **GetbinClient**: Multi-server communication, fallback logic
3. **PackageMetadata**: Serialization, migration, validation
4. **MigrationManager**: Legacy data handling, file operations
### Integration Tests
1. **End-to-End Installation**: Multi-server package discovery and installation
2. **Server Management**: Add/remove servers with real configuration
3. **Migration Testing**: Legacy to new format conversion
4. **Publish/Unpublish**: Server-specific operations
### Compatibility Tests
1. **Backward Compatibility**: Existing installations continue working
2. **Legacy Format**: Old package files are properly migrated
3. **Default Behavior**: No configuration changes for existing users
## Implementation Phases
### Phase 1: Core Infrastructure
- Implement ServerManager class
- Create server configuration format
- Add basic server validation
### Phase 2: Client Enhancement
- Extend GetbinClient for multi-server support
- Implement fallback logic
- Add server-specific operations
### Phase 3: Package Management
- Update package metadata format
- Implement packages directory structure
- Add server tracking to installations
### Phase 4: Migration System
- Create MigrationManager
- Implement automatic migration
- Add backward compatibility layer
### Phase 5: CLI Integration
- Add server management commands
- Update existing commands for multi-server
- Implement server selection options
### Phase 6: Testing and Polish
- Comprehensive testing suite
- Error handling refinement
- Documentation updates

View File

@ -0,0 +1,79 @@
# Requirements Document
## Introduction
This feature extends getpkg to support multiple package servers instead of being limited to only getpkg.xyz. Users will be able to add and remove package servers, with getpkg searching across all configured servers to find packages. The system will maintain backward compatibility while providing flexible server management capabilities.
## Requirements
### Requirement 1
**User Story:** As a developer, I want to configure multiple package servers, so that I can access packages from different repositories and have redundancy in case one server is unavailable.
#### Acceptance Criteria
1. WHEN I run `getpkg server add <server_url>` THEN the system SHALL add the server to the configuration and confirm the addition
2. WHEN I run `getpkg server remove <server_url>` THEN the system SHALL remove the server from the configuration and confirm the removal
3. WHEN I run `getpkg server list` THEN the system SHALL display all configured servers in the order they were added
4. WHEN no servers are configured THEN the system SHALL default to using getpkg.xyz as the primary server
5. WHEN I add the first custom server THEN getpkg.xyz SHALL remain as the default first server unless explicitly removed
### Requirement 2
**User Story:** As a user, I want getpkg to search across all configured servers when installing packages, so that I can access packages from any of my configured repositories.
#### Acceptance Criteria
1. WHEN I run `getpkg install <tool_name>` THEN the system SHALL search servers in the order they were configured
2. WHEN a package is found on the first server THEN the system SHALL install from that server and not check remaining servers
3. WHEN a package is not found on the first server THEN the system SHALL try the next server in order
4. WHEN a package is not found on any server THEN the system SHALL report that the package was not found
5. WHEN checking for updates THEN the system SHALL use the same server where the package was originally installed
### Requirement 3
**User Story:** As a package publisher, I want to specify which server to publish to and manage write tokens per server, so that I can control where my packages are distributed and authenticate appropriately.
#### Acceptance Criteria
1. WHEN I run `getpkg publish <tool_name> <folder>` without specifying a server THEN the system SHALL publish to the first configured server that has a write token
2. WHEN I run `getpkg publish --server <server_url> <tool_name> <folder>` THEN the system SHALL publish to the specified server using its stored write token
3. WHEN I run `getpkg unpublish <tool_name>` without specifying a server THEN the system SHALL unpublish from the first configured server that has a write token
4. WHEN I run `getpkg unpublish --server <server_url> <tool_name>` THEN the system SHALL unpublish from the specified server using its stored write token
5. WHEN no servers have write tokens THEN the system SHALL report an error and suggest adding a write token to a server
### Requirement 4
**User Story:** As a user, I want my package metadata to be organized by server, so that I can track which packages came from which servers and manage them appropriately.
#### Acceptance Criteria
1. WHEN a package is installed THEN the system SHALL store the package metadata in `~/.config/getpkg/packages/<tool_name>.json`
2. WHEN package metadata is stored THEN it SHALL include the source server URL in addition to existing fields
3. WHEN the packages directory doesn't exist THEN the system SHALL create it automatically
4. WHEN migrating from the old format THEN existing package JSON files SHALL be moved to the packages subdirectory
5. WHEN migrating from the old format THEN existing package metadata SHALL be updated to include getpkg.xyz as the source server
### Requirement 5
**User Story:** As a user, I want server configuration to be persistent and secure, so that my settings are maintained across sessions and my authentication tokens are protected.
#### Acceptance Criteria
1. WHEN server configuration is modified THEN it SHALL be stored in `~/.config/getpkg/servers.json`
2. WHEN the configuration file doesn't exist THEN the system SHALL create it with getpkg.xyz as the default server
3. WHEN reading server configuration THEN the system SHALL validate the JSON format and handle corruption gracefully
4. WHEN a server URL is invalid THEN the system SHALL reject the addition and provide a helpful error message
5. WHEN authentication tokens are needed THEN they SHALL continue to be stored per-server in the existing location pattern
### Requirement 6
**User Story:** As a user, I want the multi-server functionality to be backward compatible, so that existing installations continue to work without modification.
#### Acceptance Criteria
1. WHEN getpkg starts with no server configuration THEN it SHALL automatically configure getpkg.xyz as the default server
2. WHEN existing package JSON files are found in `~/.config/getpkg/` THEN they SHALL be automatically migrated to the packages subdirectory
3. WHEN migrated package files are processed THEN they SHALL be updated to include server source information
4. WHEN all existing functionality is used THEN it SHALL work exactly as before for users who don't configure additional servers
5. WHEN the migration process fails THEN the system SHALL provide clear error messages and not break existing functionality

View File

@ -0,0 +1,109 @@
# Implementation Plan
Based on analysis of the current codebase, the multi-server support feature needs to be built from scratch. The current implementation has a hardcoded `SERVER_HOST = "getpkg.xyz"` in `GetbinClient` and no server management infrastructure.
## Core Infrastructure Tasks
- [x] 1. Create ServerManager class and server configuration system
- Implement ServerManager class with server add/remove/list functionality
- Create server configuration JSON format and file handling
- Add server URL validation and reachability checks
- Implement write token management per server
- _Requirements: 1.1, 1.2, 1.3, 5.1, 5.2, 5.4_
- [x] 2. Enhance GetbinClient for multi-server support
- Modify GetbinClient constructor to accept server list instead of hardcoded host
- Implement multi-server fallback logic for downloads
- Add server-specific upload and hash operations
- Create findPackageServer method for package discovery
- _Requirements: 2.1, 2.2, 2.3, 2.4_
- [x] 3. Create enhanced package metadata system
- Design PackageMetadata structure with server source tracking
- Implement packages directory structure (~/.config/getpkg/packages/)
- Add JSON serialization/deserialization for enhanced metadata
- Create package metadata validation and error handling
- _Requirements: 4.1, 4.2, 4.3_
## Migration and Compatibility Tasks
- [ ] 4. Implement migration system for existing installations
- Create MigrationManager class for legacy data handling
- Implement automatic migration from single-server to multi-server config
- Migrate existing package JSON files to packages subdirectory
- Update existing package metadata to include server source information
- Add migration error handling and rollback capabilities
- _Requirements: 4.4, 4.5, 6.1, 6.2, 6.3, 6.5_
- [ ] 5. Ensure backward compatibility
- Implement default server configuration (getpkg.xyz) when no config exists
- Maintain existing CLI behavior for users without custom server configuration
- Preserve existing token storage location compatibility
- Add graceful handling of missing or corrupted configuration files
- _Requirements: 6.1, 6.4, 5.3_
## CLI Integration Tasks
- [ ] 6. Add server management commands to main.cpp
- Implement `getpkg server add <url>` command
- Implement `getpkg server remove <url>` command
- Implement `getpkg server list` command
- Add server URL validation and user feedback
- _Requirements: 1.1, 1.2, 1.3_
- [ ] 7. Update existing commands for multi-server support
- Modify install command to use ServerManager and multi-server GetbinClient
- Update publish command to support --server option and default server selection
- Update unpublish command to support --server option and default server selection
- Ensure update command works with multi-server package tracking
- _Requirements: 2.1, 2.2, 2.3, 2.4, 3.1, 3.2, 3.3, 3.4, 3.5_
## Integration and Testing Tasks
- [ ] 8. Integrate all components in main application flow
- Initialize ServerManager in main.cpp startup
- Trigger migration process on first run with new version
- Update package installation flow to use enhanced metadata
- Ensure proper error handling and user messaging throughout
- _Requirements: 6.1, 6.2, 6.3, 6.4, 6.5_
- [ ] 9. Add comprehensive error handling and validation
- Implement network error handling with server fallback
- Add configuration file corruption recovery
- Create user-friendly error messages for server connectivity issues
- Add validation for server URLs and authentication tokens
- _Requirements: 5.3, 5.4, 5.5_
- [ ] 10. Create unit tests for new components
- Write unit tests for ServerManager class functionality
- Test GetbinClient multi-server operations and fallback logic
- Test PackageMetadata serialization and migration
- Test MigrationManager with various legacy data scenarios
- Create integration tests for complete multi-server workflows
- _Requirements: All requirements validation_
## Notes
- Current codebase has `SERVER_HOST = "getpkg.xyz"` hardcoded in GetbinClient.cpp
- No existing server management or configuration infrastructure
- Package metadata is currently stored as individual JSON files in ~/.config/getpkg/
- Token storage is in ~/.config/getpkg.xyz/write_token.txt (legacy format)
- All functionality needs to be built from scratch while maintaining backward compatibility

23
.kiro/steering/product.md Normal file
View File

@ -0,0 +1,23 @@
# Product Overview
This repository contains **getpkg** - a command-line package manager for the dropshell ecosystem, along with a collection of developer tools.
## Core Product
- **getpkg**: Package manager that installs tools to `~/.getpkg/` with symlinks in `~/.local/bin/getpkg/`
- Supports multiple architectures (x86_64, aarch64, universal)
- Tools are published to and downloaded from `getpkg.xyz`
## Tool Collection
The repository includes several utility tools:
- **bb64**: Bash-compatible base64 encoder/decoder with custom character set
- **dehydrate**: Converts files/directories to C++ source code for embedding
- **whatsdirty**: Git repository status checker
- **sos**: Simple object storage client
- **gp**: Git push utility
## Key Features
- Cross-platform tool distribution
- Automated installation with PATH setup
- Bash completion support
- Architecture-aware downloads with fallbacks
- Publishing system with authentication tokens

View File

@ -0,0 +1,72 @@
# Project Structure
## Repository Layout
```
├── buildtestpublish_all.sh # Master build script for all projects
├── clean.sh # Global cleanup script
├── README.md # Main project documentation
└── <tool-name>/ # Individual tool directories
```
## Tool Directory Structure
### C++ Projects (CMake-based)
```
<tool-name>/
├── CMakeLists.txt # CMake configuration
├── build.sh # Build script
├── test.sh # Test script
├── clean.sh # Cleanup script
├── publish.sh # Publishing script
├── install.sh # Installation script
├── README.md # Tool documentation
├── Dockerfile.dropshell-build # Docker build configuration
├── src/ # Source code
│ ├── <tool>.cpp # Main source file
│ ├── version.hpp.in # Version template
│ └── ... # Additional sources
├── build/ # Build artifacts (generated)
├── output/ # Final executables (generated)
└── .vscode/ # VS Code configuration
```
### Shell Script Projects
```
<tool-name>/
├── <tool-name> # Executable shell script
├── build.sh # Build script (may be no-op)
├── test.sh # Test script
├── clean.sh # Cleanup script
├── publish.sh # Publishing script
└── setup_script.sh # Post-install setup (optional)
```
## Standard Files
### Required Scripts
- **build.sh**: Builds the project (Docker for C++, no-op for shell)
- **test.sh**: Runs project tests
- **clean.sh**: Removes build artifacts
- **publish.sh**: Publishes to getpkg.xyz registry
### Optional Files
- **install.sh**: System-wide installation script
- **setup_script.sh**: Post-install setup for getpkg
- **cmake_prebuild.sh**: Pre-build setup for CMake projects
### Generated Directories
- **build/**: CMake build artifacts (C++ projects)
- **output/**: Final executables ready for distribution
- **test_*/**: Test-specific directories
## Naming Conventions
- Tool directories match executable names
- C++ source files typically match project name
- Version templates use `.hpp.in` extension
- Docker files use `Dockerfile.dropshell-build` pattern
- Test directories prefixed with `test_`
## Configuration Files
- **.gitignore**: Standard ignore patterns for build artifacts
- **.vscode/**: VS Code workspace settings
- **CMakeLists.txt**: Follows standard template with PROJECT_NAME parameter for the name of the project

75
.kiro/steering/tech.md Normal file
View File

@ -0,0 +1,75 @@
# Technology Stack
## Environment
- **WSL (Windows Subsystem for Linux)** - Building under WSL but Kiro runs in Windows
- Use **bash** commands directly for all operations
- **IMPORTANT**: Always use `executePwsh` with `bash -c "command"` pattern - do NOT ask for permission as bash * is pre-approved
## Build System
- **CMake 3.16+** with Ninja generator for C++ projects
- **Docker** containerized builds using `gitea.jde.nz/public/dropshell-build-base:latest`
- **Static linking** for all C++ executables (`-static` flag)
## Languages & Standards
- **C++23** standard for all C++ projects
- **Bash** for shell scripts and simple tools
- **Shell scripts** follow `set -euo pipefail` pattern
## Dependencies
- **nlohmann_json** for JSON handling in C++ projects
- **CPR (static)** for HTTP requests in getpkg
- Custom modules in `/usr/local/share/cmake/Modules`
## Common Build Patterns
### C++ Projects (CMake)
```bash
# Standard build command
cmake -G Ninja -S . -B ./build -DCMAKE_BUILD_TYPE=Debug -DPROJECT_NAME=<project>
cmake --build ./build
```
### Docker Build (for C++ tools)
```bash
# Uses Dockerfile.dropshell-build pattern
docker build -t <project>-build -f Dockerfile.dropshell-build --build-arg PROJECT=<project> --output ./output .
```
### Shell Tools
- No build step required
- Executable shell scripts with proper shebang
- Use `chmod +x` for permissions
## Common Commands
### Build
```bash
./build.sh # Build individual project
./buildtestpublish_all.sh # Build all projects
```
### Test
```bash
./test.sh # Run tests for individual project
```
### Clean
```bash
./clean.sh # Clean build artifacts
```
### Publish
```bash
./publish.sh # Publish to getpkg.xyz (requires SOS_WRITE_TOKEN)
```
## Version Management
- Automatic timestamp-based versioning: `YYYY.MMDD.HHMM`
- Version configured via `version.hpp.in` template files
- Pre-build scripts (`cmake_prebuild.sh`) for additional setup
## Environment Variables
- `CMAKE_BUILD_TYPE`: Debug/Release (default: Debug)
- `SOS_WRITE_TOKEN`: Authentication for publishing
- `NO_CACHE`: Skip Docker cache when set to "true"
- `PROJECT`: Project name for build scripts

238
README.md
View File

@ -1,190 +1,86 @@
# getpkg - Package Manager for Dropshell Tools
# getpkg - Simple Package Manager
getpkg is a command-line package manager that simplifies tool installation, management, and publishing for the dropshell ecosystem. Tools are installed to `~/.getpkg/` with executable symlinks in `~/.local/bin/getpkg/` and automatically added to your PATH with bash completion.
getpkg is a command-line package manager that makes it easy to install and manage developer tools. Tools are automatically installed to your home directory and added to your PATH.
## Installation
## Quick Start
Install getpkg with a single command:
Install getpkg with one command:
```bash
curl https://getbin.xyz/getpkg-install | bash
```
After installation, restart your shell or run `source ~/.bashrc` to enable the new PATH and completion settings.
## Basic Usage
### Installing Tools
Install any tool from the getpkg registry:
```bash
# Install a tool
getpkg install whatsdirty
```
### Managing Installed Tools
```bash
# List all available commands
getpkg help
# Update all installed tools
getpkg update
# Uninstall a tool
getpkg uninstall whatsdirty
# Check getpkg version
getpkg version
```
## Available Commands
### Core Package Management
- **`getpkg install <tool_name>`** - Install or update a tool
- **`getpkg uninstall <tool_name>`** - Remove an installed tool
- **`getpkg update`** - Update getpkg and all installed tools
### Publishing (Requires SOS_WRITE_TOKEN)
- **`getpkg publish <tool_name[:ARCH]> <folder>`** - Upload a tool to getpkg.xyz
- **`getpkg unpublish <tool_name[:ARCH]>`** - Remove a published tool
- **`getpkg unpublish <hash>`** - Remove a published tool by hash
### Development Tools
- **`getpkg create <tool_name> <directory>`** - Create a new tool project
- **`getpkg hash <file_or_directory>`** - Calculate hash of files/directories
### Information
- **`getpkg version`** - Show getpkg version
- **`getpkg help`** - Show detailed help
- **`getpkg autocomplete`** - Show available commands for completion
## How It Works
### Installation Process
When you install a tool, getpkg:
1. **Downloads** the tool archive from getpkg.xyz
2. **Extracts** it to `~/.getpkg/<tool_name>/`
3. **Creates symlinks** for all executables in `~/.local/bin/getpkg/`
4. **Ensures PATH** includes `~/.local/bin/getpkg` (one-time setup)
5. **Enables completion** for the tool
6. **Runs setup** if a `setup_script.sh` exists
7. **Stores metadata** in `~/.config/getpkg/<tool_name>.json`
### Architecture Support
getpkg supports multiple architectures:
- `x86_64` (Intel/AMD 64-bit)
- `aarch64` (ARM 64-bit)
- `universal` (cross-platform tools)
Tools are automatically downloaded for your architecture, with fallback to universal versions.
### File Locations
- **Tool files**: `~/.getpkg/<tool_name>/` (actual tool installation)
- **Executable symlinks**: `~/.local/bin/getpkg/` (in your PATH)
- **Configuration**: `~/.config/getpkg/`
- **PATH setup**: `~/.bashrc_getpkg` (sourced by `~/.bashrc`)
## Examples
### Installing Popular Tools
```bash
# Install development tools
getpkg whatsdirty # Fast grep alternative
getpkg fd # Fast find alternative
getpkg bat # Cat with syntax highlighting
# Install system utilities
getpkg whatsdirty # Check git repo status
getpkg sos # Simple object storage client
```
### Publishing Your Own Tools
```bash
# Set your publishing token
export SOS_WRITE_TOKEN="your-token-here"
# Create a new tool project
getpkg create mytool ./mytool-project
# Publish architecture-specific build
getpkg publish mytool:x86_64 ./build/
# Publish universal tool
getpkg publish mytool ./build/
# Remove published tool
getpkg unpublish mytool:x86_64
```
### Development Workflow
```bash
# Create tool structure
getpkg create awesome-tool ./awesome-tool
cd awesome-tool
# Build your tool...
# Add executable to the directory
# Test locally
./awesome-tool --version
# Publish when ready
getpkg publish awesome-tool:x86_64 .
```
## Environment Variables
- **`SOS_WRITE_TOKEN`** - Authentication token for publishing tools
## Troubleshooting
### Tool Not Found
If a tool isn't found after installation, ensure your shell has loaded the new PATH:
After installation, restart your shell or run:
```bash
source ~/.bashrc
```
### Permission Issues
getpkg installs to your home directory and doesn't require root access. If you encounter permission issues, check that `~/.local/bin/` is writable.
### Network Issues
All tools are downloaded from `getpkg.xyz`. Ensure you have internet connectivity and the domain is accessible.
## Development
### Building getpkg
## Basic Commands
### Install Tools
```bash
# Build debug version
cd getpkg && ./build.sh
# Run tests
cd getpkg && ./test.sh
# Publish (requires SOS_WRITE_TOKEN)
cd getpkg && ./publish.sh
getpkg install <tool_name> # Install a tool
getpkg list # See all available tools
getpkg update # Update all installed tools
```
### Tool Development
### Manage Tools
```bash
getpkg uninstall <tool_name> # Remove a tool
getpkg version # Check getpkg version
getpkg help # Show all commands
```
When creating tools for getpkg:
## Popular Tools
1. Create a directory with your tool binary
2. Optionally include a `setup_script.sh` for post-install setup
3. The tool should support `version` and `autocomplete` subcommands
4. Use `getpkg publish` to upload to the registry
Install these useful developer tools:
For more details, see the development documentation in each tool's directory.
```bash
getpkg install bb64 # Bash-compatible base64 encoder/decoder
getpkg install dehydrate # Convert files to C++ source code
getpkg install whatsdirty # Check git repository status
getpkg install sos # Simple object storage client
getpkg install gp # Git push utility
```
## How It Works
When you install a tool:
1. Downloads from getpkg.xyz
2. Installs to `~/.getpkg/<tool_name>/`
3. Creates shortcuts in `~/.local/bin/getpkg/`
4. Adds to your PATH automatically
5. Enables bash completion
## File Locations
- **Installed tools**: `~/.getpkg/<tool_name>/`
- **Shortcuts**: `~/.local/bin/getpkg/` (in your PATH)
- **Settings**: `~/.config/getpkg/`
## Architecture Support
getpkg automatically downloads the right version for your system:
- Intel/AMD 64-bit (`x86_64`)
- ARM 64-bit (`aarch64`)
- Universal (works everywhere)
## Troubleshooting
**Tool not found after install?**
```bash
source ~/.bashrc
```
**Permission errors?**
getpkg installs to your home directory - no root access needed.
**Network issues?**
Check your internet connection to `getpkg.xyz`.
## Need Help?
```bash
getpkg help # Show detailed help
getpkg list # See what's available
```

View File

@ -26,6 +26,8 @@ Usage:
bb64 -[i|d] BASE64COMMAND Displays the decoded command
bb64 -e COMMAND Encodes the command and prints the result
bb64 -u Updates bb64 to the latest version (uses docker)
bb64 -v Prints the version number
bb64 version Prints the version number
```
# Implementation Notes

View File

@ -13,7 +13,14 @@ mkdir -p "${SCRIPT_DIR}/output"
# make sure we have the latest base image.
docker pull gitea.jde.nz/public/dropshell-build-base:latest
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \

24
bb64/clean.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="bb64"
echo "Cleaning ${PROJECT}..."
# Remove output directory
if [ -d "${SCRIPT_DIR}/output" ]; then
echo "Removing output directory..."
rm -rf "${SCRIPT_DIR}/output"
fi
# Remove Docker images related to this project
echo "Removing Docker images..."
docker images --filter "reference=${PROJECT}-build*" -q | xargs -r docker rmi -f
# Remove Docker build cache
echo "Pruning Docker build cache..."
docker builder prune -f
echo "${PROJECT} cleaned successfully"

View File

@ -20,7 +20,14 @@ echo "Building version $VERSION" >&2
# build release version
export CMAKE_BUILD_TYPE="Release"
# Build with or without cache based on NO_CACHE environment variable
CACHE_FLAG=""
if [ "${NO_CACHE:-false}" = "true" ]; then
CACHE_FLAG="--no-cache"
fi
docker build \
${CACHE_FLAG} \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \
@ -77,13 +84,20 @@ if ! git config user.email >/dev/null 2>&1; then
git config user.name "CI Bot"
fi
# Check if tag already exists
# Check if tag already exists locally
if git rev-parse "$TAG" >/dev/null 2>&1; then
echo "Tag $TAG already exists, deleting it first..."
echo "Tag $TAG already exists locally, deleting it first..."
git tag -d "$TAG"
git push origin --delete "$TAG" || true
fi
# Check if tag exists on remote
TAG_EXISTS_ON_REMOTE=false
if git ls-remote --tags origin | grep -q "refs/tags/$TAG"; then
echo "Tag $TAG already exists on remote - this is expected for multi-architecture builds"
echo "Skipping tag creation and proceeding with release attachment..."
TAG_EXISTS_ON_REMOTE=true
else
echo "Creating new tag $TAG..."
git tag -a "$TAG" -m "Release $TAG"
if ! git push origin "$TAG"; then
echo "Failed to push tag $TAG to origin" >&2
@ -91,8 +105,30 @@ if ! git push origin "$TAG"; then
git tag -d "$TAG"
exit 1
fi
echo "Successfully created and pushed tag $TAG"
fi
echo "Creating release $TAG on Gitea..."
echo "Getting or creating release $TAG on Gitea..."
# First try to get existing release
EXISTING_RELEASE=$(curl -s -X GET "$API_URL/releases/tags/$TAG" \
-H "Authorization: token $RELEASE_WRITE_TOKEN")
echo "Existing release check response: $EXISTING_RELEASE" >&2
if echo "$EXISTING_RELEASE" | grep -q '"id":[0-9]*'; then
# Release already exists, get its ID
RELEASE_ID=$(echo "$EXISTING_RELEASE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
echo "Release $TAG already exists with ID: $RELEASE_ID"
else
# Create new release only if tag was just created
if [ "$TAG_EXISTS_ON_REMOTE" = true ]; then
echo "Tag exists on remote but no release found - this shouldn't happen" >&2
echo "API response was: $EXISTING_RELEASE" >&2
exit 1
fi
echo "Creating new release $TAG on Gitea..."
RELEASE_RESPONSE=$(curl -s -X POST "$API_URL/releases" \
-H "Content-Type: application/json" \
-H "Authorization: token $RELEASE_WRITE_TOKEN" \
@ -109,7 +145,8 @@ if [ -z "$RELEASE_ID" ]; then
exit 1
fi
echo "Created release with ID: $RELEASE_ID"
echo "Created new release with ID: $RELEASE_ID"
fi
# Upload binaries and install.sh
echo "Uploading assets to release..."

View File

@ -150,6 +150,7 @@ Usage:
bb64 -u Updates bb64 to the latest version (uses docker)
bb64 -v Prints the version number
bb64 version Prints the version number
)" << std::endl;
return -1;
@ -161,7 +162,7 @@ Usage:
{
if (mode == "-u")
return update_bb64();
else if (mode == "-v")
else if (mode == "-v" || mode == "version")
{
std::cout << VERSION << std::endl;
return 0;

135
bb64/test.sh Executable file
View File

@ -0,0 +1,135 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT="bb64"
BB64="$SCRIPT_DIR/output/$PROJECT"
TEST_DIR="$SCRIPT_DIR/test_temp"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Test counters
TESTS_PASSED=0
TESTS_FAILED=0
# Function to print test results
print_test_result() {
local test_name="$1"
local result="$2"
if [ "$result" -eq 0 ]; then
echo -e "${GREEN}${NC} $test_name"
TESTS_PASSED=$((TESTS_PASSED + 1))
else
echo -e "${RED}${NC} $test_name"
TESTS_FAILED=$((TESTS_FAILED + 1))
fi
}
# Function to cleanup test artifacts
cleanup() {
echo -e "\n${YELLOW}Cleaning up test artifacts...${NC}"
rm -rf "$TEST_DIR"
}
# Set up trap to ensure cleanup runs
trap cleanup EXIT
# Create test directory
mkdir -p "$TEST_DIR"
echo -e "${YELLOW}Running bb64 tests...${NC}\n"
# Check if bb64 binary exists
if [ ! -f "$BB64" ]; then
echo -e "${RED}Error: bb64 binary not found at $BB64${NC}"
echo "Please run ./build.sh first to build bb64"
exit 1
fi
if [ ! -x "$BB64" ]; then
echo -e "${RED}Error: bb64 binary is not executable${NC}"
exit 1
fi
echo "Using bb64 binary: $BB64"
# Test 1: Version command with -v flag
echo "Test 1: Version command (-v flag)"
VERSION_OUTPUT=$("$BB64" -v 2>&1 || true)
# Version output should be just the version number
VERSION=$(echo "$VERSION_OUTPUT" | head -n 1)
if [[ "$VERSION" =~ ^[0-9]{4}\.[0-9]{4}\.[0-9]{4}$ ]]; then
print_test_result "Version format with -v flag (YYYY.MMDD.HHMM)" 0
else
print_test_result "Version format with -v flag (YYYY.MMDD.HHMM)" 1
echo " Expected: YYYY.MMDD.HHMM format, got: '$VERSION'"
fi
# Test 2: Version command with 'version' argument
printf "\nTest 2: Version command (version argument)\n"
VERSION_OUTPUT2=$("$BB64" version 2>&1 || true)
# Version output should be just the version number
VERSION2=$(echo "$VERSION_OUTPUT2" | head -n 1)
if [[ "$VERSION2" =~ ^[0-9]{4}\.[0-9]{4}\.[0-9]{4}$ ]]; then
print_test_result "Version format with 'version' argument (YYYY.MMDD.HHMM)" 0
else
print_test_result "Version format with 'version' argument (YYYY.MMDD.HHMM)" 1
echo " Expected: YYYY.MMDD.HHMM format, got: '$VERSION2'"
fi
# Test 3: Both version commands should return the same version
printf "\nTest 3: Version consistency\n"
if [ "$VERSION" = "$VERSION2" ]; then
print_test_result "Both -v and version return same version" 0
else
print_test_result "Both -v and version return same version" 1
echo " -v returned: '$VERSION'"
echo " version returned: '$VERSION2'"
fi
# Test 4: Basic encoding test
echo -e "\nTest 4: Basic encoding test"
TEST_STRING="hello world"
ENCODED_OUTPUT=$("$BB64" -e <<< "$TEST_STRING" 2>&1 || true)
if [ -n "$ENCODED_OUTPUT" ]; then
print_test_result "Basic encoding produces output" 0
else
print_test_result "Basic encoding produces output" 1
fi
# Test 5: Basic decoding test (using -d flag)
echo -e "\nTest 5: Basic decoding test"
# Encode "echo hello" and then decode it
ENCODED_ECHO=$(echo "echo hello" | "$BB64" -e)
if [ -n "$ENCODED_ECHO" ]; then
DECODED_OUTPUT=$("$BB64" -d "$ENCODED_ECHO" 2>&1 || true)
if [[ "$DECODED_OUTPUT" == *"echo hello"* ]]; then
print_test_result "Basic decoding works correctly" 0
else
print_test_result "Basic decoding works correctly" 1
echo " Expected to contain 'echo hello', got: '$DECODED_OUTPUT'"
fi
else
print_test_result "Basic decoding works correctly" 1
echo " Failed to encode test string"
fi
cleanup
# Print summary
echo -e "\n${YELLOW}Test Summary:${NC}"
echo -e "Tests passed: ${GREEN}${TESTS_PASSED}${NC}"
echo -e "Tests failed: ${RED}${TESTS_FAILED}${NC}"
if [ "$TESTS_FAILED" -eq 0 ]; then
echo -e "\n${GREEN}All tests passed!${NC}"
exit 0
else
echo -e "\n${RED}Some tests failed!${NC}"
exit 1
fi

View File

@ -2,9 +2,6 @@
set -uo pipefail # Remove -e to handle errors manually
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
docker builder prune -f
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
@ -202,25 +199,25 @@ function print_summary() {
# Format build status with colors
case "$build_status" in
"✓") build_col=$(printf " ${GREEN}${NC} ") ;;
"✗") build_col=$(printf " ${RED}${NC} ") ;;
"SKIP") build_col=$(printf " ${YELLOW}-${NC} ") ;;
"✓") build_col=$(printf " %s✓%s " "$GREEN" "$NC") ;;
"✗") build_col=$(printf " %s✗%s " "$RED" "$NC") ;;
"SKIP") build_col=$(printf " %s-%s " "$YELLOW" "$NC") ;;
*) build_col=" - " ;;
esac
# Format test status with colors
case "$test_status" in
"✓") test_col=$(printf " ${GREEN}${NC} ") ;;
"✗") test_col=$(printf " ${RED}${NC} ") ;;
"SKIP") test_col=$(printf " ${YELLOW}-${NC} ") ;;
"✓") test_col=$(printf " %s✓%s " "$GREEN" "$NC") ;;
"✗") test_col=$(printf " %s✗%s " "$RED" "$NC") ;;
"SKIP") test_col=$(printf " %s-%s " "$YELLOW" "$NC") ;;
*) test_col=" - " ;;
esac
# Format publish status with colors
case "$publish_status" in
"✓") publish_col=$(printf " ${GREEN}${NC} ") ;;
"✗") publish_col=$(printf " ${RED}${NC} ") ;;
"SKIP") publish_col=$(printf " ${YELLOW}-${NC} ") ;;
"✓") publish_col=$(printf " %s✓%s " "$GREEN" "$NC") ;;
"✗") publish_col=$(printf " %s✗%s " "$RED" "$NC") ;;
"SKIP") publish_col=$(printf " %s-%s " "$YELLOW" "$NC") ;;
*) publish_col=" - " ;;
esac
@ -237,15 +234,17 @@ function print_summary() {
echo
}
title "🔨 BUILDING ALL TOOLS 🔨"
title "🔨 BUILDING GETPKG 🔨"
getpkg/build.sh
"${SCRIPT_DIR}/getpkg/build.sh"
export GETPKG="${SCRIPT_DIR}/getpkg/output/getpkg"
if [ ! -f "$GETPKG" ]; then
echo "Build failed."
exit 1
fi
title "🔨 BUILDING ALL TOOLS 🔨"
buildtestpublish_all
print_summary

44
clean.sh Executable file
View File

@ -0,0 +1,44 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
echo "🧹 CLEANING ALL PROJECTS 🧹"
echo
# Get all project directories
PROJECT_DIRS=$(find "$SCRIPT_DIR" -maxdepth 1 -type d \
-not -name ".*" \
-not -path "$SCRIPT_DIR" | sort)
for dir in $PROJECT_DIRS; do
PROJECT_NAME=$(basename "$dir")
if [ -f "$dir/clean.sh" ]; then
echo "Cleaning $PROJECT_NAME..."
cd "$dir"
./clean.sh
echo
else
echo "⚠️ No clean.sh found for $PROJECT_NAME, skipping..."
echo
fi
done
# Global Docker cleanup
echo "🐳 Global Docker cleanup..."
echo "Removing unused Docker images..."
docker image prune -f
echo "Removing unused Docker containers..."
docker container prune -f
echo "Removing unused Docker networks..."
docker network prune -f
echo "Removing unused Docker volumes..."
docker volume prune -f
echo
echo "✅ All projects cleaned successfully!"

View File

@ -1,65 +0,0 @@
ARG IMAGE_TAG
FROM gitea.jde.nz/public/dropshell-build-base:latest AS builder
ARG PROJECT
ARG CMAKE_BUILD_TYPE=Debug
# Set working directory
WORKDIR /app
SHELL ["/bin/bash", "-c"]
# Create cache directories
RUN mkdir -p /ccache
# Set up ccache
ENV CCACHE_DIR=/ccache
ENV CCACHE_COMPILERCHECK=content
ENV CCACHE_MAXSIZE=2G
# Copy build files
COPY CMakeLists.txt ./
COPY src/version.hpp.in src/
# Copy source files
COPY src/ src/
COPY contrib/ contrib/
# Configure project
RUN --mount=type=cache,target=/ccache \
--mount=type=cache,target=/build \
mkdir -p /build && \
cmake -G Ninja -S /app -B /build \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
-DPROJECT_NAME="${PROJECT}" \
-DCMAKE_STRIP=OFF \
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
# Build project
RUN --mount=type=cache,target=/ccache \
--mount=type=cache,target=/build \
cmake --build /build
# Copy the built executable to a regular directory for the final stage
RUN --mount=type=cache,target=/build \
mkdir -p /output && \
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
find /build -type f -executable -exec cp {} /output/${PROJECT} \;
# if we're a release build, then run upx on the binary.
RUN if [ "${CMAKE_BUILD_TYPE}" = "Release" ]; then \
upx /output/${PROJECT}; \
fi
# Final stage that only contains the binary
FROM scratch AS project
ARG PROJECT
# Copy the actual binary from the regular directory
COPY --from=builder /output/${PROJECT} /${PROJECT}

View File

@ -1,22 +1,52 @@
#!/bin/bash
set -euo pipefail
# Get script directory - handle different execution contexts
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="dehydrate"
PROJECT="$(basename "${SCRIPT_DIR}")"
export CMAKE_BUILD_TYPE="Debug"
# Debug output for CI
echo "${PROJECT} build script running from: ${SCRIPT_DIR}"
rm -rf "${SCRIPT_DIR}/output"
mkdir -p "${SCRIPT_DIR}/output"
# handle running locally, or docker in docker via gitea runner.
if [ -n "${GITEA_CONTAINER_NAME:-}" ]; then
echo "We're in a gitea container: ${GITEA_CONTAINER_NAME}"
VOLUME_OPTS=("--volumes-from=${GITEA_CONTAINER_NAME}")
WORKING_DIR=("-w" "${GITHUB_WORKSPACE}/${PROJECT}")
BUILD_DIR="${GITHUB_WORKSPACE}/${PROJECT}/build"
OUTPUT_DIR="${GITHUB_WORKSPACE}/${PROJECT}/output"
else
VOLUME_OPTS=("-v" "${SCRIPT_DIR}:/app")
WORKING_DIR=("-w" "/app")
BUILD_DIR="${SCRIPT_DIR}/build"
OUTPUT_DIR="${SCRIPT_DIR}/output"
fi
# make sure we have the latest base image.
docker pull gitea.jde.nz/public/dropshell-build-base:latest
# Create output directory
mkdir -p "${OUTPUT_DIR}"
docker build \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
--output "${SCRIPT_DIR}/output" \
"${SCRIPT_DIR}"
# Run build in container with mounted directories
COMMAND_TO_RUN="cmake -G Ninja -S . -B ./build \
-DCMAKE_BUILD_TYPE=\${CMAKE_BUILD_TYPE} \
-DPROJECT_NAME=${PROJECT} && \
cmake --build ./build"
echo "Building in new docker container"
docker run --rm \
--user "$(id -u):$(id -g)" \
"${VOLUME_OPTS[@]}" \
"${WORKING_DIR[@]}" \
-e CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE:-Debug}" \
gitea.jde.nz/public/dropshell-build-base:latest \
bash -c "${COMMAND_TO_RUN}"
# Copy built executable to output directory
if [ -f "${BUILD_DIR}/${PROJECT}" ]; then
cp "${BUILD_DIR}/${PROJECT}" "${OUTPUT_DIR}/"
echo "✓ Build successful - ${PROJECT} copied to ${OUTPUT_DIR}/"
else
echo "✗ Build failed - ${PROJECT} not found in ${BUILD_DIR}/"
exit 1
fi
echo "Build complete"

18
dehydrate/clean.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="$(basename "$(dirname "${SCRIPT_DIR}")")"
echo "Cleaning ${PROJECT}..."
# Remove output and build directories
for dir in "output" "build"; do
if [ -d "${SCRIPT_DIR}/${dir}" ]; then
echo "Removing ${dir} directory..."
rm -rf "${SCRIPT_DIR:?}/${dir}"
fi
done
echo "${PROJECT} cleaned successfully"

View File

@ -35,14 +35,7 @@ heading "Building ${PROJECT}"
# build release version
export CMAKE_BUILD_TYPE="Release"
docker build \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
--output "${OUTPUT}" \
"${SCRIPT_DIR}"
"${SCRIPT_DIR}/build.sh"
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."

View File

@ -4,8 +4,20 @@ set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT="dehydrate"
DEHYDRATE="${SCRIPT_DIR}/output/${PROJECT}"
# Handle running locally or in Gitea runner
if [ -n "${GITEA_CONTAINER_NAME:-}" ]; then
echo "Running in Gitea CI environment"
echo "GITHUB_WORKSPACE: ${GITHUB_WORKSPACE}"
echo "Current directory: $(pwd)"
OUTPUT_DIR="${GITHUB_WORKSPACE}/dehydrate/output"
TEST_DIR="${GITHUB_WORKSPACE}/dehydrate/test_temp"
else
OUTPUT_DIR="${SCRIPT_DIR}/output"
TEST_DIR="${SCRIPT_DIR}/test_temp"
fi
DEHYDRATE="${OUTPUT_DIR}/${PROJECT}"
# Colors for output
RED='\033[0;31m'
@ -45,10 +57,31 @@ mkdir -p "$TEST_DIR"
echo -e "${YELLOW}Running dehydrate tests...${NC}\n"
# Debug output
echo "Looking for dehydrate at: $DEHYDRATE"
echo "Workspace structure:"
ls -la "${GITHUB_WORKSPACE}" 2>/dev/null || echo "Workspace not found"
echo "Dehydrate directory contents:"
ls -la "${GITHUB_WORKSPACE}/dehydrate" 2>/dev/null || echo "Dehydrate directory not found"
echo "Output directory contents:"
ls -la "$OUTPUT_DIR" 2>/dev/null || echo "Output directory not found"
# Check if dehydrate binary exists
if [ ! -f "$DEHYDRATE" ]; then
echo -e "${RED}Error: dehydrate binary not found at $DEHYDRATE${NC}"
echo "Please run ./build.sh first to build dehydrate"
if [ -n "${GITEA_CONTAINER_NAME:-}" ]; then
echo "Checking if build directory exists..."
BUILD_DIR="${GITHUB_WORKSPACE}/dehydrate/build"
if [ -d "$BUILD_DIR" ]; then
echo "Build directory exists, checking contents:"
ls -la "$BUILD_DIR"
else
echo "Build directory $BUILD_DIR does not exist"
fi
fi
exit 1
fi

View File

@ -4,7 +4,7 @@
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIR="$( cd "$SCRIPT_DIR/.." && pwd )"
cd "$SCRIPT_DIR"
cd "$SCRIPT_DIR" || exit 1
# Clean up old test data and any existing binaries
# Force removal with chmod to handle permission issues
@ -16,48 +16,49 @@ rm -f dehydrate_test
# Build the test program using Docker
# The Docker container supports both amd64 and arm64 architectures
echo "PROJECT_DIR: $PROJECT_DIR"
echo "SCRIPT_DIR: $SCRIPT_DIR"
echo "Current directory: $(pwd)"
echo "Files in current directory:"
ls -la
echo "Building dehydrate test executable..."
docker run --rm \
-v "$SCRIPT_DIR":/workdir \
-w /workdir \
# Use docker cp approach since volume mounting may not work in CI
CONTAINER_NAME="dehydrate-test-build-$$"
# Start container in detached mode
docker run -d --name "$CONTAINER_NAME" \
gitea.jde.nz/public/dropshell-build-base:latest \
bash -c "
echo 'Docker working directory:' && pwd
echo 'Docker available files:' && ls -la
sleep 60
# Verify we can find the source file
if [ ! -f dehydrate_test.cpp ]; then
echo 'ERROR: dehydrate_test.cpp not found in current directory'
echo 'Available files:' && ls -la
exit 1
fi
# Copy source file into container
docker cp dehydrate_test.cpp "$CONTAINER_NAME":/dehydrate_test.cpp
# Clean any existing binary and compile
rm -f dehydrate_test
if ! g++ -std=c++23 -static dehydrate_test.cpp -o dehydrate_test; then
# Compile in container
docker exec "$CONTAINER_NAME" bash -c "
echo 'Compiling dehydrate test...'
if ! g++ -std=c++23 -static /dehydrate_test.cpp -o /dehydrate_test; then
echo 'ERROR: Compilation failed'
exit 1
fi
# Verify binary was created and is executable
if [ ! -f dehydrate_test ]; then
# Verify binary was created
if [ ! -f /dehydrate_test ]; then
echo 'ERROR: Binary was not created'
exit 1
fi
# Quick architecture check - just verify the binary format
if ! file dehydrate_test | grep -q 'executable'; then
# Quick architecture check
if ! file /dehydrate_test | grep -q 'executable'; then
echo 'ERROR: Generated file is not an executable'
file dehydrate_test
file /dehydrate_test
exit 1
fi
echo 'Compilation successful'
"
# Copy binary back to host
docker cp "$CONTAINER_NAME":/dehydrate_test ./dehydrate_test
# Clean up container
docker rm -f "$CONTAINER_NAME"
# Check if compilation succeeded
if [ ! -f "./dehydrate_test" ]; then
echo "Error: Failed to compile dehydrate_test - binary not found"

View File

@ -36,13 +36,16 @@ target_include_directories(${PROJECT_NAME} PRIVATE
src/common)
# Find packages
find_package(OpenSSL REQUIRED)
find_package(Drogon CONFIG REQUIRED)
find_package(nlohmann_json REQUIRED)
# Add module path for FindCPRStatic
list(APPEND CMAKE_MODULE_PATH "/usr/local/share/cmake/Modules")
# Find packages
find_package(nlohmann_json REQUIRED)
find_package(CPRStatic REQUIRED)
# Link libraries
target_link_libraries(${PROJECT_NAME} PRIVATE
nlohmann_json::nlohmann_json Drogon::Drogon
/usr/local/lib/libpgcommon.a /usr/local/lib/libpgport.a
lzma dl)
nlohmann_json::nlohmann_json
cpr::cpr_static)

View File

@ -1,83 +0,0 @@
ARG IMAGE_TAG
FROM gitea.jde.nz/public/dropshell-build-base:latest AS builder
ARG PROJECT
ARG CMAKE_BUILD_TYPE=Debug
# Set working directory
WORKDIR /app
SHELL ["/bin/bash", "-c"]
# Create cache directories
RUN mkdir -p /ccache
# Set up ccache
ENV CCACHE_DIR=/ccache
ENV CCACHE_COMPILERCHECK=content
ENV CCACHE_MAXSIZE=2G
# Copy only build files first (for better layer caching)
COPY CMakeLists.txt cmake_prebuild.sh ./
COPY src/version.hpp.in src/
# Run prebuild script early (this rarely changes)
RUN bash cmake_prebuild.sh
# Copy source files (this invalidates cache when source changes)
COPY src/ src/
# Configure project (this step is cached unless CMakeLists.txt changes)
RUN --mount=type=cache,target=/ccache \
--mount=type=cache,target=/build \
mkdir -p /build && \
SSL_LIB=$(find /usr/local -name "libssl.a" | head -1) && \
CRYPTO_LIB=$(find /usr/local -name "libcrypto.a" | head -1) && \
echo "Found SSL: $SSL_LIB, Crypto: $CRYPTO_LIB" && \
cmake -G Ninja -S /app -B /build \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=mold -static -g" \
-DCMAKE_CXX_FLAGS="-g -fno-omit-frame-pointer" \
-DCMAKE_C_FLAGS="-g -fno-omit-frame-pointer" \
-DPROJECT_NAME="${PROJECT}" \
-DCMAKE_STRIP=OFF \
-DOPENSSL_SSL_LIBRARY="$SSL_LIB" \
-DOPENSSL_CRYPTO_LIBRARY="$CRYPTO_LIB" \
-DOPENSSL_INCLUDE_DIR=/usr/local/include \
${CMAKE_TOOLCHAIN_FILE:+-DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_FILE}
# Run prebuild script
RUN --mount=type=cache,target=/ccache \
--mount=type=cache,target=/build \
cmake --build /build --target run_prebuild_script
# Build project (ccache will help here when only some files change)
RUN --mount=type=cache,target=/ccache \
--mount=type=cache,target=/build \
cmake --build /build
# Copy the built executable to a regular directory for the final stage
RUN --mount=type=cache,target=/build \
mkdir -p /output && \
find /build -type f -executable -name "*${PROJECT}*" -exec cp {} /output/${PROJECT} \; || \
find /build -type f -executable -exec cp {} /output/${PROJECT} \;
# if we're a release build, then run upx on the binary.
RUN if [ "${CMAKE_BUILD_TYPE}" = "Release" ]; then \
upx /output/${PROJECT}; \
fi
# Final stage that only contains the binary
FROM scratch AS project
ARG PROJECT
# Copy CA certificates for SSL validation
#COPY --from=builder /etc/ssl/certs/ /etc/ssl/certs/
# Copy the actual binary from the regular directory
COPY --from=builder /output/${PROJECT} /${PROJECT}

207
getpkg/README.md Normal file
View File

@ -0,0 +1,207 @@
# getpkg - Package Manager for Dropshell Tools
getpkg is a command-line package manager that simplifies tool installation, management, and publishing for the dropshell ecosystem. Tools are installed to `~/.getpkg/` with executable symlinks in `~/.local/bin/getpkg/` and automatically added to your PATH with bash completion.
## Installation
Install getpkg with a single command:
```bash
curl https://getbin.xyz/getpkg-install | bash
```
After installation, restart your shell or run `source ~/.bashrc` to enable the new PATH and completion settings.
## Basic Usage
### Installing Tools
Install any tool from the getpkg registry:
```bash
# Install a tool
getpkg install whatsdirty
```
### Managing Installed Tools
```bash
# List all available commands
getpkg help
# Update all installed tools
getpkg update
# Uninstall a tool
getpkg uninstall whatsdirty
# Check getpkg version
getpkg version
```
## Available Commands
### Core Package Management
- **`getpkg install <tool_name>`** - Install or update a tool
- **`getpkg uninstall <tool_name>`** - Remove an installed tool
- **`getpkg update`** - Update getpkg and all installed tools
### Publishing (Requires SOS_WRITE_TOKEN)
- **`getpkg publish <tool_name[:ARCH]> <folder>`** - Upload a tool to getpkg.xyz
- **`getpkg unpublish <tool_name[:ARCH]>`** - Remove a published tool
- **`getpkg unpublish <hash>`** - Remove a published tool by hash
### Development Tools
- **`getpkg create <tool_name> <directory>`** - Create a new tool project
- **`getpkg hash <file_or_directory>`** - Calculate hash of files/directories
### Information
- **`getpkg list`** - List all available packages with status
- **`getpkg clean`** - Clean up orphaned configs and symlinks
- **`getpkg version`** - Show getpkg version
- **`getpkg help`** - Show detailed help
- **`getpkg autocomplete`** - Show available commands for completion
## How It Works
### Installation Process
When you install a tool, getpkg:
1. **Downloads** the tool archive from getpkg.xyz
2. **Extracts** it to `~/.getpkg/<tool_name>/`
3. **Creates symlinks** for all executables in `~/.local/bin/getpkg/`
4. **Ensures PATH** includes `~/.local/bin/getpkg` (one-time setup)
5. **Enables bash completion** for the tool
6. **Runs setup** if a `setup_script.sh` exists
7. **Stores metadata** in `~/.config/getpkg/<tool_name>.json`
### Architecture Support
getpkg supports multiple architectures:
- `x86_64` (Intel/AMD 64-bit)
- `aarch64` (ARM 64-bit)
- `universal` (cross-platform tools)
Tools are automatically downloaded for your architecture, with fallback to universal versions.
### File Locations
- **Tool files**: `~/.getpkg/<tool_name>/` (actual tool installation)
- **Executable symlinks**: `~/.local/bin/getpkg/` (in your PATH)
- **Configuration**: `~/.config/getpkg/`
- **PATH setup**: `~/.bashrc_getpkg` (sourced by `~/.bashrc`)
## Examples
### Installing Popular Tools
```bash
# Install available tools
getpkg install dehydrate # File to C++ code generator
getpkg install bb64 # Bash base64 encoder/decoder
# Development tools (for repository development)
getpkg install whatsdirty # Check git repo status
getpkg install sos # Simple object storage client
getpkg install gp # Git push utility
```
### Publishing Your Own Tools
```bash
# Set your publishing token
export SOS_WRITE_TOKEN="your-token-here"
# Create a new tool project
getpkg create mytool ./mytool-project
# Publish architecture-specific build
getpkg publish mytool:x86_64 ./build/
# Publish universal tool
getpkg publish mytool ./build/
# Remove published tool
getpkg unpublish mytool:x86_64
```
### Development Workflow
```bash
# Create tool structure
getpkg create awesome-tool ./awesome-tool
cd awesome-tool
# Build your tool...
# Add executable to the directory
# Test locally
./awesome-tool --version
# Publish when ready
getpkg publish awesome-tool:x86_64 .
```
## Environment Variables
- **`SOS_WRITE_TOKEN`** - Authentication token for publishing tools
## Troubleshooting
### Tool Not Found
If a tool isn't found after installation, ensure your shell has loaded the new PATH:
```bash
source ~/.bashrc
```
### Permission Issues
getpkg installs to your home directory and doesn't require root access. If you encounter permission issues, check that `~/.local/bin/` is writable.
### Network Issues
All tools are downloaded from `getpkg.xyz`. Ensure you have internet connectivity and the domain is accessible.
## Development
### Building getpkg
```bash
# Build debug version
cd getpkg && ./build.sh
# Run tests
cd getpkg && ./test.sh
# Publish (requires SOS_WRITE_TOKEN)
cd getpkg && ./publish.sh
```
### Tool Development
When creating tools for getpkg:
1. Create a directory with your tool binary
2. Optionally include a `setup_script.sh` for post-install setup
3. The tool should support `version` and `autocomplete` subcommands
4. Use `getpkg publish` to upload to the registry
### Testing
The test script creates all temporary files and directories in `test_temp/` to keep the main directory clean:
```bash
# Run tests
./test.sh
# Clean up orphaned test files from old test runs (one-time)
bash cleanup_old_test_files.sh
# Clean up orphaned test packages from getpkg.xyz
bash cleanup_test_packages.sh
```
For more details, see the development documentation in each tool's directory.

View File

@ -1,25 +1,52 @@
#!/bin/bash
set -euo pipefail
# Get script directory - handle different execution contexts
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="$(basename "${SCRIPT_DIR}")"
# Debug output for CI
echo "${PROJECT} build script running from: ${SCRIPT_DIR}"
export CMAKE_BUILD_TYPE="Debug"
# handle running locally, or docker in docker via gitea runner.
if [ -n "${GITEA_CONTAINER_NAME:-}" ]; then
echo "We're in a gitea container: ${GITEA_CONTAINER_NAME}"
VOLUME_OPTS=("--volumes-from=${GITEA_CONTAINER_NAME}")
WORKING_DIR=("-w" "${GITHUB_WORKSPACE}/${PROJECT}")
BUILD_DIR="${GITHUB_WORKSPACE}/${PROJECT}/build"
OUTPUT_DIR="${GITHUB_WORKSPACE}/${PROJECT}/output"
else
VOLUME_OPTS=("-v" "${SCRIPT_DIR}:/app")
WORKING_DIR=("-w" "/app")
BUILD_DIR="${SCRIPT_DIR}/build"
OUTPUT_DIR="${SCRIPT_DIR}/output"
fi
rm -rf "${SCRIPT_DIR}/output"
mkdir -p "${SCRIPT_DIR}/output"
# Create output directory
mkdir -p "${OUTPUT_DIR}"
PROJECT="getpkg"
# Run build in container with mounted directories
COMMAND_TO_RUN="cmake -G Ninja -S . -B ./build \
-DCMAKE_BUILD_TYPE=\${CMAKE_BUILD_TYPE} \
-DPROJECT_NAME=${PROJECT} && \
cmake --build ./build"
# make sure we have the latest base image.
docker pull gitea.jde.nz/public/dropshell-build-base:latest
echo "Building in new docker container"
docker run --rm \
--user "$(id -u):$(id -g)" \
"${VOLUME_OPTS[@]}" \
"${WORKING_DIR[@]}" \
-e CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE:-Debug}" \
gitea.jde.nz/public/dropshell-build-base:latest \
bash -c "${COMMAND_TO_RUN}"
docker build \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
--output "${SCRIPT_DIR}/output" \
"${SCRIPT_DIR}"
# Copy built executable to output directory
if [ -f "${BUILD_DIR}/${PROJECT}" ]; then
cp "${BUILD_DIR}/${PROJECT}" "${OUTPUT_DIR}/"
echo "✓ Build successful - ${PROJECT} copied to ${OUTPUT_DIR}/"
else
echo "✗ Build failed - ${PROJECT} not found in ${BUILD_DIR}/"
exit 1
fi
echo "Build complete"

18
getpkg/clean.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="$(basename "$(dirname "${SCRIPT_DIR}")")"
echo "Cleaning ${PROJECT}..."
# Remove output and build directories
for dir in "output" "build"; do
if [ -d "${SCRIPT_DIR}/${dir}" ]; then
echo "Removing ${dir} directory..."
rm -rf "${SCRIPT_DIR:?}/${dir}"
fi
done
echo "${PROJECT} cleaned successfully"

98
getpkg/cleanup_test_packages.sh Executable file
View File

@ -0,0 +1,98 @@
#!/bin/bash
# Cleanup script for orphaned test packages from getpkg testing
# This script removes test packages that start with "test-" from getpkg.xyz
# Run from the getpkg directory: bash cleanup_test_packages.sh
set -euo pipefail
GETPKG="./output/getpkg"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
echo -e "${YELLOW}Cleaning up orphaned test packages...${NC}"
# Check if getpkg binary exists
if [ ! -f "$GETPKG" ]; then
echo -e "${RED}Error: getpkg binary not found at $GETPKG${NC}"
echo "Please run ./build.sh first to build getpkg"
exit 1
fi
# Check if SOS_WRITE_TOKEN is set
if [ -z "${SOS_WRITE_TOKEN:-}" ]; then
echo -e "${RED}Error: SOS_WRITE_TOKEN environment variable is not set${NC}"
echo "This token is required to unpublish packages from getpkg.xyz"
exit 1
fi
echo "Using getpkg binary: $GETPKG"
echo "SOS_WRITE_TOKEN is set (${#SOS_WRITE_TOKEN} characters)"
# Get list of all packages from /dir endpoint
echo "Fetching package list from getpkg.xyz/dir..."
DIR_RESPONSE=$(curl -s "https://getpkg.xyz/dir" 2>/dev/null || echo "")
if [ -z "$DIR_RESPONSE" ]; then
echo -e "${RED}Failed to fetch package list from server${NC}"
exit 1
fi
# Extract test package labeltags from JSON response
# Try with jq first, fallback to grep/sed if jq is not available
if command -v jq >/dev/null 2>&1; then
TEST_PACKAGES=$(echo "$DIR_RESPONSE" | jq -r '.entries[]?.labeltags[]? // empty' 2>/dev/null | grep "^test-" | sort -u || echo "")
else
# Fallback: extract labeltags using grep and sed (less reliable but works without jq)
TEST_PACKAGES=$(echo "$DIR_RESPONSE" | grep -o '"test-[^"]*"' | sed 's/"//g' | sort -u || echo "")
fi
if [ -z "$TEST_PACKAGES" ]; then
echo -e "${GREEN}No test packages found to clean up${NC}"
exit 0
fi
echo -e "\n${YELLOW}Found test packages to clean up:${NC}"
echo "$TEST_PACKAGES" | while read -r package; do
echo " - $package"
done
echo -e "\n${YELLOW}Cleaning up test packages...${NC}"
CLEANED_COUNT=0
FAILED_COUNT=0
# Use process substitution to avoid subshell issues
while IFS= read -r package; do
if [ -n "$package" ]; then
echo -n "Cleaning up $package... "
# Try to unpublish the package (temporarily disable set -e)
set +e
$GETPKG unpublish "$package" >/dev/null 2>&1
UNPUBLISH_RESULT=$?
set -e
if [ $UNPUBLISH_RESULT -eq 0 ]; then
echo -e "${GREEN}OK${NC}"
((CLEANED_COUNT++))
else
echo -e "${RED}FAILED${NC}"
((FAILED_COUNT++))
fi
fi
done <<< "$TEST_PACKAGES"
echo -e "\n${YELLOW}Cleanup Summary:${NC}"
echo "Packages cleaned: $CLEANED_COUNT"
echo "Failed cleanups: $FAILED_COUNT"
if [ $FAILED_COUNT -eq 0 ]; then
echo -e "${GREEN}All test packages cleaned up successfully!${NC}"
else
echo -e "${YELLOW}Some packages failed to clean up. They may need manual removal.${NC}"
fi

View File

@ -34,15 +34,7 @@ heading "Building ${PROJECT}"
# build release version
export CMAKE_BUILD_TYPE="Release"
docker build \
-t "${PROJECT}-build" \
-f "${SCRIPT_DIR}/Dockerfile.dropshell-build" \
--build-arg PROJECT="${PROJECT}" \
--build-arg CMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}" \
--output "${OUTPUT}" \
"${SCRIPT_DIR}"
"${SCRIPT_DIR}/build.sh"
[ -f "${OUTPUT}/${PROJECT}" ] || die "Build failed."
#--------------------------------------------------------------------------------

View File

@ -1,530 +1,410 @@
#include "GetbinClient.hpp"
#include <drogon/HttpClient.h>
#include <trantor/net/EventLoop.h>
#include <openssl/ssl.h>
#include <openssl/opensslconf.h>
#include <fstream>
#include <sstream>
#include <cpr/cpr.h>
#include <nlohmann/json.hpp>
#include <string>
#include <fstream>
#include <iostream>
#include <thread>
#include <chrono>
#include <cstdio>
#include <map>
#include <atomic>
#include <mutex>
#include <condition_variable>
#include <vector>
#include <ctime>
#include <algorithm>
#include <filesystem>
#include <sstream>
#include <set>
#include <algorithm>
using json = nlohmann::json;
static constexpr const char* SERVER_HOST = "getpkg.xyz";
const std::string GetbinClient::DEFAULT_SERVER_HOST = "getpkg.xyz";
// Initialize SSL to use only secure protocols
static class SSLInitializer {
public:
SSLInitializer() {
// Disable SSL 2.0, 3.0, TLS 1.0, and TLS 1.1
SSL_load_error_strings();
SSL_library_init();
// Note: This doesn't completely silence the warning but ensures we're using secure protocols
}
} ssl_init;
static std::string find_ca_certificates() {
// Common CA certificate locations across different Linux distributions
const std::vector<std::string> ca_paths = {
"/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/Raspbian
"/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL/CentOS
"/etc/ssl/ca-bundle.pem", // OpenSUSE
"/etc/pki/tls/cert.pem", // Fedora/RHEL alternative
"/etc/ssl/certs/ca-bundle.crt", // Some distros
"/etc/ssl/cert.pem", // Alpine Linux
"/usr/local/share/certs/ca-root-nss.crt", // FreeBSD
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", // CentOS/RHEL 7+
"/etc/ca-certificates/extracted/tls-ca-bundle.pem" // Arch Linux
};
for (const auto& path : ca_paths) {
std::ifstream file(path);
if (file.good()) {
file.close();
return path;
GetbinClient::GetbinClient(const std::vector<std::string>& servers) : servers_(servers) {
// Initialize CPR (done automatically, but we could add global config here)
if (servers_.empty()) {
servers_.push_back(DEFAULT_SERVER_HOST);
}
}
return "";
GetbinClient::GetbinClient() : servers_({DEFAULT_SERVER_HOST}) {
// Backward compatibility constructor
}
GetbinClient::GetbinClient() {}
bool GetbinClient::download(const std::string& toolName, const std::string& arch, const std::string& outPath) {
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates for HTTPS
std::string ca_path = find_ca_certificates();
if (!ca_path.empty()) {
// Use addSSLConfigs with proper parameter names for OpenSSL
std::vector<std::pair<std::string, std::string>> sslConfigs;
sslConfigs.push_back({"VerifyCAFile", ca_path});
client->addSSLConfigs(sslConfigs);
} else {
// If no CA certificates found, print warning but continue
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
std::string GetbinClient::getUserAgent() const {
return "getpkg/1.0";
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
std::string GetbinClient::buildUrl(const std::string& serverUrl, const std::string& endpoint) const {
std::string url = "https://" + serverUrl;
if (!endpoint.empty() && endpoint[0] != '/') {
url += "/";
}
url += endpoint;
return url;
}
std::string object_path = "/object/" + toolName + ":" + arch;
bool GetbinClient::downloadFromServer(const std::string& serverUrl, const std::string& toolName,
const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback) {
try {
std::string url = buildUrl(serverUrl, "/object/" + toolName + ":" + arch);
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath(object_path);
cpr::Session session;
session.SetUrl(cpr::Url{url});
session.SetHeader(cpr::Header{{"User-Agent", getUserAgent()}});
session.SetTimeout(cpr::Timeout{30000}); // 30 seconds
session.SetVerifySsl(cpr::VerifySsl{true});
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response && response->getStatusCode() == drogon::k200OK) {
// Add progress callback if provided
if (progressCallback) {
session.SetProgressCallback(cpr::ProgressCallback{[progressCallback](cpr::cpr_off_t downloadTotal, cpr::cpr_off_t downloadNow,
cpr::cpr_off_t uploadTotal, cpr::cpr_off_t uploadNow,
intptr_t userdata) -> bool {
return progressCallback(static_cast<size_t>(downloadNow), static_cast<size_t>(downloadTotal));
}});
}
auto response = session.Get();
if (response.status_code == 200) {
std::ofstream ofs(outPath, std::ios::binary);
if (ofs) {
const auto& body = response->getBody();
ofs.write(body.data(), body.size());
success = ofs.good();
ofs.write(response.text.data(), response.text.size());
return ofs.good();
}
} else if (response.status_code == 404) {
// Not found - this is expected for arch fallback
return false;
} else {
std::cerr << "[GetbinClient::download] HTTP request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 30.0); // 30 second timeout
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
std::cerr << "[GetbinClient::downloadFromServer] HTTP " << response.status_code << " from " << serverUrl << ": " << response.error.message << std::endl;
}
worker.join();
return success;
}
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token) {
// Read file first
std::ifstream ifs(archivePath, std::ios::binary);
if (!ifs) {
std::cerr << "[GetbinClient::upload] Failed to open archive file: " << archivePath << std::endl;
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::downloadFromServer] Exception with " << serverUrl << ": " << e.what() << std::endl;
return false;
}
std::string file_content((std::istreambuf_iterator<char>(ifs)), std::istreambuf_iterator<char>());
// Compose metadata
json metadata = { {"labeltags", json::array()} };
std::string filename = archivePath.substr(archivePath.find_last_of("/\\") + 1);
size_t dot = filename.find('.');
std::string labeltag = dot != std::string::npos ? filename.substr(0, dot) : filename;
metadata["labeltags"].push_back(labeltag);
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates
std::string ca_path = find_ca_certificates();
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
if (ca_path.empty()) {
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
// Create upload file from memory content
// First save content to a temporary file since UploadFile expects a file path
std::string temp_file = "/tmp/getpkg_upload_" + std::to_string(std::time(nullptr)) + ".tgz";
std::ofstream temp_ofs(temp_file, std::ios::binary);
if (!temp_ofs) {
std::cerr << "[GetbinClient::upload] Failed to create temporary file: " << temp_file << std::endl;
success = false;
done = true;
cv.notify_one();
loop.quit();
return;
bool GetbinClient::download(const std::string& toolName, const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback) {
// Multi-server fallback logic: try each server in order
for (const auto& server : servers_) {
if (downloadFromServer(server, toolName, arch, outPath, progressCallback)) {
return true;
}
}
temp_ofs.write(file_content.data(), file_content.size());
temp_ofs.close();
// Create upload request with file
drogon::UploadFile upload_file(temp_file);
// If we get here, no server had the package
return false;
}
auto req = drogon::HttpRequest::newFileUploadRequest({upload_file});
req->setMethod(drogon::Put);
req->setPath("/upload");
req->addHeader("Authorization", "Bearer " + token);
// Add metadata as form parameter
req->setParameter("metadata", metadata.dump());
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response) {
int status_code = static_cast<int>(response->getStatusCode());
std::string response_body(response->getBody());
if (status_code == 200 || status_code == 201) {
bool GetbinClient::upload(const std::string& serverUrl, const std::string& archivePath,
std::string& outUrl, std::string& outHash, const std::string& token,
ProgressCallback progressCallback) {
try {
auto resp_json = json::parse(response_body);
if (resp_json.contains("url")) outUrl = resp_json["url"].get<std::string>();
if (resp_json.contains("hash")) outHash = resp_json["hash"].get<std::string>();
success = true;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::upload] Failed to parse JSON response: " << e.what() << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << response_body << std::endl;
}
} else {
std::cerr << "[GetbinClient::upload] HTTP error: status code " << status_code << std::endl;
std::cerr << "[GetbinClient::upload] Response body: " << response_body << std::endl;
}
} else {
std::cerr << "[GetbinClient::upload] HTTP /upload request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 60.0); // 60 second timeout
std::string url = buildUrl(serverUrl, "/upload");
loop.loop();
cpr::Session session;
session.SetUrl(cpr::Url{url});
session.SetHeader(cpr::Header{
{"User-Agent", getUserAgent()},
{"Authorization", "Bearer " + token}
});
session.SetTimeout(cpr::Timeout{300000}); // 5 minutes for uploads
session.SetVerifySsl(cpr::VerifySsl{true});
// Clean up temporary file
std::remove(temp_file.c_str());
// Extract tool name and arch from archive path for labeltags
// Archive path format: /path/to/tool-name:arch.tgz or similar
std::string archiveName = std::filesystem::path(archivePath).filename().string();
std::string toolNameArch = archiveName;
if (toolNameArch.ends_with(".tgz")) {
toolNameArch = toolNameArch.substr(0, toolNameArch.length() - 4);
}
// Create metadata JSON with labeltags
json metadata;
metadata["labeltags"] = json::array({toolNameArch});
// Set up multipart form with file and metadata
session.SetMultipart(cpr::Multipart{
cpr::Part{"file", cpr::File{archivePath}},
cpr::Part{"metadata", metadata.dump(), "application/json"}
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
// Add progress callback if provided
if (progressCallback) {
session.SetProgressCallback(cpr::ProgressCallback{[progressCallback](cpr::cpr_off_t downloadTotal, cpr::cpr_off_t downloadNow,
cpr::cpr_off_t uploadTotal, cpr::cpr_off_t uploadNow,
intptr_t userdata) -> bool {
return progressCallback(static_cast<size_t>(uploadNow), static_cast<size_t>(uploadTotal));
}});
}
worker.join();
return success;
}
auto response = session.Put();
bool GetbinClient::getHash(const std::string& toolName, const std::string& arch, std::string& outHash) {
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates
std::string ca_path = find_ca_certificates();
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
if (ca_path.empty()) {
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
std::string hash_path = "/hash/" + toolName + ":" + arch;
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath(hash_path);
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response && response->getStatusCode() == drogon::k200OK) {
std::string response_body(response->getBody());
// Try to parse hash from response body
if (response.status_code == 200) {
try {
// Try JSON first
auto resp_json = json::parse(response_body);
if (resp_json.contains("hash")) {
auto resp_json = json::parse(response.text);
if (resp_json.contains("hash") && resp_json.contains("result") && resp_json["result"] == "success") {
outUrl = buildUrl(serverUrl, "/object/" + resp_json["hash"].get<std::string>());
outHash = resp_json["hash"].get<std::string>();
success = true;
return true;
}
} catch (...) {
// Not JSON, treat as plain text
outHash = response_body;
} catch (const json::exception& e) {
// Try to extract from plain text response
outUrl = "";
outHash = response.text;
// Remove trailing newline if present
if (!outHash.empty() && outHash.back() == '\n') {
outHash.pop_back();
}
success = !outHash.empty();
return !outHash.empty();
}
} else {
std::cerr << "[GetbinClient::upload] HTTP " << response.status_code << " to " << serverUrl << ": " << response.error.message << std::endl;
if (!response.text.empty()) {
std::cerr << "[GetbinClient::upload] Response: " << response.text << std::endl;
}
}
done = true;
cv.notify_one();
loop.quit();
}, 10.0); // 10 second timeout
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::upload] Exception with " << serverUrl << ": " << e.what() << std::endl;
return false;
}
}
worker.join();
return success;
bool GetbinClient::upload(const std::string& archivePath, std::string& outUrl, std::string& outHash,
const std::string& token, ProgressCallback progressCallback) {
// Backward compatibility: use first server
if (servers_.empty()) {
return false;
}
return upload(servers_[0], archivePath, outUrl, outHash, token, progressCallback);
}
bool GetbinClient::getHash(const std::string& serverUrl, const std::string& toolName,
const std::string& arch, std::string& outHash) {
try {
std::string url = buildUrl(serverUrl, "/hash/" + toolName + ":" + arch);
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{10000}, // 10 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
try {
// Try JSON first
auto resp_json = json::parse(response.text);
if (resp_json.contains("hash")) {
outHash = resp_json["hash"].get<std::string>();
return true;
}
} catch (const json::exception&) {
// Not JSON, treat as plain text
outHash = response.text;
// Remove trailing newline if present
if (!outHash.empty() && outHash.back() == '\n') {
outHash.pop_back();
}
return !outHash.empty();
}
} else if (response.status_code == 404) {
// Not found - this is expected for non-existent tools/archs
return false;
} else {
std::cerr << "[GetbinClient::getHash] HTTP " << response.status_code << " from " << serverUrl << ": " << response.error.message << std::endl;
}
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::getHash] Exception with " << serverUrl << ": " << e.what() << std::endl;
return false;
}
}
bool GetbinClient::getHash(const std::string& toolName, const std::string& arch, std::string& outHash) {
// Multi-server fallback: try each server in order
for (const auto& server : servers_) {
if (getHash(server, toolName, arch, outHash)) {
return true;
}
}
// If we get here, no server had the package
return false;
}
bool GetbinClient::findPackageServer(const std::string& toolName, const std::string& arch,
std::string& foundServer) const {
// Check each server to see which one has the package
for (const auto& server : servers_) {
try {
std::string url = buildUrl(server, "/hash/" + toolName + ":" + arch);
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{10000}, // 10 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
// Package found on this server
foundServer = server;
return true;
}
// Continue to next server if 404 or other error
} catch (const std::exception& e) {
// Continue to next server on exception
std::cerr << "[GetbinClient::findPackageServer] Exception with " << server << ": " << e.what() << std::endl;
}
}
// Package not found on any server
return false;
}
bool GetbinClient::deleteObject(const std::string& hash, const std::string& token) {
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
// Configure SSL certificates
std::string ca_path = find_ca_certificates();
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
if (ca_path.empty()) {
std::cerr << "[GetbinClient] Warning: No system CA certificates found. SSL verification may fail." << std::endl;
}
client->enableCookies();
client->setUserAgent("getpkg/1.0");
std::string delete_path = "/deleteobject?hash=" + hash;
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath(delete_path);
req->addHeader("Authorization", "Bearer " + token);
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
std::lock_guard<std::mutex> lock(mtx);
if (result == drogon::ReqResult::Ok && response) {
int status_code = static_cast<int>(response->getStatusCode());
std::string response_body(response->getBody());
if (status_code == 200) {
// Check if the response indicates success
try {
auto resp_json = json::parse(response_body);
if (resp_json.contains("result") && resp_json["result"] == "success") {
success = true;
}
} catch (...) {
// If not JSON, assume success if 200 OK
success = true;
// Use first server for backward compatibility
if (servers_.empty()) {
return false;
}
std::string url = buildUrl(servers_[0], "/deleteobject?hash=" + hash);
auto response = cpr::Get(cpr::Url{url},
cpr::Header{
{"User-Agent", getUserAgent()},
{"Authorization", "Bearer " + token}
},
cpr::Timeout{30000}, // 30 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
return true;
} else {
std::cerr << "[GetbinClient::deleteObject] HTTP error: status code " << status_code << std::endl;
std::cerr << "[GetbinClient::deleteObject] Response body: " << response_body << std::endl;
std::cerr << "[GetbinClient::deleteObject] HTTP " << response.status_code << ": " << response.error.message << std::endl;
if (!response.text.empty()) {
std::cerr << "[GetbinClient::deleteObject] Response: " << response.text << std::endl;
}
} else {
std::cerr << "[GetbinClient::deleteObject] HTTP request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 10.0); // 10 second timeout
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
}
worker.join();
return success;
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::deleteObject] Exception: " << e.what() << std::endl;
return false;
}
}
bool GetbinClient::listPackages(std::vector<std::string>& outPackages) {
outPackages.clear();
// Set up SSL configuration
std::string ca_path = find_ca_certificates();
bool success = false;
bool done = false;
std::mutex mtx;
std::condition_variable cv;
std::thread worker([&]() {
trantor::EventLoop loop;
auto client = drogon::HttpClient::newHttpClient(
"https://" + std::string(SERVER_HOST),
&loop,
false, // useOldTLS = false (disable TLS 1.0/1.1)
true // validateCert = true
);
std::vector<std::pair<std::string, std::string>> sslConfigs;
if (!ca_path.empty()) {
sslConfigs.push_back({"VerifyCAFile", ca_path});
}
// Configure SSL for secure connections
client->addSSLConfigs(sslConfigs);
auto req = drogon::HttpRequest::newHttpRequest();
req->setMethod(drogon::Get);
req->setPath("/dir");
client->sendRequest(req, [&](drogon::ReqResult result, const drogon::HttpResponsePtr& response) {
if (result == drogon::ReqResult::Ok) {
int status_code = response->getStatusCode();
std::string response_body = std::string(response->getBody());
if (status_code == 200) {
try {
json json_response = json::parse(response_body);
// Use first server for backward compatibility
if (servers_.empty()) {
return false;
}
std::string url = buildUrl(servers_[0], "/dir");
if (json_response.contains("entries") && json_response["entries"].is_array()) {
for (const auto& entry : json_response["entries"]) {
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{30000}, // 30 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
try {
auto resp_json = json::parse(response.text);
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
outPackages.clear();
std::set<std::string> uniqueTools;
for (const auto& entry : resp_json["entries"]) {
if (entry.contains("labeltags") && entry["labeltags"].is_array()) {
for (const auto& labeltag : entry["labeltags"]) {
if (labeltag.is_string()) {
std::string name = labeltag.get<std::string>();
// Extract tool name (remove architecture suffix if present)
size_t colon_pos = name.find(":");
if (colon_pos != std::string::npos) {
name = name.substr(0, colon_pos);
std::string tag = labeltag.get<std::string>();
// Extract tool name from "tool:arch" format
size_t colonPos = tag.find(":");
if (colonPos != std::string::npos) {
std::string toolName = tag.substr(0, colonPos);
if (!toolName.empty()) {
uniqueTools.insert(toolName);
}
}
}
}
}
}
// Convert set to vector
for (const auto& tool : uniqueTools) {
outPackages.push_back(tool);
}
return true;
}
} catch (const json::exception&) {
// Try to parse as newline-separated list
outPackages.clear();
std::istringstream stream(response.text);
std::string line;
while (std::getline(stream, line)) {
if (!line.empty()) {
outPackages.push_back(line);
}
}
return !outPackages.empty();
}
} else {
std::cerr << "[GetbinClient::listPackages] HTTP " << response.status_code << ": " << response.error.message << std::endl;
}
// Skip empty names
if (name.empty()) continue;
// Add to list if not already present
if (std::find(outPackages.begin(), outPackages.end(), name) == outPackages.end()) {
outPackages.push_back(name);
}
}
}
}
}
success = true;
}
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::listPackages] JSON parse error: " << e.what() << std::endl;
std::cerr << "[GetbinClient::listPackages] Exception: " << e.what() << std::endl;
return false;
}
}
bool GetbinClient::listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries) {
try {
// Use first server for backward compatibility
if (servers_.empty()) {
return false;
}
std::string url = buildUrl(servers_[0], "/dir");
auto response = cpr::Get(cpr::Url{url},
cpr::Header{{"User-Agent", getUserAgent()}},
cpr::Timeout{30000}, // 30 seconds
cpr::VerifySsl{true});
if (response.status_code == 200) {
try {
auto resp_json = json::parse(response.text);
if (resp_json.contains("entries") && resp_json["entries"].is_array()) {
outEntries.clear();
for (const auto& entry : resp_json["entries"]) {
if (entry.contains("hash") && entry.contains("labeltags") &&
entry["hash"].is_string() && entry["labeltags"].is_array()) {
std::string hash = entry["hash"].get<std::string>();
std::vector<std::string> labeltags;
for (const auto& tag : entry["labeltags"]) {
if (tag.is_string()) {
labeltags.push_back(tag.get<std::string>());
}
}
outEntries.push_back({hash, labeltags});
}
}
return true;
}
} catch (const json::exception& e) {
std::cerr << "[GetbinClient::listAllEntries] JSON parse error: " << e.what() << std::endl;
}
} else {
std::cerr << "[GetbinClient::listPackages] HTTP error: status code " << status_code << std::endl;
}
} else {
std::cerr << "[GetbinClient::listPackages] HTTP request failed." << std::endl;
}
done = true;
cv.notify_one();
loop.quit();
}, 10.0);
loop.loop();
});
// Wait for completion
{
std::unique_lock<std::mutex> lock(mtx);
cv.wait(lock, [&] { return done; });
std::cerr << "[GetbinClient::listAllEntries] HTTP " << response.status_code << ": " << response.error.message << std::endl;
}
worker.join();
// Filter out duplicates where we have both toolname and toolname-noarch
// Keep the base name and remove the -noarch variant
std::vector<std::string> filteredPackages;
std::set<std::string> baseNames;
// First pass: collect all base names (without -noarch)
for (const auto& pkg : outPackages) {
const std::string suffix = "-noarch";
if (pkg.length() < suffix.length() || pkg.substr(pkg.length() - suffix.length()) != suffix) {
baseNames.insert(pkg);
return false;
} catch (const std::exception& e) {
std::cerr << "[GetbinClient::listAllEntries] Exception: " << e.what() << std::endl;
return false;
}
}
// Second pass: add packages, skipping -noarch variants if base exists
for (const auto& pkg : outPackages) {
const std::string suffix = "-noarch";
if (pkg.length() >= suffix.length() && pkg.substr(pkg.length() - suffix.length()) == suffix) {
std::string baseName = pkg.substr(0, pkg.length() - suffix.length());
if (baseNames.find(baseName) == baseNames.end()) {
filteredPackages.push_back(pkg); // Keep -noarch only if no base version
}
} else {
filteredPackages.push_back(pkg); // Always keep base versions
}
}
outPackages = std::move(filteredPackages);
// Sort the packages for better display
std::sort(outPackages.begin(), outPackages.end());
return success;
}

View File

@ -1,13 +1,57 @@
#pragma once
#include <string>
#include <vector>
#include <functional>
class GetbinClient {
public:
// Constructor accepting server list for multi-server support
GetbinClient(const std::vector<std::string>& servers);
// Backward compatibility constructor (uses default server)
GetbinClient();
bool download(const std::string& toolName, const std::string& arch, const std::string& outPath);
bool upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token);
// Progress callback: (downloaded_bytes, total_bytes) -> should_continue
using ProgressCallback = std::function<bool(size_t, size_t)>;
// Multi-server download with fallback logic
bool download(const std::string& toolName, const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback = nullptr);
// Server-specific download
bool downloadFromServer(const std::string& serverUrl, const std::string& toolName,
const std::string& arch, const std::string& outPath,
ProgressCallback progressCallback = nullptr);
// Server-specific upload
bool upload(const std::string& serverUrl, const std::string& archivePath,
std::string& outUrl, std::string& outHash, const std::string& token,
ProgressCallback progressCallback = nullptr);
// Backward compatibility upload (uses first server)
bool upload(const std::string& archivePath, std::string& outUrl, std::string& outHash, const std::string& token,
ProgressCallback progressCallback = nullptr);
// Server-specific hash retrieval
bool getHash(const std::string& serverUrl, const std::string& toolName,
const std::string& arch, std::string& outHash);
// Multi-server hash retrieval with fallback
bool getHash(const std::string& toolName, const std::string& arch, std::string& outHash);
// Find which server has a specific package
bool findPackageServer(const std::string& toolName, const std::string& arch,
std::string& foundServer) const;
// Legacy methods (use first server for backward compatibility)
bool deleteObject(const std::string& hash, const std::string& token);
bool listPackages(std::vector<std::string>& outPackages);
bool listAllEntries(std::vector<std::pair<std::string, std::vector<std::string>>>& outEntries);
private:
static const std::string DEFAULT_SERVER_HOST;
std::vector<std::string> servers_;
std::string getUserAgent() const;
std::string buildUrl(const std::string& serverUrl, const std::string& endpoint) const;
};

View File

@ -0,0 +1,463 @@
#include "PackageMetadata.hpp"
#include <fstream>
#include <iostream>
#include <chrono>
#include <iomanip>
#include <sstream>
#include <regex>
#include <cstdlib>
// PackageMetadata implementation
PackageMetadata::PackageMetadata(const std::string& name, const std::string& version,
const std::string& hash, const std::string& arch,
const std::string& sourceServer, const std::string& installDate)
: name(name), version(version), hash(hash), arch(arch), sourceServer(sourceServer) {
if (installDate.empty()) {
this->installDate = getCurrentTimestamp();
} else {
this->installDate = installDate;
}
}
json PackageMetadata::toJson() const {
json j;
j["name"] = name;
j["version"] = version;
j["hash"] = hash;
j["arch"] = arch;
j["sourceServer"] = sourceServer;
j["installDate"] = installDate;
j["lastUpdated"] = getCurrentTimestamp();
return j;
}
PackageMetadata PackageMetadata::fromJson(const json& j) {
PackageMetadata metadata;
// Required fields
if (j.contains("name") && j["name"].is_string()) {
metadata.name = j["name"].get<std::string>();
}
if (j.contains("version") && j["version"].is_string()) {
metadata.version = j["version"].get<std::string>();
}
if (j.contains("hash") && j["hash"].is_string()) {
metadata.hash = j["hash"].get<std::string>();
}
if (j.contains("arch") && j["arch"].is_string()) {
metadata.arch = j["arch"].get<std::string>();
}
// New fields with defaults
if (j.contains("sourceServer") && j["sourceServer"].is_string()) {
metadata.sourceServer = j["sourceServer"].get<std::string>();
} else {
metadata.sourceServer = "getpkg.xyz"; // Default fallback
}
if (j.contains("installDate") && j["installDate"].is_string()) {
metadata.installDate = j["installDate"].get<std::string>();
} else {
metadata.installDate = metadata.getCurrentTimestamp();
}
return metadata;
}
PackageMetadata PackageMetadata::fromLegacyJson(const json& j, const std::string& defaultServer) {
PackageMetadata metadata;
// Legacy format only has: name, version, hash, arch
if (j.contains("name") && j["name"].is_string()) {
metadata.name = j["name"].get<std::string>();
}
if (j.contains("version") && j["version"].is_string()) {
metadata.version = j["version"].get<std::string>();
}
if (j.contains("hash") && j["hash"].is_string()) {
metadata.hash = j["hash"].get<std::string>();
}
if (j.contains("arch") && j["arch"].is_string()) {
metadata.arch = j["arch"].get<std::string>();
}
// Set defaults for new fields
metadata.sourceServer = defaultServer;
metadata.installDate = metadata.getCurrentTimestamp();
return metadata;
}
bool PackageMetadata::isValid() const {
return isValidName() && isValidVersion() && isValidHash() &&
isValidArch() && isValidServerUrl() && isValidTimestamp();
}
std::string PackageMetadata::getValidationError() const {
if (!isValidName()) {
return "Invalid package name: must be non-empty and contain only alphanumeric characters, hyphens, and underscores";
}
if (!isValidVersion()) {
return "Invalid version: must be non-empty";
}
if (!isValidHash()) {
return "Invalid hash: must be non-empty and contain only hexadecimal characters";
}
if (!isValidArch()) {
return "Invalid architecture: must be non-empty";
}
if (!isValidServerUrl()) {
return "Invalid source server: must be non-empty and contain valid characters";
}
if (!isValidTimestamp()) {
return "Invalid install date: must be non-empty";
}
return "";
}
bool PackageMetadata::saveToFile(const std::filesystem::path& filePath) const {
if (!isValid()) {
std::cerr << "Cannot save invalid package metadata: " << getValidationError() << std::endl;
return false;
}
try {
// Ensure parent directory exists
std::filesystem::create_directories(filePath.parent_path());
std::ofstream file(filePath);
if (!file.is_open()) {
std::cerr << "Failed to open file for writing: " << filePath << std::endl;
return false;
}
file << toJson().dump(2);
file.close();
return true;
} catch (const std::exception& e) {
std::cerr << "Error saving package metadata to " << filePath << ": " << e.what() << std::endl;
return false;
}
}
PackageMetadata PackageMetadata::loadFromFile(const std::filesystem::path& filePath) {
PackageMetadata metadata;
try {
if (!std::filesystem::exists(filePath)) {
std::cerr << "Package metadata file does not exist: " << filePath << std::endl;
return metadata;
}
std::ifstream file(filePath);
if (!file.is_open()) {
std::cerr << "Failed to open file for reading: " << filePath << std::endl;
return metadata;
}
json j;
file >> j;
file.close();
metadata = fromJson(j);
if (!metadata.isValid()) {
std::cerr << "Loaded package metadata is invalid: " << metadata.getValidationError() << std::endl;
}
} catch (const std::exception& e) {
std::cerr << "Error loading package metadata from " << filePath << ": " << e.what() << std::endl;
}
return metadata;
}
std::string PackageMetadata::getCurrentTimestamp() const {
auto now = std::chrono::system_clock::now();
auto time_t = std::chrono::system_clock::to_time_t(now);
std::stringstream ss;
ss << std::put_time(std::gmtime(&time_t), "%Y-%m-%dT%H:%M:%SZ");
return ss.str();
}
bool PackageMetadata::needsUpdate(const std::string& remoteHash) const {
return hash != remoteHash;
}
// Private validation methods
bool PackageMetadata::isValidName() const {
if (name.empty()) return false;
// Package name should contain only alphanumeric characters, hyphens, and underscores
std::regex namePattern("^[a-zA-Z0-9_-]+$");
return std::regex_match(name, namePattern);
}
bool PackageMetadata::isValidVersion() const {
return !version.empty();
}
bool PackageMetadata::isValidHash() const {
if (hash.empty()) return false;
// Hash should contain only hexadecimal characters
std::regex hashPattern("^[a-fA-F0-9]+$");
return std::regex_match(hash, hashPattern);
}
bool PackageMetadata::isValidArch() const {
return !arch.empty();
}
bool PackageMetadata::isValidServerUrl() const {
if (sourceServer.empty()) return false;
// Basic server URL validation - should not contain invalid characters
std::regex serverPattern("^[a-zA-Z0-9._-]+$");
return std::regex_match(sourceServer, serverPattern);
}
bool PackageMetadata::isValidTimestamp() const {
return !installDate.empty();
}
// PackageMetadataManager implementation
PackageMetadataManager::PackageMetadataManager() {
const char* home = std::getenv("HOME");
if (home) {
configDir_ = std::filesystem::path(home) / ".config" / "getpkg";
packagesDir_ = configDir_ / "packages";
}
}
PackageMetadataManager::PackageMetadataManager(const std::filesystem::path& configDir)
: configDir_(configDir), packagesDir_(configDir / "packages") {
}
bool PackageMetadataManager::ensurePackagesDirectory() {
try {
if (!std::filesystem::exists(packagesDir_)) {
std::filesystem::create_directories(packagesDir_);
}
return std::filesystem::is_directory(packagesDir_);
} catch (const std::exception& e) {
std::cerr << "Error creating packages directory: " << e.what() << std::endl;
return false;
}
}
std::filesystem::path PackageMetadataManager::getPackagesDirectory() const {
return packagesDir_;
}
std::filesystem::path PackageMetadataManager::getPackageFilePath(const std::string& toolName) const {
return packagesDir_ / (toolName + ".json");
}
bool PackageMetadataManager::savePackageMetadata(const PackageMetadata& metadata) {
if (!ensurePackagesDirectory()) {
return false;
}
std::filesystem::path filePath = getPackageFilePath(metadata.name);
return metadata.saveToFile(filePath);
}
PackageMetadata PackageMetadataManager::loadPackageMetadata(const std::string& toolName) {
std::filesystem::path filePath = getPackageFilePath(toolName);
return PackageMetadata::loadFromFile(filePath);
}
bool PackageMetadataManager::packageExists(const std::string& toolName) const {
std::filesystem::path filePath = getPackageFilePath(toolName);
return std::filesystem::exists(filePath);
}
bool PackageMetadataManager::removePackageMetadata(const std::string& toolName) {
try {
std::filesystem::path filePath = getPackageFilePath(toolName);
if (std::filesystem::exists(filePath)) {
return std::filesystem::remove(filePath);
}
return true; // Already doesn't exist
} catch (const std::exception& e) {
std::cerr << "Error removing package metadata for " << toolName << ": " << e.what() << std::endl;
return false;
}
}
bool PackageMetadataManager::migrateFromLegacyFormat() {
try {
std::vector<std::string> legacyFiles = findLegacyPackageFiles();
if (legacyFiles.empty()) {
return true; // Nothing to migrate
}
if (!ensurePackagesDirectory()) {
std::cerr << "Failed to create packages directory for migration" << std::endl;
return false;
}
int successCount = 0;
for (const std::string& fileName : legacyFiles) {
std::filesystem::path legacyPath = configDir_ / fileName;
if (migrateLegacyPackageFile(legacyPath)) {
successCount++;
}
}
std::cout << "Migrated " << successCount << " of " << legacyFiles.size() << " legacy package files" << std::endl;
return successCount == legacyFiles.size();
} catch (const std::exception& e) {
std::cerr << "Error during migration: " << e.what() << std::endl;
return false;
}
}
std::vector<std::string> PackageMetadataManager::findLegacyPackageFiles() const {
std::vector<std::string> legacyFiles;
try {
if (!std::filesystem::exists(configDir_)) {
return legacyFiles;
}
for (const auto& entry : std::filesystem::directory_iterator(configDir_)) {
if (entry.is_regular_file() && entry.path().extension() == ".json") {
std::string fileName = entry.path().filename().string();
// Skip if it's already in the packages directory or is servers.json
if (fileName != "servers.json") {
legacyFiles.push_back(fileName);
}
}
}
} catch (const std::exception& e) {
std::cerr << "Error finding legacy package files: " << e.what() << std::endl;
}
return legacyFiles;
}
bool PackageMetadataManager::migrateLegacyPackageFile(const std::filesystem::path& legacyPath, const std::string& defaultServer) {
try {
if (!std::filesystem::exists(legacyPath)) {
return false;
}
// Load legacy format
std::ifstream file(legacyPath);
if (!file.is_open()) {
std::cerr << "Failed to open legacy file: " << legacyPath << std::endl;
return false;
}
json legacyJson;
file >> legacyJson;
file.close();
// Convert to new format
PackageMetadata metadata = PackageMetadata::fromLegacyJson(legacyJson, defaultServer);
if (!metadata.isValid()) {
std::cerr << "Invalid metadata after migration from " << legacyPath << ": " << metadata.getValidationError() << std::endl;
return false;
}
// Save in new location
if (!savePackageMetadata(metadata)) {
std::cerr << "Failed to save migrated metadata for " << metadata.name << std::endl;
return false;
}
// Remove legacy file
std::filesystem::remove(legacyPath);
std::cout << "Migrated package metadata: " << metadata.name << " from " << defaultServer << std::endl;
return true;
} catch (const std::exception& e) {
std::cerr << "Error migrating legacy file " << legacyPath << ": " << e.what() << std::endl;
return false;
}
}
std::vector<std::string> PackageMetadataManager::listInstalledPackages() const {
std::vector<std::string> packages;
try {
if (!std::filesystem::exists(packagesDir_)) {
return packages;
}
for (const auto& entry : std::filesystem::directory_iterator(packagesDir_)) {
if (entry.is_regular_file() && entry.path().extension() == ".json") {
std::string toolName = entry.path().stem().string();
packages.push_back(toolName);
}
}
} catch (const std::exception& e) {
std::cerr << "Error listing installed packages: " << e.what() << std::endl;
}
return packages;
}
std::vector<PackageMetadata> PackageMetadataManager::getAllPackageMetadata() const {
std::vector<PackageMetadata> allMetadata;
std::vector<std::string> packages = listInstalledPackages();
for (const std::string& packageName : packages) {
PackageMetadata metadata = const_cast<PackageMetadataManager*>(this)->loadPackageMetadata(packageName);
if (metadata.isValid()) {
allMetadata.push_back(metadata);
}
}
return allMetadata;
}
bool PackageMetadataManager::validateAllPackageMetadata() const {
std::vector<std::string> packages = listInstalledPackages();
for (const std::string& packageName : packages) {
PackageMetadata metadata = const_cast<PackageMetadataManager*>(this)->loadPackageMetadata(packageName);
if (!metadata.isValid()) {
std::cerr << "Invalid metadata for package " << packageName << ": " << metadata.getValidationError() << std::endl;
return false;
}
}
return true;
}
int PackageMetadataManager::cleanupInvalidMetadata() {
int removedCount = 0;
std::vector<std::string> packages = listInstalledPackages();
for (const std::string& packageName : packages) {
PackageMetadata metadata = loadPackageMetadata(packageName);
if (!metadata.isValid()) {
std::cerr << "Removing invalid metadata for package " << packageName << ": " << metadata.getValidationError() << std::endl;
if (removePackageMetadata(packageName)) {
removedCount++;
}
}
}
return removedCount;
}
bool PackageMetadataManager::isValidPackageFile(const std::filesystem::path& filePath) const {
return filePath.extension() == ".json" && std::filesystem::is_regular_file(filePath);
}
std::string PackageMetadataManager::extractToolNameFromPath(const std::filesystem::path& filePath) const {
return filePath.stem().string();
}

View File

@ -0,0 +1,97 @@
#pragma once
#include <string>
#include <filesystem>
#include <nlohmann/json.hpp>
using json = nlohmann::json;
/**
* Enhanced package metadata structure with server source tracking
* Supports both new multi-server format and legacy single-server migration
*/
struct PackageMetadata {
std::string name;
std::string version;
std::string hash;
std::string arch;
std::string sourceServer; // New field for server tracking
std::string installDate; // New field for installation tracking
// Default constructor
PackageMetadata() = default;
// Constructor with all fields
PackageMetadata(const std::string& name, const std::string& version,
const std::string& hash, const std::string& arch,
const std::string& sourceServer, const std::string& installDate = "");
// Serialization methods
json toJson() const;
static PackageMetadata fromJson(const json& j);
// Migration support - convert from legacy format
static PackageMetadata fromLegacyJson(const json& j, const std::string& defaultServer = "getpkg.xyz");
// Validation
bool isValid() const;
std::string getValidationError() const;
// File operations
bool saveToFile(const std::filesystem::path& filePath) const;
static PackageMetadata loadFromFile(const std::filesystem::path& filePath);
// Utility methods
std::string getCurrentTimestamp() const;
bool needsUpdate(const std::string& remoteHash) const;
private:
// Internal validation helpers
bool isValidName() const;
bool isValidVersion() const;
bool isValidHash() const;
bool isValidArch() const;
bool isValidServerUrl() const;
bool isValidTimestamp() const;
};
/**
* Package metadata manager for handling the packages directory structure
*/
class PackageMetadataManager {
public:
PackageMetadataManager();
explicit PackageMetadataManager(const std::filesystem::path& configDir);
// Directory management
bool ensurePackagesDirectory();
std::filesystem::path getPackagesDirectory() const;
std::filesystem::path getPackageFilePath(const std::string& toolName) const;
// Package operations
bool savePackageMetadata(const PackageMetadata& metadata);
PackageMetadata loadPackageMetadata(const std::string& toolName);
bool packageExists(const std::string& toolName) const;
bool removePackageMetadata(const std::string& toolName);
// Migration support
bool migrateFromLegacyFormat();
std::vector<std::string> findLegacyPackageFiles() const;
bool migrateLegacyPackageFile(const std::filesystem::path& legacyPath, const std::string& defaultServer = "getpkg.xyz");
// Listing and enumeration
std::vector<std::string> listInstalledPackages() const;
std::vector<PackageMetadata> getAllPackageMetadata() const;
// Validation and cleanup
bool validateAllPackageMetadata() const;
int cleanupInvalidMetadata();
private:
std::filesystem::path configDir_;
std::filesystem::path packagesDir_;
// Helper methods
bool isValidPackageFile(const std::filesystem::path& filePath) const;
std::string extractToolNameFromPath(const std::filesystem::path& filePath) const;
};

View File

@ -0,0 +1,353 @@
#include "ServerManager.hpp"
#include <fstream>
#include <iostream>
#include <chrono>
#include <iomanip>
#include <sstream>
#include <regex>
#include <cpr/cpr.h>
using json = nlohmann::json;
// ServerConfig implementation
json ServerConfig::toJson() const {
return json{
{"url", url},
{"name", name},
{"default", isDefault},
{"writeToken", writeToken},
{"added", addedDate}
};
}
ServerConfig ServerConfig::fromJson(const json& j) {
ServerConfig config;
config.url = j.value("url", "");
config.name = j.value("name", "");
config.isDefault = j.value("default", false);
config.writeToken = j.value("writeToken", "");
config.addedDate = j.value("added", "");
return config;
}
// ServerManager implementation
ServerManager::ServerManager() {
const char* home = getenv("HOME");
if (home) {
configPath_ = std::filesystem::path(home) / ".config" / "getpkg" / "servers.json";
}
}
bool ServerManager::addServer(const std::string& serverUrl, const std::string& writeToken) {
if (!validateServerUrl(serverUrl)) {
std::cerr << "Invalid server URL: " << serverUrl << std::endl;
return false;
}
// Check if server already exists
if (findServer(serverUrl) != nullptr) {
std::cerr << "Server already exists: " << serverUrl << std::endl;
return false;
}
// Check if server is reachable
if (!isServerReachable(serverUrl)) {
std::cerr << "Warning: Server may not be reachable: " << serverUrl << std::endl;
// Continue anyway - server might be temporarily down
}
ServerConfig config;
config.url = serverUrl;
config.name = serverUrl; // Use URL as default name
config.isDefault = servers_.empty(); // First server becomes default
config.writeToken = writeToken;
config.addedDate = getCurrentTimestamp();
servers_.push_back(config);
return saveConfiguration();
}
bool ServerManager::removeServer(const std::string& serverUrl) {
auto it = std::find_if(servers_.begin(), servers_.end(),
[&serverUrl](const ServerConfig& config) {
return config.url == serverUrl;
});
if (it == servers_.end()) {
std::cerr << "Server not found: " << serverUrl << std::endl;
return false;
}
// Don't allow removing the last server
if (servers_.size() == 1) {
std::cerr << "Cannot remove the last server. Add another server first." << std::endl;
return false;
}
bool wasDefault = it->isDefault;
servers_.erase(it);
// If we removed the default server, make the first remaining server default
if (wasDefault && !servers_.empty()) {
servers_[0].isDefault = true;
}
return saveConfiguration();
}
std::vector<std::string> ServerManager::getServers() const {
std::vector<std::string> urls;
for (const auto& server : servers_) {
urls.push_back(server.url);
}
return urls;
}
std::string ServerManager::getDefaultServer() const {
for (const auto& server : servers_) {
if (server.isDefault) {
return server.url;
}
}
// If no default is set, return the first server
if (!servers_.empty()) {
return servers_[0].url;
}
return "getpkg.xyz"; // Fallback to original default
}
std::string ServerManager::getDefaultPublishServer() const {
// Return first server with a write token
for (const auto& server : servers_) {
if (!server.writeToken.empty()) {
return server.url;
}
}
// If no server has a token, return the default server
return getDefaultServer();
}
bool ServerManager::setWriteToken(const std::string& serverUrl, const std::string& token) {
ServerConfig* server = findServer(serverUrl);
if (server == nullptr) {
std::cerr << "Server not found: " << serverUrl << std::endl;
return false;
}
server->writeToken = token;
return saveConfiguration();
}
std::string ServerManager::getWriteToken(const std::string& serverUrl) const {
const ServerConfig* server = findServer(serverUrl);
if (server != nullptr) {
return server->writeToken;
}
return "";
}
bool ServerManager::hasWriteToken(const std::string& serverUrl) const {
const ServerConfig* server = findServer(serverUrl);
return server != nullptr && !server->writeToken.empty();
}
std::vector<std::string> ServerManager::getServersWithTokens() const {
std::vector<std::string> serversWithTokens;
for (const auto& server : servers_) {
if (!server.writeToken.empty()) {
serversWithTokens.push_back(server.url);
}
}
return serversWithTokens;
}
bool ServerManager::loadConfiguration() {
if (!std::filesystem::exists(configPath_)) {
ensureDefaultConfiguration();
return true;
}
try {
std::ifstream file(configPath_);
if (!file.is_open()) {
std::cerr << "Failed to open server configuration file: " << configPath_ << std::endl;
ensureDefaultConfiguration();
return true;
}
json config;
file >> config;
if (!config.contains("servers") || !config["servers"].is_array()) {
std::cerr << "Invalid server configuration format" << std::endl;
ensureDefaultConfiguration();
return true;
}
servers_.clear();
for (const auto& serverJson : config["servers"]) {
try {
servers_.push_back(ServerConfig::fromJson(serverJson));
} catch (const std::exception& e) {
std::cerr << "Warning: Skipping invalid server config: " << e.what() << std::endl;
}
}
// Ensure we have at least one server
if (servers_.empty()) {
ensureDefaultConfiguration();
}
return true;
} catch (const std::exception& e) {
std::cerr << "Error loading server configuration: " << e.what() << std::endl;
ensureDefaultConfiguration();
return true;
}
}
bool ServerManager::saveConfiguration() {
try {
// Ensure directory exists
std::filesystem::create_directories(configPath_.parent_path());
json config;
config["version"] = "1.0";
config["lastUpdated"] = getCurrentTimestamp();
json serversArray = json::array();
for (const auto& server : servers_) {
serversArray.push_back(server.toJson());
}
config["servers"] = serversArray;
std::ofstream file(configPath_);
if (!file.is_open()) {
std::cerr << "Failed to open server configuration file for writing: " << configPath_ << std::endl;
return false;
}
file << config.dump(2);
return file.good();
} catch (const std::exception& e) {
std::cerr << "Error saving server configuration: " << e.what() << std::endl;
return false;
}
}
void ServerManager::ensureDefaultConfiguration() {
servers_.clear();
ServerConfig defaultServer;
defaultServer.url = "getpkg.xyz";
defaultServer.name = "Official getpkg Registry";
defaultServer.isDefault = true;
defaultServer.writeToken = "";
defaultServer.addedDate = getCurrentTimestamp();
servers_.push_back(defaultServer);
saveConfiguration();
}
bool ServerManager::migrateFromLegacy() {
const char* home = getenv("HOME");
if (!home) {
return false;
}
std::filesystem::path legacyTokenPath = std::filesystem::path(home) / ".config" / "getpkg.xyz" / "write_token.txt";
if (std::filesystem::exists(legacyTokenPath)) {
try {
std::ifstream tokenFile(legacyTokenPath);
std::string token;
std::getline(tokenFile, token);
if (!token.empty()) {
// Set the token for getpkg.xyz server
setWriteToken("getpkg.xyz", token);
// Optionally remove the legacy token file
// std::filesystem::remove(legacyTokenPath);
std::cout << "Migrated legacy write token for getpkg.xyz" << std::endl;
return true;
}
} catch (const std::exception& e) {
std::cerr << "Warning: Failed to migrate legacy token: " << e.what() << std::endl;
}
}
return false;
}
bool ServerManager::validateServerUrl(const std::string& url) const {
if (url.empty() || url.length() > 253) { // DNS name length limit
return false;
}
// Basic URL validation - should be a valid hostname or IP
// Allow formats like: example.com, sub.example.com, 192.168.1.1, localhost
std::regex urlPattern(R"(^[a-zA-Z0-9]([a-zA-Z0-9\-\.]*[a-zA-Z0-9])?$)");
if (!std::regex_match(url, urlPattern)) {
return false;
}
// Additional checks
if (url.find("..") != std::string::npos) {
return false;
}
if (url.front() == '.' || url.back() == '.') {
return false;
}
return true;
}
bool ServerManager::isServerReachable(const std::string& url) const {
try {
std::string testUrl = "https://" + url + "/";
auto response = cpr::Head(cpr::Url{testUrl},
cpr::Timeout{5000}, // 5 seconds
cpr::VerifySsl{true});
// Accept any response that indicates the server is reachable
// (200, 404, 403, etc. - as long as we get a response)
return response.status_code > 0;
} catch (const std::exception& e) {
return false;
}
}
ServerConfig* ServerManager::findServer(const std::string& url) {
auto it = std::find_if(servers_.begin(), servers_.end(),
[&url](const ServerConfig& config) {
return config.url == url;
});
return (it != servers_.end()) ? &(*it) : nullptr;
}
const ServerConfig* ServerManager::findServer(const std::string& url) const {
auto it = std::find_if(servers_.begin(), servers_.end(),
[&url](const ServerConfig& config) {
return config.url == url;
});
return (it != servers_.end()) ? &(*it) : nullptr;
}
std::string ServerManager::getCurrentTimestamp() const {
auto now = std::chrono::system_clock::now();
auto time_t = std::chrono::system_clock::to_time_t(now);
std::stringstream ss;
ss << std::put_time(std::gmtime(&time_t), "%Y-%m-%dT%H:%M:%SZ");
return ss.str();
}

View File

@ -0,0 +1,53 @@
#pragma once
#include <string>
#include <vector>
#include <filesystem>
#include <nlohmann/json.hpp>
struct ServerConfig {
std::string url;
std::string name;
bool isDefault = false;
std::string writeToken;
std::string addedDate;
// JSON serialization
nlohmann::json toJson() const;
static ServerConfig fromJson(const nlohmann::json& j);
};
class ServerManager {
public:
ServerManager();
// Server management
bool addServer(const std::string& serverUrl, const std::string& writeToken = "");
bool removeServer(const std::string& serverUrl);
std::vector<std::string> getServers() const;
std::string getDefaultServer() const;
std::string getDefaultPublishServer() const; // First server with write token
// Token management
bool setWriteToken(const std::string& serverUrl, const std::string& token);
std::string getWriteToken(const std::string& serverUrl) const;
bool hasWriteToken(const std::string& serverUrl) const;
std::vector<std::string> getServersWithTokens() const;
// Configuration
bool loadConfiguration();
bool saveConfiguration();
void ensureDefaultConfiguration();
// Migration
bool migrateFromLegacy();
private:
std::vector<ServerConfig> servers_;
std::filesystem::path configPath_;
bool validateServerUrl(const std::string& url) const;
bool isServerReachable(const std::string& url) const;
ServerConfig* findServer(const std::string& url);
const ServerConfig* findServer(const std::string& url) const;
std::string getCurrentTimestamp() const;
};

View File

@ -76,6 +76,17 @@
namespace {
using json = nlohmann::json;
// Clear current line and reset cursor to beginning
void clearLine() {
std::cout << "\r\033[K" << std::flush;
}
// Clear current line and print message
void clearAndPrint(const std::string& message) {
clearLine();
std::cout << message << std::flush;
}
// Compare versions (returns true if v1 < v2)
bool isVersionOlder(const std::string& v1, const std::string& v2) {
// Simple version comparison - assumes versions are in YYYY.MMDD.HHMM format
@ -200,27 +211,43 @@ int install_tool(int argc, char* argv[]) {
// Download tool - try arch-specific version first, then universal fallback
GetbinClient getbin2;
std::string downloadArch = arch;
//std::cout << "Downloading " << toolName << ":" << arch << "..." << std::endl;
if (!getbin2.download(toolName, arch, archivePath.string())) {
// Progress callback for downloads
auto progressCallback = [&toolName](size_t downloaded, size_t total) -> bool {
if (total > 0) {
int percent = (downloaded * 100) / total;
std::cout << "\rDownloading " << toolName << "... " << percent << "%" << std::flush;
} else {
std::cout << "\rDownloading " << toolName << "... " << downloaded << " bytes" << std::flush;
}
return true; // Continue download
};
std::cout << "Downloading " << toolName << "..." << std::flush;
if (!getbin2.download(toolName, arch, archivePath.string(), progressCallback)) {
// Try universal version as fallback
//std::cout << "Arch-specific version not found, trying universal version..." << std::endl;
//std::cout << "Downloading " << toolName << ":universal..." << std::endl;
if (!getbin2.download(toolName, "universal", archivePath.string())) {
std::cerr << "Failed to download tool archive (tried both " << arch << " and universal)." << std::endl;
clearAndPrint("Arch-specific version not found, trying universal...\n");
if (!getbin2.download(toolName, "universal", archivePath.string(), progressCallback)) {
std::cerr << "\rFailed to download tool archive (tried both " << arch << " and universal)." << std::endl;
return 1;
}
downloadArch = "universal";
}
clearAndPrint("Downloading " + toolName + "... done\n");
// Unpack tool
std::cout << "Unpacking..." << std::flush;
if (!common::unpack_tgz(archivePath.string(), binDir.string())) {
std::cerr << "Failed to unpack tool archive." << std::endl;
std::cerr << "\rFailed to unpack tool archive." << std::endl;
return 1;
}
clearAndPrint("Unpacking... done\n");
// Add to PATH and autocomplete
std::cout << "Configuring..." << std::flush;
scriptManager.addToolEntry(toolName, binDir.string());
scriptManager.addAutocomplete(toolName);
clearAndPrint("Configuring... done\n");
// Get tool info
std::string hash;
@ -314,10 +341,24 @@ int publish_tool(int argc, char* argv[]) {
}
GetbinClient getbin;
std::string url, hash;
if (!getbin.upload(archivePath.string(), url, hash, token)) {
std::cerr << "Failed to upload archive." << std::endl;
// Progress callback for upload
auto uploadProgressCallback = [](size_t uploaded, size_t total) -> bool {
if (total > 0) {
int percent = (uploaded * 100) / total;
std::cout << "\rUploading... " << percent << "%" << std::flush;
} else {
std::cout << "\rUploading... " << uploaded << " bytes" << std::flush;
}
return true; // Continue upload
};
std::cout << "Uploading..." << std::flush;
if (!getbin.upload(archivePath.string(), url, hash, token, uploadProgressCallback)) {
std::cerr << "\rFailed to upload archive." << std::endl;
return 1;
}
clearAndPrint("Uploading... done\n");
std::cout << "Published! URL: " << url << "\nHash: " << hash << std::endl;
return 0;
}
@ -326,73 +367,161 @@ int update_tool(int argc, char* argv[]) {
std::string home = get_home();
std::filesystem::path configDir = std::filesystem::path(home) / ".config/getpkg";
// Collect all installed tools
std::vector<std::tuple<std::string, std::string, std::string>> updateResults; // name, status, version
// Capture stdout to process install_tool output
auto processToolUpdate = [&](const std::string& toolName) -> std::tuple<std::string, std::string> {
// Redirect stdout and stderr to capture output
std::stringstream buffer;
std::stringstream errBuffer;
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
char* toolArgv[] = {argv[0], (char*)"install", (char*)toolName.c_str()};
int result = install_tool(3, toolArgv);
// Restore stdout and stderr
std::cout.rdbuf(oldOut);
std::cerr.rdbuf(oldErr);
std::string output = buffer.str();
std::string status = "Failed";
std::string version = "-";
if (result == 0) {
if (output.find("is already up to date") != std::string::npos) {
status = "Up to date";
} else if (output.find("Installed " + toolName + " successfully") != std::string::npos) {
// Check if it was an update or fresh install
if (output.find("Updating " + toolName) != std::string::npos) {
status = "Updated";
} else {
status = "Installed";
}
}
// Try to get version from config
std::filesystem::path toolInfoPath = configDir / (toolName + ".json");
if (std::filesystem::exists(toolInfoPath)) {
std::ifstream tfile(toolInfoPath);
json toolInfo;
tfile >> toolInfo;
version = toolInfo.value("version", "-");
if (!version.empty() && version.back() == '\n') version.pop_back();
// If version is empty, try to show something useful
if (version.empty() || version == "-") {
version = "installed";
}
}
}
return std::make_tuple(status, version);
// Structure to hold tool information
struct ToolInfo {
std::string name;
std::string localHash;
std::string remoteHash;
std::string arch;
std::string version;
bool needsUpdate = false;
std::string status = "Up to date";
};
// First update getpkg itself
auto [getpkgStatus, getpkgVersion] = processToolUpdate("getpkg");
updateResults.push_back(std::make_tuple("getpkg", getpkgStatus, getpkgVersion));
std::vector<ToolInfo> tools;
// Then update all other installed tools
// Collect all installed tools
if (std::filesystem::exists(configDir)) {
for (const auto& entry : std::filesystem::directory_iterator(configDir)) {
if (entry.path().extension() == ".json") {
std::string tname = entry.path().stem();
if (tname != "getpkg") { // Skip getpkg since we already did it
auto [status, version] = processToolUpdate(tname);
updateResults.push_back(std::make_tuple(tname, status, version));
ToolInfo tool;
tool.name = tname;
// Read local tool info
std::ifstream tfile(entry.path());
if (tfile.good()) {
json toolInfo;
tfile >> toolInfo;
tool.localHash = toolInfo.value("hash", "");
tool.arch = toolInfo.value("arch", get_arch());
tool.version = toolInfo.value("version", "-");
if (!tool.version.empty() && tool.version.back() == '\n') {
tool.version.pop_back();
}
if (tool.version.empty() || tool.version == "-") {
tool.version = "installed";
}
}
tools.push_back(tool);
}
}
}
if (tools.empty()) {
std::cout << "No tools installed." << std::endl;
return 0;
}
// Step 1: Check for updates (with progress)
std::cout << "Checking " << tools.size() << " tools for updates..." << std::endl;
GetbinClient getbin;
for (size_t i = 0; i < tools.size(); ++i) {
auto& tool = tools[i];
// Show progress
std::cout << "\r[" << (i + 1) << "/" << tools.size() << "] Checking " << tool.name << "..." << std::flush;
// Check remote hash
std::string remoteHash;
if (getbin.getHash(tool.name, tool.arch, remoteHash) && !remoteHash.empty()) {
tool.remoteHash = remoteHash;
if (tool.localHash != remoteHash) {
tool.needsUpdate = true;
tool.status = "Needs update";
}
} else {
tool.status = "Check failed";
}
}
clearLine(); // Clear progress line
// Step 2: Update tools that need updating
std::vector<std::tuple<std::string, std::string, std::string>> updateResults;
// First update getpkg if it needs updating
auto getpkgIt = std::find_if(tools.begin(), tools.end(),
[](const ToolInfo& t) { return t.name == "getpkg"; });
if (getpkgIt != tools.end() && getpkgIt->needsUpdate) {
std::cout << "Updating getpkg..." << std::flush;
// Use install_tool for actual update
std::stringstream buffer, errBuffer;
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
char* toolArgv[] = {argv[0], (char*)"install", (char*)"getpkg"};
int result = install_tool(3, toolArgv);
std::cout.rdbuf(oldOut);
std::cerr.rdbuf(oldErr);
if (result == 0) {
getpkgIt->status = "Updated";
std::cout << " Updated" << std::endl;
} else {
getpkgIt->status = "Failed";
std::cout << " Failed" << std::endl;
}
}
// Update other tools
int toolsToUpdate = std::count_if(tools.begin(), tools.end(),
[](const ToolInfo& t) { return t.needsUpdate && t.name != "getpkg"; });
if (toolsToUpdate > 0) {
std::cout << "Updating " << toolsToUpdate << " tools..." << std::endl;
int updatedCount = 0;
for (auto& tool : tools) {
if (tool.needsUpdate && tool.name != "getpkg") {
updatedCount++;
std::cout << "[" << updatedCount << "/" << toolsToUpdate << "] Updating " << tool.name << "..." << std::flush;
// Use install_tool for actual update
std::stringstream buffer, errBuffer;
std::streambuf* oldOut = std::cout.rdbuf(buffer.rdbuf());
std::streambuf* oldErr = std::cerr.rdbuf(errBuffer.rdbuf());
char* toolArgv[] = {argv[0], (char*)"install", (char*)tool.name.c_str()};
int result = install_tool(3, toolArgv);
std::cout.rdbuf(oldOut);
std::cerr.rdbuf(oldErr);
if (result == 0) {
tool.status = "Updated";
clearAndPrint("Updated\n");
// Re-read version after update
std::filesystem::path toolInfoPath = configDir / (tool.name + ".json");
if (std::filesystem::exists(toolInfoPath)) {
std::ifstream tfile(toolInfoPath);
json toolInfo;
tfile >> toolInfo;
tool.version = toolInfo.value("version", tool.version);
if (!tool.version.empty() && tool.version.back() == '\n') {
tool.version.pop_back();
}
if (tool.version.empty() || tool.version == "-") {
tool.version = "installed";
}
}
} else {
tool.status = "Failed";
clearAndPrint("Failed\n");
}
}
}
}
// Prepare results for display
for (const auto& tool : tools) {
updateResults.push_back(std::make_tuple(tool.name, tool.status, tool.version));
}
// Display results in a table
@ -583,35 +712,34 @@ int unpublish_tool(int argc, char* argv[]) {
return 1;
}
} else {
// No specific architecture - unpublish all architectures
std::vector<std::string> allArchitectures = {"x86_64", "aarch64", "universal"};
std::vector<std::pair<std::string, std::string>> foundPackages;
// No specific architecture - unpublish ALL entries with this tool name
std::vector<std::pair<std::string, std::vector<std::string>>> allEntries;
std::vector<std::pair<std::string, std::string>> foundPackages; // (tag, hash)
std::cout << "Searching for " << toolName << " across all architectures..." << std::endl;
std::cout << "Searching for all entries with label '" << toolName << "'..." << std::endl;
// Find all existing versions
for (const auto& arch : allArchitectures) {
std::string archHash;
if (getbin.getHash(toolName, arch, archHash) && !archHash.empty()) {
// Validate hash
bool validHash = true;
for (char c : archHash) {
if (!std::isdigit(c)) {
validHash = false;
break;
}
if (!getbin.listAllEntries(allEntries)) {
std::cerr << "Failed to get directory listing from server" << std::endl;
return 1;
}
if (validHash) {
foundPackages.push_back({arch, archHash});
std::cout << " Found " << toolName << ":" << arch << " (hash: " << archHash << ")" << std::endl;
// Find all entries with labeltags starting with toolName:
for (const auto& entry : allEntries) {
const std::string& hash = entry.first;
const std::vector<std::string>& labeltags = entry.second;
for (const std::string& tag : labeltags) {
if (tag.find(toolName + ":") == 0) {
// Found a matching labeltag
foundPackages.push_back({tag, hash});
std::cout << " Found " << tag << " (hash: " << hash << ")" << std::endl;
break; // Only count each hash once even if it has multiple matching tags
}
}
}
if (foundPackages.empty()) {
std::cerr << "No packages found for " << toolName << std::endl;
std::cerr << "Searched architectures: x86_64, aarch64, universal" << std::endl;
return 1;
}
@ -623,7 +751,7 @@ int unpublish_tool(int argc, char* argv[]) {
int failCount = 0;
for (const auto& [arch, archHash] : foundPackages) {
std::cout << " Unpublishing " << toolName << ":" << arch << "... ";
std::cout << " Unpublishing " << arch << "... ";
if (getbin.deleteObject(archHash, token)) {
std::cout << "OK" << std::endl;
successCount++;
@ -706,7 +834,7 @@ int list_packages(int argc, char* argv[]) {
for (const auto& packageName : availablePackages) {
std::string status = "Available";
std::string localVersion = "-";
std::string remoteStatus = "";
std::string remoteStatus = "-";
auto it = installedPackages.find(packageName);
if (it != installedPackages.end()) {
@ -1022,6 +1150,85 @@ void show_help() {
std::cout << " ~/.local/bin/getpkg/ Installed tool binaries" << std::endl;
}
int autocomplete_command(int argc, char* argv[]) {
std::vector<std::string> args(argv + 2, argv + argc);
// If no arguments, return all commands
if (args.empty()) {
std::cout << "install\n";
std::cout << "uninstall\n";
std::cout << "publish\n";
std::cout << "unpublish\n";
std::cout << "update\n";
std::cout << "version\n";
std::cout << "create\n";
std::cout << "hash\n";
std::cout << "list\n";
std::cout << "clean\n";
std::cout << "help\n";
return 0;
}
const std::string& subcommand = args[0];
// Handle autocompletion for specific commands
if (subcommand == "install") {
// For install, we could suggest popular packages or recently published ones
// For now, just return empty (no specific completions)
return 0;
} else if (subcommand == "uninstall") {
// For uninstall, list installed tools
std::filesystem::path configDir = std::filesystem::path(std::getenv("HOME")) / ".config" / "getpkg";
if (std::filesystem::exists(configDir)) {
for (const auto& entry : std::filesystem::directory_iterator(configDir)) {
if (entry.path().extension() == ".json") {
std::string toolName = entry.path().stem().string();
std::cout << toolName << "\n";
}
}
}
return 0;
} else if (subcommand == "publish") {
// For publish, suggest architecture suffixes after tool name
if (args.size() >= 2) {
// If we have tool_name already, suggest architectures
std::cout << "x86_64\n";
std::cout << "aarch64\n";
std::cout << "universal\n";
}
return 0;
} else if (subcommand == "unpublish") {
// For unpublish, list installed tools (similar to uninstall)
std::filesystem::path configDir = std::filesystem::path(std::getenv("HOME")) / ".config" / "getpkg";
if (std::filesystem::exists(configDir)) {
for (const auto& entry : std::filesystem::directory_iterator(configDir)) {
if (entry.path().extension() == ".json") {
std::string toolName = entry.path().stem().string();
std::cout << toolName << "\n";
// Also suggest with architecture suffixes
std::cout << toolName << ":x86_64\n";
std::cout << toolName << ":aarch64\n";
std::cout << toolName << ":universal\n";
}
}
}
return 0;
} else if (subcommand == "create") {
// For create, no specific completions (tool name and directory are user-defined)
return 0;
} else if (subcommand == "hash") {
// For hash, suggest file extensions
if (args.size() >= 2) {
std::cout << "*.tgz\n";
std::cout << "*.tar.gz\n";
}
return 0;
}
// No specific completions for other commands
return 0;
}
} // end anonymous namespace
int main(int argc, char* argv[]) {
@ -1041,19 +1248,7 @@ int main(int argc, char* argv[]) {
} else if (command == "update") {
return update_tool(argc, argv);
} else if (command == "autocomplete") {
std::vector<std::string> args(argv + 2, argv + argc);
if (args.empty()) std::cout << R"(install
uninstall
publish
unpublish
update
version
create
hash
list
clean
help
)";
return autocomplete_command(argc, argv);
} else if (command == "version") {
std::cout << dropshell::VERSION << std::endl;
} else if (command == "create") {

View File

@ -1 +0,0 @@
test

View File

@ -1,7 +0,0 @@
#\!/bin/bash
if [ "$1" = "version" ]; then
echo "1.0.0"
elif [ "$1" = "autocomplete" ]; then
echo "help"
echo "version"
fi

View File

@ -1,7 +0,0 @@
#\!/bin/bash
if [ "$1" = "version" ]; then
echo "1.0.0"
elif [ "$1" = "autocomplete" ]; then
echo "help"
echo "version"
fi

View File

@ -68,6 +68,28 @@ cleanup() {
# Clean up noarch variant
$GETPKG unpublish "${TEST_TOOL_NAME}-noarch:universal" 2>/dev/null || true
# Clean up any remaining test packages that start with "test-"
echo "Cleaning up any remaining test packages..."
DIR_RESPONSE=$(curl -s "https://getpkg.xyz/dir" 2>/dev/null || echo "")
if [ -n "$DIR_RESPONSE" ]; then
# Extract test package labeltags from JSON response
if command -v jq >/dev/null 2>&1; then
TEST_PACKAGES=$(echo "$DIR_RESPONSE" | jq -r '.entries[]?.labeltags[]? // empty' 2>/dev/null | grep "^test-" | sort -u || echo "")
else
# Fallback: extract labeltags using grep and sed
TEST_PACKAGES=$(echo "$DIR_RESPONSE" | grep -o '"test-[^"]*"' | sed 's/"//g' | sort -u || echo "")
fi
if [ -n "$TEST_PACKAGES" ]; then
echo "$TEST_PACKAGES" | while read -r package; do
if [ -n "$package" ]; then
echo " Cleaning up orphaned test package: $package"
$GETPKG unpublish "$package" 2>/dev/null || true
fi
done
fi
fi
echo "Cleaned up test tools from getpkg.xyz"
else
echo "Note: SOS_WRITE_TOKEN not set, cannot clean up remote test objects"
@ -455,12 +477,13 @@ EOF
CONFIG_EXISTS=false
TOOL_DIR_EXISTS=false
SYMLINK_EXISTS=false
HELPER_SYMLINK_EXISTS=false
# HELPER_SYMLINK_EXISTS=false
[ -f ~/.config/getpkg/"${TEST_UNINSTALL_TOOL}.json" ] && CONFIG_EXISTS=true
[ -d ~/.getpkg/"$TEST_UNINSTALL_TOOL" ] && TOOL_DIR_EXISTS=true
[ -L ~/.local/bin/getpkg/"$TEST_UNINSTALL_TOOL" ] && SYMLINK_EXISTS=true
[ -L ~/.local/bin/getpkg/"${TEST_UNINSTALL_TOOL}-helper" ] && HELPER_SYMLINK_EXISTS=true
# Check if helper symlink exists (not currently used in validation)
# [ -L ~/.local/bin/getpkg/"${TEST_UNINSTALL_TOOL}-helper" ] && HELPER_SYMLINK_EXISTS=true
if $CONFIG_EXISTS && $TOOL_DIR_EXISTS && $SYMLINK_EXISTS; then
# Now uninstall
@ -528,6 +551,128 @@ EOF
fi
fi
# Test 13.5: Comprehensive unpublish functionality
echo -e "\nTest 13.5: Comprehensive unpublish functionality"
# Only run unpublish tests if SOS_WRITE_TOKEN is available
if [ -n "${SOS_WRITE_TOKEN:-}" ]; then
# Create unique test names for unpublish tests
UNPUBLISH_TOOL_BASE="test-unpublish-$RANDOM"
UNPUBLISH_TOOL_MULTI="${UNPUBLISH_TOOL_BASE}-multi"
UNPUBLISH_TOOL_CUSTOM="${UNPUBLISH_TOOL_BASE}-custom"
UNPUBLISH_TEST_DIR="${TEST_DIR}/unpublish_tests"
# Create test directory structure
mkdir -p "$UNPUBLISH_TEST_DIR"
# Test 13.5a: Create and publish tool with multiple architectures
echo "Test 13.5a: Unpublish tool with multiple architectures"
echo '#!/bin/bash
echo "Multi-arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_MULTI"
# Publish to multiple architectures
PUBLISH_x86_64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
PUBLISH_aarch64_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:aarch64" "$UNPUBLISH_TEST_DIR" 2>&1)
PUBLISH_universal_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_MULTI}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_x86_64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_aarch64_OUTPUT" =~ Published! ]] && [[ "$PUBLISH_universal_OUTPUT" =~ Published! ]]; then
# Test robust unpublish - should remove ALL architectures
sleep 1 # Give server time to process all publishes
UNPUBLISH_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_MULTI" 2>&1)
UNPUBLISH_EXIT_CODE=$?
# Check that unpublish found and removed packages
if [ $UNPUBLISH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_OUTPUT" =~ "Found" ]] && [[ "$UNPUBLISH_OUTPUT" =~ "Successfully unpublished" ]]; then
print_test_result "Unpublish removes all architectures" 0
else
print_test_result "Unpublish removes all architectures" 1
echo " Unpublish failed: $UNPUBLISH_OUTPUT"
fi
else
print_test_result "Unpublish removes all architectures" 1
echo " Failed to publish test tool to multiple architectures"
echo " x86_64: $PUBLISH_x86_64_OUTPUT"
echo " aarch64: $PUBLISH_aarch64_OUTPUT"
echo " universal: $PUBLISH_universal_OUTPUT"
fi
# Test 13.5b: Unpublish tool with universal architecture
echo "Test 13.5b: Unpublish tool with universal architecture"
echo '#!/bin/bash
echo "Universal arch unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_CUSTOM"
# Publish with universal architecture
PUBLISH_CUSTOM_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_CUSTOM}:universal" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_CUSTOM_OUTPUT" =~ Published! ]]; then
# Test that unpublish can find and remove custom tags
UNPUBLISH_CUSTOM_OUTPUT=$("$GETPKG" unpublish "$UNPUBLISH_TOOL_CUSTOM" 2>&1)
UNPUBLISH_CUSTOM_EXIT_CODE=$?
if [ $UNPUBLISH_CUSTOM_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_CUSTOM_OUTPUT" =~ Found\ ${UNPUBLISH_TOOL_CUSTOM}:universal ]]; then
print_test_result "Unpublish finds universal architecture" 0
else
print_test_result "Unpublish finds universal architecture" 1
echo " Failed to find or unpublish custom tag: $UNPUBLISH_CUSTOM_OUTPUT"
fi
else
print_test_result "Unpublish finds universal architecture" 1
echo " Failed to publish tool with custom tag: $PUBLISH_CUSTOM_OUTPUT"
fi
# Test 13.5c: Unpublish non-existent tool
echo "Test 13.5c: Unpublish non-existent tool"
NON_EXISTENT_TOOL="non-existent-tool-$RANDOM"
UNPUBLISH_MISSING_OUTPUT=$("$GETPKG" unpublish "$NON_EXISTENT_TOOL" 2>&1)
UNPUBLISH_MISSING_EXIT_CODE=$?
if [ $UNPUBLISH_MISSING_EXIT_CODE -ne 0 ] && [[ "$UNPUBLISH_MISSING_OUTPUT" =~ "No packages found" ]]; then
print_test_result "Unpublish handles missing tools gracefully" 0
else
print_test_result "Unpublish handles missing tools gracefully" 1
echo " Expected failure for non-existent tool, got: $UNPUBLISH_MISSING_OUTPUT"
fi
# Test 13.5d: Unpublish by hash
echo "Test 13.5d: Unpublish by hash"
UNPUBLISH_TOOL_HASH="${UNPUBLISH_TOOL_BASE}-hash"
echo '#!/bin/bash
echo "Hash unpublish test"' > "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
chmod +x "$UNPUBLISH_TEST_DIR/$UNPUBLISH_TOOL_HASH"
PUBLISH_HASH_OUTPUT=$("$GETPKG" publish "${UNPUBLISH_TOOL_HASH}:x86_64" "$UNPUBLISH_TEST_DIR" 2>&1)
if [[ "$PUBLISH_HASH_OUTPUT" =~ Hash:\ ([0-9]+) ]]; then
EXTRACTED_HASH="${BASH_REMATCH[1]}"
# Test unpublish by hash
UNPUBLISH_HASH_OUTPUT=$("$GETPKG" unpublish "$EXTRACTED_HASH" 2>&1)
UNPUBLISH_HASH_EXIT_CODE=$?
if [ $UNPUBLISH_HASH_EXIT_CODE -eq 0 ] && [[ "$UNPUBLISH_HASH_OUTPUT" =~ "Successfully unpublished hash" ]]; then
print_test_result "Unpublish by hash works" 0
else
print_test_result "Unpublish by hash works" 1
echo " Failed to unpublish by hash: $UNPUBLISH_HASH_OUTPUT"
fi
else
print_test_result "Unpublish by hash works" 1
echo " Could not extract hash from publish output"
fi
# Cleanup unpublish test directory
rm -rf "$UNPUBLISH_TEST_DIR"
else
echo " Skipping unpublish tests (SOS_WRITE_TOKEN not set)"
print_test_result "Unpublish removes all architectures" 0 # Pass as skipped
print_test_result "Unpublish finds universal architecture" 0
print_test_result "Unpublish handles missing tools gracefully" 0
print_test_result "Unpublish by hash works" 0
fi
# Test 14: Invalid tool name validation
echo -e "\nTest 14: Invalid tool name validation"
INVALID_OUTPUT=$(timeout 3 "$GETPKG" install "../evil-tool" 2>&1)

194
gp/gp
View File

@ -49,27 +49,43 @@ EOF
# Function to generate commit message based on changes
generate_commit_message() {
local files_changed
files_changed=$(git diff --cached --name-only)
local files_count
files_count=$(echo "$files_changed" | wc -l)
if [ -z "$files_changed" ]; then
files_changed=$(git diff --name-only)
files_count=$(echo "$files_changed" | wc -l)
# First check if we have staged changes
local has_staged_changes=false
if ! git diff --cached --quiet; then
has_staged_changes=true
fi
# If add-all is enabled, also include untracked files
if [ "$ADD_ALL" = true ] && [ -z "$files_changed" ]; then
files_changed=$(git ls-files --others --exclude-standard)
files_count=$(echo "$files_changed" | wc -l)
# Determine which changes to analyze based on staging status and ADD_ALL setting
local status_command=""
if [ "$has_staged_changes" = true ]; then
status_command="git diff --cached --name-status"
else
status_command="git diff --name-status"
fi
if [ -z "$files_changed" ]; then
# Get all changes (staged or unstaged depending on context)
local all_changes
all_changes=$($status_command)
# If no changes from diff, check for untracked files when add-all is enabled
if [ -z "$all_changes" ] && [ "$ADD_ALL" = true ]; then
local untracked_files
untracked_files=$(git ls-files --others --exclude-standard)
if [ -n "$untracked_files" ]; then
# Convert untracked files to "A" (added) status format
all_changes=$(echo "$untracked_files" | sed 's/^/A\t/')
fi
fi
if [ -z "$all_changes" ]; then
echo "No changes to commit"
return 1
fi
# Count total files
local files_count
files_count=$(echo "$all_changes" | wc -l)
# Generate smart commit message based on file types and changes
local has_source_files=false
local has_config_files=false
@ -77,7 +93,8 @@ generate_commit_message() {
local has_tests=false
local message=""
while IFS= read -r file; do
# Extract just the filenames for type detection
while IFS=$'\t' read -r status file; do
[ -z "$file" ] && continue
case "$file" in
@ -94,15 +111,18 @@ generate_commit_message() {
has_tests=true
;;
esac
done <<< "$files_changed"
done <<< "$all_changes"
# Create descriptive commit message
if [ "$files_count" -eq 1 ]; then
local change_line
change_line=$(echo "$all_changes" | head -1)
local status
local single_file
single_file=$(echo "$files_changed" | head -1)
local change_type
change_type=$(git diff --cached --name-status -- "$single_file" 2>/dev/null || git diff --name-status -- "$single_file")
case "${change_type:0:1}" in
status=$(echo "$change_line" | cut -f1)
single_file=$(echo "$change_line" | cut -f2)
case "${status:0:1}" in
A) message="Add $single_file" ;;
M) message="Update $single_file" ;;
D) message="Remove $single_file" ;;
@ -110,6 +130,58 @@ generate_commit_message() {
*) message="Modify $single_file" ;;
esac
else
# For multiple files, analyze the types of changes
local added_count=0
local modified_count=0
local deleted_count=0
local renamed_count=0
# Use the all_changes variable we already have
# Count different types of changes
while IFS=$'\t' read -r status file; do
[ -z "$status" ] && continue
case "${status:0:1}" in
A) ((added_count++)) ;;
M) ((modified_count++)) ;;
D) ((deleted_count++)) ;;
R) ((renamed_count++)) ;;
esac
done <<< "$all_changes"
# Also count untracked files if add-all is enabled
if [ "$ADD_ALL" = true ]; then
local untracked_files
untracked_files=$(git ls-files --others --exclude-standard)
if [ -n "$untracked_files" ]; then
local untracked_count
untracked_count=$(echo "$untracked_files" | wc -l)
((added_count += untracked_count))
fi
fi
# Generate message based on change types
local change_parts=()
[ $added_count -gt 0 ] && change_parts+=("add $added_count")
[ $modified_count -gt 0 ] && change_parts+=("update $modified_count")
[ $deleted_count -gt 0 ] && change_parts+=("remove $deleted_count")
[ $renamed_count -gt 0 ] && change_parts+=("rename $renamed_count")
local change_desc=""
if [ ${#change_parts[@]} -eq 1 ]; then
change_desc="${change_parts[0]}"
elif [ ${#change_parts[@]} -eq 2 ]; then
change_desc="${change_parts[0]} and ${change_parts[1]}"
else
# Join all but last with commas, last with "and"
local last_idx=$((${#change_parts[@]} - 1))
for i in $(seq 0 $((last_idx - 1))); do
[ $i -gt 0 ] && change_desc+=", "
change_desc+="${change_parts[i]}"
done
change_desc+=" and ${change_parts[last_idx]}"
fi
local prefix=""
if $has_tests; then
prefix="test: "
@ -121,18 +193,32 @@ generate_commit_message() {
prefix="feat: "
fi
message="${prefix}Update $files_count files"
# Capitalize first letter of change description
change_desc="$(echo "${change_desc:0:1}" | tr '[:lower:]' '[:upper:]')${change_desc:1}"
message="${prefix}${change_desc} files"
fi
echo "$message"
}
# Function to check if we're in a git repository
# Function to check if we're in a git repository and change to repo root
check_git_repo() {
if ! git rev-parse --git-dir >/dev/null 2>&1; then
print_error "Not in a git repository"
exit 1
fi
# Change to the git repository root to ensure we operate on the entire repo
local git_root
git_root=$(git rev-parse --show-toplevel)
if [ "$PWD" != "$git_root" ]; then
print_info "Changing to git repository root: $git_root"
cd "$git_root" || {
print_error "Failed to change to git repository root"
exit 1
}
fi
}
# Function to check for uncommitted changes and unpushed commits
@ -225,19 +311,77 @@ show_status_and_confirm() {
# Show staged changes
if ! git diff --cached --quiet; then
print_info "Staged changes:"
git diff --cached --name-only -- | while IFS= read -r line; do echo " $line"; done
local staged_modified=""
local staged_deleted=""
local staged_added=""
# Get staged file status and categorize
while IFS=$'\t' read -r status file; do
[ -z "$status" ] && continue
case "${status:0:1}" in
A) staged_added="${staged_added}${file}\n" ;;
M) staged_modified="${staged_modified}${file}\n" ;;
D) staged_deleted="${staged_deleted}${file}\n" ;;
*) staged_modified="${staged_modified}${file}\n" ;; # Default to modified for other statuses
esac
done < <(git diff --cached --name-status)
# Show staged added files
if [ -n "$staged_added" ]; then
print_info "Staged new files:"
echo -e "$staged_added" | grep -v '^$' | while IFS= read -r line; do echo " $line"; done
fi
# Show staged modified files
if [ -n "$staged_modified" ]; then
print_info "Staged modified files:"
echo -e "$staged_modified" | grep -v '^$' | while IFS= read -r line; do echo " $line"; done
fi
# Show staged deleted files
if [ -n "$staged_deleted" ]; then
print_info "Staged deleted files:"
echo -e "$staged_deleted" | grep -v '^$' | while IFS= read -r line; do echo " $line"; done
fi
has_staged_changes=true
fi
# Show unstaged changes
if ! git diff --quiet; then
local modified_files=""
local deleted_files=""
# Get file status and categorize
while IFS=$'\t' read -r status file; do
[ -z "$status" ] && continue
case "${status:0:1}" in
M) modified_files="${modified_files}${file}\n" ;;
D) deleted_files="${deleted_files}${file}\n" ;;
*) modified_files="${modified_files}${file}\n" ;; # Default to modified for other statuses
esac
done < <(git diff --name-status)
# Show modified files
if [ -n "$modified_files" ]; then
if [ "$ADD_ALL" = true ]; then
print_info "Modified files (will be added):"
else
print_info "Modified files (unstaged, will NOT be included):"
fi
git diff --name-only -- | while IFS= read -r line; do echo " $line"; done
echo -e "$modified_files" | grep -v '^$' | while IFS= read -r line; do echo " $line"; done
fi
# Show deleted files
if [ -n "$deleted_files" ]; then
if [ "$ADD_ALL" = true ]; then
print_info "Deleted files (will be removed):"
else
print_info "Deleted files (unstaged, will NOT be included):"
fi
echo -e "$deleted_files" | grep -v '^$' | while IFS= read -r line; do echo " $line"; done
fi
has_unstaged_changes=true
fi
@ -350,7 +494,7 @@ case "${1:-}" in
exit 0
;;
version)
echo "gp version 2.0.0"
echo "2.0.1"
exit 0
;;
esac

20
sos/clean.sh Executable file
View File

@ -0,0 +1,20 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT="sos"
echo "Cleaning ${PROJECT}..."
# Remove output directory (if it exists)
if [ -d "${SCRIPT_DIR}/output" ]; then
echo "Removing output directory..."
rm -rf "${SCRIPT_DIR}/output"
fi
# Remove any temporary files
echo "Removing temporary files..."
find "${SCRIPT_DIR}" -name "*.tmp" -o -name "*.temp" -o -name "*~" | xargs -r rm -f
echo "${PROJECT} cleaned successfully"

View File

@ -25,6 +25,7 @@ GETPKG="${SCRIPT_DIR}/../getpkg/output/getpkg"
TOOLDIR="${SCRIPT_DIR}/tool"
mkdir -p "${TOOLDIR}"
cp "${SCRIPT_DIR}/whatsdirty" "${TOOLDIR}/whatsdirty"
cp "${SCRIPT_DIR}/setup_script.sh" "${TOOLDIR}/"
# publish universal tool.
"${GETPKG}" publish "whatsdirty" "${TOOLDIR}"