Add llama.cpp monitor dropshell template with GPU dashboard
All checks were successful
Test and Publish Templates / test-and-publish (push) Successful in 9s
All checks were successful
Test and Publish Templates / test-and-publish (push) Successful in 9s
This commit is contained in:
11
llamacpp-monitor/status.sh
Executable file
11
llamacpp-monitor/status.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
source "${AGENT_PATH}/common.sh"
|
||||
_check_required_env_vars "CONTAINER_NAME" "MONITOR_PORT"
|
||||
|
||||
_is_container_running $CONTAINER_NAME || _die "Service is not running - did not find container $CONTAINER_NAME."
|
||||
|
||||
curl -sf http://localhost:${MONITOR_PORT}/health > /dev/null \
|
||||
|| _die "Service is not healthy - no response from monitor on port ${MONITOR_PORT}."
|
||||
|
||||
echo "Service ${CONTAINER_NAME} is healthy."
|
||||
exit 0
|
||||
Reference in New Issue
Block a user