swtich from ELK to Loki!
All checks were successful
Test and Publish Templates / test-and-publish (push) Successful in 40s

This commit is contained in:
Your Name
2025-09-20 12:01:25 +12:00
parent d32042e42d
commit f114773d78
62 changed files with 1121 additions and 2899 deletions

View File

@@ -1,17 +0,0 @@
# Template identifier - MUST match the directory name
TEMPLATE=logserver
# Requirements
REQUIRES_HOST_ROOT=false # No root access on host needed
REQUIRES_DOCKER=true # Docker is required
REQUIRES_DOCKER_ROOT=false # Docker root privileges not specifically needed
# Docker compose used for ELK stack
USES_DOCKER_COMPOSE=true
# Volume definitions for persistence
DATA_VOLUME="${CONTAINER_NAME}_elasticsearch_data"
LOGSTASH_VOLUME="${CONTAINER_NAME}_logstash_data"
KIBANA_VOLUME="${CONTAINER_NAME}_kibana_data"
CERTS_VOLUME="${CONTAINER_NAME}_certs"
CONFIG_VOLUME="${CONTAINER_NAME}_config"

View File

@@ -1,6 +0,0 @@
# API Keys for LogServer Authentication
# Format: hostname:api_key
# Generated by generate-api-key.sh
api_keys:
video: a7798c63c2ac439b5ba20f3bf8bf27b5361231cdcbdc4fc9d7af715308fdf707

View File

@@ -0,0 +1,154 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": null,
"links": [],
"panels": [
{
"datasource": "Loki",
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 4,
"options": {
"content": "## Central Log Viewer\n\n**Quick Filters:** Click any label (hostname, container_name, job) to filter | **Search:** Use the search box above each panel | **Time Range:** Top right corner",
"mode": "markdown"
},
"pluginVersion": "7.5.7",
"title": "",
"type": "text"
},
{
"datasource": "Loki",
"fieldConfig": {
"defaults": {},
"overrides": []
},
"gridPos": {
"h": 10,
"w": 24,
"x": 0,
"y": 3
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": true,
"showTime": true,
"sortOrder": "Descending",
"wrapLogMessage": true
},
"targets": [
{
"expr": "{job=\"docker\"}",
"legendFormat": "{{hostname}} - {{container_name}}",
"refId": "A"
}
],
"title": "Docker Container Logs (All Servers)",
"type": "logs"
},
{
"datasource": "Loki",
"fieldConfig": {
"defaults": {},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 13
},
"id": 3,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": true,
"showTime": true,
"sortOrder": "Descending",
"wrapLogMessage": true
},
"targets": [
{
"expr": "{job=~\"syslog|auth\"}",
"legendFormat": "{{hostname}} - {{job}}",
"refId": "A"
}
],
"title": "System Logs (All Servers)",
"type": "logs"
}
],
"refresh": "10s",
"schemaVersion": 27,
"style": "dark",
"tags": ["logs", "central"],
"templating": {
"list": [
{
"allValue": ".*",
"current": {
"selected": true,
"text": ["All"],
"value": ["$__all"]
},
"datasource": "Loki",
"definition": "label_values(hostname)",
"description": "Filter by server",
"error": null,
"hide": 0,
"includeAll": true,
"label": "Server",
"multi": true,
"name": "hostname",
"options": [],
"query": "label_values(hostname)",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 1,
"type": "query"
}
]
},
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {
"refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h"]
},
"timezone": "",
"title": "Central Logs",
"uid": "central-logs",
"version": 0
}

View File

@@ -0,0 +1,69 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": null,
"links": [],
"panels": [
{
"datasource": "Loki",
"fieldConfig": {
"defaults": {},
"overrides": []
},
"gridPos": {
"h": 20,
"w": 24,
"x": 0,
"y": 0
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": true,
"showTime": true,
"sortOrder": "Descending",
"wrapLogMessage": true
},
"targets": [
{
"expr": "{job=~\"docker|syslog|auth\"}",
"refId": "A"
}
],
"title": "All Logs",
"type": "logs"
}
],
"schemaVersion": 27,
"style": "dark",
"tags": ["logs"],
"templating": {
"list": []
},
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Simple Logs",
"uid": "simple-logs",
"version": 0
}

View File

@@ -0,0 +1,12 @@
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
disableDeletion: false
updateIntervalSeconds: 10
allowUiUpdates: true
options:
path: /var/lib/grafana/dashboards

View File

@@ -0,0 +1,9 @@
apiVersion: 1
datasources:
- name: Loki
type: loki
access: proxy
url: http://loki:3100
isDefault: true
editable: false

View File

@@ -1,142 +0,0 @@
# Logstash Configuration for LogServer
# Handles Beats input with API key authentication
input {
# Beats input for Filebeat clients
beats {
port => 5044
ssl => false # Set to true for production with proper certificates
# API key authentication handled via filter below
}
# Optional: Syslog input for direct syslog shipping
tcp {
port => 514
type => "syslog"
}
udp {
port => 514
type => "syslog"
}
}
filter {
# API Key validation - check if client provided a valid key
# The API key should be in the [fields][api_key] field from Filebeat
if [fields][api_key] {
# Load and validate API key
ruby {
init => "
require 'yaml'
@api_keys = {}
# Load API keys from file
begin
if File.exist?('/usr/share/logstash/config/api-keys.yml')
config = YAML.load_file('/usr/share/logstash/config/api-keys.yml')
if config && config['api_keys']
config['api_keys'].each do |hostname, key|
@api_keys[key.to_s.strip] = hostname.to_s.strip
end
end
end
rescue => e
@logger.error('Failed to load API keys', :error => e.message)
end
"
code => "
api_key = event.get('[fields][api_key]')
if api_key && @api_keys.has_key?(api_key)
# Valid API key - add hostname to event
event.set('[@metadata][client_hostname]', @api_keys[api_key])
event.set('[@metadata][authenticated]', true)
else
# Invalid API key
event.set('[@metadata][authenticated]', false)
event.tag('_authfailure')
end
"
}
# Drop unauthorized events
if "_authfailure" in [tags] {
drop { }
}
} else {
# No API key provided - mark as unauthenticated
# You can choose to drop these or allow them based on your security requirements
mutate {
add_tag => [ "no_api_key" ]
}
# Uncomment to require API keys for all connections:
# drop { }
}
# Parse Docker logs
if [docker] {
# Docker metadata is already parsed by Filebeat
mutate {
add_field => {
"container_name" => "%{[docker][container][name]}"
"container_id" => "%{[docker][container][id]}"
"container_image" => "%{[docker][container][image]}"
}
}
}
# Parse syslog
if [type] == "syslog" {
grok {
match => {
"message" => "%{SYSLOGLINE}"
}
}
date {
match => [ "timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
# Parse JSON logs if they exist
if [message] =~ /^\{.*\}$/ {
json {
source => "message"
target => "json_message"
}
}
# Add timestamp if not present
if ![timestamp] {
mutate {
add_field => { "timestamp" => "%{@timestamp}" }
}
}
# Clean up metadata
mutate {
remove_field => [ "@version", "beat", "offset", "prospector" ]
}
}
output {
# Send to Elasticsearch with authentication
elasticsearch {
hosts => ["elasticsearch:9200"]
user => "elastic"
password => "${ELASTIC_PASSWORD:changeme}"
# Use different indices based on input type
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
# Manage index templates
manage_template => true
template_overwrite => true
}
# Optional: Debug output (comment out in production)
# stdout {
# codec => rubydebug
# }
}

View File

@@ -1,29 +0,0 @@
# Logstash Configuration Settings
# This file contains Logstash settings (not pipeline configuration)
# Node name
node.name: "logstash"
# Pipeline settings
pipeline.workers: 2
pipeline.batch.size: 125
pipeline.batch.delay: 50
pipeline.ecs_compatibility: disabled
# HTTP API settings
http.host: "0.0.0.0"
http.port: 9600
# Monitoring
monitoring.enabled: false
# Queue settings
queue.type: memory
queue.max_bytes: 1gb
# Path settings are handled by Docker volumes
# path.data: /usr/share/logstash/data
# path.logs: /usr/share/logstash/logs
# Log level
log.level: info

View File

@@ -0,0 +1,51 @@
auth_enabled: false
server:
http_listen_port: 3100
grpc_listen_port: 9096
ingester:
wal:
enabled: true
dir: /loki/wal
lifecycler:
address: 127.0.0.1
ring:
kvstore:
store: inmemory
replication_factor: 1
schema_config:
configs:
- from: 2020-10-24
store: boltdb-shipper
object_store: filesystem
schema: v11
index:
prefix: index_
period: 24h
storage_config:
boltdb_shipper:
active_index_directory: /loki/boltdb-shipper-active
cache_location: /loki/boltdb-shipper-cache
cache_ttl: 24h
filesystem:
directory: /loki/chunks
compactor:
working_directory: /loki/boltdb-shipper-compactor
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h
max_entries_limit_per_query: 5000
retention_period: 168h # 7 days
chunk_store_config:
max_look_back_period: 168h
table_manager:
retention_deletes_enabled: true
retention_period: 168h

View File

@@ -0,0 +1,30 @@
events {
worker_connections 1024;
}
http {
upstream loki {
server loki:3100;
}
server {
listen 80;
# Require authentication for all requests
auth_basic "Loki Authentication";
auth_basic_user_file /etc/nginx/.htpasswd;
# Proxy all requests to Loki
location / {
proxy_pass http://loki;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# Important for Loki push endpoint
client_max_body_size 100M;
client_body_buffer_size 10M;
}
}
}

View File

@@ -1,46 +1,20 @@
# Service identification
# Log Server Configuration (Loki + Grafana)
CONTAINER_NAME=logserver
# Server settings (REQUIRED by dropshell)
SSH_USER="root"
# Elasticsearch settings
ES_VERSION=7.17.23
ES_HEAP_SIZE=2g
ES_MAX_MAP_COUNT=262144
# Logstash settings
LS_VERSION=7.17.23
LS_HEAP_SIZE=1g
LS_PIPELINE_WORKERS=2
# Kibana settings
KIBANA_VERSION=7.17.23
# Authentication (IMPORTANT: Change these!)
ELASTIC_PASSWORD=changeme # Password for 'elastic' superuser (internal use)
KIBANA_USERNAME=admin # Your login username for Kibana
KIBANA_USER_PASSWORD=changeme # Your login password for Kibana
# Ports
KIBANA_PORT=5601
LOGSTASH_BEATS_PORT=5044
LOGSTASH_SYSLOG_PORT=514
WEB_PORT=3000 # Grafana web UI
LOKI_PORT=3100 # Loki API (for clients to send logs)
# Server configuration
SERVER_PUBLICBASEURL=http://localhost:5601 # Change to your server's actual URL
# Authentication for Grafana
ADMIN_USER=admin
ADMIN_PASSWORD=changeme
# Log retention
LOG_RETENTION_DAYS=30
LOG_MAX_SIZE_GB=50
# Authentication for Loki (clients must use these)
LOKI_USER=logclient
LOKI_PASSWORD=changeme
# Authentication
ENABLE_TLS=true
API_KEYS_FILE=${CONFIG_PATH}/api-keys.yml
# Network Security
ALLOWED_IPS="" # Comma-separated list, empty = all
# Resource limits
MAX_CPU_PERCENT=80
MAX_MEMORY=4GB
# Log retention (days)
LOG_RETENTION=7

View File

@@ -1,43 +0,0 @@
# Ruby script for Logstash to validate API keys
# This is a simplified validation - in production, use proper authentication
require 'yaml'
def register(params)
@api_keys_file = params["api_keys_file"]
end
def filter(event)
# Get the API key from the event
api_key = event.get("[api_key]") || event.get("[@metadata][api_key]")
# If no API key, pass through (for backwards compatibility)
# In production, you should reject events without valid keys
if api_key.nil? || api_key.empty?
# For now, allow events without API keys
# event.cancel # Uncomment to require API keys
return [event]
end
# Load API keys from file
begin
if File.exist?(@api_keys_file)
config = YAML.load_file(@api_keys_file)
valid_keys = config['api_keys'].values if config && config['api_keys']
# Check if the provided key is valid
if valid_keys && valid_keys.include?(api_key)
# Valid key - let the event through
event.set("[@metadata][authenticated]", true)
else
# Invalid key - drop the event
event.cancel
end
end
rescue => e
# Log error but don't crash
event.set("[@metadata][auth_error]", e.message)
end
return [event]
end