docs: Add 6 and update 11 files
All checks were successful
Test and Publish Templates / test-and-publish (push) Successful in 44s
All checks were successful
Test and Publish Templates / test-and-publish (push) Successful in 44s
This commit is contained in:
94
logserver/config/logstash.conf
Normal file
94
logserver/config/logstash.conf
Normal file
@@ -0,0 +1,94 @@
|
||||
# Logstash Configuration for LogServer
|
||||
# Handles Beats input with API key authentication
|
||||
|
||||
input {
|
||||
# Beats input for Filebeat clients
|
||||
beats {
|
||||
port => 5044
|
||||
ssl => false # Set to true for production with proper certificates
|
||||
|
||||
# API key authentication handled via filter below
|
||||
}
|
||||
|
||||
# Optional: Syslog input for direct syslog shipping
|
||||
tcp {
|
||||
port => 514
|
||||
type => "syslog"
|
||||
}
|
||||
|
||||
udp {
|
||||
port => 514
|
||||
type => "syslog"
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
# Note: API key validation would go here in production
|
||||
# For now, accepting all connections for simplicity
|
||||
# TODO: Implement proper API key validation
|
||||
|
||||
# Parse Docker logs
|
||||
if [docker] {
|
||||
# Docker metadata is already parsed by Filebeat
|
||||
mutate {
|
||||
add_field => {
|
||||
"container_name" => "%{[docker][container][name]}"
|
||||
"container_id" => "%{[docker][container][id]}"
|
||||
"container_image" => "%{[docker][container][image]}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Parse syslog
|
||||
if [type] == "syslog" {
|
||||
grok {
|
||||
match => {
|
||||
"message" => "%{SYSLOGLINE}"
|
||||
}
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
|
||||
}
|
||||
}
|
||||
|
||||
# Parse JSON logs if they exist
|
||||
if [message] =~ /^\{.*\}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
target => "json_message"
|
||||
}
|
||||
}
|
||||
|
||||
# Add timestamp if not present
|
||||
if ![timestamp] {
|
||||
mutate {
|
||||
add_field => { "timestamp" => "%{@timestamp}" }
|
||||
}
|
||||
}
|
||||
|
||||
# Clean up metadata
|
||||
mutate {
|
||||
remove_field => [ "@version", "beat", "offset", "prospector" ]
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
# Send to Elasticsearch with authentication
|
||||
elasticsearch {
|
||||
hosts => ["elasticsearch:9200"]
|
||||
user => "elastic"
|
||||
password => "${ELASTIC_PASSWORD:changeme}"
|
||||
|
||||
# Use different indices based on input type
|
||||
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
|
||||
|
||||
# Manage index templates
|
||||
manage_template => true
|
||||
template_overwrite => true
|
||||
}
|
||||
|
||||
# Optional: Debug output (comment out in production)
|
||||
# stdout {
|
||||
# codec => rubydebug
|
||||
# }
|
||||
}
|
Reference in New Issue
Block a user