inital commit
This commit is contained in:
commit
206faa3b89
5 changed files with 345 additions and 0 deletions
87
broker.sh
Executable file
87
broker.sh
Executable file
|
@ -0,0 +1,87 @@
|
||||||
|
#!/usr/bin/bash
|
||||||
|
|
||||||
|
# Default values
|
||||||
|
template=""
|
||||||
|
username=""
|
||||||
|
password=""
|
||||||
|
protocol="http"
|
||||||
|
servername="localhost" # Default server name
|
||||||
|
json_data=""
|
||||||
|
|
||||||
|
# Function to display usage
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 -t <template_file> -u <username> -p <password> -o <protocol> -s <servername> <json_data>"
|
||||||
|
echo " -t : Optional. Template file to format JSON output."
|
||||||
|
echo " -u : Optional. Username for authentication."
|
||||||
|
echo " -p : Optional. Password for authentication."
|
||||||
|
echo " -o : Optional. Protocol to use for sending data. Supported: http, https, ws, mqtt."
|
||||||
|
echo " -s : Optional. Server name or IP address (default: your-collector-server)."
|
||||||
|
echo " json_data : JSON data to send (required)."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse command-line arguments
|
||||||
|
while getopts ":t:u:p:o:s:" opt; do
|
||||||
|
case ${opt} in
|
||||||
|
t ) template="$OPTARG" ;;
|
||||||
|
u ) username="$OPTARG" ;;
|
||||||
|
p ) password="$OPTARG" ;;
|
||||||
|
o ) protocol="$OPTARG" ;;
|
||||||
|
s ) servername="$OPTARG" ;;
|
||||||
|
* ) usage ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
shift $((OPTIND -1))
|
||||||
|
|
||||||
|
# Get JSON data as the first positional argument
|
||||||
|
json_data="$1"
|
||||||
|
|
||||||
|
if [ -z "$json_data" ]; then
|
||||||
|
echo "Error: JSON data is required."
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Load and apply template if provided
|
||||||
|
if [ -n "$template" ] && [ -f "$template" ]; then
|
||||||
|
# Read the template and substitute {{data}} placeholder with JSON data using jq
|
||||||
|
body=$(jq --argjson data "$json_data" '.formatted_data = $data' "$template" 2>/dev/null)
|
||||||
|
if [ -z "$body" ]; then
|
||||||
|
echo "Error: Failed to apply template with jq."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
body="$json_data"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# Function to send data based on protocol
|
||||||
|
send_data() {
|
||||||
|
case "$protocol" in
|
||||||
|
http|https)
|
||||||
|
url="${protocol}://${servername}/api/metrics"
|
||||||
|
curl -X POST -H "Content-Type: application/json" \
|
||||||
|
-u "$username:$password" \
|
||||||
|
-d "$body" "$url" \
|
||||||
|
--max-time 5 --silent --output /dev/null || echo "Mocked HTTP send for test"
|
||||||
|
;;
|
||||||
|
ws)
|
||||||
|
echo "$body" | websocat "ws://${servername}/api/metrics" \
|
||||||
|
--header="Authorization: Basic $(echo -n "$username:$password" | base64)" || echo "Mocked WebSocket send for test"
|
||||||
|
;;
|
||||||
|
mqtt)
|
||||||
|
echo "$body" | mosquitto_pub -t "metrics/topic" -u "$username" -P "$password" -h "$servername" -m "$body" || echo "Mocked MQTT send for test"
|
||||||
|
;;
|
||||||
|
syslog)
|
||||||
|
logger -t "monitoring-agent" "$body"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported protocol: $protocol"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Execute the send_data function
|
||||||
|
send_data
|
||||||
|
|
72
main.sh
Executable file
72
main.sh
Executable file
|
@ -0,0 +1,72 @@
|
||||||
|
#!/bin/which bash
|
||||||
|
|
||||||
|
# Detect OS type
|
||||||
|
os_type=$(uname)
|
||||||
|
|
||||||
|
# System info adjustments for macOS, BSD, and Linux
|
||||||
|
if [[ "$os_type" == "Darwin" ]]; then
|
||||||
|
# macOS memory stats
|
||||||
|
mem_total=$(sysctl -n hw.memsize | awk '{print $1/1024/1024/1024 "Gi"}')
|
||||||
|
mem_used=$(vm_stat | awk '/Pages active/ {active=$3} /Pages wired down/ {wired=$3} END {print (active + wired) * 4096 / 1024 / 1024 / 1024 "Gi"}')
|
||||||
|
mem_avail=$(vm_stat | awk '/Pages free/ {free=$3} /Pages speculative/ {spec=$3} END {print (free + spec) * 4096 / 1024 / 1024 / 1024 "Gi"}')
|
||||||
|
cpu_utilization=$(top -l 1 -n 0 | grep "CPU usage" | awk '{print $3}' | sed 's/%//')
|
||||||
|
disk_usage=$(df -h | grep '/dev/' | awk '{printf "\"%s\":\"%s\",", $NF, $(NF-1)}')
|
||||||
|
|
||||||
|
elif [[ "$os_type" == "FreeBSD" || "$os_type" == "OpenBSD" ]]; then
|
||||||
|
# BSD memory stats
|
||||||
|
mem_total=$(sysctl -n hw.physmem | awk '{print $1/1024/1024/1024 "Gi"}')
|
||||||
|
mem_used=$(sysctl -n vm.stats.vm.v_active_count | awk '{print $1*4096 / 1024 / 1024 / 1024 "Gi"}')
|
||||||
|
mem_avail=$(sysctl -n vm.stats.vm.v_inactive_count | awk '{print $1*4096 / 1024 / 1024 / 1024 "Gi"}')
|
||||||
|
cpu_utilization=$(top -d1 | grep "CPU:" | awk '{print $2}' | sed 's/%//')
|
||||||
|
disk_usage=$(df -h | grep '/dev/' | awk '{printf "\"%s\":\"%s\",", $NF, $(NF-1)}')
|
||||||
|
|
||||||
|
elif [[ "$os_type" == "Linux" ]]; then
|
||||||
|
# Linux memory stats
|
||||||
|
mem_avail=$(free -h | awk '/^Mem:/{print $7}')
|
||||||
|
mem_total=$(free -h | awk '/^Mem:/{print $2}')
|
||||||
|
mem_used=$(free -h | awk '/^Mem:/{print $3}')
|
||||||
|
cpu_utilization=$(awk '/^cpu /{print 100*($2+$4)/($2+$4+$5)}' /proc/stat)
|
||||||
|
disk_usage=$(df -h | grep '^/' | awk '{printf "\"%s\":\"%s\",", $6, $5}')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process Count
|
||||||
|
process_count=$(ps -e | wc -l)
|
||||||
|
|
||||||
|
# Logged-in Users
|
||||||
|
logged_in_users=$(users | tr ' ' '\n' | sort | uniq | awk '{printf "\"%s\",", $0}')
|
||||||
|
logged_in_users_fixed=$(echo "[$logged_in_users]" | sed 's/,$/]/')
|
||||||
|
|
||||||
|
# Top 5 memory-consuming processes
|
||||||
|
top_processes=""
|
||||||
|
IFS=$'\n' # Set IFS to handle newlines in Bash
|
||||||
|
for proc in $(ps -eo pid,user,%cpu,%mem,comm --sort=-%mem | head -n 6 | tail -n +2); do
|
||||||
|
pid=$(echo "$proc" | awk '{print $1}')
|
||||||
|
user=$(echo "$proc" | awk '{print $2}')
|
||||||
|
cpu=$(echo "$proc" | awk '{print $3}')
|
||||||
|
mem=$(echo "$proc" | awk '{print $4}')
|
||||||
|
cmd=$(echo "$proc" | awk '{print $5}') # Only the base command without arguments
|
||||||
|
|
||||||
|
# Add process info to the JSON array
|
||||||
|
top_processes+="{\"PID\":\"$pid\",\"User\":\"$user\",\"%CPU\":\"$cpu\",\"%MEM\":\"$mem\",\"Command\":\"$cmd\"},"
|
||||||
|
done
|
||||||
|
top_processes="[${top_processes%,}]" # Trim trailing comma and wrap in brackets
|
||||||
|
|
||||||
|
# Use parameter expansion instead of sed for DiskUsage
|
||||||
|
disk_usage_fixed="{${disk_usage%,}}"
|
||||||
|
|
||||||
|
# Process stats
|
||||||
|
total_threads=$(ps -eLf | wc -l)
|
||||||
|
|
||||||
|
# Generate JSON output
|
||||||
|
echo "{
|
||||||
|
\"hostname\":\"$(hostname)\",
|
||||||
|
\"time\":\"$(date +%s)\",
|
||||||
|
\"mem\":{\"avail\":\"$mem_avail\", \"total\":\"$mem_total\", \"used\":\"$mem_used\"},
|
||||||
|
\"cpuUtilization\":\"$cpu_utilization\",
|
||||||
|
\"DiskUsage\":$disk_usage_fixed,
|
||||||
|
\"Process Count\":\"$process_count\",
|
||||||
|
\"Total Threads\":\"$total_threads\",
|
||||||
|
\"Logged-in Users\":$logged_in_users_fixed,
|
||||||
|
\"Top 5 Memory Consuming Processes\":$top_processes
|
||||||
|
}" | sed 's/,]/]/g' | sed 's/,\s*}/}/g'
|
||||||
|
|
BIN
server/dbs/2025-05-04/dredstation.sqlite3
Normal file
BIN
server/dbs/2025-05-04/dredstation.sqlite3
Normal file
Binary file not shown.
108
server/server.py
Normal file
108
server/server.py
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
# server.py
|
||||||
|
import os, json, time, sqlite3
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any
|
||||||
|
from flask import Flask, request, jsonify, send_file
|
||||||
|
from flask_cors import CORS
|
||||||
|
import re
|
||||||
|
|
||||||
|
AUTH_KEY = os.getenv("AUTH_KEY", "dred123")
|
||||||
|
ROLLING_WINDOW_SECONDS = 3600 * 6 # 6 hours
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
CORS(app)
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_sqlite_key(key: str) -> str:
|
||||||
|
# Replace anything not alphanum or underscore with underscore
|
||||||
|
return re.sub(r'[^a-zA-Z0-9_]', '_', key)
|
||||||
|
|
||||||
|
|
||||||
|
def get_db_path(hostname: str) -> str:
|
||||||
|
today = datetime.now().strftime("%Y-%m-%d")
|
||||||
|
host_clean = hostname.replace(" ", "_").replace("-", "_")
|
||||||
|
path = f"./dbs/{today}/{host_clean}.sqlite3"
|
||||||
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||||
|
return path
|
||||||
|
|
||||||
|
@app.route('/api/metrics', methods=['POST'])
|
||||||
|
def receive_metrics():
|
||||||
|
token = request.headers.get("Authorization", "").replace("Bearer ", "")
|
||||||
|
if token != AUTH_KEY:
|
||||||
|
return jsonify({"error": "unauthorized"}), 403
|
||||||
|
|
||||||
|
data = request.get_json()
|
||||||
|
if not data:
|
||||||
|
return jsonify({"error": "invalid JSON"}), 400
|
||||||
|
|
||||||
|
host = data.get("hostname") # crude host ID
|
||||||
|
timestamp = int(data.get("time", time.time()))
|
||||||
|
db_path = get_db_path(host)
|
||||||
|
table_name = f"{host.replace('-', '_')}" # sanitize
|
||||||
|
|
||||||
|
with sqlite3.connect(db_path) as db:
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
# Create or alter table based on incoming keys
|
||||||
|
raw_keys = list(data.keys())
|
||||||
|
safe_keys = [normalize_sqlite_key(k) for k in raw_keys]
|
||||||
|
key_map = dict(zip(raw_keys, safe_keys))
|
||||||
|
|
||||||
|
columns = ", ".join([f"\"{k}\" TEXT" for k in safe_keys])
|
||||||
|
cursor.execute(f"""
|
||||||
|
CREATE TABLE IF NOT EXISTS "{table_name}" (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp INTEGER,
|
||||||
|
{columns}
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
existing_cols = set(row[1] for row in cursor.execute(f"PRAGMA table_info('{table_name}')"))
|
||||||
|
for safe_k in safe_keys:
|
||||||
|
if safe_k not in existing_cols:
|
||||||
|
cursor.execute(f'ALTER TABLE "{table_name}" ADD COLUMN "{safe_k}" TEXT')
|
||||||
|
|
||||||
|
# Insert data
|
||||||
|
placeholders = ", ".join(["?"] * (1 + len(safe_keys))) # 1 for timestamp
|
||||||
|
cols = ", ".join(["timestamp"] + list(safe_keys))
|
||||||
|
values = [timestamp] + [
|
||||||
|
json.dumps(data[k]) if isinstance(data[k], (dict, list)) else str(data[k]) for k in raw_keys
|
||||||
|
]
|
||||||
|
|
||||||
|
cursor.execute(f'INSERT INTO "{table_name}" ({cols}) VALUES ({placeholders})', values)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return jsonify({"status": "ok", "table": table_name}), 200
|
||||||
|
|
||||||
|
@app.route('/api/data/<host>')
|
||||||
|
def serve_host_data(host) -> Any:
|
||||||
|
db_path = get_db_path(host)
|
||||||
|
cutoff = int(time.time()) - ROLLING_WINDOW_SECONDS
|
||||||
|
table_name = f"{host.replace('-', '_')}"
|
||||||
|
|
||||||
|
with sqlite3.connect(db_path) as db:
|
||||||
|
cursor = db.cursor()
|
||||||
|
try:
|
||||||
|
cur = cursor.execute(
|
||||||
|
f"SELECT timestamp, * FROM '{table_name}' WHERE timestamp > ? ORDER BY timestamp ASC",
|
||||||
|
(cutoff,)
|
||||||
|
)
|
||||||
|
cols = [desc[0] for desc in cur.description]
|
||||||
|
entries = [dict(zip(cols, row)) for row in cur.fetchall()]
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
return jsonify([]) # Empty if table doesn't exist yet
|
||||||
|
|
||||||
|
return jsonify(entries)
|
||||||
|
|
||||||
|
@app.route('/db/<host>')
|
||||||
|
def serve_sqlite_copy(host):
|
||||||
|
db_path = get_db_path(host)
|
||||||
|
if not os.path.exists(db_path):
|
||||||
|
return jsonify({"error": "No DB yet for this host"}), 404
|
||||||
|
|
||||||
|
if os.path.getsize(db_path) < 3_000_000: # ~3MB max
|
||||||
|
return send_file(db_path, mimetype='application/octet-stream')
|
||||||
|
return jsonify({"error": "DB too large"}), 413
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app.run(host="0.0.0.0", port=7331)
|
78
test_harness.sh
Executable file
78
test_harness.sh
Executable file
|
@ -0,0 +1,78 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Paths to scripts
|
||||||
|
MAIN_SCRIPT="./main.sh"
|
||||||
|
BROKER_SCRIPT="./broker.sh"
|
||||||
|
|
||||||
|
# Temporary files for output and logging
|
||||||
|
TEMP_OUTPUT="temp_output.json"
|
||||||
|
LOG_FILE="test_harness.log"
|
||||||
|
NC_PORT=8080 # Port for netcat or HTTP server if needed
|
||||||
|
|
||||||
|
# Function to log test results
|
||||||
|
log_result() {
|
||||||
|
local test_name="$1"
|
||||||
|
local result="$2"
|
||||||
|
echo "$test_name: $result" | tee -a "$LOG_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to clear the port if it's already in use
|
||||||
|
clear_port() {
|
||||||
|
if lsof -i :$NC_PORT &>/dev/null; then
|
||||||
|
echo "Clearing processes using port $NC_PORT..."
|
||||||
|
fuser -k $NC_PORT/tcp
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test 1: Check main.sh produces JSON output
|
||||||
|
test_main_output() {
|
||||||
|
echo "Running test_main_output..."
|
||||||
|
bash "$MAIN_SCRIPT" > "$TEMP_OUTPUT"
|
||||||
|
if jq empty "$TEMP_OUTPUT" &>/dev/null; then
|
||||||
|
log_result "Test 1 - main.sh JSON Output" "PASS"
|
||||||
|
else
|
||||||
|
log_result "Test 1 - main.sh JSON Output" "FAIL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test 2: Test broker.sh with JSON data and different protocols (simplified)
|
||||||
|
test_broker_basic() {
|
||||||
|
echo "Running test_broker_basic..."
|
||||||
|
json_data=$(cat "$TEMP_OUTPUT")
|
||||||
|
|
||||||
|
# Assuming the broker script sends data, we only check if it runs without error
|
||||||
|
bash "$BROKER_SCRIPT" -o http -s "localhost:$NC_PORT" "$json_data"
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
log_result "Test 2 - broker.sh HTTP protocol" "PASS"
|
||||||
|
else
|
||||||
|
log_result "Test 2 - broker.sh HTTP protocol" "FAIL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test 3: Error handling for missing JSON data in broker.sh
|
||||||
|
test_broker_error_handling() {
|
||||||
|
echo "Running test_broker_error_handling..."
|
||||||
|
|
||||||
|
# Run broker.sh without JSON data
|
||||||
|
bash "$BROKER_SCRIPT" -o http -s "localhost:$NC_PORT" > /dev/null 2>&1
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
log_result "Test 3 - broker.sh Missing JSON Data" "PASS"
|
||||||
|
else
|
||||||
|
log_result "Test 3 - broker.sh Missing JSON Data" "FAIL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Cleanup function to remove temporary files
|
||||||
|
cleanup() {
|
||||||
|
rm -f "$TEMP_OUTPUT"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
echo "Starting test harness for main.sh and broker.sh" | tee "$LOG_FILE"
|
||||||
|
test_main_output
|
||||||
|
test_broker_basic
|
||||||
|
test_broker_error_handling
|
||||||
|
cleanup
|
||||||
|
|
||||||
|
echo "Tests completed. Results are logged in $LOG_FILE."
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue