Add the optional status configuration for health checks and other info

See #7
This commit is contained in:
R Tyler Croy 2020-05-25 14:24:17 -07:00
parent 39dca3223c
commit 648b7881ac
7 changed files with 756 additions and 148 deletions

742
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,33 +8,40 @@ edition = "2018"
[dependencies]
# Base asynchronous runtime
async-std = "~1.5.0"
async-std = { version = "~1.6.0", features = ["attributes"] }
async-trait = "0.1.31"
futures = { version = "~0.3.4", features = ["thread-pool"] }
# For parsing the syslog formatted messages
syslog_rfc5424 = "~0.6.1"
syslog_loose = "0.2.0"
# Needed for listening for TLS connections
async-tls = "~0.7.0"
rustls = "~0.17.0"
# Handling command line options
clap = { version = "~2.33.0", features = ["yaml"] }
# Needed for time management
chrono = "~0.4.11"
# Handling configuration overlays
config = { version = "~0.10.1", features = ["yaml"] }
# Requiredfor communciating with across internal queues
crossbeam = "~0.7"
# Needed to report metrics of hotdog's performance
dipstick = "~0.8.0"
# Used for string replacements and other template based transformations
handlebars = "~3.0.1"
# used for rule matching on JSON
# The "sync" feature is undocumented but required in order to swap Rc for Arc
# in the crate, allowing it to be used with futures and threads properly
jmespath = { git = "https://github.com/jmespath/jmespath.rs", features = ["sync"] }
# Logging
log = "~0.4.8"
pretty_env_logger = "~0.3.1"
# Needed for forwarding messages along to Kafka
# including the SSL and SASL features to ensure that this can authenticate
# against secure Kafka clusters, e.g. AWS MSK
rdkafka = { version = "~0.23.1", features = ["ssl", "sasl"]}
# Handling command line options
clap = { version = "~2.33.0", features = ["yaml"] }
# Handling configuration overlays
config = { version = "~0.10.1", features = ["yaml"] }
# Logging
log = "~0.4.8"
pretty_env_logger = "~0.3.1"
# Used for rule matching
regex = "~1.3.6"
# Needed for deserializing JSON messages _and_ managing our configuration
# effectively
@ -44,29 +51,20 @@ serde_json = "~1.0.0"
# Helpful to deserialize our regular expressions directly from the
# configuration file
serde_regex = "~0.4.0"
# Needed to do clever enum/derive tricks for strings
strum = "0.18.0"
strum_macros = "0.18.0"
# Used for rule matching
regex = "~1.3.6"
# For parsing the syslog formatted messages
syslog_rfc5424 = "~0.6.1"
syslog_loose = "0.2.0"
# used for rule matching on JSON
# The "sync" feature is undocumented but required in order to swap Rc for Arc
# in the crate, allowing it to be used with futures and threads properly
jmespath = { git = "https://github.com/jmespath/jmespath.rs", features = ["sync"] }
# Needed to report metrics of hotdog's performance
dipstick = "~0.8.0"
# Used for string replacements and other template based transformations
handlebars = "~3.0.1"
# Needed for time management
chrono = "~0.4.11"
# Needed for the http-based health checks
tide = "~0.9.0"
# Needed to tag rules and actions with their own unique identifiers
uuid = { version = "~0.8.1", features = ["v4"] }
async-trait = "0.1.31"
# Optimize the heck out of the release build, I have no idea what these flags
# do

View File

@ -221,6 +221,23 @@ global:
----
[[yml-status]]
==== Status
The `global.status` configuration is fully _optional_ but when it is enabled `hotdog`
will spin up an HTTP server on the configured `address` and `port` in order to provide
real-time status information about the daemon's runtime to HTTP clients.
.hotdog.yml
[source,yaml]
----
global:
status:
address: 'localhost'
port: 8585
----
[[rules]]
=== Rules

View File

@ -9,7 +9,9 @@ global:
key: './contrib/cert-key.pem'
# Swap these values out in order to listen for plaintext syslog
#port: 1514
#tls:
status:
address: '127.0.0.1'
port: 8585
kafka:
# Maximum number of messages to buffer internally while waiting for Kafka
# responses

View File

@ -15,6 +15,9 @@ extern crate serde_derive;
extern crate serde_regex;
extern crate syslog_loose;
extern crate syslog_rfc5424;
extern crate strum;
#[macro_use]
extern crate strum_macros;
use async_std::{sync::Arc, task};
use clap::{App, Arg};
@ -31,6 +34,7 @@ mod serve;
mod serve_plain;
mod serve_tls;
mod settings;
mod status;
use serve::*;
use settings::*;
@ -64,6 +68,15 @@ fn main() -> Result<(), errors::HotdogError> {
let settings_file = matches.value_of("config").unwrap_or("hotdog.yml");
let settings = Arc::new(settings::load(settings_file));
if let Some(st) = &settings.global.status {
let listen_to = format!("{}:{}", st.address, st.port).to_string();
/*
* Need to spin this into its own separate thread since tide will blcok
* up the async executor in this thread.
*/
task::spawn(status::status_server(listen_to));
}
if let Some(test_file) = matches.value_of("test") {
return task::block_on(rules::test_rules(&test_file, settings));
}

View File

@ -164,11 +164,18 @@ pub struct Metrics {
pub statsd: String,
}
#[derive(Debug, Deserialize)]
pub struct Status {
pub address: String,
pub port: u64,
}
#[derive(Debug, Deserialize)]
pub struct Global {
pub listen: Listen,
pub kafka: Kafka,
pub listen: Listen,
pub metrics: Metrics,
pub status: Option<Status>,
}
#[derive(Debug, Deserialize)]

59
src/status.rs Normal file
View File

@ -0,0 +1,59 @@
/**
* This module contains the necessary code to launch the internal status HTTP
* server when so configured by the administrator
*/
use log::*;
use tide;
/**
* Launch the status server
*/
pub async fn status_server(listen_to: String) -> Result<(), std::io::Error> {
debug!("Starting the status server on: {}", listen_to);
let mut app = tide::new();
app.at("/").get(|_| async {
Ok("Welcome to Hotdog")
});
app.listen(listen_to).await?;
Ok(())
}
pub struct StatsHandler {
}
#[derive(Debug, Display)]
pub enum Stats {
#[strum(serialize="connections")]
ConnectionCount,
#[strum(serialize="lines")]
LineReceived,
#[strum(serialize="kafka.submitted")]
KafkaMsgSubmitted {
topic: String,
},
#[strum(serialize="kafka.producer.sent")]
KafkaMsgSent,
#[strum(serialize="kafka.producer.error")]
KafkaMsgErrored {
errcode: String,
},
LogParseError,
FullInternalQueueError,
InternalPushError,
MergeInvalidJsonError,
MergeTargetNotJsonError,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_sanity_check_strum_serialize() {
let s = Stats::ConnectionCount.to_string();
assert_eq!("connections", s);
}
}