Applied format

This commit is contained in:
Francis Lalonde 2019-04-09 07:55:15 -04:00
parent e5c74de9ed
commit 0070f26d6d
60 changed files with 893 additions and 665 deletions

View File

@ -1,10 +1,8 @@
#[cfg(feature="skeptic")] #[cfg(feature = "skeptic")]
extern crate skeptic; extern crate skeptic;
fn main() { fn main() {
// generates documentation tests. // generates documentation tests.
#[cfg(feature="skeptic")] #[cfg(feature = "skeptic")]
skeptic::generate_doc_tests(&["README.md", "HANDBOOK.md"]); skeptic::generate_doc_tests(&["README.md", "HANDBOOK.md"]);
} }

View File

@ -2,10 +2,10 @@
extern crate dipstick; extern crate dipstick;
use dipstick::{Input, InputScope, QueuedOutput, Stream};
use std::thread;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
use dipstick::{Stream, InputScope, QueuedOutput, Input};
use std::thread;
fn main() { fn main() {
let async_metrics = Stream::to_stdout().queued(100).metrics(); let async_metrics = Stream::to_stdout().queued(100).metrics();
@ -21,5 +21,4 @@ fn main() {
}); });
} }
sleep(Duration::from_secs(5000)); sleep(Duration::from_secs(5000));
} }

View File

@ -3,10 +3,10 @@
extern crate dipstick; extern crate dipstick;
use std::thread::sleep;
use std::io;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::io;
use std::thread::sleep;
use std::time::Duration;
fn main() { fn main() {
// for this demo, print metric values to the console // for this demo, print metric values to the console

View File

@ -2,12 +2,12 @@
extern crate dipstick; extern crate dipstick;
use std::thread::sleep;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::thread;
use std::env::args; use std::env::args;
use std::str::FromStr; use std::str::FromStr;
use std::thread;
use std::thread::sleep;
use std::time::Duration;
fn main() { fn main() {
let bucket = AtomicBucket::new(); let bucket = AtomicBucket::new();
@ -27,5 +27,4 @@ fn main() {
sleep(Duration::from_secs(5)); sleep(Duration::from_secs(5));
bucket.stats(stats_all); bucket.stats(stats_all);
bucket.flush_to(&Stream::to_stdout().new_scope()).unwrap(); bucket.flush_to(&Stream::to_stdout().new_scope()).unwrap();
} }

View File

@ -2,12 +2,12 @@
extern crate dipstick; extern crate dipstick;
use std::thread::sleep;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::thread;
use std::env::args; use std::env::args;
use std::str::FromStr; use std::str::FromStr;
use std::thread;
use std::thread::sleep;
use std::time::Duration;
fn main() { fn main() {
let event = Proxy::default().marker("a"); let event = Proxy::default().marker("a");
@ -30,5 +30,4 @@ fn main() {
} }
sleep(Duration::from_secs(5)); sleep(Duration::from_secs(5));
bucket.flush_to(&Stream::to_stdout().new_scope()).unwrap(); bucket.flush_to(&Stream::to_stdout().new_scope()).unwrap();
} }

View File

@ -2,12 +2,12 @@
extern crate dipstick; extern crate dipstick;
use std::thread::sleep;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::thread;
use std::env::args; use std::env::args;
use std::str::FromStr; use std::str::FromStr;
use std::thread;
use std::thread::sleep;
use std::time::Duration;
fn main() { fn main() {
let bucket = AtomicBucket::new(); let bucket = AtomicBucket::new();
@ -27,5 +27,4 @@ fn main() {
} }
sleep(Duration::from_secs(5)); sleep(Duration::from_secs(5));
bucket.flush_to(&Stream::to_stdout().new_scope()).unwrap(); bucket.flush_to(&Stream::to_stdout().new_scope()).unwrap();
} }

View File

@ -3,8 +3,8 @@
extern crate dipstick; extern crate dipstick;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::time::Duration;
fn main() { fn main() {
// adding a name to the bucket // adding a name to the bucket
@ -12,8 +12,12 @@ fn main() {
// adding two names to Graphite output // adding two names to Graphite output
// metrics will be prefixed with "machine1.application.test" // metrics will be prefixed with "machine1.application.test"
bucket.drain(Graphite::send_to("localhost:2003").expect("Socket") bucket.drain(
.named("machine1").add_name("application")); Graphite::send_to("localhost:2003")
.expect("Socket")
.named("machine1")
.add_name("application"),
);
bucket.flush_every(Duration::from_secs(3)); bucket.flush_every(Duration::from_secs(3));

View File

@ -3,8 +3,8 @@
extern crate dipstick; extern crate dipstick;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::time::Duration;
fn main() { fn main() {
let metrics = AtomicBucket::new().named("test"); let metrics = AtomicBucket::new().named("test");

View File

@ -4,9 +4,8 @@ extern crate dipstick;
use dipstick::*; use dipstick::*;
use std::time::Duration;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration;
fn main() { fn main() {
let bucket = AtomicBucket::new(); let bucket = AtomicBucket::new();

View File

@ -3,11 +3,10 @@
extern crate dipstick; extern crate dipstick;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::time::Duration;
fn main() { fn main() {
let app_metrics = AtomicBucket::new(); let app_metrics = AtomicBucket::new();
app_metrics.drain(Stream::to_stdout()); app_metrics.drain(Stream::to_stdout());

View File

@ -2,8 +2,8 @@
extern crate dipstick; extern crate dipstick;
use std::time::Duration;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration;
use dipstick::*; use dipstick::*;

View File

@ -2,13 +2,16 @@
extern crate dipstick; extern crate dipstick;
use dipstick::*;
use std::io;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
use std::io;
use dipstick::*;
fn main() { fn main() {
let metrics = Stream::write_to(io::stdout()).cached(5).metrics().named("cache"); let metrics = Stream::write_to(io::stdout())
.cached(5)
.metrics()
.named("cache");
loop { loop {
// report some ad-hoc metric values from our "application" loop // report some ad-hoc metric values from our "application" loop

View File

@ -3,18 +3,17 @@
extern crate dipstick; extern crate dipstick;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration;
metrics!{ metrics! {
APP = "application" => { APP = "application" => {
pub COUNTER: Counter = "counter"; pub COUNTER: Counter = "counter";
} }
} }
fn main() { fn main() {
let one_minute = AtomicBucket::new(); let one_minute = AtomicBucket::new();
one_minute.flush_every(Duration::from_secs(60)); one_minute.flush_every(Duration::from_secs(60));

View File

@ -3,8 +3,8 @@
extern crate dipstick; extern crate dipstick;
use std::time::Duration;
use dipstick::*; use dipstick::*;
use std::time::Duration;
fn main() { fn main() {
fn custom_statistics( fn custom_statistics(
@ -28,7 +28,7 @@ fn main() {
} else { } else {
None None
} }
}, }
// scaling the score value and appending unit to name // scaling the score value and appending unit to name
(kind, ScoreType::Sum(sum)) => Some((kind, name.append("per_thousand"), sum / 1000)), (kind, ScoreType::Sum(sum)) => Some((kind, name.append("per_thousand"), sum / 1000)),

View File

@ -6,11 +6,10 @@ use dipstick::*;
use std::time::Duration; use std::time::Duration;
fn main() { fn main() {
let metrics = let metrics = Graphite::send_to("localhost:2003")
Graphite::send_to("localhost:2003") .expect("Connected")
.expect("Connected") .named("my_app")
.named("my_app") .metrics();
.metrics();
loop { loop {
metrics.counter("counter_a").count(123); metrics.counter("counter_a").count(123);

View File

@ -2,7 +2,7 @@
extern crate dipstick; extern crate dipstick;
use dipstick::{MultiInput, Graphite, Stream, Input, InputScope, Prefixed}; use dipstick::{Graphite, Input, InputScope, MultiInput, Prefixed, Stream};
use std::time::Duration; use std::time::Duration;
fn main() { fn main() {

View File

@ -16,11 +16,16 @@ fn main() {
let same_type_metrics = MultiOutput::new() let same_type_metrics = MultiOutput::new()
.add_target(Stream::to_stderr().named("yeah")) .add_target(Stream::to_stderr().named("yeah"))
.add_target(Stream::to_stderr().named("ouch")) .add_target(Stream::to_stderr().named("ouch"))
.named("both").metrics(); .named("both")
.metrics();
loop { loop {
different_type_metrics.new_metric("counter_a".into(), InputKind::Counter).write(123, labels![]); different_type_metrics
same_type_metrics.new_metric("timer_a".into(), InputKind::Timer).write(6677, labels![]); .new_metric("counter_a".into(), InputKind::Counter)
.write(123, labels![]);
same_type_metrics
.new_metric("timer_a".into(), InputKind::Timer)
.write(6677, labels![]);
std::thread::sleep(Duration::from_millis(400)); std::thread::sleep(Duration::from_millis(400));
} }
} }

View File

@ -15,7 +15,7 @@
extern crate dipstick; extern crate dipstick;
use std::time::{Duration}; use std::time::Duration;
use dipstick::*; use dipstick::*;
@ -28,7 +28,9 @@ fn main() {
metrics.observe(uptime, || 6).on_flush(); metrics.observe(uptime, || 6).on_flush();
let threads = metrics.gauge("threads"); let threads = metrics.gauge("threads");
metrics.observe(threads, thread_count).every(Duration::from_secs(1)); metrics
.observe(threads, thread_count)
.every(Duration::from_secs(1));
loop { loop {
std::thread::sleep(Duration::from_millis(40)); std::thread::sleep(Duration::from_millis(40));

View File

@ -6,12 +6,11 @@ use dipstick::*;
use std::time::Duration; use std::time::Duration;
fn main() { fn main() {
let statsd = let statsd = Statsd::send_to("localhost:8125")
Statsd::send_to("localhost:8125") .expect("Connected")
.expect("Connected") .named("my_app");
.named("my_app"); // Sampling::Full is the default
// Sampling::Full is the default // .sampled(Sampling::Full);
// .sampled(Sampling::Full);
let unsampled_marker = statsd.metrics().marker("marker_a"); let unsampled_marker = statsd.metrics().marker("marker_a");
@ -24,10 +23,10 @@ fn main() {
.sampled(Sampling::Random(0.001)) .sampled(Sampling::Random(0.001))
.metrics() .metrics()
.marker("hi_freq_marker"); .marker("hi_freq_marker");
loop { loop {
unsampled_marker.mark(); unsampled_marker.mark();
for _i in 0..10 { for _i in 0..10 {
low_freq_marker.mark(); low_freq_marker.mark();
} }

View File

@ -6,11 +6,10 @@ use dipstick::*;
use std::time::Duration; use std::time::Duration;
fn main() { fn main() {
let metrics = let metrics = Prometheus::push_to("http:// prometheus:9091/metrics/job/prometheus_example")
Prometheus::push_to("http:// prometheus:9091/metrics/job/prometheus_example") .expect("Prometheus Socket")
.expect("Prometheus Socket") .named("my_app")
.named("my_app") .metrics();
.metrics();
loop { loop {
metrics.counter("counter_a").count(123); metrics.counter("counter_a").count(123);

View File

@ -2,10 +2,9 @@
extern crate dipstick; extern crate dipstick;
use dipstick::{Input, InputScope, Prefixed, Proxy, Stream};
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
use dipstick::{Proxy, Stream, InputScope, Input, Prefixed};
fn main() { fn main() {
let root_proxy = Proxy::default(); let root_proxy = Proxy::default();
@ -50,5 +49,4 @@ fn main() {
println!() println!()
} }
} }

View File

@ -15,9 +15,6 @@ pub fn raw_write() {
let metrics_log = dipstick::Log::to_log().metrics(); let metrics_log = dipstick::Log::to_log().metrics();
// define and send metrics using raw channel API // define and send metrics using raw channel API
let counter = metrics_log.new_metric( let counter = metrics_log.new_metric("count_a".into(), dipstick::InputKind::Counter);
"count_a".into(),
dipstick::InputKind::Counter,
);
counter.write(1, labels![]); counter.write(1, labels![]);
} }

View File

@ -6,12 +6,11 @@ use dipstick::*;
use std::time::Duration; use std::time::Duration;
fn main() { fn main() {
let metrics = let metrics = Statsd::send_to("localhost:8125")
Statsd::send_to("localhost:8125") .expect("Connected")
.expect("Connected") // .with_sampling(Sampling::Random(0.2))
// .with_sampling(Sampling::Random(0.2)) .named("my_app")
.named("my_app") .metrics();
.metrics();
let counter = metrics.counter("counter_a"); let counter = metrics.counter("counter_a");

View File

@ -6,12 +6,11 @@ use dipstick::*;
use std::time::Duration; use std::time::Duration;
fn main() { fn main() {
let metrics = let metrics = Statsd::send_to("localhost:8125")
Statsd::send_to("localhost:8125") .expect("Connected")
.expect("Connected") .sampled(Sampling::Random(0.2))
.sampled(Sampling::Random(0.2)) .named("my_app")
.named("my_app") .metrics();
.metrics();
let counter = metrics.counter("counter_a"); let counter = metrics.counter("counter_a");

View File

@ -2,10 +2,12 @@
extern crate dipstick; extern crate dipstick;
use dipstick::{
AppLabel, Formatting, Input, InputKind, InputScope, LabelOp, LineFormat, LineOp, LineTemplate,
MetricName, Stream,
};
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
use dipstick::{Stream, InputScope, Input, Formatting, AppLabel,
MetricName, InputKind, LineTemplate, LineFormat, LineOp, LabelOp};
/// Generates template like "$METRIC $value $label_value["abc"]\n" /// Generates template like "$METRIC $value $label_value["abc"]\n"
struct MyFormat; struct MyFormat;
@ -16,21 +18,29 @@ impl LineFormat for MyFormat {
LineOp::Literal(format!("{} ", name.join(".")).to_uppercase().into()), LineOp::Literal(format!("{} ", name.join(".")).to_uppercase().into()),
LineOp::ValueAsText, LineOp::ValueAsText,
LineOp::Literal(" ".into()), LineOp::Literal(" ".into()),
LineOp::LabelExists("abc".into(), LineOp::LabelExists(
vec![LabelOp::LabelKey, LabelOp::Literal(":".into()), LabelOp::LabelValue], "abc".into(),
vec![
LabelOp::LabelKey,
LabelOp::Literal(":".into()),
LabelOp::LabelValue,
],
), ),
LineOp::NewLine, LineOp::NewLine,
].into() ]
.into()
} }
} }
fn main() { fn main() {
let counter = Stream::to_stderr().formatting(MyFormat).metrics().counter("counter_a"); let counter = Stream::to_stderr()
.formatting(MyFormat)
.metrics()
.counter("counter_a");
AppLabel::set("abc", "xyz"); AppLabel::set("abc", "xyz");
loop { loop {
// report some metric values from our "application" loop // report some metric values from our "application" loop
counter.count(11); counter.count(11);
sleep(Duration::from_millis(500)); sleep(Duration::from_millis(500));
} }
} }

View File

@ -1,33 +1,37 @@
//! Maintain aggregated metrics for deferred reporting, //! Maintain aggregated metrics for deferred reporting,
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::{MetricName};
use core::input::{InputKind, InputScope, InputMetric};
use core::output::{OutputDyn, OutputScope, OutputMetric, Output, output_none};
use core::clock::TimeHandle;
use core::{MetricValue, Flush};
use bucket::{ScoreType, stats_summary};
use bucket::ScoreType::*; use bucket::ScoreType::*;
use bucket::{stats_summary, ScoreType};
use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::clock::TimeHandle;
use core::error; use core::error;
use core::input::{InputKind, InputMetric, InputScope};
use core::name::MetricName;
use core::output::{output_none, Output, OutputDyn, OutputMetric, OutputScope};
use core::{Flush, MetricValue};
use std::mem; use std::collections::BTreeMap;
use std::isize; use std::isize;
use std::collections::{BTreeMap}; use std::mem;
use std::sync::atomic::AtomicIsize; use std::sync::atomic::AtomicIsize;
use std::sync::atomic::Ordering::*; use std::sync::atomic::Ordering::*;
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
use std::fmt;
use std::borrow::Borrow; use std::borrow::Borrow;
use std::fmt;
/// A function type to transform aggregated scores into publishable statistics. /// A function type to transform aggregated scores into publishable statistics.
pub type StatsFn = Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)> + Send + Sync + 'static; pub type StatsFn =
Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)>
+ Send
+ Sync
+ 'static;
fn initial_stats() -> &'static StatsFn { fn initial_stats() -> &'static StatsFn {
&stats_summary &stats_summary
@ -38,9 +42,10 @@ fn initial_drain() -> Arc<OutputDyn + Send + Sync> {
} }
lazy_static! { lazy_static! {
static ref DEFAULT_AGGREGATE_STATS: RwLock<Arc<StatsFn>> = RwLock::new(Arc::new(initial_stats())); static ref DEFAULT_AGGREGATE_STATS: RwLock<Arc<StatsFn>> =
RwLock::new(Arc::new(initial_stats()));
static ref DEFAULT_AGGREGATE_OUTPUT: RwLock<Arc<OutputDyn + Send + Sync>> = RwLock::new(initial_drain()); static ref DEFAULT_AGGREGATE_OUTPUT: RwLock<Arc<OutputDyn + Send + Sync>> =
RwLock::new(initial_drain());
} }
/// Central aggregation structure. /// Central aggregation structure.
@ -54,8 +59,14 @@ pub struct AtomicBucket {
struct InnerAtomicBucket { struct InnerAtomicBucket {
metrics: BTreeMap<MetricName, Arc<AtomicScores>>, metrics: BTreeMap<MetricName, Arc<AtomicScores>>,
period_start: TimeHandle, period_start: TimeHandle,
stats: Option<Arc<Fn(InputKind, MetricName, ScoreType) stats: Option<
-> Option<(InputKind, MetricName, MetricValue)> + Send + Sync + 'static>>, Arc<
Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)>
+ Send
+ Sync
+ 'static,
>,
>,
drain: Option<Arc<OutputDyn + Send + Sync + 'static>>, drain: Option<Arc<OutputDyn + Send + Sync + 'static>>,
publish_metadata: bool, publish_metadata: bool,
} }
@ -72,7 +83,6 @@ lazy_static! {
} }
impl InnerAtomicBucket { impl InnerAtomicBucket {
fn flush(&mut self) -> error::Result<()> { fn flush(&mut self) -> error::Result<()> {
let pub_scope = match self.drain { let pub_scope = match self.drain {
Some(ref out) => out.output_dyn(), Some(ref out) => out.output_dyn(),
@ -85,10 +95,13 @@ impl InnerAtomicBucket {
// purge: if bucket is the last owner of the metric, remove it // purge: if bucket is the last owner of the metric, remove it
// TODO parameterize whether to keep ad-hoc metrics after publish // TODO parameterize whether to keep ad-hoc metrics after publish
let mut purged = self.metrics.clone(); let mut purged = self.metrics.clone();
self.metrics.iter() self.metrics
.iter()
.filter(|&(_k, v)| Arc::strong_count(v) == 1) .filter(|&(_k, v)| Arc::strong_count(v) == 1)
.map(|(k, _v)| k) .map(|(k, _v)| k)
.for_each(|k| { purged.remove(k); }); .for_each(|k| {
purged.remove(k);
});
self.metrics = purged; self.metrics = purged;
Ok(()) Ok(())
@ -98,16 +111,19 @@ impl InnerAtomicBucket {
/// Compute stats on captured values using assigned or default stats function. /// Compute stats on captured values using assigned or default stats function.
/// Write stats to assigned or default output. /// Write stats to assigned or default output.
fn flush_to(&mut self, target: &OutputScope) -> error::Result<()> { fn flush_to(&mut self, target: &OutputScope) -> error::Result<()> {
let now = TimeHandle::now(); let now = TimeHandle::now();
let duration_seconds = self.period_start.elapsed_us() as f64 / 1_000_000.0; let duration_seconds = self.period_start.elapsed_us() as f64 / 1_000_000.0;
self.period_start = now; self.period_start = now;
let mut snapshot: Vec<(&MetricName, InputKind, Vec<ScoreType>)> = self.metrics.iter() let mut snapshot: Vec<(&MetricName, InputKind, Vec<ScoreType>)> = self
.flat_map(|(name, scores)| if let Some(values) = scores.reset(duration_seconds) { .metrics
Some((name, scores.metric_kind(), values)) .iter()
} else { .flat_map(|(name, scores)| {
None if let Some(values) = scores.reset(duration_seconds) {
Some((name, scores.metric_kind(), values))
} else {
None
}
}) })
.collect(); .collect();
@ -119,7 +135,11 @@ impl InnerAtomicBucket {
} else { } else {
// TODO add switch for metadata such as PERIOD_LENGTH // TODO add switch for metadata such as PERIOD_LENGTH
if self.publish_metadata { if self.publish_metadata {
snapshot.push((&PERIOD_LENGTH, InputKind::Timer, vec![Sum((duration_seconds * 1000.0) as isize)])); snapshot.push((
&PERIOD_LENGTH,
InputKind::Timer,
vec![Sum((duration_seconds * 1000.0) as isize)],
));
} }
let stats_fn = match self.stats { let stats_fn = match self.stats {
@ -140,7 +160,6 @@ impl InnerAtomicBucket {
target.flush() target.flush()
} }
} }
} }
impl<S: AsRef<str>> From<S> for AtomicBucket { impl<S: AsRef<str>> From<S> for AtomicBucket {
@ -161,14 +180,17 @@ impl AtomicBucket {
drain: None, drain: None,
// TODO add API toggle for metadata publish // TODO add API toggle for metadata publish
publish_metadata: false, publish_metadata: false,
})) })),
} }
} }
/// Set the default aggregated metrics statistics generator. /// Set the default aggregated metrics statistics generator.
pub fn default_stats<F>(func: F) pub fn default_stats<F>(func: F)
where where
F: Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)> + Send + Sync + 'static F: Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)>
+ Send
+ Sync
+ 'static,
{ {
*write_lock!(DEFAULT_AGGREGATE_STATS) = Arc::new(func) *write_lock!(DEFAULT_AGGREGATE_STATS) = Arc::new(func)
} }
@ -189,18 +211,24 @@ impl AtomicBucket {
} }
/// Set this bucket's statistics generator. /// Set this bucket's statistics generator.
#[deprecated(since="0.7.2", note="Use stats()")] #[deprecated(since = "0.7.2", note = "Use stats()")]
pub fn set_stats<F>(&self, func: F) pub fn set_stats<F>(&self, func: F)
where where
F: Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)> + Send + Sync + 'static F: Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)>
+ Send
+ Sync
+ 'static,
{ {
self.stats(func) self.stats(func)
} }
/// Set this bucket's statistics generator. /// Set this bucket's statistics generator.
pub fn stats<F>(&self, func: F) pub fn stats<F>(&self, func: F)
where where
F: Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)> + Send + Sync + 'static F: Fn(InputKind, MetricName, ScoreType) -> Option<(InputKind, MetricName, MetricValue)>
+ Send
+ Sync
+ 'static,
{ {
write_lock!(self.inner).stats = Some(Arc::new(func)) write_lock!(self.inner).stats = Some(Arc::new(func))
} }
@ -211,7 +239,7 @@ impl AtomicBucket {
} }
/// Set this bucket's aggregated metrics flush output. /// Set this bucket's aggregated metrics flush output.
#[deprecated(since="0.7.2", note="Use sink()")] #[deprecated(since = "0.7.2", note = "Use sink()")]
pub fn set_drain(&self, new_drain: impl Output + Send + Sync + 'static) { pub fn set_drain(&self, new_drain: impl Output + Send + Sync + 'static) {
self.drain(new_drain) self.drain(new_drain)
} }
@ -231,7 +259,6 @@ impl AtomicBucket {
let mut inner = write_lock!(self.inner); let mut inner = write_lock!(self.inner);
inner.flush_to(publish_scope) inner.flush_to(publish_scope)
} }
} }
impl InputScope for AtomicBucket { impl InputScope for AtomicBucket {
@ -257,8 +284,12 @@ impl Flush for AtomicBucket {
} }
impl WithAttributes for AtomicBucket { impl WithAttributes for AtomicBucket {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
const HIT: usize = 0; const HIT: usize = 0;
@ -354,7 +385,6 @@ impl AtomicScores {
pub fn reset(&self, duration_seconds: f64) -> Option<Vec<ScoreType>> { pub fn reset(&self, duration_seconds: f64) -> Option<Vec<ScoreType>> {
let mut scores = AtomicScores::blank(); let mut scores = AtomicScores::blank();
if self.snapshot(&mut scores) { if self.snapshot(&mut scores) {
let mut snapshot = Vec::new(); let mut snapshot = Vec::new();
match self.kind { match self.kind {
InputKind::Marker => { InputKind::Marker => {
@ -421,8 +451,8 @@ fn swap_if(counter: &AtomicIsize, new_value: isize, compare: fn(isize, isize) ->
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
mod bench { mod bench {
use test;
use super::*; use super::*;
use test;
#[bench] #[bench]
fn update_marker(b: &mut test::Bencher) { fn update_marker(b: &mut test::Bencher) {
@ -467,8 +497,8 @@ mod test {
use core::clock::{mock_clock_advance, mock_clock_reset}; use core::clock::{mock_clock_advance, mock_clock_reset};
use output::map::StatsMapScope; use output::map::StatsMapScope;
use std::time::Duration;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::time::Duration;
fn make_stats(stats_fn: &'static StatsFn) -> BTreeMap<String, MetricValue> { fn make_stats(stats_fn: &'static StatsFn) -> BTreeMap<String, MetricValue> {
mock_clock_reset(); mock_clock_reset();

View File

@ -1,8 +1,8 @@
pub mod atomic; pub mod atomic;
use core::input::InputKind; use core::input::InputKind;
use core::name::MetricName;
use core::MetricValue; use core::MetricValue;
use core::name::{MetricName};
/// Possibly aggregated scores. /// Possibly aggregated scores.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@ -24,16 +24,22 @@ pub enum ScoreType {
/// A predefined export strategy reporting all aggregated stats for all metric types. /// A predefined export strategy reporting all aggregated stats for all metric types.
/// Resulting stats are named by appending a short suffix to each metric's name. /// Resulting stats are named by appending a short suffix to each metric's name.
#[allow(dead_code)] #[allow(dead_code)]
pub fn stats_all(kind: InputKind, name: MetricName, score: ScoreType) pub fn stats_all(
-> Option<(InputKind, MetricName, MetricValue)> kind: InputKind,
{ name: MetricName,
score: ScoreType,
) -> Option<(InputKind, MetricName, MetricValue)> {
match score { match score {
ScoreType::Count(hit) => Some((InputKind::Counter, name.make_name("count"), hit)), ScoreType::Count(hit) => Some((InputKind::Counter, name.make_name("count"), hit)),
ScoreType::Sum(sum) => Some((kind, name.make_name("sum"), sum)), ScoreType::Sum(sum) => Some((kind, name.make_name("sum"), sum)),
ScoreType::Mean(mean) => Some((kind, name.make_name("mean"), mean.round() as MetricValue)), ScoreType::Mean(mean) => Some((kind, name.make_name("mean"), mean.round() as MetricValue)),
ScoreType::Max(max) => Some((InputKind::Gauge, name.make_name("max"), max)), ScoreType::Max(max) => Some((InputKind::Gauge, name.make_name("max"), max)),
ScoreType::Min(min) => Some((InputKind::Gauge, name.make_name("min"), min)), ScoreType::Min(min) => Some((InputKind::Gauge, name.make_name("min"), min)),
ScoreType::Rate(rate) => Some((InputKind::Gauge, name.make_name("rate"), rate.round() as MetricValue)), ScoreType::Rate(rate) => Some((
InputKind::Gauge,
name.make_name("rate"),
rate.round() as MetricValue,
)),
} }
} }
@ -42,9 +48,11 @@ pub fn stats_all(kind: InputKind, name: MetricName, score: ScoreType)
/// Since there is only one stat per metric, there is no risk of collision /// Since there is only one stat per metric, there is no risk of collision
/// and so exported stats copy their metric's name. /// and so exported stats copy their metric's name.
#[allow(dead_code)] #[allow(dead_code)]
pub fn stats_average(kind: InputKind, name: MetricName, score: ScoreType) pub fn stats_average(
-> Option<(InputKind, MetricName, MetricValue)> kind: InputKind,
{ name: MetricName,
score: ScoreType,
) -> Option<(InputKind, MetricName, MetricValue)> {
match kind { match kind {
InputKind::Marker => match score { InputKind::Marker => match score {
ScoreType::Count(count) => Some((InputKind::Counter, name, count)), ScoreType::Count(count) => Some((InputKind::Counter, name, count)),
@ -64,9 +72,11 @@ pub fn stats_average(kind: InputKind, name: MetricName, score: ScoreType)
/// Since there is only one stat per metric, there is no risk of collision /// Since there is only one stat per metric, there is no risk of collision
/// and so exported stats copy their metric's name. /// and so exported stats copy their metric's name.
#[allow(dead_code)] #[allow(dead_code)]
pub fn stats_summary(kind: InputKind, name: MetricName, score: ScoreType) pub fn stats_summary(
-> Option<(InputKind, MetricName, MetricValue)> kind: InputKind,
{ name: MetricName,
score: ScoreType,
) -> Option<(InputKind, MetricName, MetricValue)> {
match kind { match kind {
InputKind::Marker => match score { InputKind::Marker => match score {
ScoreType::Count(count) => Some((InputKind::Counter, name, count)), ScoreType::Count(count) => Some((InputKind::Counter, name, count)),
@ -81,4 +91,4 @@ pub fn stats_summary(kind: InputKind, name: MetricName, score: ScoreType)
_ => None, _ => None,
}, },
} }
} }

41
src/cache/cache_in.rs vendored
View File

@ -1,19 +1,19 @@
//! Metric input scope caching. //! Metric input scope caching.
use core::Flush;
use core::input::{InputKind, Input, InputScope, InputMetric, InputDyn};
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use cache::lru_cache as lru; use cache::lru_cache as lru;
use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::error; use core::error;
use core::input::{Input, InputDyn, InputKind, InputMetric, InputScope};
use core::name::MetricName;
use core::Flush;
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
/// Wrap an input with a metric definition cache. /// Wrap an input with a metric definition cache.
/// This can provide performance benefits for metrics that are dynamically defined at runtime on each access. /// This can provide performance benefits for metrics that are dynamically defined at runtime on each access.
@ -43,14 +43,18 @@ impl InputCache {
InputCache { InputCache {
attributes: Attributes::default(), attributes: Attributes::default(),
target: Arc::new(target), target: Arc::new(target),
cache: Arc::new(RwLock::new(lru::LRUCache::with_capacity(max_size))) cache: Arc::new(RwLock::new(lru::LRUCache::with_capacity(max_size))),
} }
} }
} }
impl WithAttributes for InputCache { impl WithAttributes for InputCache {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Input for InputCache { impl Input for InputCache {
@ -75,16 +79,18 @@ pub struct InputScopeCache {
} }
impl WithAttributes for InputScopeCache { impl WithAttributes for InputScopeCache {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl InputScope for InputScopeCache { impl InputScope for InputScopeCache {
fn new_metric(&self, name: MetricName, kind: InputKind) -> InputMetric { fn new_metric(&self, name: MetricName, kind: InputKind) -> InputMetric {
let name = self.prefix_append(name); let name = self.prefix_append(name);
let lookup = { let lookup = { write_lock!(self.cache).get(&name).cloned() };
write_lock!(self.cache).get(&name).cloned()
};
lookup.unwrap_or_else(|| { lookup.unwrap_or_else(|| {
let new_metric = self.target.new_metric(name.clone(), kind); let new_metric = self.target.new_metric(name.clone(), kind);
// FIXME (perf) having to take another write lock for a cache miss // FIXME (perf) having to take another write lock for a cache miss
@ -95,7 +101,6 @@ impl InputScope for InputScopeCache {
} }
impl Flush for InputScopeCache { impl Flush for InputScopeCache {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
self.target.flush() self.target.flush()

View File

@ -1,20 +1,20 @@
//! Metric output scope caching. //! Metric output scope caching.
use core::Flush;
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::output::{Output, OutputMetric, OutputScope, OutputDyn};
use core::input::InputKind;
use cache::lru_cache as lru; use cache::lru_cache as lru;
use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::error; use core::error;
use core::input::InputKind;
use core::name::MetricName;
use core::output::{Output, OutputDyn, OutputMetric, OutputScope};
use core::Flush;
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
use std::rc::Rc; use std::rc::Rc;
@ -46,14 +46,18 @@ impl OutputCache {
OutputCache { OutputCache {
attributes: Attributes::default(), attributes: Attributes::default(),
target: Arc::new(target), target: Arc::new(target),
cache: Arc::new(RwLock::new(lru::LRUCache::with_capacity(max_size))) cache: Arc::new(RwLock::new(lru::LRUCache::with_capacity(max_size))),
} }
} }
} }
impl WithAttributes for OutputCache { impl WithAttributes for OutputCache {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Output for OutputCache { impl Output for OutputCache {
@ -78,16 +82,18 @@ pub struct OutputScopeCache {
} }
impl WithAttributes for OutputScopeCache { impl WithAttributes for OutputScopeCache {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl OutputScope for OutputScopeCache { impl OutputScope for OutputScopeCache {
fn new_metric(&self, name: MetricName, kind: InputKind) -> OutputMetric { fn new_metric(&self, name: MetricName, kind: InputKind) -> OutputMetric {
let name = self.prefix_append(name); let name = self.prefix_append(name);
let lookup = { let lookup = { write_lock!(self.cache).get(&name).cloned() };
write_lock!(self.cache).get(&name).cloned()
};
lookup.unwrap_or_else(|| { lookup.unwrap_or_else(|| {
let new_metric = self.target.new_metric(name.clone(), kind); let new_metric = self.target.new_metric(name.clone(), kind);
// FIXME (perf) having to take another write lock for a cache miss // FIXME (perf) having to take another write lock for a cache miss
@ -98,10 +104,8 @@ impl OutputScope for OutputScopeCache {
} }
impl Flush for OutputScopeCache { impl Flush for OutputScopeCache {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
self.target.flush() self.target.flush()
} }
} }

View File

@ -2,8 +2,8 @@
//! Stored values will be held onto as long as there is space. //! Stored values will be held onto as long as there is space.
//! When space runs out, the oldest unused value will get evicted to make room for a new value. //! When space runs out, the oldest unused value will get evicted to make room for a new value.
use std::hash::Hash;
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::Hash;
struct CacheEntry<K, V> { struct CacheEntry<K, V> {
key: K, key: K,

4
src/cache/mod.rs vendored
View File

@ -1,3 +1,3 @@
pub mod lru_cache;
pub mod cache_out;
pub mod cache_in; pub mod cache_in;
pub mod cache_out;
pub mod lru_cache;

View File

@ -1,21 +1,20 @@
use std::sync::{Arc}; use std::collections::HashMap;
use std::collections::{HashMap};
use std::default::Default; use std::default::Default;
use std::sync::Arc;
use core::name::{MetricName, NameParts};
use core::scheduler::SCHEDULER; use core::scheduler::SCHEDULER;
use core::name::{NameParts, MetricName};
use ::{Flush, CancelHandle};
use std::fmt; use std::fmt;
use std::time::Duration; use std::time::Duration;
use ::{InputScope, Gauge};
use MetricValue; use MetricValue;
use {CancelHandle, Flush};
use {Gauge, InputScope};
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")]
use parking_lot::{RwLock};
#[cfg(feature = "parking_lot")]
use parking_lot::RwLock;
/// The actual distribution (random, fixed-cycled, etc) depends on selected sampling method. /// The actual distribution (random, fixed-cycled, etc) depends on selected sampling method.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@ -28,7 +27,7 @@ pub enum Sampling {
/// - 1.0+ records everything /// - 1.0+ records everything
/// - 0.5 records one of two values /// - 0.5 records one of two values
/// - 0.0 records nothing /// - 0.0 records nothing
Random(f64) Random(f64),
} }
impl Default for Sampling { impl Default for Sampling {
@ -102,7 +101,10 @@ pub trait OnFlush {
fn notify_flush_listeners(&self); fn notify_flush_listeners(&self);
} }
impl <T> OnFlush for T where T: Flush + WithAttributes { impl<T> OnFlush for T
where
T: Flush + WithAttributes,
{
fn notify_flush_listeners(&self) { fn notify_flush_listeners(&self) {
for listener in read_lock!(self.get_attributes().flush_listeners).iter() { for listener in read_lock!(self.get_attributes().flush_listeners).iter() {
(listener)() (listener)()
@ -117,16 +119,18 @@ pub struct ObserveWhen<'a, T, F> {
} }
impl<'a, T, F> ObserveWhen<'a, T, F> impl<'a, T, F> ObserveWhen<'a, T, F>
where F: Fn() -> MetricValue + Send + Sync + 'static, where
T: InputScope + WithAttributes + Send + Sync, F: Fn() -> MetricValue + Send + Sync + 'static,
T: InputScope + WithAttributes + Send + Sync,
{ {
pub fn on_flush(self) { pub fn on_flush(self) {
let gauge = self.gauge; let gauge = self.gauge;
let op = self.operation; let op = self.operation;
write_lock!(self.target.mut_attributes().flush_listeners).push(Arc::new(move || gauge.value(op()))); write_lock!(self.target.mut_attributes().flush_listeners)
.push(Arc::new(move || gauge.value(op())));
} }
pub fn every(self, period: Duration,) -> CancelHandle { pub fn every(self, period: Duration) -> CancelHandle {
let gauge = self.gauge; let gauge = self.gauge;
let op = self.operation; let op = self.operation;
let handle = SCHEDULER.schedule(period, move || gauge.value(op())); let handle = SCHEDULER.schedule(period, move || gauge.value(op()));
@ -137,17 +141,19 @@ impl<'a, T, F> ObserveWhen<'a, T, F>
/// Schedule a recurring task /// Schedule a recurring task
pub trait Observe { pub trait Observe {
/// Schedule a recurring task. /// Schedule a recurring task.
/// The returned handle can be used to cancel the task. /// The returned handle can be used to cancel the task.
fn observe<F>(&mut self, gauge: Gauge, operation: F) -> ObserveWhen<Self, F> fn observe<F>(&mut self, gauge: Gauge, operation: F) -> ObserveWhen<Self, F>
where F: Fn() -> MetricValue + Send + Sync + 'static, Self: Sized; where
F: Fn() -> MetricValue + Send + Sync + 'static,
Self: Sized;
} }
impl<T: InputScope + WithAttributes> Observe for T { impl<T: InputScope + WithAttributes> Observe for T {
fn observe<F>(&mut self, gauge: Gauge, operation: F) -> ObserveWhen<Self, F> fn observe<F>(&mut self, gauge: Gauge, operation: F) -> ObserveWhen<Self, F>
where F: Fn() -> MetricValue + Send + Sync + 'static, Self: Sized where
F: Fn() -> MetricValue + Send + Sync + 'static,
Self: Sized,
{ {
ObserveWhen { ObserveWhen {
target: self, target: self,
@ -173,7 +179,7 @@ pub trait Prefixed {
/// Append a name to the existing names. /// Append a name to the existing names.
/// Return a clone of the component with the updated names. /// Return a clone of the component with the updated names.
#[deprecated(since="0.7.2", note="Use named() or add_name()")] #[deprecated(since = "0.7.2", note = "Use named() or add_name()")]
fn add_prefix<S: Into<String>>(&self, name: S) -> Self; fn add_prefix<S: Into<String>>(&self, name: S) -> Self;
/// Append a name to the existing names. /// Append a name to the existing names.
@ -203,11 +209,9 @@ pub trait Label {
/// Join namespace and prepend in newly defined metrics. /// Join namespace and prepend in newly defined metrics.
fn label(&self, name: &str) -> Self; fn label(&self, name: &str) -> Self;
} }
impl<T: WithAttributes> Prefixed for T { impl<T: WithAttributes> Prefixed for T {
/// Returns namespace of component. /// Returns namespace of component.
fn get_prefixes(&self) -> &NameParts { fn get_prefixes(&self) -> &NameParts {
&self.get_attributes().naming &self.get_attributes().naming
@ -233,7 +237,6 @@ impl<T: WithAttributes> Prefixed for T {
let parts = NameParts::from(name); let parts = NameParts::from(name);
self.with_attributes(|new_attr| new_attr.naming = parts.clone()) self.with_attributes(|new_attr| new_attr.naming = parts.clone())
} }
} }
/// Apply statistical sampling to collected metrics data. /// Apply statistical sampling to collected metrics data.
@ -272,11 +275,11 @@ pub trait Buffered: WithAttributes {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use output::map::StatsMap;
use core::attributes::*; use core::attributes::*;
use core::input::Input;
use core::input::*; use core::input::*;
use core::Flush; use core::Flush;
use core::input::Input; use output::map::StatsMap;
use StatsMapScope; use StatsMapScope;
#[test] #[test]
@ -287,4 +290,4 @@ mod test {
metrics.flush().unwrap(); metrics.flush().unwrap();
assert_eq!(Some(&4), metrics.into_map().get("my_gauge")) assert_eq!(Some(&4), metrics.into_map().get("my_gauge"))
} }
} }

View File

@ -52,7 +52,6 @@ pub fn mock_clock_reset() {
}) })
} }
/// Advance the mock clock by a certain amount of time. /// Advance the mock clock by a certain amount of time.
/// Enables writing reproducible metrics tests in combination with #mock_clock_reset() /// Enables writing reproducible metrics tests in combination with #mock_clock_reset()
/// Should be after metrics have been produced but before they are published. /// Should be after metrics have been produced but before they are published.
@ -75,7 +74,5 @@ fn now() -> Instant {
/// thread::sleep will have no effect on metrics. /// thread::sleep will have no effect on metrics.
/// Use advance_time() to simulate passing time. /// Use advance_time() to simulate passing time.
fn now() -> Instant { fn now() -> Instant {
MOCK_CLOCK.with(|now| { MOCK_CLOCK.with(|now| *now.borrow())
*now.borrow()
})
} }

View File

@ -1,6 +1,5 @@
use std::result;
use std::error; use std::error;
use std::result;
/// Just put any error in a box. /// Just put any error in a box.
pub type Result<T> = result::Result<T, Box<error::Error + Send + Sync>>; pub type Result<T> = result::Result<T, Box<error::Error + Send + Sync>>;

View File

@ -1,14 +1,14 @@
use core::clock::TimeHandle; use core::clock::TimeHandle;
use core::{MetricValue, Flush};
use core::name::MetricName;
use core::label::Labels; use core::label::Labels;
use core::name::MetricName;
use core::{Flush, MetricValue};
use std::sync::Arc;
use std::fmt; use std::fmt;
use std::sync::Arc;
// TODO maybe define an 'AsValue' trait + impl for supported number types, then drop 'num' crate // TODO maybe define an 'AsValue' trait + impl for supported number types, then drop 'num' crate
pub use num::{ToPrimitive};
pub use num::integer; pub use num::integer;
pub use num::ToPrimitive;
/// A function trait that opens a new metric capture scope. /// A function trait that opens a new metric capture scope.
pub trait Input: Send + Sync + 'static + InputDyn { pub trait Input: Send + Sync + 'static + InputDyn {
@ -19,7 +19,7 @@ pub trait Input: Send + Sync + 'static + InputDyn {
fn metrics(&self) -> Self::SCOPE; fn metrics(&self) -> Self::SCOPE;
/// Open a new scope from this input. /// Open a new scope from this input.
#[deprecated(since="0.7.2", note="Use metrics()")] #[deprecated(since = "0.7.2", note = "Use metrics()")]
fn input(&self) -> Self::SCOPE { fn input(&self) -> Self::SCOPE {
self.metrics() self.metrics()
} }
@ -74,7 +74,7 @@ pub trait InputScope: Flush {
/// A metric is actually a function that knows to write a metric value to a metric output. /// A metric is actually a function that knows to write a metric value to a metric output.
#[derive(Clone)] #[derive(Clone)]
pub struct InputMetric { pub struct InputMetric {
inner: Arc<Fn(MetricValue, Labels) + Send + Sync> inner: Arc<Fn(MetricValue, Labels) + Send + Sync>,
} }
impl fmt::Debug for InputMetric { impl fmt::Debug for InputMetric {
@ -86,7 +86,9 @@ impl fmt::Debug for InputMetric {
impl InputMetric { impl InputMetric {
/// Utility constructor /// Utility constructor
pub fn new<F: Fn(MetricValue, Labels) + Send + Sync + 'static>(metric: F) -> InputMetric { pub fn new<F: Fn(MetricValue, Labels) + Send + Sync + 'static>(metric: F) -> InputMetric {
InputMetric { inner: Arc::new(metric) } InputMetric {
inner: Arc::new(metric),
}
} }
/// Collect a new value for this metric. /// Collect a new value for this metric.
@ -120,7 +122,7 @@ impl<'a> From<&'a str> for InputKind {
"Gauge" => InputKind::Gauge, "Gauge" => InputKind::Gauge,
"Timer" => InputKind::Timer, "Timer" => InputKind::Timer,
"Level" => InputKind::Level, "Level" => InputKind::Level,
_ => panic!("No InputKind '{}' defined", s) _ => panic!("No InputKind '{}' defined", s),
} }
} }
} }
@ -187,7 +189,6 @@ impl Gauge {
pub fn value<V: ToPrimitive>(&self, value: V) { pub fn value<V: ToPrimitive>(&self, value: V) {
self.inner.write(value.to_isize().unwrap(), labels![]) self.inner.write(value.to_isize().unwrap(), labels![])
} }
} }
/// A timer that sends values to the metrics backend /// A timer that sends values to the metrics backend

View File

@ -1,13 +1,13 @@
use std::collections::{HashMap};
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashMap;
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
/// Label values are immutable but can move around a lot. /// Label values are immutable but can move around a lot.
type LabelValue = Arc<String>; type LabelValue = Arc<String>;
@ -18,7 +18,7 @@ type LabelValue = Arc<String>;
/// All write operations return a mutated clone of the original. /// All write operations return a mutated clone of the original.
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
struct LabelScope { struct LabelScope {
pairs: Option<Arc<HashMap<String, LabelValue>>> pairs: Option<Arc<HashMap<String, LabelValue>>>,
} }
impl LabelScope { impl LabelScope {
@ -26,11 +26,13 @@ impl LabelScope {
fn set(&self, key: String, value: LabelValue) -> Self { fn set(&self, key: String, value: LabelValue) -> Self {
let mut new_pairs = match self.pairs { let mut new_pairs = match self.pairs {
None => HashMap::new(), None => HashMap::new(),
Some(ref old_pairs) => old_pairs.as_ref().clone() Some(ref old_pairs) => old_pairs.as_ref().clone(),
}; };
new_pairs.insert(key, value); new_pairs.insert(key, value);
LabelScope { pairs: Some(Arc::new(new_pairs)) } LabelScope {
pairs: Some(Arc::new(new_pairs)),
}
} }
fn unset(&self, key: &str) -> Self { fn unset(&self, key: &str) -> Self {
@ -42,7 +44,9 @@ impl LabelScope {
if new_pairs.is_empty() { if new_pairs.is_empty() {
LabelScope { pairs: None } LabelScope { pairs: None }
} else { } else {
LabelScope { pairs: Some(Arc::new(new_pairs)) } LabelScope {
pairs: Some(Arc::new(new_pairs)),
}
} }
} else { } else {
// key wasn't set, labels unchanged // key wasn't set, labels unchanged
@ -56,7 +60,7 @@ impl LabelScope {
// FIXME should use .and_then(), how? // FIXME should use .and_then(), how?
match &self.pairs { match &self.pairs {
None => None, None => None,
Some(pairs) => pairs.get(key).cloned() Some(pairs) => pairs.get(key).cloned(),
} }
} }
@ -67,9 +71,9 @@ impl LabelScope {
} }
} }
lazy_static!( lazy_static! {
static ref APP_LABELS: RwLock<LabelScope> = RwLock::new(LabelScope::default()); static ref APP_LABELS: RwLock<LabelScope> = RwLock::new(LabelScope::default());
); }
thread_local! { thread_local! {
static THREAD_LABELS: RefCell<LabelScope> = RefCell::new(LabelScope::default()); static THREAD_LABELS: RefCell<LabelScope> = RefCell::new(LabelScope::default());
@ -104,9 +108,7 @@ impl ThreadLabel {
} }
fn collect(map: &mut HashMap<String, LabelValue>) { fn collect(map: &mut HashMap<String, LabelValue>) {
THREAD_LABELS.with(|mop| { THREAD_LABELS.with(|mop| mop.borrow().collect(map));
mop.borrow().collect(map)
});
} }
} }
@ -139,7 +141,6 @@ impl AppLabel {
} }
} }
/// Base structure to carry metric labels from the application to the metric backend(s). /// Base structure to carry metric labels from the application to the metric backend(s).
/// Can carry both one-off labels and exported context labels (if async metrics are enabled). /// Can carry both one-off labels and exported context labels (if async metrics are enabled).
/// Used in applications through the labels!() macro. /// Used in applications through the labels!() macro.
@ -152,8 +153,8 @@ impl From<HashMap<String, LabelValue>> for Labels {
fn from(map: HashMap<String, LabelValue>) -> Self { fn from(map: HashMap<String, LabelValue>) -> Self {
Labels { Labels {
scopes: vec![LabelScope { scopes: vec![LabelScope {
pairs: Some(Arc::new(map)) pairs: Some(Arc::new(map)),
}] }],
} }
} }
} }
@ -168,10 +169,10 @@ impl Default for Labels {
} }
impl Labels { impl Labels {
/// Used to save metric context before enqueuing value for async output. /// Used to save metric context before enqueuing value for async output.
pub fn save_context(&mut self) { pub fn save_context(&mut self) {
self.scopes.push(THREAD_LABELS.with(|map| map.borrow().clone())); self.scopes
.push(THREAD_LABELS.with(|map| map.borrow().clone()));
self.scopes.push(read_lock!(APP_LABELS).clone()); self.scopes.push(read_lock!(APP_LABELS).clone());
} }
@ -179,7 +180,6 @@ impl Labels {
/// Searches provided labels, provided scopes or default scopes. /// Searches provided labels, provided scopes or default scopes.
// TODO needs less magic, add checks? // TODO needs less magic, add checks?
pub fn lookup(&self, key: &str) -> Option<LabelValue> { pub fn lookup(&self, key: &str) -> Option<LabelValue> {
fn lookup_current_context(key: &str) -> Option<LabelValue> { fn lookup_current_context(key: &str) -> Option<LabelValue> {
ThreadLabel::get(key).or_else(|| AppLabel::get(key)) ThreadLabel::get(key).or_else(|| AppLabel::get(key))
} }
@ -191,14 +191,16 @@ impl Labels {
// some value labels, no saved context labels // some value labels, no saved context labels
// lookup value label, then lookup implicit context // lookup value label, then lookup implicit context
1 => self.scopes[0].get(key).or_else(|| lookup_current_context(key)), 1 => self.scopes[0]
.get(key)
.or_else(|| lookup_current_context(key)),
// value + saved context labels // value + saved context labels
// lookup explicit context in turn // lookup explicit context in turn
_ => { _ => {
for src in &self.scopes { for src in &self.scopes {
if let Some(label_value) = src.get(key) { if let Some(label_value) = src.get(key) {
return Some(label_value) return Some(label_value);
} }
} }
None None
@ -225,7 +227,7 @@ impl Labels {
AppLabel::collect(&mut map); AppLabel::collect(&mut map);
ThreadLabel::collect(&mut map); ThreadLabel::collect(&mut map);
self.scopes[0].collect(&mut map); self.scopes[0].collect(&mut map);
}, }
// value + saved context labels // value + saved context labels
// lookup explicit context in turn // lookup explicit context in turn
@ -240,14 +242,13 @@ impl Labels {
} }
} }
#[cfg(test)] #[cfg(test)]
pub mod test { pub mod test {
use super::*; use super::*;
use std::sync::Mutex; use std::sync::Mutex;
lazy_static!{ lazy_static! {
/// Label tests use the globally shared AppLabels which may make them interfere as tests are run concurrently. /// Label tests use the globally shared AppLabels which may make them interfere as tests are run concurrently.
/// We do not want to mandate usage of `RUST_TEST_THREADS=1` which would penalize the whole test suite. /// We do not want to mandate usage of `RUST_TEST_THREADS=1` which would penalize the whole test suite.
/// Instead we use a local mutex to make sure the label tests run in sequence. /// Instead we use a local mutex to make sure the label tests run in sequence.
@ -261,10 +262,16 @@ pub mod test {
AppLabel::set("abc", "456"); AppLabel::set("abc", "456");
ThreadLabel::set("abc", "123"); ThreadLabel::set("abc", "123");
assert_eq!(Arc::new("123".into()), labels!().lookup("abc").expect("ThreadLabel Value")); assert_eq!(
Arc::new("123".into()),
labels!().lookup("abc").expect("ThreadLabel Value")
);
ThreadLabel::unset("abc"); ThreadLabel::unset("abc");
assert_eq!(Arc::new("456".into()), labels!().lookup("abc").expect("AppLabel Value")); assert_eq!(
Arc::new("456".into()),
labels!().lookup("abc").expect("AppLabel Value")
);
AppLabel::unset("abc"); AppLabel::unset("abc");
assert_eq!(true, labels!().lookup("abc").is_none()); assert_eq!(true, labels!().lookup("abc").is_none());
@ -274,27 +281,41 @@ pub mod test {
fn labels_macro() { fn labels_macro() {
let _lock = TEST_SEQUENCE.lock().expect("Test Sequence"); let _lock = TEST_SEQUENCE.lock().expect("Test Sequence");
let labels = labels!{ let labels = labels! {
"abc" => "789", "abc" => "789",
"xyz" => "123" "xyz" => "123"
}; };
assert_eq!(Arc::new("789".into()), labels.lookup("abc").expect("Label Value")); assert_eq!(
assert_eq!(Arc::new("123".into()), labels.lookup("xyz").expect("Label Value")); Arc::new("789".into()),
labels.lookup("abc").expect("Label Value")
);
assert_eq!(
Arc::new("123".into()),
labels.lookup("xyz").expect("Label Value")
);
} }
#[test] #[test]
fn value_labels() { fn value_labels() {
let _lock = TEST_SEQUENCE.lock().expect("Test Sequence"); let _lock = TEST_SEQUENCE.lock().expect("Test Sequence");
let labels = labels!{ "abc" => "789" }; let labels = labels! { "abc" => "789" };
assert_eq!(Arc::new("789".into()), labels.lookup("abc").expect("Label Value")); assert_eq!(
Arc::new("789".into()),
labels.lookup("abc").expect("Label Value")
);
AppLabel::set("abc", "456"); AppLabel::set("abc", "456");
assert_eq!(Arc::new("789".into()), labels.lookup("abc").expect("Label Value")); assert_eq!(
Arc::new("789".into()),
labels.lookup("abc").expect("Label Value")
);
ThreadLabel::set("abc", "123"); ThreadLabel::set("abc", "123");
assert_eq!(Arc::new("789".into()), labels.lookup("abc").expect("Label Value")); assert_eq!(
Arc::new("789".into()),
labels.lookup("abc").expect("Label Value")
);
} }
} }

View File

@ -2,43 +2,49 @@
//! This makes all outputs also immediately usable as inputs. //! This makes all outputs also immediately usable as inputs.
//! The alternatives are queuing or thread local. //! The alternatives are queuing or thread local.
use core::input::{InputScope, InputMetric, Input, InputKind}; use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::output::{Output, OutputScope};
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::Flush;
use core::error; use core::error;
use core::input::{Input, InputKind, InputMetric, InputScope};
use core::name::MetricName;
use core::output::{Output, OutputScope};
use core::Flush;
use std::rc::Rc; use std::rc::Rc;
use std::sync::{Arc, Mutex};
use std::ops; use std::ops;
use std::sync::{Arc, Mutex};
/// Synchronous thread-safety for metric output using basic locking. /// Synchronous thread-safety for metric output using basic locking.
#[derive(Clone)] #[derive(Clone)]
pub struct LockingOutput { pub struct LockingOutput {
attributes: Attributes, attributes: Attributes,
inner: Arc<Mutex<LockedOutputScope>> inner: Arc<Mutex<LockedOutputScope>>,
} }
impl WithAttributes for LockingOutput { impl WithAttributes for LockingOutput {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl InputScope for LockingOutput { impl InputScope for LockingOutput {
fn new_metric(&self, name: MetricName, kind: InputKind) -> InputMetric { fn new_metric(&self, name: MetricName, kind: InputKind) -> InputMetric {
let name = self.prefix_append(name); let name = self.prefix_append(name);
// lock when creating metrics // lock when creating metrics
let raw_metric = self.inner.lock().expect("LockingOutput").new_metric(name, kind); let raw_metric = self
.inner
.lock()
.expect("LockingOutput")
.new_metric(name, kind);
let mutex = self.inner.clone(); let mutex = self.inner.clone();
InputMetric::new(move |value, labels| { InputMetric::new(move |value, labels| {
// lock when collecting values // lock when collecting values
let _guard = mutex.lock().expect("LockingOutput"); let _guard = mutex.lock().expect("LockingOutput");
raw_metric.write(value, labels) raw_metric.write(value, labels)
} ) })
} }
} }
impl Flush for LockingOutput { impl Flush for LockingOutput {
@ -54,7 +60,7 @@ impl<T: Output + Send + Sync + 'static> Input for T {
fn metrics(&self) -> Self::SCOPE { fn metrics(&self) -> Self::SCOPE {
LockingOutput { LockingOutput {
attributes: Attributes::default(), attributes: Attributes::default(),
inner: Arc::new(Mutex::new(LockedOutputScope(self.output_dyn()))) inner: Arc::new(Mutex::new(LockedOutputScope(self.output_dyn()))),
} }
} }
} }
@ -62,7 +68,7 @@ impl<T: Output + Send + Sync + 'static> Input for T {
/// Wrap an OutputScope to make it Send + Sync, allowing it to travel the world of threads. /// Wrap an OutputScope to make it Send + Sync, allowing it to travel the world of threads.
/// Obviously, it should only still be used from a single thread at a time or dragons may occur. /// Obviously, it should only still be used from a single thread at a time or dragons may occur.
#[derive(Clone)] #[derive(Clone)]
struct LockedOutputScope(Rc<OutputScope + 'static> ); struct LockedOutputScope(Rc<OutputScope + 'static>);
impl ops::Deref for LockedOutputScope { impl ops::Deref for LockedOutputScope {
type Target = OutputScope + 'static; type Target = OutputScope + 'static;
@ -73,4 +79,3 @@ impl ops::Deref for LockedOutputScope {
unsafe impl Send for LockedOutputScope {} unsafe impl Send for LockedOutputScope {}
unsafe impl Sync for LockedOutputScope {} unsafe impl Sync for LockedOutputScope {}

View File

@ -2,11 +2,11 @@
//! Because the possibly high volume of data, this is pre-set to use aggregation. //! Because the possibly high volume of data, this is pre-set to use aggregation.
//! This is also kept in a separate module because it is not to be exposed outside of the crate. //! This is also kept in a separate module because it is not to be exposed outside of the crate.
use core::input::{Marker, InputScope, Counter};
use core::attributes::Prefixed; use core::attributes::Prefixed;
use core::input::{Counter, InputScope, Marker};
use core::proxy::Proxy; use core::proxy::Proxy;
metrics!{ metrics! {
/// Dipstick's own internal metrics. /// Dipstick's own internal metrics.
pub DIPSTICK_METRICS = "dipstick" => { pub DIPSTICK_METRICS = "dipstick" => {

View File

@ -1,16 +1,16 @@
pub mod error;
pub mod name;
pub mod attributes; pub mod attributes;
pub mod input;
pub mod output;
pub mod locking;
pub mod clock; pub mod clock;
pub mod void; pub mod error;
pub mod proxy; pub mod input;
pub mod label; pub mod label;
pub mod pcg32; pub mod locking;
pub mod scheduler;
pub mod metrics; pub mod metrics;
pub mod name;
pub mod output;
pub mod pcg32;
pub mod proxy;
pub mod scheduler;
pub mod void;
/// Base type for recorded metric values. /// Base type for recorded metric values.
pub type MetricValue = isize; pub type MetricValue = isize;
@ -19,13 +19,12 @@ pub type MetricValue = isize;
pub trait Flush { pub trait Flush {
/// Flush does nothing by default. /// Flush does nothing by default.
fn flush(&self) -> error::Result<()>; fn flush(&self) -> error::Result<()>;
} }
#[cfg(test)] #[cfg(test)]
pub mod test { pub mod test {
use super::*;
use super::input::*; use super::input::*;
use super::*;
#[test] #[test]
fn test_to_void() { fn test_to_void() {
@ -38,9 +37,9 @@ pub mod test {
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
pub mod bench { pub mod bench {
use super::input::*;
use super::clock::*;
use super::super::bucket::atomic::*; use super::super::bucket::atomic::*;
use super::clock::*;
use super::input::*;
use test; use test;
#[bench] #[bench]

View File

@ -1,5 +1,5 @@
use std::ops::{Deref,DerefMut}; use std::collections::VecDeque;
use std::collections::{VecDeque}; use std::ops::{Deref, DerefMut};
/// A double-ended vec of strings constituting a metric name or a future part thereof. /// A double-ended vec of strings constituting a metric name or a future part thereof.
#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Default)] #[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Default)]
@ -10,18 +10,17 @@ pub struct NameParts {
} }
impl NameParts { impl NameParts {
/// Returns true if this instance is equal to or a subset (more specific) of the target instance. /// Returns true if this instance is equal to or a subset (more specific) of the target instance.
/// e.g. `a.b.c` is within `a.b` /// e.g. `a.b.c` is within `a.b`
/// e.g. `a.d.c` is not within `a.b` /// e.g. `a.d.c` is not within `a.b`
pub fn is_within(&self, other: &NameParts) -> bool { pub fn is_within(&self, other: &NameParts) -> bool {
// quick check: if this name has less parts it cannot be equal or more specific // quick check: if this name has less parts it cannot be equal or more specific
if self.len() < other.nodes.len() { if self.len() < other.nodes.len() {
return false return false;
} }
for (i, part) in other.nodes.iter().enumerate() { for (i, part) in other.nodes.iter().enumerate() {
if part != &self.nodes[i] { if part != &self.nodes[i] {
return false return false;
} }
} }
true true
@ -75,20 +74,20 @@ pub struct MetricName {
} }
impl MetricName { impl MetricName {
/// Prepend to the existing namespace. /// Prepend to the existing namespace.
pub fn prepend<S: Into<NameParts>>(mut self, namespace: S) -> Self { pub fn prepend<S: Into<NameParts>>(mut self, namespace: S) -> Self {
let parts: NameParts = namespace.into(); let parts: NameParts = namespace.into();
parts.iter().rev().for_each(|node| parts
self.nodes.push_front(node.clone()) .iter()
); .rev()
.for_each(|node| self.nodes.push_front(node.clone()));
self self
} }
/// Append to the existing namespace. /// Append to the existing namespace.
pub fn append<S: Into<NameParts>>(mut self, namespace: S) -> Self { pub fn append<S: Into<NameParts>>(mut self, namespace: S) -> Self {
let offset = self.nodes.len() - 1; let offset = self.nodes.len() - 1;
let parts: NameParts = namespace.into(); let parts: NameParts = namespace.into();
for (i, part) in parts.iter().enumerate() { for (i, part) in parts.iter().enumerate() {
self.nodes.insert(i + offset, part.clone()) self.nodes.insert(i + offset, part.clone())
} }
@ -97,13 +96,19 @@ impl MetricName {
/// Combine name parts into a string. /// Combine name parts into a string.
pub fn join(&self, separator: &str) -> String { pub fn join(&self, separator: &str) -> String {
self.nodes.iter().map(|s| &**s).collect::<Vec<&str>>().join(separator) self.nodes
.iter()
.map(|s| &**s)
.collect::<Vec<&str>>()
.join(separator)
} }
} }
impl<S: Into<String>> From<S> for MetricName { impl<S: Into<String>> From<S> for MetricName {
fn from(name: S) -> Self { fn from(name: S) -> Self {
MetricName { nodes: NameParts::from(name) } MetricName {
nodes: NameParts::from(name),
}
} }
} }
@ -120,7 +125,6 @@ impl DerefMut for MetricName {
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
@ -147,4 +151,4 @@ mod test {
assert_eq!(false, sd1.is_within(&sd2)); assert_eq!(false, sd1.is_within(&sd2));
} }
} }

View File

@ -1,29 +1,29 @@
use core::{Flush, MetricValue};
use core::input::InputKind; use core::input::InputKind;
use core::label::Labels;
use core::name::MetricName; use core::name::MetricName;
use core::void::Void; use core::void::Void;
use core::label::Labels; use core::{Flush, MetricValue};
use std::rc::Rc; use std::rc::Rc;
/// Define metrics, write values and flush them. /// Define metrics, write values and flush them.
pub trait OutputScope: Flush { pub trait OutputScope: Flush {
/// Define a raw metric of the specified type. /// Define a raw metric of the specified type.
fn new_metric(&self, name: MetricName, kind: InputKind) -> OutputMetric; fn new_metric(&self, name: MetricName, kind: InputKind) -> OutputMetric;
} }
/// Output metrics are not thread safe. /// Output metrics are not thread safe.
#[derive(Clone)] #[derive(Clone)]
pub struct OutputMetric { pub struct OutputMetric {
inner: Rc<Fn(MetricValue, Labels)> inner: Rc<Fn(MetricValue, Labels)>,
} }
impl OutputMetric { impl OutputMetric {
/// Utility constructor /// Utility constructor
pub fn new<F: Fn(MetricValue, Labels) + 'static>(metric: F) -> OutputMetric { pub fn new<F: Fn(MetricValue, Labels) + 'static>(metric: F) -> OutputMetric {
OutputMetric { inner: Rc::new(metric) } OutputMetric {
inner: Rc::new(metric),
}
} }
/// Some may prefer the `metric.write(value)` form to the `(metric)(value)` form. /// Some may prefer the `metric.write(value)` form to the `(metric)(value)` form.
@ -34,7 +34,6 @@ impl OutputMetric {
} }
} }
/// A function trait that opens a new metric capture scope. /// A function trait that opens a new metric capture scope.
pub trait Output: Send + Sync + 'static + OutputDyn { pub trait Output: Send + Sync + 'static + OutputDyn {
/// The type of Scope returned byt this output. /// The type of Scope returned byt this output.
@ -44,7 +43,7 @@ pub trait Output: Send + Sync + 'static + OutputDyn {
fn new_scope(&self) -> Self::SCOPE; fn new_scope(&self) -> Self::SCOPE;
/// Open a new scope for this output. /// Open a new scope for this output.
#[deprecated(since="0.7.2", note="Use new_scope()")] #[deprecated(since = "0.7.2", note = "Use new_scope()")]
fn output(&self) -> Self::SCOPE { fn output(&self) -> Self::SCOPE {
self.new_scope() self.new_scope()
} }
@ -66,4 +65,4 @@ impl<T: Output + Send + Sync + 'static> OutputDyn for T {
/// Discard all metric values sent to it. /// Discard all metric values sent to it.
pub fn output_none() -> Void { pub fn output_none() -> Void {
Void {} Void {}
} }

View File

@ -8,7 +8,8 @@ use time;
fn seed() -> u64 { fn seed() -> u64 {
let seed = 5573589319906701683_u64; let seed = 5573589319906701683_u64;
let seed = seed.wrapping_mul(6364136223846793005) let seed = seed
.wrapping_mul(6364136223846793005)
.wrapping_add(1442695040888963407) .wrapping_add(1442695040888963407)
.wrapping_add(time::precise_time_ns()); .wrapping_add(time::precise_time_ns());
seed.wrapping_mul(6364136223846793005) seed.wrapping_mul(6364136223846793005)

View File

@ -1,21 +1,21 @@
//! Decouple metric definition from configuration with trait objects. //! Decouple metric definition from configuration with trait objects.
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush}; use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::name::{MetricName, NameParts};
use core::Flush;
use core::input::{InputKind, InputMetric, InputScope};
use core::void::VOID_INPUT;
use core::error; use core::error;
use core::input::{InputKind, InputMetric, InputScope};
use core::name::{MetricName, NameParts};
use core::void::VOID_INPUT;
use core::Flush;
use std::collections::{HashMap, BTreeMap}; use std::collections::{BTreeMap, HashMap};
use std::sync::{Arc, Weak};
use std::fmt; use std::fmt;
use std::sync::{Arc, Weak};
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
use atomic_refcell::*; use atomic_refcell::*;
@ -67,7 +67,6 @@ impl Default for Proxy {
} }
} }
struct InnerProxy { struct InnerProxy {
// namespaces can target one, many or no metrics // namespaces can target one, many or no metrics
targets: HashMap<NameParts, Arc<InputScope + Send + Sync>>, targets: HashMap<NameParts, Arc<InputScope + Send + Sync>>,
@ -83,7 +82,6 @@ impl fmt::Debug for InnerProxy {
} }
impl InnerProxy { impl InnerProxy {
fn new() -> Self { fn new() -> Self {
Self { Self {
targets: HashMap::new(), targets: HashMap::new(),
@ -97,10 +95,14 @@ impl InnerProxy {
for (metric_name, metric) in self.metrics.range_mut(namespace.clone()..) { for (metric_name, metric) in self.metrics.range_mut(namespace.clone()..) {
if let Some(metric) = metric.upgrade() { if let Some(metric) = metric.upgrade() {
// check for range end // check for range end
if !metric_name.is_within(namespace) { break } if !metric_name.is_within(namespace) {
break;
}
// check if metric targeted by _lower_ namespace // check if metric targeted by _lower_ namespace
if metric.target.borrow().1 > namespace.len() { continue } if metric.target.borrow().1 > namespace.len() {
continue;
}
let target_metric = target_scope.new_metric(metric.name.short(), metric.kind); let target_metric = target_scope.new_metric(metric.name.short(), metric.kind);
*metric.target.borrow_mut() = (target_metric, namespace.len()); *metric.target.borrow_mut() = (target_metric, namespace.len());
@ -108,7 +110,10 @@ impl InnerProxy {
} }
} }
fn get_effective_target(&self, namespace: &NameParts) -> Option<(Arc<InputScope + Send + Sync>, usize)> { fn get_effective_target(
&self,
namespace: &NameParts,
) -> Option<(Arc<InputScope + Send + Sync>, usize)> {
if let Some(target) = self.targets.get(namespace) { if let Some(target) = self.targets.get(namespace) {
return Some((target.clone(), namespace.len())); return Some((target.clone(), namespace.len()));
} }
@ -117,7 +122,7 @@ impl InnerProxy {
let mut name = namespace.clone(); let mut name = namespace.clone();
while let Some(_popped) = name.pop_back() { while let Some(_popped) = name.pop_back() {
if let Some(target) = self.targets.get(&name) { if let Some(target) = self.targets.get(&name) {
return Some((target.clone(), name.len())) return Some((target.clone(), name.len()));
} }
} }
None None
@ -126,20 +131,25 @@ impl InnerProxy {
fn unset_target(&mut self, namespace: &NameParts) { fn unset_target(&mut self, namespace: &NameParts) {
if self.targets.remove(namespace).is_none() { if self.targets.remove(namespace).is_none() {
// nothing to do // nothing to do
return return;
} }
let (up_target, up_nslen) = self.get_effective_target(namespace) let (up_target, up_nslen) = self
.get_effective_target(namespace)
.unwrap_or_else(|| (VOID_INPUT.input_dyn(), 0)); .unwrap_or_else(|| (VOID_INPUT.input_dyn(), 0));
// update all affected metrics to next upper targeted namespace // update all affected metrics to next upper targeted namespace
for (name, metric) in self.metrics.range_mut(namespace.clone()..) { for (name, metric) in self.metrics.range_mut(namespace.clone()..) {
// check for range end // check for range end
if !name.is_within(namespace) { break } if !name.is_within(namespace) {
break;
}
if let Some(mut metric) = metric.upgrade() { if let Some(mut metric) = metric.upgrade() {
// check if metric targeted by _lower_ namespace // check if metric targeted by _lower_ namespace
if metric.target.borrow().1 > namespace.len() { continue } if metric.target.borrow().1 > namespace.len() {
continue;
}
let new_metric = up_target.new_metric(name.short(), metric.kind); let new_metric = up_target.new_metric(name.short(), metric.kind);
*metric.target.borrow_mut() = (new_metric, up_nslen); *metric.target.borrow_mut() = (new_metric, up_nslen);
@ -160,11 +170,9 @@ impl InnerProxy {
Ok(()) Ok(())
} }
} }
} }
impl Proxy { impl Proxy {
/// Create a new "private" metric proxy root. This is usually not what you want. /// Create a new "private" metric proxy root. This is usually not what you want.
/// Since this proxy will not be part of the standard proxy tree, /// Since this proxy will not be part of the standard proxy tree,
/// it will need to be configured independently and since downstream code may not know about /// it will need to be configured independently and since downstream code may not know about
@ -178,7 +186,7 @@ impl Proxy {
} }
/// Replace target for this proxy and its children. /// Replace target for this proxy and its children.
#[deprecated(since="0.7.2", note="Use target()")] #[deprecated(since = "0.7.2", note = "Use target()")]
pub fn set_target<T: InputScope + Send + Sync + 'static>(&self, target: T) { pub fn set_target<T: InputScope + Send + Sync + 'static>(&self, target: T) {
self.target(target) self.target(target)
} }
@ -194,7 +202,7 @@ impl Proxy {
} }
/// Install a new default target for all proxies. /// Install a new default target for all proxies.
#[deprecated(since="0.7.2", note="Use default_target()")] #[deprecated(since = "0.7.2", note = "Use default_target()")]
pub fn set_default_target<T: InputScope + Send + Sync + 'static>(target: T) { pub fn set_default_target<T: InputScope + Send + Sync + 'static>(target: T) {
Self::default_target(target) Self::default_target(target)
} }
@ -208,7 +216,6 @@ impl Proxy {
pub fn unset_default_target(&self) { pub fn unset_default_target(&self) {
ROOT_PROXY.unset_target() ROOT_PROXY.unset_target()
} }
} }
impl<S: AsRef<str>> From<S> for Proxy { impl<S: AsRef<str>> From<S> for Proxy {
@ -231,7 +238,8 @@ impl InputScope for Proxy {
let namespace = &*name; let namespace = &*name;
{ {
// not found, define new // not found, define new
let (target, target_namespace_length) = inner.get_effective_target(namespace) let (target, target_namespace_length) = inner
.get_effective_target(namespace)
.unwrap_or_else(|| (VOID_INPUT.input_dyn(), 0)); .unwrap_or_else(|| (VOID_INPUT.input_dyn(), 0));
let metric_object = target.new_metric(namespace.short(), kind); let metric_object = target.new_metric(namespace.short(), kind);
let proxy = Arc::new(ProxyMetric { let proxy = Arc::new(ProxyMetric {
@ -240,7 +248,9 @@ impl InputScope for Proxy {
target: AtomicRefCell::new((metric_object, target_namespace_length)), target: AtomicRefCell::new((metric_object, target_namespace_length)),
proxy: self.inner.clone(), proxy: self.inner.clone(),
}); });
inner.metrics.insert(namespace.clone(), Arc::downgrade(&proxy)); inner
.metrics
.insert(namespace.clone(), Arc::downgrade(&proxy));
proxy proxy
} }
}); });
@ -249,7 +259,6 @@ impl InputScope for Proxy {
} }
impl Flush for Proxy { impl Flush for Proxy {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
write_lock!(self.inner).flush(self.get_prefixes()) write_lock!(self.inner).flush(self.get_prefixes())
@ -257,16 +266,20 @@ impl Flush for Proxy {
} }
impl WithAttributes for Proxy { impl WithAttributes for Proxy {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
mod bench { mod bench {
use super::*; use super::*;
use test;
use bucket::atomic::AtomicBucket; use bucket::atomic::AtomicBucket;
use test;
#[bench] #[bench]
fn proxy_marker_to_aggregate(b: &mut test::Bencher) { fn proxy_marker_to_aggregate(b: &mut test::Bencher) {

View File

@ -2,13 +2,13 @@
use core::input::InputScope; use core::input::InputScope;
use std::time::{Duration, Instant}; use std::cmp::{max, Ordering};
use std::sync::{Arc, Condvar, Mutex}; use std::collections::BinaryHeap;
use std::sync::atomic::{AtomicBool}; use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering::SeqCst; use std::sync::atomic::Ordering::SeqCst;
use std::collections::{BinaryHeap}; use std::sync::{Arc, Condvar, Mutex};
use std::cmp::{Ordering, max}; use std::thread;
use std::thread::{self}; use std::time::{Duration, Instant};
/// A handle to cancel a scheduled task if required. /// A handle to cancel a scheduled task if required.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -89,7 +89,8 @@ pub static MIN_DELAY: Duration = Duration::from_millis(50);
impl Scheduler { impl Scheduler {
/// Launch a new scheduler thread. /// Launch a new scheduler thread.
fn new() -> Self { fn new() -> Self {
let sched: Arc<(Mutex<BinaryHeap<ScheduledTask>>, Condvar)> = Arc::new((Mutex::new(BinaryHeap::new()), Condvar::new())); let sched: Arc<(Mutex<BinaryHeap<ScheduledTask>>, Condvar)> =
Arc::new((Mutex::new(BinaryHeap::new()), Condvar::new()));
let sched1 = Arc::downgrade(&sched); let sched1 = Arc::downgrade(&sched);
thread::Builder::new() thread::Builder::new()
@ -106,31 +107,29 @@ impl Scheduler {
Some(task) if task.next_time > now => { Some(task) if task.next_time > now => {
// next task is not ready yet, update schedule // next task is not ready yet, update schedule
wait_for = max(MIN_DELAY, task.next_time - now); wait_for = max(MIN_DELAY, task.next_time - now);
break 'work break 'work;
} }
None => { None => {
// TODO no tasks left. exit thread? // TODO no tasks left. exit thread?
break 'work break 'work;
}, }
_ => {} _ => {}
} }
if let Some(mut task) = tasks.pop() { if let Some(mut task) = tasks.pop() {
if task.handle.is_cancelled() { if task.handle.is_cancelled() {
// do not execute, do not reinsert // do not execute, do not reinsert
continue continue;
} }
(task.operation)(); (task.operation)();
task.next_time = now + task.period; task.next_time = now + task.period;
tasks.push(task); tasks.push(task);
} }
}; }
} }
}) })
.unwrap(); .unwrap();
Scheduler { Scheduler { next_tasks: sched }
next_tasks: sched
}
} }
#[cfg(test)] #[cfg(test)]
@ -140,7 +139,9 @@ impl Scheduler {
/// Schedule a task to run periodically. /// Schedule a task to run periodically.
pub fn schedule<F>(&self, period: Duration, operation: F) -> CancelHandle pub fn schedule<F>(&self, period: Duration, operation: F) -> CancelHandle
where F: Fn() -> () + Send + Sync + 'static { where
F: Fn() -> () + Send + Sync + 'static,
{
let handle = CancelHandle::new(); let handle = CancelHandle::new();
let new_task = ScheduledTask { let new_task = ScheduledTask {
next_time: Instant::now() + period, next_time: Instant::now() + period,
@ -157,7 +158,7 @@ impl Scheduler {
#[cfg(test)] #[cfg(test)]
pub mod test { pub mod test {
use super::*; use super::*;
use std::sync::atomic::{AtomicUsize}; use std::sync::atomic::AtomicUsize;
#[test] #[test]
fn schedule_one_and_cancel() { fn schedule_one_and_cancel() {
@ -166,7 +167,9 @@ pub mod test {
let sched = Scheduler::new(); let sched = Scheduler::new();
let handle1 = sched.schedule(Duration::from_millis(50), move || {trig1b.fetch_add(1, SeqCst);}); let handle1 = sched.schedule(Duration::from_millis(50), move || {
trig1b.fetch_add(1, SeqCst);
});
assert_eq!(sched.task_count(), 1); assert_eq!(sched.task_count(), 1);
thread::sleep(Duration::from_millis(170)); thread::sleep(Duration::from_millis(170));
assert_eq!(3, trig1a.load(SeqCst)); assert_eq!(3, trig1a.load(SeqCst));
@ -241,4 +244,3 @@ pub mod test {
handle2.cancel(); handle2.cancel();
} }
} }

View File

@ -1,10 +1,10 @@
use core::output::{Output, OutputScope, OutputMetric}; use core::input::{InputDyn, InputKind, InputScope};
use core::name::MetricName; use core::name::MetricName;
use core::input::{InputKind, InputDyn, InputScope}; use core::output::{Output, OutputMetric, OutputScope};
use core::Flush; use core::Flush;
use std::sync::Arc;
use std::error::Error; use std::error::Error;
use std::sync::Arc;
lazy_static! { lazy_static! {
/// The reference instance identifying an uninitialized metric config. /// The reference instance identifying an uninitialized metric config.
@ -18,14 +18,13 @@ lazy_static! {
#[derive(Clone)] #[derive(Clone)]
pub struct Void {} pub struct Void {}
/// Discard metrics output. /// Discard metrics output.
#[derive(Clone)] #[derive(Clone)]
pub struct VoidOutput {} pub struct VoidOutput {}
impl Void { impl Void {
/// Void metrics builder. /// Void metrics builder.
#[deprecated(since="0.7.2", note="Use new()")] #[deprecated(since = "0.7.2", note = "Use new()")]
pub fn metrics() -> Self { pub fn metrics() -> Self {
Self::new() Self::new()
} }

View File

@ -1,9 +1,14 @@
//! A quick, modular metrics toolkit for Rust applications. //! A quick, modular metrics toolkit for Rust applications.
#![cfg_attr(feature = "bench", feature(test))] #![cfg_attr(feature = "bench", feature(test))]
#![warn(missing_docs, trivial_casts, trivial_numeric_casts, unused_extern_crates, #![warn(
unused_qualifications)] missing_docs,
#![recursion_limit="32"] trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_qualifications
)]
#![recursion_limit = "32"]
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
extern crate test; extern crate test;
@ -19,48 +24,58 @@ extern crate num;
// FIXME required only for pcg32 seed (for sampling) // FIXME required only for pcg32 seed (for sampling)
extern crate time; extern crate time;
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
extern crate crossbeam_channel; extern crate crossbeam_channel;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
extern crate parking_lot; extern crate parking_lot;
#[macro_use] #[macro_use]
mod macros; mod macros;
pub use macros::*; pub use macros::*;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
macro_rules! write_lock { macro_rules! write_lock {
($WUT:expr) => { $WUT.write().unwrap() }; ($WUT:expr) => {
$WUT.write().unwrap()
};
} }
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
macro_rules! write_lock { macro_rules! write_lock {
($WUT:expr) => { $WUT.write() }; ($WUT:expr) => {
$WUT.write()
};
} }
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
macro_rules! read_lock { macro_rules! read_lock {
($WUT:expr) => { $WUT.read().unwrap() }; ($WUT:expr) => {
$WUT.read().unwrap()
};
} }
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
macro_rules! read_lock { macro_rules! read_lock {
($WUT:expr) => { $WUT.read() }; ($WUT:expr) => {
$WUT.read()
};
} }
mod core; mod core;
pub use core::{Flush, MetricValue}; pub use core::attributes::{Buffered, Buffering, Observe, OnFlush, Prefixed, Sampled, Sampling};
pub use core::attributes::{Prefixed, Sampling, Sampled, Buffered, Buffering, OnFlush, Observe}; pub use core::clock::TimeHandle;
pub use core::name::{MetricName, NameParts}; pub use core::error::Result;
pub use core::input::{Input, InputDyn, InputScope, InputMetric, Counter, Timer, Marker, Gauge, Level, InputKind}; pub use core::input::{
pub use core::output::{Output, OutputDyn, OutputScope, OutputMetric}; Counter, Gauge, Input, InputDyn, InputKind, InputMetric, InputScope, Level, Marker, Timer,
pub use core::scheduler::{ScheduleFlush, CancelHandle}; };
pub use core::label::{AppLabel, Labels, ThreadLabel};
pub use core::locking::LockingOutput; pub use core::locking::LockingOutput;
pub use core::error::{Result}; pub use core::name::{MetricName, NameParts};
pub use core::void::{Void}; pub use core::output::{Output, OutputDyn, OutputMetric, OutputScope};
pub use core::clock::{TimeHandle}; pub use core::scheduler::{CancelHandle, ScheduleFlush};
pub use core::label::{Labels, AppLabel, ThreadLabel}; pub use core::void::Void;
pub use core::{Flush, MetricValue};
#[cfg(test)] #[cfg(test)]
pub use core::clock::{mock_clock_advance, mock_clock_reset}; pub use core::clock::{mock_clock_advance, mock_clock_reset};
@ -68,19 +83,19 @@ pub use core::clock::{mock_clock_advance, mock_clock_reset};
pub use core::proxy::Proxy; pub use core::proxy::Proxy;
mod output; mod output;
pub use output::format::{LineFormat, SimpleFormat, LineOp, LabelOp, LineTemplate, Formatting}; pub use output::format::{Formatting, LabelOp, LineFormat, LineOp, LineTemplate, SimpleFormat};
pub use output::stream::{Stream, TextScope}; pub use output::graphite::{Graphite, GraphiteMetric, GraphiteScope};
pub use output::graphite::{Graphite, GraphiteScope, GraphiteMetric};
pub use output::statsd::{Statsd, StatsdScope, StatsdMetric};
pub use output::map::StatsMapScope;
pub use output::log::{Log, LogScope}; pub use output::log::{Log, LogScope};
pub use output::map::StatsMapScope;
pub use output::statsd::{Statsd, StatsdMetric, StatsdScope};
pub use output::stream::{Stream, TextScope};
//#[cfg(feature="prometheus")] //#[cfg(feature="prometheus")]
pub use output::prometheus::{Prometheus, PrometheusScope}; pub use output::prometheus::{Prometheus, PrometheusScope};
mod bucket; mod bucket;
pub use bucket::{ScoreType, stats_all, stats_average, stats_summary}; pub use bucket::atomic::AtomicBucket;
pub use bucket::atomic::{AtomicBucket}; pub use bucket::{stats_all, stats_average, stats_summary, ScoreType};
mod cache; mod cache;
pub use cache::cache_in::CachedInput; pub use cache::cache_in::CachedInput;
@ -91,5 +106,5 @@ pub use multi::multi_in::{MultiInput, MultiInputScope};
pub use multi::multi_out::{MultiOutput, MultiOutputScope}; pub use multi::multi_out::{MultiOutput, MultiOutputScope};
mod queue; mod queue;
pub use queue::queue_in::{QueuedInput, InputQueue, InputQueueScope}; pub use queue::queue_in::{InputQueue, InputQueueScope, QueuedInput};
pub use queue::queue_out::{QueuedOutput, OutputQueue, OutputQueueScope}; pub use queue::queue_out::{OutputQueue, OutputQueueScope, QueuedOutput};

View File

@ -162,7 +162,7 @@ macro_rules! metrics {
}; };
(@internal $WITH:expr; $TYPE:ty;) => () (@internal $WITH:expr; $TYPE:ty;) => ()
} }
#[cfg(test)] #[cfg(test)]
@ -170,7 +170,7 @@ mod test {
use core::input::*; use core::input::*;
use core::proxy::Proxy; use core::proxy::Proxy;
metrics!{TEST: Proxy = "test_prefix" => { metrics! {TEST: Proxy = "test_prefix" => {
pub M1: Marker = "failed"; pub M1: Marker = "failed";
C1: Counter = "failed"; C1: Counter = "failed";
G1: Gauge = "failed"; G1: Gauge = "failed";

View File

@ -1,10 +1,10 @@
//! Dispatch metrics to multiple sinks. //! Dispatch metrics to multiple sinks.
use core::Flush; use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::input::{InputKind, Input, InputScope, InputMetric, InputDyn};
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::error; use core::error;
use core::input::{Input, InputDyn, InputKind, InputMetric, InputScope};
use core::name::MetricName;
use core::Flush;
use std::sync::Arc; use std::sync::Arc;
@ -29,7 +29,7 @@ impl Input for MultiInput {
impl MultiInput { impl MultiInput {
/// Create a new multi-input dispatcher. /// Create a new multi-input dispatcher.
#[deprecated(since="0.7.2", note="Use new()")] #[deprecated(since = "0.7.2", note = "Use new()")]
pub fn input() -> Self { pub fn input() -> Self {
Self::new() Self::new()
} }
@ -51,8 +51,12 @@ impl MultiInput {
} }
impl WithAttributes for MultiInput { impl WithAttributes for MultiInput {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
/// Dispatch metric values to a list of scopes. /// Dispatch metric values to a list of scopes.
@ -78,17 +82,20 @@ impl MultiInputScope {
cloned.scopes.push(Arc::new(scope)); cloned.scopes.push(Arc::new(scope));
cloned cloned
} }
} }
impl InputScope for MultiInputScope { impl InputScope for MultiInputScope {
fn new_metric(&self, name: MetricName, kind: InputKind) -> InputMetric { fn new_metric(&self, name: MetricName, kind: InputKind) -> InputMetric {
let name = &self.prefix_append(name); let name = &self.prefix_append(name);
let metrics: Vec<InputMetric> = self.scopes.iter() let metrics: Vec<InputMetric> = self
.scopes
.iter()
.map(move |scope| scope.new_metric(name.clone(), kind)) .map(move |scope| scope.new_metric(name.clone(), kind))
.collect(); .collect();
InputMetric::new(move |value, labels| for metric in &metrics { InputMetric::new(move |value, labels| {
metric.write(value, labels.clone()) for metric in &metrics {
metric.write(value, labels.clone())
}
}) })
} }
} }
@ -104,6 +111,10 @@ impl Flush for MultiInputScope {
} }
impl WithAttributes for MultiInputScope { impl WithAttributes for MultiInputScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }

View File

@ -1,11 +1,11 @@
//! Dispatch metrics to multiple sinks. //! Dispatch metrics to multiple sinks.
use core::Flush; use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::input::InputKind;
use core::output::{Output, OutputMetric, OutputScope, OutputDyn};
use core::error; use core::error;
use core::input::InputKind;
use core::name::MetricName;
use core::output::{Output, OutputDyn, OutputMetric, OutputScope};
use core::Flush;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
@ -31,7 +31,7 @@ impl Output for MultiOutput {
impl MultiOutput { impl MultiOutput {
/// Create a new multi-output dispatcher. /// Create a new multi-output dispatcher.
#[deprecated(since="0.7.2", note="Use new()")] #[deprecated(since = "0.7.2", note = "Use new()")]
pub fn output() -> Self { pub fn output() -> Self {
Self::new() Self::new()
} }
@ -51,12 +51,15 @@ impl MultiOutput {
cloned.outputs.push(Arc::new(out)); cloned.outputs.push(Arc::new(out));
cloned cloned
} }
} }
impl WithAttributes for MultiOutput { impl WithAttributes for MultiOutput {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
/// Dispatch metric values to a list of scopes. /// Dispatch metric values to a list of scopes.
@ -81,17 +84,20 @@ impl MultiOutputScope {
cloned.scopes.push(Rc::new(scope)); cloned.scopes.push(Rc::new(scope));
cloned cloned
} }
} }
impl OutputScope for MultiOutputScope { impl OutputScope for MultiOutputScope {
fn new_metric(&self, name: MetricName, kind: InputKind) -> OutputMetric { fn new_metric(&self, name: MetricName, kind: InputKind) -> OutputMetric {
let name = &self.prefix_append(name); let name = &self.prefix_append(name);
let metrics: Vec<OutputMetric> = self.scopes.iter() let metrics: Vec<OutputMetric> = self
.scopes
.iter()
.map(move |scope| scope.new_metric(name.clone(), kind)) .map(move |scope| scope.new_metric(name.clone(), kind))
.collect(); .collect();
OutputMetric::new(move |value, labels| for metric in &metrics { OutputMetric::new(move |value, labels| {
metric.write(value, labels.clone()) for metric in &metrics {
metric.write(value, labels.clone())
}
}) })
} }
} }
@ -107,6 +113,10 @@ impl Flush for MultiOutputScope {
} }
impl WithAttributes for MultiOutputScope { impl WithAttributes for MultiOutputScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }

View File

@ -1,7 +1,7 @@
use core::name::MetricName;
use core::input::InputKind;
use core::MetricValue;
use self::LineOp::*; use self::LineOp::*;
use core::input::InputKind;
use core::name::MetricName;
use core::MetricValue;
use std::io; use std::io;
use std::sync::Arc; use std::sync::Arc;
@ -32,7 +32,7 @@ pub enum LabelOp {
/// An sequence of print commands, embodying an output strategy for a single metric. /// An sequence of print commands, embodying an output strategy for a single metric.
pub struct LineTemplate { pub struct LineTemplate {
ops: Vec<LineOp> ops: Vec<LineOp>,
} }
impl From<Vec<LineOp>> for LineTemplate { impl From<Vec<LineOp>> for LineTemplate {
@ -43,8 +43,14 @@ impl From<Vec<LineOp>> for LineTemplate {
impl LineTemplate { impl LineTemplate {
/// Template execution applies commands in turn, writing to the output. /// Template execution applies commands in turn, writing to the output.
pub fn print<L>(&self, output: &mut io::Write, value: MetricValue, lookup: L) -> Result<(), io::Error> pub fn print<L>(
where L: Fn(&str) -> Option<Arc<String>> &self,
output: &mut io::Write,
value: MetricValue,
lookup: L,
) -> Result<(), io::Error>
where
L: Fn(&str) -> Option<Arc<String>>,
{ {
for cmd in &self.ops { for cmd in &self.ops {
match cmd { match cmd {
@ -53,22 +59,19 @@ impl LineTemplate {
ScaledValueAsText(scale) => { ScaledValueAsText(scale) => {
let scaled = value as f64 / scale; let scaled = value as f64 / scale;
output.write_all(format!("{}", scaled).as_ref())? output.write_all(format!("{}", scaled).as_ref())?
}, }
NewLine => writeln!(output)?, NewLine => writeln!(output)?,
LabelExists(label_key, print_label) => { LabelExists(label_key, print_label) => {
if let Some(label_value) = lookup(label_key.as_ref()) { if let Some(label_value) = lookup(label_key.as_ref()) {
for label_cmd in print_label { for label_cmd in print_label {
match label_cmd { match label_cmd {
LabelOp::LabelValue => LabelOp::LabelValue => output.write_all(label_value.as_bytes())?,
output.write_all(label_value.as_bytes())?, LabelOp::LabelKey => output.write_all(label_key.as_bytes())?,
LabelOp::LabelKey => LabelOp::Literal(src) => output.write_all(src.as_ref())?,
output.write_all(label_key.as_bytes())?,
LabelOp::Literal(src) =>
output.write_all(src.as_ref())?,
} }
} }
} }
}, }
}; };
} }
Ok(()) Ok(())
@ -83,7 +86,6 @@ pub trait Formatting {
/// Forges metric-specific printers /// Forges metric-specific printers
pub trait LineFormat: Send + Sync { pub trait LineFormat: Send + Sync {
/// Prepare a template for output of metric values. /// Prepare a template for output of metric values.
fn template(&self, name: &MetricName, kind: InputKind) -> LineTemplate; fn template(&self, name: &MetricName, kind: InputKind) -> LineTemplate;
} }
@ -100,11 +102,7 @@ impl LineFormat for SimpleFormat {
let mut header = name.join("."); let mut header = name.join(".");
header.push(' '); header.push(' ');
LineTemplate { LineTemplate {
ops: vec![ ops: vec![Literal(header.into_bytes()), ValueAsText, NewLine],
Literal(header.into_bytes()),
ValueAsText,
NewLine,
]
} }
} }
} }
@ -129,12 +127,16 @@ pub mod test {
Literal(" ".into()), Literal(" ".into()),
ScaledValueAsText(1000.0), ScaledValueAsText(1000.0),
Literal(" ".into()), Literal(" ".into()),
LabelExists("test_key".into(), vec![ LabelExists(
LabelOp::LabelKey, "test_key".into(),
LabelOp::Literal("=".into()), vec![
LabelOp::LabelValue]), LabelOp::LabelKey,
LabelOp::Literal("=".into()),
LabelOp::LabelValue,
],
),
NewLine, NewLine,
] ],
} }
} }
} }
@ -147,8 +149,13 @@ pub mod test {
name = name.prepend("xyz"); name = name.prepend("xyz");
let template = format.template(&name, InputKind::Counter); let template = format.template(&name, InputKind::Counter);
let mut out = vec![]; let mut out = vec![];
template.print(&mut out, 123000, |key| labels.lookup(key)).unwrap(); template
assert_eq!("Counter/xyz.abc 123000 123 test_key=456\n", String::from_utf8(out).unwrap()); .print(&mut out, 123000, |key| labels.lookup(key))
.unwrap();
assert_eq!(
"Counter/xyz.abc 123000 123 test_key=456\n",
String::from_utf8(out).unwrap()
);
} }
#[test] #[test]
@ -159,6 +166,9 @@ pub mod test {
let template = format.template(&name, InputKind::Counter); let template = format.template(&name, InputKind::Counter);
let mut out = vec![]; let mut out = vec![];
template.print(&mut out, 123000, |_key| None).unwrap(); template.print(&mut out, 123000, |_key| None).unwrap();
assert_eq!("Counter/xyz.abc 123000 123 \n", String::from_utf8(out).unwrap()); assert_eq!(
"Counter/xyz.abc 123000 123 \n",
String::from_utf8(out).unwrap()
);
} }
} }

View File

@ -1,32 +1,32 @@
//! Send metrics to a graphite server. //! Send metrics to a graphite server.
use core::attributes::{Buffered, Attributes, WithAttributes, Prefixed, OnFlush}; use cache::cache_out;
use core::name::MetricName; use core::attributes::{Attributes, Buffered, OnFlush, Prefixed, WithAttributes};
use core::{Flush, MetricValue}; use core::error;
use core::input::InputKind; use core::input::InputKind;
use core::metrics; use core::metrics;
use core::output::{Output, OutputScope, OutputMetric}; use core::name::MetricName;
use core::error; use core::output::{Output, OutputMetric, OutputScope};
use queue::queue_out; use core::{Flush, MetricValue};
use cache::cache_out;
use output::socket::RetrySocket; use output::socket::RetrySocket;
use queue::queue_out;
use std::net::ToSocketAddrs; use std::net::ToSocketAddrs;
use std::time::{SystemTime, UNIX_EPOCH};
use std::io::Write;
use std::fmt::Debug; use std::fmt::Debug;
use std::io::Write;
use std::time::{SystemTime, UNIX_EPOCH};
use std::rc::Rc;
use std::cell::{RefCell, RefMut}; use std::cell::{RefCell, RefMut};
use std::rc::Rc;
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
/// Graphite output holds a socket to a graphite server. /// Graphite output holds a socket to a graphite server.
/// The socket is shared between scopes opened from the output. /// The socket is shared between scopes opened from the output.
@ -62,8 +62,12 @@ impl Graphite {
} }
impl WithAttributes for Graphite { impl WithAttributes for Graphite {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for Graphite {} impl Buffered for Graphite {}
@ -98,7 +102,6 @@ impl OutputScope for GraphiteScope {
} }
impl Flush for GraphiteScope { impl Flush for GraphiteScope {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
let buf = self.buffer.borrow_mut(); let buf = self.buffer.borrow_mut();
@ -107,7 +110,7 @@ impl Flush for GraphiteScope {
} }
impl GraphiteScope { impl GraphiteScope {
fn print(&self, metric: &GraphiteMetric, value: MetricValue) { fn print(&self, metric: &GraphiteMetric, value: MetricValue) {
let scaled_value = value / metric.scale; let scaled_value = value / metric.scale;
let value_str = scaled_value.to_string(); let value_str = scaled_value.to_string();
@ -142,7 +145,9 @@ impl GraphiteScope {
} }
fn flush_inner(&self, mut buf: RefMut<String>) -> error::Result<()> { fn flush_inner(&self, mut buf: RefMut<String>) -> error::Result<()> {
if buf.is_empty() { return Ok(()) } if buf.is_empty() {
return Ok(());
}
let mut sock = write_lock!(self.socket); let mut sock = write_lock!(self.socket);
match sock.write_all(buf.as_bytes()) { match sock.write_all(buf.as_bytes()) {
@ -158,13 +163,16 @@ impl GraphiteScope {
Err(e.into()) Err(e.into())
} }
} }
} }
} }
impl WithAttributes for GraphiteScope { impl WithAttributes for GraphiteScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for GraphiteScope {} impl Buffered for GraphiteScope {}
@ -195,9 +203,9 @@ impl Drop for GraphiteScope {
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
mod bench { mod bench {
use super::*;
use core::attributes::*; use core::attributes::*;
use core::input::*; use core::input::*;
use super::*;
use test; use test;
#[bench] #[bench]
@ -210,8 +218,10 @@ mod bench {
#[bench] #[bench]
pub fn buffering_graphite(b: &mut test::Bencher) { pub fn buffering_graphite(b: &mut test::Bencher) {
let sd = Graphite::send_to("localhost:2003").unwrap() let sd = Graphite::send_to("localhost:2003")
.buffered(Buffering::BufferSize(65465)).metrics(); .unwrap()
.buffered(Buffering::BufferSize(65465))
.metrics();
let timer = sd.new_metric("timer".into(), InputKind::Timer); let timer = sd.new_metric("timer".into(), InputKind::Timer);
b.iter(|| test::black_box(timer.write(2000, labels![]))); b.iter(|| test::black_box(timer.write(2000, labels![])));

View File

@ -1,22 +1,22 @@
use core::{Flush};
use core::input::{InputKind, Input, InputScope, InputMetric};
use core::attributes::{Attributes, WithAttributes, Buffered, Prefixed, OnFlush};
use core::name::MetricName;
use core::error;
use cache::cache_in; use cache::cache_in;
use core::attributes::{Attributes, Buffered, OnFlush, Prefixed, WithAttributes};
use core::error;
use core::input::{Input, InputKind, InputMetric, InputScope};
use core::name::MetricName;
use core::Flush;
use output::format::{Formatting, LineFormat, SimpleFormat};
use queue::queue_in; use queue::queue_in;
use output::format::{LineFormat, SimpleFormat, Formatting};
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
use std::io::Write;
use log; use log;
use std::io::Write;
/// Buffered metrics log output. /// Buffered metrics log output.
#[derive(Clone)] #[derive(Clone)]
@ -40,8 +40,12 @@ impl Input for Log {
} }
impl WithAttributes for Log { impl WithAttributes for Log {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for Log {} impl Buffered for Log {}
@ -70,7 +74,7 @@ impl Log {
attributes: Attributes::default(), attributes: Attributes::default(),
format: Arc::new(SimpleFormat::default()), format: Arc::new(SimpleFormat::default()),
level: log::Level::Info, level: log::Level::Info,
target: None target: None,
} }
} }
@ -89,12 +93,15 @@ impl Log {
cloned.target = Some(target.to_string()); cloned.target = Some(target.to_string());
cloned cloned
} }
} }
impl WithAttributes for LogScope { impl WithAttributes for LogScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for LogScope {} impl Buffered for LogScope {}
@ -116,7 +123,7 @@ impl InputScope for LogScope {
Ok(()) => { Ok(()) => {
let mut entries = write_lock!(entries); let mut entries = write_lock!(entries);
entries.push(buffer) entries.push(buffer)
}, }
Err(err) => debug!("Could not format buffered log metric: {}", err), Err(err) => debug!("Could not format buffered log metric: {}", err),
} }
}) })
@ -127,10 +134,12 @@ impl InputScope for LogScope {
InputMetric::new(move |value, labels| { InputMetric::new(move |value, labels| {
let mut buffer = Vec::with_capacity(32); let mut buffer = Vec::with_capacity(32);
match template.print(&mut buffer, value, |key| labels.lookup(key)) { match template.print(&mut buffer, value, |key| labels.lookup(key)) {
Ok(()) => if let Some(target) = &target { Ok(()) => {
log!(target: target, level, "{:?}", &buffer) if let Some(target) = &target {
} else { log!(target: target, level, "{:?}", &buffer)
log!(level, "{:?}", &buffer) } else {
log!(level, "{:?}", &buffer)
}
} }
Err(err) => debug!("Could not format buffered log metric: {}", err), Err(err) => debug!("Could not format buffered log metric: {}", err),
} }
@ -140,7 +149,6 @@ impl InputScope for LogScope {
} }
impl Flush for LogScope { impl Flush for LogScope {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
let mut entries = write_lock!(self.entries); let mut entries = write_lock!(self.entries);

View File

@ -1,14 +1,14 @@
use core::{Flush, MetricValue}; use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::input::InputKind; use core::input::InputKind;
use core::input::{Input, InputMetric, InputScope};
use core::name::MetricName; use core::name::MetricName;
use core::input::{InputMetric, InputScope, Input}; use core::{Flush, MetricValue};
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::error::Error; use std::error::Error;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use ::{OutputScope, OutputMetric}; use {OutputMetric, OutputScope};
/// A BTreeMap wrapper to receive metrics or stats values. /// A BTreeMap wrapper to receive metrics or stats values.
/// Every received value for a metric replaces the previous one (if any). /// Every received value for a metric replaces the previous one (if any).
@ -18,8 +18,12 @@ pub struct StatsMap {
} }
impl WithAttributes for StatsMap { impl WithAttributes for StatsMap {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Input for StatsMap { impl Input for StatsMap {
@ -42,8 +46,12 @@ pub struct StatsMapScope {
} }
impl WithAttributes for StatsMapScope { impl WithAttributes for StatsMapScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl InputScope for StatsMapScope { impl InputScope for StatsMapScope {
@ -68,7 +76,6 @@ impl OutputScope for StatsMapScope {
} }
} }
impl Flush for StatsMapScope { impl Flush for StatsMapScope {
fn flush(&self) -> Result<(), Box<Error + Send + Sync>> { fn flush(&self) -> Result<(), Box<Error + Send + Sync>> {
self.notify_flush_listeners(); self.notify_flush_listeners();

View File

@ -1,18 +1,18 @@
//! Send metrics to a Prometheus server. //! Send metrics to a Prometheus server.
use core::attributes::{Buffered, Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::{Flush, MetricValue};
use core::input::InputKind;
use core::metrics;
use core::output::{Output, OutputScope, OutputMetric};
use core::error;
use queue::queue_out;
use cache::cache_out; use cache::cache_out;
use core::attributes::{Attributes, Buffered, OnFlush, Prefixed, WithAttributes};
use core::error;
use core::input::InputKind;
use core::label::Labels; use core::label::Labels;
use core::metrics;
use core::name::MetricName;
use core::output::{Output, OutputMetric, OutputScope};
use core::{Flush, MetricValue};
use queue::queue_out;
use std::rc::Rc;
use std::cell::{RefCell, RefMut}; use std::cell::{RefCell, RefMut};
use std::rc::Rc;
/// Prometheus output holds a socket to a Prometheus server. /// Prometheus output holds a socket to a Prometheus server.
/// The socket is shared between scopes opened from the output. /// The socket is shared between scopes opened from the output.
@ -50,8 +50,12 @@ impl Prometheus {
} }
impl WithAttributes for Prometheus { impl WithAttributes for Prometheus {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for Prometheus {} impl Buffered for Prometheus {}
@ -85,7 +89,6 @@ impl OutputScope for PrometheusScope {
} }
impl Flush for PrometheusScope { impl Flush for PrometheusScope {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
let buf = self.buffer.borrow_mut(); let buf = self.buffer.borrow_mut();
@ -94,7 +97,7 @@ impl Flush for PrometheusScope {
} }
impl PrometheusScope { impl PrometheusScope {
fn print(&self, metric: &PrometheusMetric, value: MetricValue, labels: Labels) { fn print(&self, metric: &PrometheusMetric, value: MetricValue, labels: Labels) {
let scaled_value = value / metric.scale; let scaled_value = value / metric.scale;
let value_str = scaled_value.to_string(); let value_str = scaled_value.to_string();
@ -127,7 +130,10 @@ impl PrometheusScope {
let buffer = self.buffer.borrow_mut(); let buffer = self.buffer.borrow_mut();
if strbuf.len() + buffer.len() > BUFFER_FLUSH_THRESHOLD { if strbuf.len() + buffer.len() > BUFFER_FLUSH_THRESHOLD {
metrics::PROMETHEUS_OVERFLOW.mark(); metrics::PROMETHEUS_OVERFLOW.mark();
warn!("Prometheus Buffer Size Exceeded: {}", BUFFER_FLUSH_THRESHOLD); warn!(
"Prometheus Buffer Size Exceeded: {}",
BUFFER_FLUSH_THRESHOLD
);
let _ = self.flush_inner(buffer); let _ = self.flush_inner(buffer);
} else { } else {
if !self.is_buffered() { if !self.is_buffered() {
@ -139,9 +145,14 @@ impl PrometheusScope {
} }
fn flush_inner(&self, mut buf: RefMut<String>) -> error::Result<()> { fn flush_inner(&self, mut buf: RefMut<String>) -> error::Result<()> {
if buf.is_empty() { return Ok(()) } if buf.is_empty() {
return Ok(());
}
match minreq::get(self.push_url.as_ref()).with_body(buf.as_ref()).send() { match minreq::get(self.push_url.as_ref())
.with_body(buf.as_ref())
.send()
{
Ok(_res) => { Ok(_res) => {
metrics::PROMETHEUS_SENT_BYTES.count(buf.len()); metrics::PROMETHEUS_SENT_BYTES.count(buf.len());
trace!("Sent {} bytes to Prometheus", buf.len()); trace!("Sent {} bytes to Prometheus", buf.len());
@ -158,8 +169,12 @@ impl PrometheusScope {
} }
impl WithAttributes for PrometheusScope { impl WithAttributes for PrometheusScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for PrometheusScope {} impl Buffered for PrometheusScope {}
@ -190,9 +205,9 @@ impl Drop for PrometheusScope {
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
mod bench { mod bench {
use super::*;
use core::attributes::*; use core::attributes::*;
use core::input::*; use core::input::*;
use super::*;
use test; use test;
#[bench] #[bench]
@ -205,8 +220,10 @@ mod bench {
#[bench] #[bench]
pub fn buffering_prometheus(b: &mut test::Bencher) { pub fn buffering_prometheus(b: &mut test::Bencher) {
let sd = Prometheus::push_to("localhost:2003").unwrap() let sd = Prometheus::push_to("localhost:2003")
.buffered(Buffering::BufferSize(65465)).metrics(); .unwrap()
.buffered(Buffering::BufferSize(65465))
.metrics();
let timer = sd.new_metric("timer".into(), InputKind::Timer); let timer = sd.new_metric("timer".into(), InputKind::Timer);
b.iter(|| test::black_box(timer.write(2000, labels![]))); b.iter(|| test::black_box(timer.write(2000, labels![])));

View File

@ -1,11 +1,11 @@
//! A TCP Socket wrapper that reconnects automatically. //! A TCP Socket wrapper that reconnects automatically.
use std::fmt;
use std::io;
use std::io::Write;
use std::net::TcpStream; use std::net::TcpStream;
use std::net::{SocketAddr, ToSocketAddrs}; use std::net::{SocketAddr, ToSocketAddrs};
use std::io;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use std::fmt;
use std::io::Write;
const MIN_RECONNECT_DELAY_MS: u64 = 50; const MIN_RECONNECT_DELAY_MS: u64 = 50;
const MAX_RECONNECT_DELAY_MS: u64 = 10_000; const MAX_RECONNECT_DELAY_MS: u64 = 10_000;

View File

@ -1,21 +1,23 @@
//! Send metrics to a statsd server. //! Send metrics to a statsd server.
use core::attributes::{Buffered, Attributes, Sampled, Sampling, WithAttributes, Prefixed, OnFlush}; use cache::cache_out;
use core::name::MetricName; use core::attributes::{
use core::pcg32; Attributes, Buffered, OnFlush, Prefixed, Sampled, Sampling, WithAttributes,
use core::{Flush, MetricValue}; };
use core::error;
use core::input::InputKind; use core::input::InputKind;
use core::metrics; use core::metrics;
use core::output::{Output, OutputScope, OutputMetric}; use core::name::MetricName;
use core::error; use core::output::{Output, OutputMetric, OutputScope};
use cache::cache_out; use core::pcg32;
use core::{Flush, MetricValue};
use queue::queue_out; use queue::queue_out;
use std::cell::{RefCell, RefMut};
use std::net::ToSocketAddrs; use std::net::ToSocketAddrs;
use std::sync::Arc;
use std::net::UdpSocket; use std::net::UdpSocket;
use std::rc::Rc; use std::rc::Rc;
use std::cell::{RefCell, RefMut}; use std::sync::Arc;
/// Use a safe maximum size for UDP to prevent fragmentation. /// Use a safe maximum size for UDP to prevent fragmentation.
// TODO make configurable? // TODO make configurable?
@ -62,8 +64,12 @@ impl Output for Statsd {
} }
impl WithAttributes for Statsd { impl WithAttributes for Statsd {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
/// Statsd Input /// Statsd Input
@ -99,9 +105,13 @@ impl OutputScope for StatsdScope {
let cloned = self.clone(); let cloned = self.clone();
if let Sampling::Random(float_rate) = self.get_sampling() { if let Sampling::Random(float_rate) = self.get_sampling() {
suffix.push_str(&format!{"|@{}\n", float_rate}); suffix.push_str(&format! {"|@{}\n", float_rate});
let int_sampling_rate = pcg32::to_int_rate(float_rate); let int_sampling_rate = pcg32::to_int_rate(float_rate);
let metric = StatsdMetric { prefix, suffix, scale }; let metric = StatsdMetric {
prefix,
suffix,
scale,
};
OutputMetric::new(move |value, _labels| { OutputMetric::new(move |value, _labels| {
if pcg32::accept_sample(int_sampling_rate) { if pcg32::accept_sample(int_sampling_rate) {
@ -110,16 +120,17 @@ impl OutputScope for StatsdScope {
}) })
} else { } else {
suffix.push_str("\n"); suffix.push_str("\n");
let metric = StatsdMetric { prefix, suffix, scale }; let metric = StatsdMetric {
OutputMetric::new(move |value, _labels| { prefix,
cloned.print(&metric, value) suffix,
}) scale,
};
OutputMetric::new(move |value, _labels| cloned.print(&metric, value))
} }
} }
} }
impl Flush for StatsdScope { impl Flush for StatsdScope {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
let buf = self.buffer.borrow_mut(); let buf = self.buffer.borrow_mut();
@ -128,7 +139,7 @@ impl Flush for StatsdScope {
} }
impl StatsdScope { impl StatsdScope {
fn print(&self, metric: &StatsdMetric, value: MetricValue) { fn print(&self, metric: &StatsdMetric, value: MetricValue) {
let scaled_value = value / metric.scale; let scaled_value = value / metric.scale;
let value_str = scaled_value.to_string(); let value_str = scaled_value.to_string();
let entry_len = metric.prefix.len() + value_str.len() + metric.suffix.len(); let entry_len = metric.prefix.len() + value_str.len() + metric.suffix.len();
@ -144,7 +155,6 @@ impl StatsdScope {
// buffer is nearly full, make room // buffer is nearly full, make room
let _ = self.flush_inner(buffer); let _ = self.flush_inner(buffer);
buffer = self.buffer.borrow_mut(); buffer = self.buffer.borrow_mut();
} else { } else {
if !buffer.is_empty() { if !buffer.is_empty() {
// separate from previous entry // separate from previous entry
@ -171,7 +181,7 @@ impl StatsdScope {
} }
Err(e) => { Err(e) => {
metrics::STATSD_SEND_ERR.mark(); metrics::STATSD_SEND_ERR.mark();
return Err(e.into()) return Err(e.into());
} }
}; };
buffer.clear(); buffer.clear();
@ -181,8 +191,12 @@ impl StatsdScope {
} }
impl WithAttributes for StatsdScope { impl WithAttributes for StatsdScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Buffered for StatsdScope {} impl Buffered for StatsdScope {}
@ -251,9 +265,9 @@ impl Drop for StatsdScope {
#[cfg(feature = "bench")] #[cfg(feature = "bench")]
mod bench { mod bench {
use super::*;
use core::attributes::*; use core::attributes::*;
use core::input::*; use core::input::*;
use super::*;
use test; use test;
#[bench] #[bench]
@ -266,8 +280,10 @@ mod bench {
#[bench] #[bench]
pub fn buffering_statsd(b: &mut test::Bencher) { pub fn buffering_statsd(b: &mut test::Bencher) {
let sd = Statsd::send_to("localhost:2003").unwrap() let sd = Statsd::send_to("localhost:2003")
.buffered(Buffering::BufferSize(65465)).metrics(); .unwrap()
.buffered(Buffering::BufferSize(65465))
.metrics();
let timer = sd.new_metric("timer".into(), InputKind::Timer); let timer = sd.new_metric("timer".into(), InputKind::Timer);
b.iter(|| test::black_box(timer.write(2000, labels![]))); b.iter(|| test::black_box(timer.write(2000, labels![])));

View File

@ -2,30 +2,30 @@
// TODO parameterize templates // TODO parameterize templates
use core::{Flush}; use core::attributes::{Attributes, Buffered, OnFlush, Prefixed, WithAttributes};
use core::error;
use core::input::InputKind; use core::input::InputKind;
use core::attributes::{Attributes, WithAttributes, Buffered, Prefixed, OnFlush};
use core::name::MetricName; use core::name::MetricName;
use core::output::{Output, OutputMetric, OutputScope}; use core::output::{Output, OutputMetric, OutputScope};
use core::error; use core::Flush;
use cache::cache_out; use cache::cache_out;
use output::format::{Formatting, LineFormat, SimpleFormat};
use queue::queue_out; use queue::queue_out;
use output::format::{LineFormat, SimpleFormat, Formatting};
use std::io::{Write, self};
use std::path::Path;
use std::fs::File;
use std::rc::Rc;
use std::cell::RefCell; use std::cell::RefCell;
use std::fs::File;
use std::io::{self, Write};
use std::path::Path;
use std::rc::Rc;
use std::sync::{Arc}; use std::sync::Arc;
#[cfg(not(feature="parking_lot"))] #[cfg(not(feature = "parking_lot"))]
use std::sync::{RwLock}; use std::sync::RwLock;
#[cfg(feature="parking_lot")] #[cfg(feature = "parking_lot")]
use parking_lot::{RwLock}; use parking_lot::RwLock;
/// Buffered metrics text output. /// Buffered metrics text output.
pub struct Stream<W: Write + Send + Sync + 'static> { pub struct Stream<W: Write + Send + Sync + 'static> {
@ -45,7 +45,7 @@ impl<W: Write + Send + Sync + 'static> Formatting for Stream<W> {
} }
} }
impl<W: Write + Send + Sync + 'static> Stream<W> { impl<W: Write + Send + Sync + 'static> Stream<W> {
/// Write metric values to provided Write target. /// Write metric values to provided Write target.
pub fn write_to(write: W) -> Stream<W> { pub fn write_to(write: W) -> Stream<W> {
Stream { Stream {
@ -77,7 +77,6 @@ impl Stream<io::Stdout> {
} }
} }
// FIXME manual Clone impl required because auto-derive is borked (https://github.com/rust-lang/rust/issues/26925) // FIXME manual Clone impl required because auto-derive is borked (https://github.com/rust-lang/rust/issues/26925)
impl<W: Write + Send + Sync + 'static> Clone for Stream<W> { impl<W: Write + Send + Sync + 'static> Clone for Stream<W> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
@ -90,8 +89,12 @@ impl<W: Write + Send + Sync + 'static> Clone for Stream<W> {
} }
impl<W: Write + Send + Sync + 'static> WithAttributes for Stream<W> { impl<W: Write + Send + Sync + 'static> WithAttributes for Stream<W> {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl<W: Write + Send + Sync + 'static> Buffered for Stream<W> {} impl<W: Write + Send + Sync + 'static> Buffered for Stream<W> {}
@ -115,7 +118,6 @@ pub struct TextScope<W: Write + Send + Sync + 'static> {
output: Stream<W>, output: Stream<W>,
} }
impl<W: Write + Send + Sync + 'static> Clone for TextScope<W> { impl<W: Write + Send + Sync + 'static> Clone for TextScope<W> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
TextScope { TextScope {
@ -127,8 +129,12 @@ impl<W: Write + Send + Sync + 'static> Clone for TextScope<W> {
} }
impl<W: Write + Send + Sync + 'static> WithAttributes for TextScope<W> { impl<W: Write + Send + Sync + 'static> WithAttributes for TextScope<W> {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl<W: Write + Send + Sync + 'static> Buffered for TextScope<W> {} impl<W: Write + Send + Sync + 'static> Buffered for TextScope<W> {}
@ -147,7 +153,7 @@ impl<W: Write + Send + Sync + 'static> OutputScope for TextScope<W> {
Ok(()) => { Ok(()) => {
let mut entries = entries.borrow_mut(); let mut entries = entries.borrow_mut();
entries.push(buffer) entries.push(buffer)
}, }
Err(err) => debug!("{}", err), Err(err) => debug!("{}", err),
} }
}) })
@ -162,7 +168,7 @@ impl<W: Write + Send + Sync + 'static> OutputScope for TextScope<W> {
if let Err(e) = output.write_all(&buffer).and_then(|_| output.flush()) { if let Err(e) = output.write_all(&buffer).and_then(|_| output.flush()) {
debug!("Could not write text metrics: {}", e) debug!("Could not write text metrics: {}", e)
} }
}, }
Err(err) => debug!("{}", err), Err(err) => debug!("{}", err),
} }
}) })
@ -171,7 +177,6 @@ impl<W: Write + Send + Sync + 'static> OutputScope for TextScope<W> {
} }
impl<W: Write + Send + Sync + 'static> Flush for TextScope<W> { impl<W: Write + Send + Sync + 'static> Flush for TextScope<W> {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
let mut entries = self.entries.borrow_mut(); let mut entries = self.entries.borrow_mut();

View File

@ -2,21 +2,21 @@
//! Metrics definitions are still synchronous. //! Metrics definitions are still synchronous.
//! If queue size is exceeded, calling code reverts to blocking. //! If queue size is exceeded, calling code reverts to blocking.
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::input::{InputKind, Input, InputScope, InputDyn, InputMetric};
use core::{MetricValue, Flush};
use core::metrics;
use cache::cache_in::CachedInput; use cache::cache_in::CachedInput;
use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::error; use core::error;
use core::input::{Input, InputDyn, InputKind, InputMetric, InputScope};
use core::label::Labels; use core::label::Labels;
use core::metrics;
use core::name::MetricName;
use core::{Flush, MetricValue};
use std::sync::Arc; #[cfg(not(feature = "crossbeam-channel"))]
#[cfg(not(feature="crossbeam-channel"))]
use std::sync::mpsc; use std::sync::mpsc;
use std::sync::Arc;
use std::thread; use std::thread;
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
use crossbeam_channel as crossbeam; use crossbeam_channel as crossbeam;
/// Wrap this output behind an asynchronous metrics dispatch queue. /// Wrap this output behind an asynchronous metrics dispatch queue.
@ -32,7 +32,7 @@ pub trait QueuedInput: Input + Send + Sync + 'static + Sized {
/// # Panics /// # Panics
/// ///
/// Panics if the OS fails to create a thread. /// Panics if the OS fails to create a thread.
#[cfg(not(feature="crossbeam-channel"))] #[cfg(not(feature = "crossbeam-channel"))]
fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<InputQueueCmd>> { fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<InputQueueCmd>> {
let (sender, receiver) = mpsc::sync_channel::<InputQueueCmd>(length); let (sender, receiver) = mpsc::sync_channel::<InputQueueCmd>(length);
@ -43,9 +43,11 @@ fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<InputQueueCmd>> {
while !done { while !done {
match receiver.recv() { match receiver.recv() {
Ok(InputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels), Ok(InputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels),
Ok(InputQueueCmd::Flush(scope)) => if let Err(e) = scope.flush() { Ok(InputQueueCmd::Flush(scope)) => {
debug!("Could not asynchronously flush metrics: {}", e); if let Err(e) = scope.flush() {
}, debug!("Could not asynchronously flush metrics: {}", e);
}
}
Err(e) => { Err(e) => {
debug!("Async metrics receive loop terminated: {}", e); debug!("Async metrics receive loop terminated: {}", e);
// cannot break from within match, use safety pin instead // cannot break from within match, use safety pin instead
@ -61,7 +63,7 @@ fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<InputQueueCmd>> {
/// # Panics /// # Panics
/// ///
/// Panics if the OS fails to create a thread. /// Panics if the OS fails to create a thread.
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<InputQueueCmd>> { fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<InputQueueCmd>> {
let (sender, receiver) = crossbeam::bounded::<InputQueueCmd>(length); let (sender, receiver) = crossbeam::bounded::<InputQueueCmd>(length);
@ -72,9 +74,11 @@ fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<InputQueueCmd>> {
while !done { while !done {
match receiver.recv() { match receiver.recv() {
Ok(InputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels), Ok(InputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels),
Ok(InputQueueCmd::Flush(scope)) => if let Err(e) = scope.flush() { Ok(InputQueueCmd::Flush(scope)) => {
debug!("Could not asynchronously flush metrics: {}", e); if let Err(e) = scope.flush() {
}, debug!("Could not asynchronously flush metrics: {}", e);
}
}
Err(e) => { Err(e) => {
debug!("Async metrics receive loop terminated: {}", e); debug!("Async metrics receive loop terminated: {}", e);
// cannot break from within match, use safety pin instead // cannot break from within match, use safety pin instead
@ -92,9 +96,9 @@ fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<InputQueueCmd>> {
pub struct InputQueue { pub struct InputQueue {
attributes: Attributes, attributes: Attributes,
target: Arc<InputDyn + Send + Sync + 'static>, target: Arc<InputDyn + Send + Sync + 'static>,
#[cfg(not(feature="crossbeam-channel"))] #[cfg(not(feature = "crossbeam-channel"))]
sender: Arc<mpsc::SyncSender<InputQueueCmd>>, sender: Arc<mpsc::SyncSender<InputQueueCmd>>,
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
sender: Arc<crossbeam::Sender<InputQueueCmd>>, sender: Arc<crossbeam::Sender<InputQueueCmd>>,
} }
@ -112,8 +116,12 @@ impl InputQueue {
impl CachedInput for InputQueue {} impl CachedInput for InputQueue {}
impl WithAttributes for InputQueue { impl WithAttributes for InputQueue {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl Input for InputQueue { impl Input for InputQueue {
@ -144,16 +152,19 @@ pub enum InputQueueCmd {
#[derive(Clone)] #[derive(Clone)]
pub struct InputQueueScope { pub struct InputQueueScope {
attributes: Attributes, attributes: Attributes,
#[cfg(not(feature="crossbeam-channel"))] #[cfg(not(feature = "crossbeam-channel"))]
sender: Arc<mpsc::SyncSender<InputQueueCmd>>, sender: Arc<mpsc::SyncSender<InputQueueCmd>>,
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
sender: Arc<crossbeam::Sender<InputQueueCmd>>, sender: Arc<crossbeam::Sender<InputQueueCmd>>,
target: Arc<InputScope + Send + Sync + 'static>, target: Arc<InputScope + Send + Sync + 'static>,
} }
impl InputQueueScope { impl InputQueueScope {
/// Wrap new scopes with an asynchronous metric write & flush dispatcher. /// Wrap new scopes with an asynchronous metric write & flush dispatcher.
pub fn wrap<SC: InputScope + Send + Sync + 'static>(target_scope: SC, queue_length: usize) -> Self { pub fn wrap<SC: InputScope + Send + Sync + 'static>(
target_scope: SC,
queue_length: usize,
) -> Self {
InputQueueScope { InputQueueScope {
attributes: Attributes::default(), attributes: Attributes::default(),
sender: new_async_channel(queue_length), sender: new_async_channel(queue_length),
@ -163,8 +174,12 @@ impl InputQueueScope {
} }
impl WithAttributes for InputQueueScope { impl WithAttributes for InputQueueScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl InputScope for InputQueueScope { impl InputScope for InputQueueScope {
@ -174,7 +189,8 @@ impl InputScope for InputQueueScope {
let sender = self.sender.clone(); let sender = self.sender.clone();
InputMetric::new(move |value, mut labels| { InputMetric::new(move |value, mut labels| {
labels.save_context(); labels.save_context();
if let Err(e) = sender.send(InputQueueCmd::Write(target_metric.clone(), value, labels)) { if let Err(e) = sender.send(InputQueueCmd::Write(target_metric.clone(), value, labels))
{
metrics::SEND_FAILED.mark(); metrics::SEND_FAILED.mark();
debug!("Failed to send async metrics: {}", e); debug!("Failed to send async metrics: {}", e);
} }
@ -183,7 +199,6 @@ impl InputScope for InputQueueScope {
} }
impl Flush for InputQueueScope { impl Flush for InputQueueScope {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
if let Err(e) = self.sender.send(InputQueueCmd::Flush(self.target.clone())) { if let Err(e) = self.sender.send(InputQueueCmd::Flush(self.target.clone())) {
@ -195,4 +210,3 @@ impl Flush for InputQueueScope {
} }
} }
} }

View File

@ -2,26 +2,26 @@
//! RawMetrics definitions are still synchronous. //! RawMetrics definitions are still synchronous.
//! If queue size is exceeded, calling code reverts to blocking. //! If queue size is exceeded, calling code reverts to blocking.
use core::attributes::{Attributes, WithAttributes, Prefixed, OnFlush};
use core::name::MetricName;
use core::input::{InputKind, Input, InputScope, InputMetric};
use core::output::{OutputDyn, OutputScope, OutputMetric, Output};
use core::{MetricValue, Flush};
use core::metrics;
use cache::cache_in; use cache::cache_in;
use core::attributes::{Attributes, OnFlush, Prefixed, WithAttributes};
use core::error; use core::error;
use core::input::{Input, InputKind, InputMetric, InputScope};
use core::label::Labels; use core::label::Labels;
use core::metrics;
use core::name::MetricName;
use core::output::{Output, OutputDyn, OutputMetric, OutputScope};
use core::{Flush, MetricValue};
use std::rc::Rc;
use std::ops;
use std::fmt; use std::fmt;
use std::ops;
use std::rc::Rc;
use std::sync::Arc; #[cfg(not(feature = "crossbeam-channel"))]
#[cfg(not(feature="crossbeam-channel"))]
use std::sync::mpsc; use std::sync::mpsc;
use std::sync::Arc;
use std::thread; use std::thread;
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
use crossbeam_channel as crossbeam; use crossbeam_channel as crossbeam;
/// Wrap this raw output behind an asynchronous metrics dispatch queue. /// Wrap this raw output behind an asynchronous metrics dispatch queue.
@ -35,7 +35,7 @@ pub trait QueuedOutput: Output + Sized {
/// # Panics /// # Panics
/// ///
/// Panics if the OS fails to create a thread. /// Panics if the OS fails to create a thread.
#[cfg(not(feature="crossbeam-channel"))] #[cfg(not(feature = "crossbeam-channel"))]
fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<OutputQueueCmd>> { fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<OutputQueueCmd>> {
let (sender, receiver) = mpsc::sync_channel::<OutputQueueCmd>(length); let (sender, receiver) = mpsc::sync_channel::<OutputQueueCmd>(length);
@ -46,9 +46,11 @@ fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<OutputQueueCmd>> {
while !done { while !done {
match receiver.recv() { match receiver.recv() {
Ok(OutputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels), Ok(OutputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels),
Ok(OutputQueueCmd::Flush(scope)) => if let Err(e) = scope.flush() { Ok(OutputQueueCmd::Flush(scope)) => {
debug!("Could not asynchronously flush metrics: {}", e); if let Err(e) = scope.flush() {
}, debug!("Could not asynchronously flush metrics: {}", e);
}
}
Err(e) => { Err(e) => {
debug!("Async metrics receive loop terminated: {}", e); debug!("Async metrics receive loop terminated: {}", e);
// cannot break from within match, use safety pin instead // cannot break from within match, use safety pin instead
@ -64,7 +66,7 @@ fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<OutputQueueCmd>> {
/// # Panics /// # Panics
/// ///
/// Panics if the OS fails to create a thread. /// Panics if the OS fails to create a thread.
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<OutputQueueCmd>> { fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<OutputQueueCmd>> {
let (sender, receiver) = crossbeam::bounded::<OutputQueueCmd>(length); let (sender, receiver) = crossbeam::bounded::<OutputQueueCmd>(length);
@ -75,9 +77,11 @@ fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<OutputQueueCmd>> {
while !done { while !done {
match receiver.recv() { match receiver.recv() {
Ok(OutputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels), Ok(OutputQueueCmd::Write(metric, value, labels)) => metric.write(value, labels),
Ok(OutputQueueCmd::Flush(scope)) => if let Err(e) = scope.flush() { Ok(OutputQueueCmd::Flush(scope)) => {
debug!("Could not asynchronously flush metrics: {}", e); if let Err(e) = scope.flush() {
}, debug!("Could not asynchronously flush metrics: {}", e);
}
}
Err(e) => { Err(e) => {
debug!("Async metrics receive loop terminated: {}", e); debug!("Async metrics receive loop terminated: {}", e);
// cannot break from within match, use safety pin instead // cannot break from within match, use safety pin instead
@ -90,15 +94,14 @@ fn new_async_channel(length: usize) -> Arc<crossbeam::Sender<OutputQueueCmd>> {
Arc::new(sender) Arc::new(sender)
} }
/// Wrap scope with an asynchronous metric write & flush dispatcher. /// Wrap scope with an asynchronous metric write & flush dispatcher.
#[derive(Clone)] #[derive(Clone)]
pub struct OutputQueue { pub struct OutputQueue {
attributes: Attributes, attributes: Attributes,
target: Arc<OutputDyn + Send + Sync + 'static>, target: Arc<OutputDyn + Send + Sync + 'static>,
#[cfg(not(feature="crossbeam-channel"))] #[cfg(not(feature = "crossbeam-channel"))]
q_sender: Arc<mpsc::SyncSender<OutputQueueCmd>>, q_sender: Arc<mpsc::SyncSender<OutputQueueCmd>>,
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
q_sender: Arc<crossbeam::Sender<OutputQueueCmd>>, q_sender: Arc<crossbeam::Sender<OutputQueueCmd>>,
} }
@ -114,8 +117,12 @@ impl OutputQueue {
} }
impl WithAttributes for OutputQueue { impl WithAttributes for OutputQueue {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl cache_in::CachedInput for OutputQueue {} impl cache_in::CachedInput for OutputQueue {}
@ -132,7 +139,6 @@ impl Input for OutputQueue {
target: Arc::new(target_scope), target: Arc::new(target_scope),
} }
} }
} }
/// This is only `pub` because `error` module needs to know about it. /// This is only `pub` because `error` module needs to know about it.
@ -149,16 +155,20 @@ pub enum OutputQueueCmd {
#[derive(Clone)] #[derive(Clone)]
pub struct OutputQueueScope { pub struct OutputQueueScope {
attributes: Attributes, attributes: Attributes,
#[cfg(not(feature="crossbeam-channel"))] #[cfg(not(feature = "crossbeam-channel"))]
sender: Arc<mpsc::SyncSender<OutputQueueCmd>>, sender: Arc<mpsc::SyncSender<OutputQueueCmd>>,
#[cfg(feature="crossbeam-channel")] #[cfg(feature = "crossbeam-channel")]
sender: Arc<crossbeam::Sender<OutputQueueCmd>>, sender: Arc<crossbeam::Sender<OutputQueueCmd>>,
target: Arc<UnsafeScope>, target: Arc<UnsafeScope>,
} }
impl WithAttributes for OutputQueueScope { impl WithAttributes for OutputQueueScope {
fn get_attributes(&self) -> &Attributes { &self.attributes } fn get_attributes(&self) -> &Attributes {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes } &self.attributes
}
fn mut_attributes(&mut self) -> &mut Attributes {
&mut self.attributes
}
} }
impl InputScope for OutputQueueScope { impl InputScope for OutputQueueScope {
@ -168,7 +178,8 @@ impl InputScope for OutputQueueScope {
let sender = self.sender.clone(); let sender = self.sender.clone();
InputMetric::new(move |value, mut labels| { InputMetric::new(move |value, mut labels| {
labels.save_context(); labels.save_context();
if let Err(e) = sender.send(OutputQueueCmd::Write(target_metric.clone(), value, labels)) { if let Err(e) = sender.send(OutputQueueCmd::Write(target_metric.clone(), value, labels))
{
metrics::SEND_FAILED.mark(); metrics::SEND_FAILED.mark();
debug!("Failed to send async metrics: {}", e); debug!("Failed to send async metrics: {}", e);
} }
@ -177,7 +188,6 @@ impl InputScope for OutputQueueScope {
} }
impl Flush for OutputQueueScope { impl Flush for OutputQueueScope {
fn flush(&self) -> error::Result<()> { fn flush(&self) -> error::Result<()> {
self.notify_flush_listeners(); self.notify_flush_listeners();
if let Err(e) = self.sender.send(OutputQueueCmd::Flush(self.target.clone())) { if let Err(e) = self.sender.send(OutputQueueCmd::Flush(self.target.clone())) {
@ -193,7 +203,7 @@ impl Flush for OutputQueueScope {
/// Wrap an OutputScope to make it Send + Sync, allowing it to travel the world of threads. /// Wrap an OutputScope to make it Send + Sync, allowing it to travel the world of threads.
/// Obviously, it should only still be used from a single thread or dragons may occur. /// Obviously, it should only still be used from a single thread or dragons may occur.
#[derive(Clone)] #[derive(Clone)]
pub struct UnsafeScope(Rc<OutputScope + 'static> ); pub struct UnsafeScope(Rc<OutputScope + 'static>);
/// This is ok because scope will only ever be used by the dispatcher thread. /// This is ok because scope will only ever be used by the dispatcher thread.
unsafe impl Send for UnsafeScope {} unsafe impl Send for UnsafeScope {}
@ -215,7 +225,6 @@ impl ops::Deref for UnsafeScope {
} }
} }
impl fmt::Debug for OutputMetric { impl fmt::Debug for OutputMetric {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Box<Fn(Value)>") write!(f, "Box<Fn(Value)>")
@ -225,4 +234,3 @@ impl fmt::Debug for OutputMetric {
unsafe impl Send for OutputMetric {} unsafe impl Send for OutputMetric {}
unsafe impl Sync for OutputMetric {} unsafe impl Sync for OutputMetric {}

View File

@ -1,2 +1,2 @@
#[cfg(feature="skeptic")] #[cfg(feature = "skeptic")]
include!(concat!(env!("OUT_DIR"), "/skeptic-tests.rs")); include!(concat!(env!("OUT_DIR"), "/skeptic-tests.rs"));