Raw Bridge

This commit is contained in:
Francis Lalonde 2018-06-21 11:52:10 -04:00
parent 537fc102ad
commit 252eba5d48
36 changed files with 340 additions and 250 deletions

6
.directory Normal file
View File

@ -0,0 +1,6 @@
[Dolphin]
Timestamp=2018,6,21,9,51,11
Version=4
[Settings]
HiddenFilesShown=true

View File

@ -20,7 +20,6 @@ travis-ci = { repository = "fralalonde/dipstick", branch = "master" }
[dependencies]
log = "0.4"
lazy_static = "1.0"
#derivative = "1.0"
atomic_refcell = "0.1"
skeptic = { version = "0.13", optional = true }
num = { version = "0.1", default-features = false }

View File

@ -135,7 +135,7 @@ timer.interval_us(123_456);
Related metrics can share a namespace:
```rust,skt-run
let app_metrics = metric_scope(to_stdout());
let db_metrics = app_metrics.add_name("database");
let db_metrics = app_metrics.add_prefix("database");
let _db_timer = db_metrics.timer("db_timer");
let _db_counter = db_metrics.counter("db_counter");
```

View File

@ -13,7 +13,7 @@ use dipstick::*;
use std::thread;
metrics!{
<QueueRawInput> ZUG = to_stdout().async(10).new_input() => {
<QueueRawInput> ZUG = output_stdout().async(10).new_input() => {
Counter COUNTER: "counter_a";
Marker EVENT: "event_c";
}

View File

@ -9,7 +9,7 @@ use dipstick::*;
fn main() {
// for this demo, print metric values to the console
let app_metrics = to_stdout().new_input();
let app_metrics = output_stdout().new_input();
// metrics can be predefined by type and name
let counter = app_metrics.counter("counter_a");
@ -19,7 +19,7 @@ fn main() {
app_metrics.counter("just_once").count(4);
// metric names can be prepended with a common prefix
let prefixed_metrics = app_metrics.add_name("subsystem");
let prefixed_metrics = app_metrics.add_prefix("subsystem");
let event = prefixed_metrics.marker("event_c");
let gauge = prefixed_metrics.gauge("gauge_d");

View File

@ -7,11 +7,11 @@ use std::time::Duration;
use dipstick::*;
fn main() {
let metrics = Bucket::new().add_name("test");
let metrics = Bucket::new().add_prefix("test");
// Bucket::set_default_output(to_stdout());
metrics.set_output(to_graphite("localhost:2003").expect("Graphite host name and port")
.add_name("machine1").add_name("application"));
metrics.set_output(output_graphite("localhost:2003").expect("Graphite host name and port")
.add_prefix("machine1").add_prefix("application"));
metrics.flush_every(Duration::from_secs(3));

View File

@ -7,10 +7,10 @@ use std::time::Duration;
use dipstick::*;
fn main() {
let metrics = to_bucket().add_name("test");
let metrics = input_bucket().add_prefix("test");
// Bucket::set_default_output(to_stdout());
metrics.set_output(to_stdout());
metrics.set_output(output_stdout());
metrics.flush_every(Duration::from_secs(3));

32
examples/bucket_cleanup.rs Executable file
View File

@ -0,0 +1,32 @@
//! A sample application continuously aggregating metrics,
//! printing the summary stats every three seconds
extern crate dipstick;
use dipstick::*;
fn main() {
let metrics = input_bucket();
let counter = metrics.counter("counter_a");
let timer = metrics.timer("timer_a");
let gauge = metrics.gauge("gauge_a");
let marker = metrics.marker("marker_a");
loop {
// add counts forever, non-stop
counter.count(11);
counter.count(12);
counter.count(13);
timer.interval_us(11_000_000);
timer.interval_us(12_000_000);
timer.interval_us(13_000_000);
gauge.value(11);
gauge.value(12);
gauge.value(13);
marker.mark();
}
}

View File

@ -9,7 +9,7 @@ use std::thread::sleep;
use dipstick::*;
fn main() {
let output = to_stdout().with_buffering(Buffering::Unlimited);
let output = output_stdout().with_buffering(Buffering::Unlimited);
loop {
// add counts forever, non-stop

View File

@ -7,7 +7,7 @@ use std::time::Duration;
use dipstick::*;
fn main() {
let metrics = to_stdout().cache(5).new_input().add_name("cache");
let metrics = output_stdout().cache(5).new_input().add_prefix("cache");
loop {
// report some ad-hoc metric values from our "application" loop

View File

@ -19,7 +19,7 @@ fn main() {
// prepend and append to metric name
(_, ScoreType::Count(count)) => {
if let Some(last) = name.pop() {
name.push("customized_add_name".into());
name.push("customized_add_prefix".into());
name.push(format!("{}_and_a_suffix", last));
Some((
Kind::Counter,
@ -32,7 +32,7 @@ fn main() {
},
// scaling the score value and appending unit to name
(kind, ScoreType::Sum(sum)) => Some((kind, name.add_name("per_thousand"), sum / 1000)),
(kind, ScoreType::Sum(sum)) => Some((kind, name.concat("per_thousand"), sum / 1000)),
// using the unmodified metric name
(kind, ScoreType::Mean(avg)) => Some((kind, name, avg.round() as u64)),
@ -43,7 +43,7 @@ fn main() {
}
// send application metrics to aggregator
Bucket::set_default_output(to_stdout());
Bucket::set_default_output(output_stdout());
Bucket::set_default_stats(custom_statistics);
let app_metrics = Bucket::new();

View File

@ -8,9 +8,9 @@ use std::time::Duration;
fn main() {
let metrics =
to_graphite("localhost:2003")
.expect("Connecting")
.add_name("my_app")
output_graphite("localhost:2003")
.expect("Connected")
.add_prefix("my_app")
.new_input();
loop {

View File

@ -9,7 +9,7 @@ use dipstick::*;
use std::time::Duration;
// undeclared root (un-prefixed) metrics
metrics!(<Bucket> pub AGGREGATE = to_bucket() => {
metrics!(<Bucket> pub AGGREGATE = input_bucket() => {
// create counter "some_counter"
pub Counter ROOT_COUNTER: "root_counter";
// create gauge "root_gauge"
@ -19,14 +19,14 @@ metrics!(<Bucket> pub AGGREGATE = to_bucket() => {
});
metrics!( <Bucket> AGGREGATE.add_name("module_prefix") => {
metrics!( <Bucket> AGGREGATE.add_prefix("module_prefix") => {
// create counter "module_prefix.module_counter"
Counter MOD_COUNTER: "module_counter";
});
fn main() {
// print aggregated metrics to the console
Bucket::set_default_output(to_stdout());
Bucket::set_default_output(output_stdout());
// enable autoflush...
AGGREGATE.flush_every(Duration::from_millis(4000));

View File

@ -37,7 +37,7 @@ metrics!(LIB_METRICS => {
});
fn main() {
ROOT_PROXY.set_target(to_stdout().new_input());
ROOT_PROXY.set_target(output_stdout().new_input());
loop {
ROOT_COUNTER.count(123);

View File

@ -18,13 +18,13 @@
//#[ignore(deprecated)]
//app_metrics!(
// MultiOutput, SAME_TYPE = to_multi()
// .with_output(to_stdout().add_name("yeah"))
// .with_output(to_stdout().add_name("ouch"))
// .with_output(to_stdout().add_prefix("yeah"))
// .with_output(to_stdout().add_prefix("ouch"))
//);
//
//#[ignore(deprecated)]
//app_metrics!(
// MultiOutput, MUTANT_CHILD = SAME_TYPE.add_name("super").add_name("duper")
// MultiOutput, MUTANT_CHILD = SAME_TYPE.add_prefix("super").add_prefix("duper")
//);
fn main() {

View File

@ -8,14 +8,14 @@ use std::time::Duration;
fn main() {
// will output metrics to graphite and to stdout
let different_type_metrics = MultiOutput::new()
.with_output(to_graphite("localhost:2003").expect("Connecting"))
.with_output(to_stdout()).new_input();
.with_output(output_graphite("localhost:2003").expect("Connecting"))
.with_output(output_stdout()).new_input();
// will output metrics twice, once with "cool.yeah" prefix and once with "cool.ouch" prefix.
let same_type_metrics = MultiOutput::new()
.with_output(to_stdout().add_name("yeah"))
.with_output(to_stdout().add_name("ouch"))
.add_name("cool").new_input();
.with_output(output_stdout().add_prefix("yeah"))
.with_output(output_stdout().add_prefix("ouch"))
.add_prefix("cool").new_input();
loop {
different_type_metrics.counter("counter_a").count(123);

View File

@ -7,25 +7,25 @@ use std::time::Duration;
use dipstick::*;
fn main() {
let root = to_proxy();
let sub = root.add_name("sub");
let root = input_proxy();
let sub = root.add_prefix("sub");
let count1 = root.counter("counter_a");
let count2 = sub.counter("counter_b");
loop {
root.set_target(to_stdout().new_input());
root.set_target(output_stdout().new_input());
count1.count(1);
count2.count(2);
// route every metric from the root to stdout with prefix "root"
root.set_target(to_stdout().add_name("root").new_input());
root.set_target(output_stdout().add_prefix("root").new_input());
count1.count(3);
count2.count(4);
// route metrics from "sub" to stdout with prefix "mutant"
sub.set_target(to_stdout().add_name("mutant").new_input());
sub.set_target(output_stdout().add_prefix("mutant").new_input());
count1.count(5);
count2.count(6);
@ -40,7 +40,7 @@ fn main() {
count2.count(10);
// go back to initial single unprefixed route
root.set_target(to_stdout().new_input());
root.set_target(output_stdout().new_input());
count1.count(11);
count2.count(12);

View File

@ -11,7 +11,7 @@ fn main() {
pub fn raw_write() {
// setup dual metric channels
let metrics_log = dipstick::to_log().new_input();
let metrics_log = dipstick::output_log().new_input();
// define and send metrics using raw channel API
let counter = metrics_log.new_metric(

View File

@ -7,7 +7,7 @@ use dipstick::*;
fn main() {
// print only 1 out of every 10000 metrics recorded
let app_metrics = to_statsd("statsd:8125").expect("Statsd")
let app_metrics = output_statsd("statsd:8125").expect("Statsd")
.with_sampling_rate(Sampling::SampleRate(0.0001)).new_input_dyn();
let marker = app_metrics.marker("marker_a");

View File

@ -9,7 +9,7 @@ use dipstick::*;
fn main() {
let app_metrics = Bucket::new();
app_metrics.set_output(to_stdout());
app_metrics.set_output(output_stdout());
app_metrics.flush_every(Duration::from_secs(3));

View File

@ -1,6 +1,7 @@
//! Maintain aggregated metrics for deferred reporting,
//!
use core::{Kind, Value, Name, WithName, NO_METRIC_OUTPUT, Input, OutputDyn, Metric, WithAttributes, Attributes};
use core::{Kind, Value, Name, WithName, output_none, Input, Metric, WithAttributes, Attributes,
RawInput, RawMetric, RawOutputDyn};
use clock::TimeHandle;
use core::Kind::*;
use error;
@ -10,26 +11,28 @@ use scores::ScoreType::*;
use std::collections::BTreeMap;
use std::sync::{Arc, RwLock};
use std::fmt;
use std::borrow::Borrow;
/// A function type to transform aggregated scores into publishable statistics.
pub type StatsFn = Fn(Kind, Name, ScoreType) -> Option<(Kind, Name, Value)> + Send + Sync + 'static;
fn initial_stats() -> &'static StatsFn {
&summary
&stats_summary
}
fn initial_output() -> Arc<OutputDyn + Send + Sync> {
NO_METRIC_OUTPUT.clone()
fn initial_output() -> Arc<RawOutputDyn + Send + Sync> {
Arc::new(output_none())
}
lazy_static! {
static ref DEFAULT_AGGREGATE_STATS: RwLock<Arc<StatsFn>> = RwLock::new(Arc::new(initial_stats()));
static ref DEFAULT_AGGREGATE_OUTPUT: RwLock<Arc<OutputDyn + Send + Sync>> = RwLock::new(initial_output());
static ref DEFAULT_AGGREGATE_OUTPUT: RwLock<Arc<RawOutputDyn + Send + Sync>> = RwLock::new(initial_output());
}
/// Create a new metric aggregating bucket.
pub fn to_bucket() -> Bucket {
pub fn input_bucket() -> Bucket {
Bucket::new()
}
@ -41,28 +44,59 @@ pub struct Bucket {
inner: Arc<RwLock<InnerBucket>>,
}
#[derive(Derivative)]
#[derivative(Debug)]
struct InnerBucket {
metrics: BTreeMap<Name, Arc<Scoreboard>>,
period_start: TimeHandle,
#[derivative(Debug = "ignore")]
stats: Option<Arc<Fn(Kind, Name, ScoreType)
-> Option<(Kind, Name, Value)> + Send + Sync + 'static>>,
#[derivative(Debug = "ignore")]
output: Option<Arc<OutputDyn + Send + Sync + 'static>>,
output: Option<Arc<RawOutputDyn + Send + Sync + 'static>>,
publish_metadata: bool,
}
impl fmt::Debug for InnerBucket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "metrics: {:?}", self.metrics)?;
write!(f, "period_start: {:?}", self.period_start)
}
}
lazy_static! {
static ref PERIOD_LENGTH: Name = "_period_length".into();
}
impl InnerBucket {
pub fn flush(&mut self) -> error::Result<()> {
let stats_fn = match self.stats {
Some(ref stats_fn) => stats_fn.clone(),
None => DEFAULT_AGGREGATE_STATS.read().unwrap().clone(),
};
let pub_scope = match self.output {
Some(ref out) => out.new_raw_input_dyn(),
None => output_none().new_raw_input_dyn(),
};
self.flush_to(pub_scope.borrow(), stats_fn.as_ref());
// all metrics published!
// purge: if bucket is the last owner of the metric, remove it
// TODO parameterize whether to keep ad-hoc metrics after publish
let mut purged = self.metrics.clone();
self.metrics.iter()
.filter(|&(_k, v)| Arc::strong_count(v) == 1)
.map(|(k, _v)| k)
.for_each(|k| {purged.remove(k);});
self.metrics = purged;
pub_scope.flush_raw()
}
/// Take a snapshot of aggregated values and reset them.
/// Compute stats on captured values using assigned or default stats function.
/// Write stats to assigned or default output.
pub fn flush_to(&mut self, publish_scope: &Input, stats_fn: &StatsFn) {
pub fn flush_to(&mut self, publish_scope: &RawInput, stats_fn: &StatsFn) {
let now = TimeHandle::now();
let duration_seconds = self.period_start.elapsed_us() as f64 / 1_000_000.0;
@ -89,7 +123,7 @@ impl InnerBucket {
for score in metric.2 {
let filtered = (stats_fn)(metric.1, metric.0.clone(), score);
if let Some((kind, name, value)) = filtered {
let metric: Metric = publish_scope.new_metric(name, kind);
let metric: RawMetric = publish_scope.new_metric_raw(name, kind);
metric.write(value)
}
}
@ -101,7 +135,7 @@ impl InnerBucket {
impl<S: AsRef<str>> From<S> for Bucket {
fn from(name: S) -> Bucket {
Bucket::new().add_name(name.as_ref())
Bucket::new().add_prefix(name.as_ref())
}
}
@ -134,7 +168,7 @@ impl Bucket {
}
/// Install a new receiver for all aggregateed metrics, replacing any previous receiver.
pub fn set_default_output(default_config: impl OutputDyn + Send + Sync + 'static) {
pub fn set_default_output(default_config: impl RawOutputDyn + Send + Sync + 'static) {
*DEFAULT_AGGREGATE_OUTPUT.write().unwrap() = Arc::new(default_config);
}
@ -157,7 +191,7 @@ impl Bucket {
}
/// Install a new receiver for all aggregated metrics, replacing any previous receiver.
pub fn set_output(&self, new_config: impl OutputDyn + Send + Sync + 'static) {
pub fn set_output(&self, new_config: impl RawOutputDyn + Send + Sync + 'static) {
self.inner.write().expect("Aggregator").output = Some(Arc::new(new_config))
}
@ -167,27 +201,11 @@ impl Bucket {
}
/// Flush the aggregator scores using the specified scope and stats.
pub fn flush_to(&self, publish_scope: &Input, stats_fn: &StatsFn) {
pub fn flush_to(&self, publish_scope: &RawInput, stats_fn: &StatsFn) {
let mut inner = self.inner.write().expect("Aggregator");
inner.flush_to(publish_scope, stats_fn);
}
// /// Discard scores for ad-hoc metrics.
// pub fn cleanup(&self) {
// let orphans: Vec<Name> = self.inner.read().expect("Aggregator").metrics.iter()
// // is aggregator now the sole owner?
// // TODO use weak ref + impl Drop to mark abandoned metrics (see dispatch)
// .filter(|&(_k, v)| Arc::strong_count(v) == 1)
// .map(|(k, _v)| k.to_string())
// .collect();
// if !orphans.is_empty() {
// let remover = &mut self.inner.write().unwrap().metrics;
// orphans.iter().for_each(|k| {
// remover.remove(k);
// });
// }
// }
}
impl Input for Bucket {
@ -207,22 +225,7 @@ impl Input for Bucket {
/// Publish statistics
fn flush(&self) -> error::Result<()> {
let mut inner = self.inner.write().expect("Aggregator");
let stats_fn = match &inner.stats {
&Some(ref stats_fn) => stats_fn.clone(),
&None => DEFAULT_AGGREGATE_STATS.read().unwrap().clone(),
};
let pub_scope = match &inner.output {
&Some(ref out) => out.new_input_dyn(),
&None => DEFAULT_AGGREGATE_OUTPUT.read().unwrap().new_input_dyn(),
};
inner.flush_to(pub_scope.as_ref(), stats_fn.as_ref());
// TODO parameterize whether to keep ad-hoc metrics after publish
// source.cleanup();
pub_scope.flush()
inner.flush()
}
}
@ -234,14 +237,14 @@ impl WithAttributes for Bucket {
/// A predefined export strategy reporting all aggregated stats for all metric types.
/// Resulting stats are named by appending a short suffix to each metric's name.
#[allow(dead_code)]
pub fn all_stats(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name, Value)> {
pub fn stats_all(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name, Value)> {
match score {
Count(hit) => Some((Counter, name.add_name("count"), hit)),
Sum(sum) => Some((kind, name.add_name("sum"), sum)),
Mean(mean) => Some((kind, name.add_name("mean"), mean.round() as Value)),
Max(max) => Some((Gauge, name.add_name("max"), max)),
Min(min) => Some((Gauge, name.add_name("min"), min)),
Rate(rate) => Some((Gauge, name.add_name("rate"), rate.round() as Value)),
Count(hit) => Some((Counter, name.concat("count"), hit)),
Sum(sum) => Some((kind, name.concat("sum"), sum)),
Mean(mean) => Some((kind, name.concat("mean"), mean.round() as Value)),
Max(max) => Some((Gauge, name.concat("max"), max)),
Min(min) => Some((Gauge, name.concat("min"), min)),
Rate(rate) => Some((Gauge, name.concat("rate"), rate.round() as Value)),
}
}
@ -250,7 +253,7 @@ pub fn all_stats(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name
/// Since there is only one stat per metric, there is no risk of collision
/// and so exported stats copy their metric's name.
#[allow(dead_code)]
pub fn average(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name, Value)> {
pub fn stats_average(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name, Value)> {
match kind {
Marker => match score {
Count(count) => Some((Counter, name, count)),
@ -270,7 +273,7 @@ pub fn average(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name,
/// Since there is only one stat per metric, there is no risk of collision
/// and so exported stats copy their metric's name.
#[allow(dead_code)]
pub fn summary(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name, Value)> {
pub fn stats_summary(kind: Kind, name: Name, score: ScoreType) -> Option<(Kind, Name, Value)> {
match kind {
Marker => match score {
Count(count) => Some((Counter, name, count)),
@ -313,7 +316,7 @@ mod bench {
#[cfg(test)]
mod test {
use core::*;
use bucket::{Bucket, all_stats, summary, average, StatsFn};
use bucket::{Bucket, stats_all, stats_summary, stats_average, StatsFn};
use clock::{mock_clock_advance, mock_clock_reset};
use map::StatsMap;
@ -323,7 +326,7 @@ mod test {
fn make_stats(stats_fn: &StatsFn) -> BTreeMap<String, Value> {
mock_clock_reset();
let metrics = Bucket::new().add_name("test");
let metrics = Bucket::new().add_prefix("test");
let counter = metrics.counter("counter_a");
let timer = metrics.timer("timer_a");
@ -353,7 +356,7 @@ mod test {
#[test]
fn external_aggregate_all_stats() {
let map = make_stats(&all_stats);
let map = make_stats(&stats_all);
assert_eq!(map["test.counter_a.count"], 2);
assert_eq!(map["test.counter_a.sum"], 30);
@ -377,7 +380,7 @@ mod test {
#[test]
fn external_aggregate_summary() {
let map = make_stats(&summary);
let map = make_stats(&stats_summary);
assert_eq!(map["test.counter_a"], 30);
assert_eq!(map["test.timer_a"], 30_000_000);
@ -387,7 +390,7 @@ mod test {
#[test]
fn external_aggregate_average() {
let map = make_stats(&average);
let map = make_stats(&stats_average);
assert_eq!(map["test.counter_a"], 15);
assert_eq!(map["test.timer_a"], 15_000_000);

View File

@ -76,19 +76,6 @@ impl Input for CacheInput {
impl Async for CacheOutput {}
impl CacheInput {
/// Create and increment an ad-hoc counter.
pub fn count(&self, name: &str, value: Value) {
self.counter(name).count(value)
}
/// Create and increment an ad-hoc marker.
pub fn mark(&self, name: &str) {
self.marker(name).mark()
}
}
mod lru {
//! A fixed-size cache with LRU expiration criteria.
//! Stored values will be held onto as long as there is space.

View File

@ -6,12 +6,12 @@ use clock::TimeHandle;
use queue;
use raw_queue;
use cache;
use error;
use std::sync::{Arc, Mutex};
use std::ops;
use std::rc::Rc;
use text;
use error;
use std::fmt;
// TODO define an 'AsValue' trait + impl for supported number types, then drop 'num' crate
pub use num::ToPrimitive;
@ -115,7 +115,7 @@ pub trait WithName {
fn get_namespace(&self) -> &Name;
/// Join namespace and prepend in newly defined metrics.
fn add_name(&self, name: &str) -> Self;
fn add_prefix(&self, name: &str) -> Self;
/// Append the specified name to the local namespace and return the concatenated result.
fn qualified_name(&self, metric_name: Name) -> Name;
@ -127,14 +127,14 @@ impl<T: WithAttributes> WithName for T {
}
/// Join namespace and prepend in newly defined metrics.
fn add_name(&self, name: &str) -> Self {
self.with_attributes(|new_attr| new_attr.namespace = new_attr.namespace.add_name(name))
fn add_prefix(&self, name: &str) -> Self {
self.with_attributes(|new_attr| new_attr.namespace = new_attr.namespace.concat(name))
}
/// Append the specified name to the local namespace and return the concatenated result.
fn qualified_name(&self, name: Name) -> Name {
// FIXME (perf) store name in reverse to prepend with an actual push() to the vec
self.get_attributes().namespace.add_name(name)
self.get_attributes().namespace.concat(name)
}
}
@ -185,7 +185,7 @@ pub struct Name {
impl Name {
/// Concatenate with another namespace into a new one.
pub fn add_name(&self, name: impl Into<Name>) -> Self {
pub fn concat(&self, name: impl Into<Name>) -> Self {
let mut cloned = self.clone();
cloned.inner.extend_from_slice(&name.into().inner);
cloned
@ -242,7 +242,7 @@ impl<S: Into<String>> From<S> for Name {
lazy_static! {
/// The reference instance identifying an uninitialized metric config.
pub static ref NO_METRIC_OUTPUT: Arc<OutputDyn + Send + Sync> = Arc::new(text::to_void());
pub static ref NO_METRIC_OUTPUT: Arc<OutputDyn + Send + Sync> = Arc::new(output_none());
/// The reference instance identifying an uninitialized metric scope.
pub static ref NO_METRIC_SCOPE: Arc<Input + Send + Sync> = NO_METRIC_OUTPUT.new_input_dyn();
@ -280,7 +280,7 @@ pub trait Cache: OutputDyn + Send + Sync + 'static + Sized {
/// Dynamic variant of the Output trait
pub trait OutputDyn {
/// Open a new metric input with dynamic typing.
/// Open a new metric input with dynamic trait typing.
fn new_input_dyn(&self) -> Arc<Input + Send + Sync + 'static>;
}
@ -292,7 +292,7 @@ impl<T: Output + Send + Sync + 'static> OutputDyn for T {
}
/// Define metrics, write values and flush them.
pub trait Input: Send + Sync {
pub trait Input {
/// Define a metric of the specified type.
fn new_metric(&self, name: Name, kind: Kind) -> Metric;
@ -321,6 +321,16 @@ pub trait Input: Send + Sync {
self.new_metric(name.into(), Kind::Gauge).into()
}
/// Create and increment an ad-hoc counter.
fn count(&self, name: &str, value: Value) {
self.counter(name).count(value)
}
/// Create and increment an ad-hoc marker.
fn mark(&self, name: &str) {
self.marker(name).mark()
}
}
/// A metric is actually a function that knows to write a metric value to a metric output.
@ -329,6 +339,12 @@ pub struct Metric {
inner: Arc<Fn(Value) + Send + Sync>
}
impl fmt::Debug for Metric {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arc<Fn(Value) + Send + Sync>")
}
}
impl Metric {
/// Utility constructor
pub fn new<F: Fn(Value) + Send + Sync + 'static>(wfn: F) -> Metric {
@ -363,13 +379,13 @@ pub trait RawAsync: RawOutput + Sized {
/// Dynamic variant of the RawOutput trait
pub trait RawOutputDyn {
/// Open a new metric input with dynamic typing.
fn new_raw_input_dyn(&self) -> RawInputBox;
fn new_raw_input_dyn(&self) -> Rc<RawInput + 'static>;
}
/// Blanket impl that provides RawOutputs their dynamic flavor.
impl<T: RawOutput + Send + Sync + 'static> RawOutputDyn for T {
fn new_raw_input_dyn(&self) -> RawInputBox {
RawInputBox (Rc::new(self.new_raw_input()))
fn new_raw_input_dyn(&self) -> Rc<RawInput + 'static> {
Rc::new(self.new_raw_input())
}
}
@ -377,7 +393,7 @@ impl<T: RawOutput + Send + Sync + 'static> RawOutputDyn for T {
#[derive(Clone)]
pub struct LockingInputBox {
attributes: Attributes,
inner: Arc<Mutex<RawInputBox>>
inner: Arc<Mutex<UnsafeInput>>
}
impl WithAttributes for LockingInputBox {
@ -389,7 +405,7 @@ impl Input for LockingInputBox {
fn new_metric(&self, name: Name, kind: Kind) -> Metric {
let name = self.qualified_name(name);
let raw_metric = self.inner.lock().expect("RawInput Lock").new_metric(name, kind);
let raw_metric = self.inner.lock().expect("RawInput Lock").new_metric_raw(name, kind);
let mutex = self.inner.clone();
Metric::new(move |value| {
let _guard = mutex.lock().expect("RawMetric Lock");
@ -398,7 +414,7 @@ impl Input for LockingInputBox {
}
fn flush(&self) -> error::Result<()> {
self.inner.lock().expect("RawInput Lock").flush()
self.inner.lock().expect("RawInput Lock").flush_raw()
}
}
@ -409,32 +425,46 @@ impl<T: RawOutput + Send + Sync + 'static> Output for T {
fn new_input(&self) -> Self::INPUT {
LockingInputBox {
attributes: Attributes::default(),
inner: Arc::new(Mutex::new(RawInputBox (Rc::new(self.new_raw_input()))))
inner: Arc::new(Mutex::new(UnsafeInput(self.new_raw_input_dyn())))
}
}
}
/// Define metrics, write values and flush them.
pub trait RawInput {
/// Define a metric of the specified type.
fn new_metric(&self, name: Name, kind: Kind) -> RawMetric;
fn new_metric_raw(&self, name: Name, kind: Kind) -> RawMetric;
/// Flush does nothing by default.
fn flush(&self) -> error::Result<()> {
fn flush_raw(&self) -> error::Result<()> {
Ok(())
}
}
/// Blanket impl that provides RawOutputs their dynamic flavor.
impl<T: Input + Send + Sync + 'static> RawInput for T {
fn new_metric_raw(&self, name: Name, kind: Kind) -> RawMetric {
let raw = self.new_metric(name, kind);
RawMetric::new(move |value| raw.write(value))
}
}
/// Wrap a RawInput to make it Send + Sync, allowing it to travel the world of threads.
/// Obviously, it should only still be used from a single thread, which RawAsync does.
/// Obviously, it should only still be used from a single thread or dragons may occur.
#[derive(Clone)]
pub struct RawInputBox ( Rc<RawInput + 'static> );
pub struct UnsafeInput(Rc<RawInput + 'static> );
unsafe impl Send for RawInputBox {}
unsafe impl Sync for RawInputBox {}
unsafe impl Send for UnsafeInput {}
unsafe impl Sync for UnsafeInput {}
impl ops::Deref for RawInputBox {
impl UnsafeInput {
/// Wrap a dynamic RawInput to make it Send + Sync.
pub fn new(input: Rc<RawInput + 'static>) -> Self {
UnsafeInput(input)
}
}
impl ops::Deref for UnsafeInput {
type Target = RawInput + 'static;
fn deref(&self) -> &Self::Target {
Rc::as_ref(&self.0)
@ -447,6 +477,12 @@ pub struct RawMetric {
inner: Box<Fn(Value)>
}
impl fmt::Debug for RawMetric {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Box<Fn(Value)>")
}
}
impl RawMetric {
/// Utility constructor
pub fn new<F: Fn(Value) + 'static>(wfn: F) -> RawMetric {
@ -467,10 +503,8 @@ unsafe impl Sync for RawMetric {}
/// A monotonic counter metric.
/// Since value is only ever increased by one, no value parameter is provided,
/// preventing programming errors.
#[derive(Derivative)]
#[derivative(Debug)]
#[derive(Debug)]
pub struct Marker {
#[derivative(Debug = "ignore")]
inner: Metric,
}
@ -482,10 +516,8 @@ impl Marker {
}
/// A counter that sends values to the metrics backend
#[derive(Derivative)]
#[derivative(Debug)]
#[derive(Debug)]
pub struct Counter {
#[derivative(Debug = "ignore")]
inner: Metric,
}
@ -497,10 +529,8 @@ impl Counter {
}
/// A gauge that sends values to the metrics backend
#[derive(Derivative)]
#[derivative(Debug)]
#[derive(Debug)]
pub struct Gauge {
#[derivative(Debug = "ignore")]
inner: Metric,
}
@ -517,10 +547,8 @@ impl Gauge {
/// - with the time(Fn) methodhich wraps a closure with start() and stop() calls.
/// - with start() and stop() methodsrapping around the operation to time
/// - with the interval_us() method, providing an externally determined microsecond interval
#[derive(Derivative)]
#[derivative(Debug)]
#[derive(Debug)]
pub struct Timer {
#[derivative(Debug = "ignore")]
inner: Metric,
}
@ -587,6 +615,40 @@ impl From<Metric> for Marker {
}
}
/// Discard metrics output.
#[derive(Clone)]
pub struct VoidOutput {}
impl RawOutput for VoidOutput {
type INPUT = VoidOutput;
fn new_raw_input(&self) -> VoidOutput {
VoidOutput {}
}
}
impl RawInput for VoidOutput {
fn new_metric_raw(&self, _name: Name, _kind: Kind) -> RawMetric {
RawMetric::new(|_value| {})
}
}
/// Discard all metric values sent to it.
pub fn output_none() -> VoidOutput {
VoidOutput {}
}
#[cfg(test)]
mod test {
use core::*;
#[test]
fn test_to_void() {
let c = output_none().new_input();
let m = c.new_metric("test".into(), Kind::Marker);
m.write(33);
}
}
#[cfg(feature = "bench")]
mod bench {
@ -608,4 +670,3 @@ mod bench {
b.iter(|| test::black_box(marker.mark()));
}
}

View File

@ -203,7 +203,7 @@ mod legacy_test {
use self_metrics::*;
use deprecated::*;
metrics!(<Aggregate> TEST_METRICS = DIPSTICK_METRICS.add_name("test_prefix"));
metrics!(<Aggregate> TEST_METRICS = DIPSTICK_METRICS.add_prefix("test_prefix"));
app_marker!(<Aggregate> TEST_METRICS => {
M1: "failed",

View File

@ -18,7 +18,7 @@ use std::rc::Rc;
use std::cell::{RefCell, RefMut};
metrics!{
<Bucket> DIPSTICK_METRICS.add_name("graphite") => {
<Bucket> DIPSTICK_METRICS.add_prefix("graphite") => {
Marker SEND_ERR: "send_failed";
Marker TRESHOLD_EXCEEDED: "bufsize_exceeded";
Counter SENT_BYTES: "sent_bytes";
@ -26,7 +26,7 @@ metrics!{
}
/// Send metrics to a graphite server at the address and port provided.
pub fn to_graphite<A: ToSocketAddrs + Debug + Clone>(address: A) -> error::Result<GraphiteOutput> {
pub fn output_graphite<A: ToSocketAddrs + Debug + Clone>(address: A) -> error::Result<GraphiteOutput> {
debug!("Connecting to graphite {:?}", address);
let socket = Arc::new(RwLock::new(RetrySocket::new(address.clone())?));
@ -79,7 +79,7 @@ pub struct GraphiteInput {
impl RawInput for GraphiteInput {
/// Define a metric of the specified type.
fn new_metric(&self, name: Name, kind: Kind) -> RawMetric {
fn new_metric_raw(&self, name: Name, kind: Kind) -> RawMetric {
let mut prefix = self.qualified_name(name).join(".");
prefix.push(' ');
@ -101,7 +101,7 @@ impl RawInput for GraphiteInput {
})
} else {
RawMetric::new(move |value| {
if let Err(err) = cloned.buf_write(&metric, value).and_then(|_| cloned.flush()) {
if let Err(err) = cloned.buf_write(&metric, value).and_then(|_| cloned.flush_raw()) {
debug!("Graphite buffer write failed: {}", err);
SEND_ERR.mark();
}
@ -110,7 +110,7 @@ impl RawInput for GraphiteInput {
}
}
fn flush(&self) -> error::Result<()> {
fn flush_raw(&self) -> error::Result<()> {
let buf = self.buffer.borrow_mut();
self.flush_inner(buf)
}
@ -186,7 +186,7 @@ pub struct GraphiteMetric {
/// Any remaining buffered data is flushed on Drop.
impl Drop for GraphiteInput {
fn drop(&mut self) {
if let Err(err) = self.flush() {
if let Err(err) = self.flush_raw() {
warn!("Could not flush graphite metrics upon Drop: {}", err)
}
}
@ -201,16 +201,16 @@ mod bench {
#[bench]
pub fn unbuffered_graphite(b: &mut test::Bencher) {
let sd = to_graphite("localhost:2003").unwrap().new_raw_input();
let timer = sd.new_metric("timer".into(), Kind::Timer);
let sd = output_graphite("localhost:2003").unwrap().new_raw_input();
let timer = sd.new_metric_raw("timer".into(), Kind::Timer);
b.iter(|| test::black_box(timer.write(2000)));
}
#[bench]
pub fn buffered_graphite(b: &mut test::Bencher) {
let sd = to_graphite("localhost:2003").unwrap().with_buffering(Buffering::BufferSize(65465)).new_raw_input();
let timer = sd.new_metric("timer".into(), Kind::Timer);
let sd = output_graphite("localhost:2003").unwrap().with_buffering(Buffering::BufferSize(65465)).new_raw_input();
let timer = sd.new_metric_raw("timer".into(), Kind::Timer);
b.iter(|| test::black_box(timer.write(2000)));
}

View File

@ -10,9 +10,6 @@ extern crate test;
#[macro_use]
extern crate log;
//#[macro_use]
//extern crate derivative;
#[macro_use]
extern crate lazy_static;
extern crate atomic_refcell;
@ -25,26 +22,26 @@ pub mod error;
pub use error::{Error, Result};
pub mod core;
pub use core::{Value, Kind, Marker, Timer, Counter, Gauge, Input,
Output, NO_METRIC_OUTPUT, OutputDyn,
Name, WithName, WithSamplingRate, Sampling, Buffering, WithBuffering,
Cache, Async, RawAsync, RawInput, RawOutput, RawMetric, RawInputBox, RawOutputDyn};
pub use core::{Value, Kind, Marker, Timer, Counter, Gauge,
Input, Output, OutputDyn,
Name, WithName, WithSamplingRate, Sampling, Buffering, WithBuffering,
Cache, Async, RawAsync, RawInput, RawOutput, RawMetric, UnsafeInput, RawOutputDyn,
output_none, VoidOutput};
#[macro_use]
pub mod macros;
pub mod proxy;
pub use proxy::{InputProxy, ROOT_PROXY, to_proxy};
pub use proxy::{InputProxy, ROOT_PROXY, input_proxy};
mod bucket;
pub use bucket::{Bucket, to_bucket, summary, all_stats, average};
pub use bucket::{Bucket, input_bucket, stats_summary, stats_all, stats_average};
mod text;
pub use text::{to_stdout, TextOutput, TextInput};
pub use text::{to_void, Void};
pub use text::{output_stdout, TextOutput, TextInput};
mod logging;
pub use logging::{LogOutput, LogInput, to_log};
pub use logging::{LogOutput, LogInput, output_log};
mod pcg32;
@ -52,16 +49,16 @@ mod scores;
pub use scores::ScoreType;
mod statsd;
pub use statsd::{StatsdOutput, StatsdInput, to_statsd};
pub use statsd::{StatsdOutput, StatsdInput, output_statsd};
mod graphite;
pub use graphite::{GraphiteOutput, GraphiteInput, to_graphite};
pub use graphite::{GraphiteOutput, GraphiteInput, output_graphite};
//mod prometheus;
//pub use prometheus::{Prometheus, to_prometheus};
mod map;
pub use map::StatsMap;
pub use map::{StatsMap, output_map};
mod socket;
pub use socket::RetrySocket;
@ -70,7 +67,7 @@ mod cache;
pub use cache::{CacheInput, CacheOutput};
mod multi;
pub use multi::{MultiOutput, MultiInput, to_multi};
pub use multi::{MultiOutput, MultiInput, output_multi, input_multi};
mod queue;
pub use queue::{QueueInput, QueueOutput};

View File

@ -8,7 +8,7 @@ use log;
/// Write metric values to the standard log using `info!`.
// TODO parameterize log level
pub fn to_log() -> LogOutput {
pub fn output_log() -> LogOutput {
LogOutput {
attributes: Attributes::default(),
format_fn: Arc::new(text::format_name),
@ -115,7 +115,7 @@ mod test {
#[test]
fn test_to_log() {
let c = super::to_log().new_input_dyn();
let c = super::output_log().new_input_dyn();
let m = c.new_metric("test".into(), Kind::Marker);
m.write(33);
}

View File

@ -135,7 +135,7 @@ mod test {
use bucket::Bucket;
use self_metrics::*;
metrics!(<Bucket> DIPSTICK_METRICS.add_name("test_prefix") => {
metrics!(<Bucket> DIPSTICK_METRICS.add_prefix("test_prefix") => {
Marker M1: "failed";
Marker M2: "success";
Counter C1: "failed";

29
src/map.rs Normal file → Executable file
View File

@ -1,34 +1,41 @@
use core::{Value, Metric, Kind, Name, Input};
use std::sync::{Arc, RwLock};
use core::{Value, RawMetric, Kind, Name, RawInput};
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::BTreeMap;
/// Create a new StatsMap input to capture metrics to a map
pub fn output_map() -> StatsMap {
StatsMap::new()
}
/// A HashMap wrapper to receive metrics or stats values.
/// Every received value for a metric replaces the previous one (if any).
#[derive(Clone)]
pub struct StatsMap {
inner: Arc<RwLock<BTreeMap<String, Value>>>,
inner: Rc<RefCell<BTreeMap<String, Value>>>,
}
impl StatsMap {
/// Create a new StatsMap.
pub fn new() -> Self {
StatsMap { inner: Arc::new(RwLock::new(BTreeMap::new())) }
StatsMap { inner: Rc::new(RefCell::new(BTreeMap::new())) }
}
}
impl Input for StatsMap {
fn new_metric(&self, name: Name, _kind: Kind) -> Metric {
impl RawInput for StatsMap {
fn new_metric_raw(&self, name: Name, _kind: Kind) -> RawMetric {
let write_to = self.inner.clone();
let name: String = name.join(".");
Metric::new(move |value| {
let _previous = write_to.write().expect("StatsMap").insert(name.clone(), value);
RawMetric::new(move |value| {
let _previous = write_to.borrow_mut().insert(name.clone(), value);
})
}
}
impl From<StatsMap> for BTreeMap<String, Value> {
fn from(map: StatsMap) -> Self {
let z = Arc::try_unwrap(map.inner).expect("StatsMap");
z.into_inner().expect("StatsMap")
// FIXME this is is possibly a full map copy, for nothing.
// into_inner() is what we'd really want here but would require some `unsafe`
map.inner.borrow().clone()
}
}
}

View File

@ -12,7 +12,7 @@ pub struct MultiOutput {
}
/// Create a new multi-output.
pub fn to_multi() -> MultiOutput {
pub fn output_multi() -> MultiOutput {
MultiOutput::new()
}
@ -38,7 +38,7 @@ impl MultiOutput {
}
/// Returns a clone of the dispatch with the new output added to the list.
pub fn with_output<O: OutputDyn + Send + Sync + 'static>(&self, out: O) -> Self {
pub fn with_output<OUT: OutputDyn + Send + Sync + 'static>(&self, out: OUT) -> Self {
let mut cloned = self.clone();
cloned.outputs.push(Arc::new(out));
cloned
@ -50,6 +50,11 @@ impl WithAttributes for MultiOutput {
fn mut_attributes(&mut self) -> &mut Attributes { &mut self.attributes }
}
/// Create a new multi-output.
pub fn input_multi() -> MultiInput {
MultiInput::new()
}
/// Dispatch metric values to a list of inputs.
#[derive(Clone)]
pub struct MultiInput {
@ -57,6 +62,23 @@ pub struct MultiInput {
inputs: Vec<Arc<Input + Send + Sync>>,
}
impl MultiInput {
/// Create a new multi input dispatcher with no inputs configured.
pub fn new() -> Self {
MultiInput {
attributes: Attributes::default(),
inputs: vec![],
}
}
/// Returns a clone of the dispatch with the new output added to the list.
pub fn with_input<IN: Input + Send + Sync + 'static>(&self, input: IN) -> Self {
let mut cloned = self.clone();
cloned.inputs.push(Arc::new(input));
cloned
}
}
impl Input for MultiInput {
fn new_metric(&self, name: Name, kind: Kind) -> Metric {
let ref name = self.qualified_name(name);

View File

@ -16,7 +16,7 @@ use std::fmt::Debug;
use socket::RetrySocket;
metrics!{
<Aggregate> DIPSTICK_METRICS.add_name("prometheus") => {
<Aggregate> DIPSTICK_METRICS.add_prefix("prometheus") => {
Marker SEND_ERR: "send_failed";
Marker TRESHOLD_EXCEEDED: "bufsize_exceeded";
Counter SENT_BYTES: "sent_bytes";

View File

@ -5,6 +5,7 @@ use error;
use std::collections::{HashMap, BTreeMap};
use std::sync::{Arc, RwLock, Weak};
use std::fmt;
use atomic_refcell::*;
@ -17,13 +18,12 @@ lazy_static! {
}
/// Return the root metric proxy.
pub fn to_proxy() -> InputProxy {
pub fn input_proxy() -> InputProxy {
ROOT_PROXY.clone()
}
/// A dynamically proxyed metric.
#[derive(Derivative)]
#[derivative(Debug)]
#[derive(Debug)]
struct ProxiedMetric {
// basic info for this metric, needed to recreate new corresponding trait object if target changes
name: Name,
@ -32,11 +32,9 @@ struct ProxiedMetric {
// the metric trait object to proxy metric values to
// the second part can be up to namespace.len() + 1 if this metric was individually targeted
// 0 if no target assigned
#[derivative(Debug = "ignore")]
target: (AtomicRefCell<(Metric, usize)>),
// a reference to the the parent proxyer to remove the metric from when it is dropped
#[derivative(Debug = "ignore")]
proxy: Arc<RwLock<InnerProxy>>,
}
@ -51,7 +49,7 @@ impl Drop for ProxiedMetric {
/// Decouples metrics definition from backend configuration.
/// Allows defining metrics before a concrete type has been selected.
/// Allows replacing metrics backend on the fly at runtime.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct InputProxy {
attributes: Attributes,
inner: Arc<RwLock<InnerProxy>>,
@ -64,6 +62,13 @@ struct InnerProxy {
metrics: BTreeMap<Name, Weak<ProxiedMetric>>,
}
impl fmt::Debug for InnerProxy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "metrics: {:?}", self.metrics.keys())?;
write!(f, "targets: {:?}", self.targets.keys())
}
}
impl InnerProxy {
fn new() -> Self {
@ -174,7 +179,7 @@ impl InputProxy {
impl<S: AsRef<str>> From<S> for InputProxy {
fn from(name: S) -> InputProxy {
InputProxy::new().add_name(name.as_ref())
InputProxy::new().add_prefix(name.as_ref())
}
}

View File

@ -13,7 +13,7 @@ use std::sync::mpsc;
use std::thread;
metrics!{
<Bucket> DIPSTICK_METRICS.add_name("async_queue") => {
<Bucket> DIPSTICK_METRICS.add_prefix("async_queue") => {
/// Maybe queue was full?
Marker SEND_FAILED: "send_failed";
}

View File

@ -2,7 +2,7 @@
//! RawMetrics definitions are still synchronous.
//! If queue size is exceeded, calling code reverts to blocking.
use core::{Value, RawMetric, Name, Kind, Marker, WithName, RawOutputDyn,
WithAttributes, Attributes, Input, Output, Metric, RawInputBox};
WithAttributes, Attributes, Input, Output, Metric, UnsafeInput};
use bucket::Bucket;
use error;
@ -13,7 +13,7 @@ use std::sync::mpsc;
use std::thread;
metrics!{
<Bucket> DIPSTICK_METRICS.add_name("raw_async_queue") => {
<Bucket> DIPSTICK_METRICS.add_prefix("raw_async_queue") => {
/// Maybe queue was full?
Marker SEND_FAILED: "send_failed";
}
@ -26,7 +26,7 @@ fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<RawQueueCmd>> {
while !done {
match receiver.recv() {
Ok(RawQueueCmd::Write(wfn, value)) => wfn.write(value),
Ok(RawQueueCmd::Flush(input)) => if let Err(e) = input.flush() {
Ok(RawQueueCmd::Flush(input)) => if let Err(e) = input.flush_raw() {
debug!("Could not asynchronously flush metrics: {}", e);
},
Err(e) => {
@ -40,6 +40,7 @@ fn new_async_channel(length: usize) -> Arc<mpsc::SyncSender<RawQueueCmd>> {
Arc::new(sender)
}
/// Wrap new inputs with an asynchronous metric write & flush dispatcher.
#[derive(Clone)]
pub struct QueueRawOutput {
@ -69,7 +70,7 @@ impl Output for QueueRawOutput {
/// Wrap new inputs with an asynchronous metric write & flush dispatcher.
fn new_input(&self) -> Self::INPUT {
let target_input = self.target.new_raw_input_dyn();
let target_input = UnsafeInput::new(self.target.new_raw_input_dyn());
QueueRawInput {
attributes: self.attributes.clone(),
sender: self.sender.clone(),
@ -83,7 +84,7 @@ impl Output for QueueRawOutput {
/// Async commands should be of no concerns to applications.
pub enum RawQueueCmd {
Write(Arc<RawMetric>, Value),
Flush(Arc<RawInputBox>),
Flush(Arc<UnsafeInput>),
}
/// A metric input wrapper that sends writes & flushes over a Rust sync channel.
@ -92,7 +93,7 @@ pub enum RawQueueCmd {
pub struct QueueRawInput {
attributes: Attributes,
sender: Arc<mpsc::SyncSender<RawQueueCmd>>,
target: Arc<RawInputBox>,
target: Arc<UnsafeInput>,
}
impl WithAttributes for QueueRawInput {
@ -103,7 +104,7 @@ impl WithAttributes for QueueRawInput {
impl Input for QueueRawInput {
fn new_metric(&self, name: Name, kind:Kind) -> Metric {
let name = self.qualified_name(name);
let target_metric = Arc::new(self.target.new_metric(name, kind));
let target_metric = Arc::new(self.target.new_metric_raw(name, kind));
let sender = self.sender.clone();
Metric::new(move |value| {
if let Err(e) = sender.send(RawQueueCmd::Write(target_metric.clone(), value)) {

View File

@ -14,14 +14,14 @@ use std::sync::{Arc, RwLock};
pub use std::net::ToSocketAddrs;
metrics! {
<Bucket> DIPSTICK_METRICS.add_name("statsd") => {
<Bucket> DIPSTICK_METRICS.add_prefix("statsd") => {
Marker SEND_ERR: "send_failed";
Counter SENT_BYTES: "sent_bytes";
}
}
/// Send metrics to a statsd server at the address and port provided.
pub fn to_statsd<ADDR: ToSocketAddrs>(address: ADDR) -> error::Result<StatsdOutput> {
pub fn output_statsd<ADDR: ToSocketAddrs>(address: ADDR) -> error::Result<StatsdOutput> {
let socket = Arc::new(UdpSocket::bind("0.0.0.0:0")?);
socket.set_nonblocking(true)?;
socket.connect(address)?;
@ -204,7 +204,7 @@ mod bench {
#[bench]
pub fn timer_statsd(b: &mut test::Bencher) {
let sd = to_statsd("localhost:8125").unwrap().new_input_dyn();
let sd = output_statsd("localhost:8125").unwrap().new_input_dyn();
let timer = sd.new_metric("timer".into(), Kind::Timer);
b.iter(|| test::black_box(timer.write(2000)));

View File

@ -9,7 +9,7 @@ use std::rc::Rc;
use std::cell::RefCell;
/// Write metric values to stdout using `println!`.
pub fn to_stdout() -> TextOutput<io::Stdout> {
pub fn output_stdout() -> TextOutput<io::Stdout> {
TextOutput {
attributes: Attributes::default(),
inner: Arc::new(RwLock::new(io::stdout())),
@ -100,7 +100,7 @@ impl<W: Write + Send + Sync + 'static> WithAttributes for TextInput<W> {
impl<W: Write + Send + Sync + 'static> WithBuffering for TextInput<W> {}
impl<W: Write + Send + Sync + 'static> RawInput for TextInput<W> {
fn new_metric(&self, name: Name, kind: Kind) -> RawMetric {
fn new_metric_raw(&self, name: Name, kind: Kind) -> RawMetric {
let name = self.qualified_name(name);
let template = (self.output.format_fn)(&name, kind);
@ -135,7 +135,7 @@ impl<W: Write + Send + Sync + 'static> RawInput for TextInput<W> {
}
}
fn flush(&self) -> error::Result<()> {
fn flush_raw(&self) -> error::Result<()> {
let mut entries = self.entries.borrow_mut();
if !entries.is_empty() {
let mut output = self.output.inner.write().expect("TextOutput");
@ -150,49 +150,19 @@ impl<W: Write + Send + Sync + 'static> RawInput for TextInput<W> {
impl<W: Write + Send + Sync + 'static> Drop for TextInput<W> {
fn drop(&mut self) {
if let Err(e) = self.flush() {
if let Err(e) = self.flush_raw() {
warn!("Could not flush text metrics on Drop. {}", e)
}
}
}
/// Discard metrics output.
#[derive(Clone)]
pub struct Void {}
impl RawOutput for Void {
type INPUT = Void;
fn new_raw_input(&self) -> Void {
self.clone()
}
}
impl RawInput for Void {
fn new_metric(&self, _name: Name, _kind: Kind) -> RawMetric {
RawMetric::new(|_value| {})
}
}
/// Discard all metric values sent to it.
pub fn to_void() -> Void {
Void {}
}
#[cfg(test)]
mod test {
use core::*;
#[test]
fn sink_print() {
let c = super::to_stdout().new_input_dyn();
let m = c.new_metric("test".into(), Kind::Marker);
m.write(33);
}
#[test]
fn test_to_void() {
let c = super::to_void().new_input_dyn();
let c = super::output_stdout().new_input_dyn();
let m = c.new_metric("test".into(), Kind::Marker);
m.write(33);
}