vector_core/event/metric/
value.rs

1use core::fmt;
2use std::collections::BTreeSet;
3
4use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
5
6use vector_common::byte_size_of::ByteSizeOf;
7use vector_config::configurable_component;
8
9use crate::{float_eq, metrics::AgentDDSketch};
10
11use super::{samples_to_buckets, write_list, write_word};
12
13const INFINITY: &str = "inf";
14const NEG_INFINITY: &str = "-inf";
15const NAN: &str = "NaN";
16
17/// Metric value.
18#[configurable_component]
19#[derive(Clone, Debug)]
20#[serde(rename_all = "snake_case")]
21/// Container for the actual value of a metric.
22pub enum MetricValue {
23    /// A cumulative numerical value that can only increase or be reset to zero.
24    Counter {
25        /// The value of the counter.
26        value: f64,
27    },
28
29    /// A single numerical value that can arbitrarily go up and down.
30    Gauge {
31        /// The value of the gauge.
32        value: f64,
33    },
34
35    /// A set of (unordered) unique values for a key.
36    Set {
37        /// The values in the set.
38        values: BTreeSet<String>,
39    },
40
41    /// A set of observations without any aggregation or sampling.
42    Distribution {
43        /// The observed values within this distribution.
44        samples: Vec<Sample>,
45
46        /// The type of statistics to derive for this distribution.
47        statistic: StatisticKind,
48    },
49
50    /// A set of observations which are counted into buckets.
51    ///
52    /// It also contains the total count of all observations and their sum to allow calculating the mean.
53    AggregatedHistogram {
54        /// The buckets within this histogram.
55        buckets: Vec<Bucket>,
56
57        /// The total number of observations contained within this histogram.
58        count: u64,
59
60        /// The sum of all observations contained within this histogram.
61        sum: f64,
62    },
63
64    /// A set of observations which are represented by quantiles.
65    ///
66    /// Each quantile contains the upper value of the quantile (0 <= φ <= 1). It also contains the total count of all
67    /// observations and their sum to allow calculating the mean.
68    AggregatedSummary {
69        /// The quantiles measured from this summary.
70        quantiles: Vec<Quantile>,
71
72        /// The total number of observations contained within this summary.
73        count: u64,
74
75        /// The sum of all observations contained within this histogram.
76        sum: f64,
77    },
78
79    /// A data structure that can answer questions about the cumulative distribution of the contained samples in
80    /// space-efficient way.
81    ///
82    /// Sketches represent the data in a way that queries over it have bounded error guarantees without needing to hold
83    /// every single sample in memory. They are also, typically, able to be merged with other sketches of the same type
84    /// such that client-side _and_ server-side aggregation can be accomplished without loss of accuracy in the queries.
85    Sketch {
86        #[configurable(derived)]
87        sketch: MetricSketch,
88    },
89}
90
91impl MetricValue {
92    /// Returns `true` if the value is empty.
93    ///
94    /// Emptiness is dictated by whether or not the value has any samples or measurements present. Consequently, scalar
95    /// values (counter, gauge) are never considered empty.
96    pub fn is_empty(&self) -> bool {
97        match self {
98            MetricValue::Counter { .. } | MetricValue::Gauge { .. } => false,
99            MetricValue::Set { values } => values.is_empty(),
100            MetricValue::Distribution { samples, .. } => samples.is_empty(),
101            MetricValue::AggregatedSummary { count, .. }
102            | MetricValue::AggregatedHistogram { count, .. } => *count == 0,
103            MetricValue::Sketch { sketch } => sketch.is_empty(),
104        }
105    }
106
107    /// Gets the name of this value as a string.
108    ///
109    /// This maps to the name of the enum variant itself.
110    pub fn as_name(&self) -> &'static str {
111        match self {
112            Self::Counter { .. } => "counter",
113            Self::Gauge { .. } => "gauge",
114            Self::Set { .. } => "set",
115            Self::Distribution { .. } => "distribution",
116            Self::AggregatedHistogram { .. } => "aggregated histogram",
117            Self::AggregatedSummary { .. } => "aggregated summary",
118            Self::Sketch { sketch } => sketch.as_name(),
119        }
120    }
121
122    /// Converts a distribution to an aggregated histogram.
123    ///
124    /// Histogram bucket bounds are based on `buckets`, where the value is the upper bound of the bucket.  Samples will
125    /// be thus be ordered in a "less than" fashion: if the given sample is less than or equal to a given bucket's upper
126    /// bound, it will be counted towards that bucket at the given sample rate.
127    ///
128    /// If this value is not a distribution, then `None` is returned.  Otherwise,
129    /// `Some(MetricValue::AggregatedHistogram)` is returned.
130    pub fn distribution_to_agg_histogram(&self, buckets: &[f64]) -> Option<MetricValue> {
131        match self {
132            MetricValue::Distribution { samples, .. } => {
133                let (buckets, count, sum) = samples_to_buckets(samples, buckets);
134
135                Some(MetricValue::AggregatedHistogram {
136                    buckets,
137                    count,
138                    sum,
139                })
140            }
141            _ => None,
142        }
143    }
144
145    /// Converts a distribution to a sketch.
146    ///
147    /// This conversion specifically use the `AgentDDSketch` sketch variant, in the default configuration that matches
148    /// the Datadog Agent, parameter-wise.
149    ///
150    /// If this value is not a distribution, then `None` is returned.  Otherwise, `Some(MetricValue::Sketch)` is
151    /// returned.
152    pub fn distribution_to_sketch(&self) -> Option<MetricValue> {
153        match self {
154            MetricValue::Distribution { samples, .. } => {
155                let mut sketch = AgentDDSketch::with_agent_defaults();
156                for sample in samples {
157                    sketch.insert_n(sample.value, sample.rate);
158                }
159
160                Some(MetricValue::Sketch {
161                    sketch: MetricSketch::AgentDDSketch(sketch),
162                })
163            }
164            _ => None,
165        }
166    }
167
168    /// Zeroes out all the values contained in this value.
169    ///
170    /// This keeps all the bucket/value vectors for the histogram and summary metric types intact while zeroing the
171    /// counts. Distribution metrics are emptied of all their values.
172    pub fn zero(&mut self) {
173        match self {
174            Self::Counter { value } | Self::Gauge { value } => *value = 0.0,
175            Self::Set { values } => values.clear(),
176            Self::Distribution { samples, .. } => samples.clear(),
177            Self::AggregatedHistogram {
178                buckets,
179                count,
180                sum,
181            } => {
182                for bucket in buckets {
183                    bucket.count = 0;
184                }
185                *count = 0;
186                *sum = 0.0;
187            }
188            Self::AggregatedSummary {
189                quantiles,
190                sum,
191                count,
192            } => {
193                for quantile in quantiles {
194                    quantile.value = 0.0;
195                }
196                *count = 0;
197                *sum = 0.0;
198            }
199            Self::Sketch { sketch } => match sketch {
200                MetricSketch::AgentDDSketch(ddsketch) => {
201                    ddsketch.clear();
202                }
203            },
204        }
205    }
206
207    /// Adds another value to this one.
208    ///
209    /// If the other value is not the same type, or if they are but their defining characteristics of the value are
210    /// different (i.e. aggregated histograms with different bucket layouts), then `false` is returned.  Otherwise,
211    /// `true` is returned.
212    #[must_use]
213    pub fn add(&mut self, other: &Self) -> bool {
214        match (self, other) {
215            (Self::Counter { ref mut value }, Self::Counter { value: value2 })
216            | (Self::Gauge { ref mut value }, Self::Gauge { value: value2 }) => {
217                *value += value2;
218                true
219            }
220            (Self::Set { ref mut values }, Self::Set { values: values2 }) => {
221                values.extend(values2.iter().map(Into::into));
222                true
223            }
224            (
225                Self::Distribution {
226                    ref mut samples,
227                    statistic: statistic_a,
228                },
229                Self::Distribution {
230                    samples: samples2,
231                    statistic: statistic_b,
232                },
233            ) if statistic_a == statistic_b => {
234                samples.extend_from_slice(samples2);
235                true
236            }
237            (
238                Self::AggregatedHistogram {
239                    ref mut buckets,
240                    ref mut count,
241                    ref mut sum,
242                },
243                Self::AggregatedHistogram {
244                    buckets: buckets2,
245                    count: count2,
246                    sum: sum2,
247                },
248            ) if buckets.len() == buckets2.len()
249                && buckets
250                    .iter()
251                    .zip(buckets2.iter())
252                    .all(|(b1, b2)| b1.upper_limit == b2.upper_limit) =>
253            {
254                for (b1, b2) in buckets.iter_mut().zip(buckets2) {
255                    b1.count += b2.count;
256                }
257                *count += count2;
258                *sum += sum2;
259                true
260            }
261            (Self::Sketch { sketch }, Self::Sketch { sketch: sketch2 }) => {
262                match (sketch, sketch2) {
263                    (
264                        MetricSketch::AgentDDSketch(ddsketch),
265                        MetricSketch::AgentDDSketch(ddsketch2),
266                    ) => ddsketch.merge(ddsketch2).is_ok(),
267                }
268            }
269            _ => false,
270        }
271    }
272
273    /// Subtracts another value from this one.
274    ///
275    /// If the other value is not the same type, or if they are but their defining characteristics of the value are
276    /// different (i.e. aggregated histograms with different bucket layouts), then `false` is returned.  Otherwise,
277    /// `true` is returned.
278    #[must_use]
279    pub fn subtract(&mut self, other: &Self) -> bool {
280        match (self, other) {
281            // Counters are monotonic, they should _never_ go backwards unless reset to 0 due to
282            // process restart, etc.  Thus, being able to generate negative deltas would violate
283            // that.  Whether a counter is reset to 0, or if it incorrectly warps to a previous
284            // value, it doesn't matter: we're going to reinitialize it.
285            (Self::Counter { ref mut value }, Self::Counter { value: value2 })
286                if *value >= *value2 =>
287            {
288                *value -= value2;
289                true
290            }
291            (Self::Gauge { ref mut value }, Self::Gauge { value: value2 }) => {
292                *value -= value2;
293                true
294            }
295            (Self::Set { ref mut values }, Self::Set { values: values2 }) => {
296                for item in values2 {
297                    values.remove(item);
298                }
299                true
300            }
301            (
302                Self::Distribution {
303                    ref mut samples,
304                    statistic: statistic_a,
305                },
306                Self::Distribution {
307                    samples: samples2,
308                    statistic: statistic_b,
309                },
310            ) if statistic_a == statistic_b => {
311                // This is an ugly algorithm, but the use of a HashSet or equivalent is complicated by neither Hash nor
312                // Eq being implemented for the f64 part of Sample.
313                //
314                // TODO: This logic does not work if a value is repeated within a distribution. For example, if the
315                // current distribution is [1, 2, 3, 1, 2, 3] and the previous distribution is [1, 2, 3], this would
316                // yield a result of [].
317                //
318                // The only reasonable way we could provide subtraction, I believe, is if we required the ordering to
319                // stay the same, such that we would just take the samples from the non-overlapping region as the delta.
320                // In the above example: length of samples from `other` would be 3, so delta would be
321                // `self.samples[3..]`.
322                *samples = samples
323                    .iter()
324                    .copied()
325                    .filter(|sample| samples2.iter().all(|sample2| sample != sample2))
326                    .collect();
327                true
328            }
329            // Aggregated histograms, at least in Prometheus, are also typically monotonic in terms of growth.
330            // Subtracting them in reverse -- e.g.. subtracting a newer one with more values from an older one with
331            // fewer values -- would not make sense, since buckets should never be able to have negative counts... and
332            // it's not clear that a saturating subtraction is technically correct either.  Instead, we avoid having to
333            // make that decision, and simply force the metric to be reinitialized.
334            (
335                Self::AggregatedHistogram {
336                    ref mut buckets,
337                    ref mut count,
338                    ref mut sum,
339                },
340                Self::AggregatedHistogram {
341                    buckets: buckets2,
342                    count: count2,
343                    sum: sum2,
344                },
345            ) if *count >= *count2
346                && buckets.len() == buckets2.len()
347                && buckets
348                    .iter()
349                    .zip(buckets2.iter())
350                    .all(|(b1, b2)| b1.upper_limit == b2.upper_limit) =>
351            {
352                for (b1, b2) in buckets.iter_mut().zip(buckets2) {
353                    b1.count -= b2.count;
354                }
355                *count -= count2;
356                *sum -= sum2;
357                true
358            }
359            _ => false,
360        }
361    }
362}
363
364impl ByteSizeOf for MetricValue {
365    fn allocated_bytes(&self) -> usize {
366        match self {
367            Self::Counter { .. } | Self::Gauge { .. } => 0,
368            Self::Set { values } => values.allocated_bytes(),
369            Self::Distribution { samples, .. } => samples.allocated_bytes(),
370            Self::AggregatedHistogram { buckets, .. } => buckets.allocated_bytes(),
371            Self::AggregatedSummary { quantiles, .. } => quantiles.allocated_bytes(),
372            Self::Sketch { sketch } => sketch.allocated_bytes(),
373        }
374    }
375}
376
377impl PartialEq for MetricValue {
378    fn eq(&self, other: &Self) -> bool {
379        match (self, other) {
380            (Self::Counter { value: l_value }, Self::Counter { value: r_value })
381            | (Self::Gauge { value: l_value }, Self::Gauge { value: r_value }) => {
382                float_eq(*l_value, *r_value)
383            }
384            (Self::Set { values: l_values }, Self::Set { values: r_values }) => {
385                l_values == r_values
386            }
387            (
388                Self::Distribution {
389                    samples: l_samples,
390                    statistic: l_statistic,
391                },
392                Self::Distribution {
393                    samples: r_samples,
394                    statistic: r_statistic,
395                },
396            ) => l_samples == r_samples && l_statistic == r_statistic,
397            (
398                Self::AggregatedHistogram {
399                    buckets: l_buckets,
400                    count: l_count,
401                    sum: l_sum,
402                },
403                Self::AggregatedHistogram {
404                    buckets: r_buckets,
405                    count: r_count,
406                    sum: r_sum,
407                },
408            ) => l_buckets == r_buckets && l_count == r_count && float_eq(*l_sum, *r_sum),
409            (
410                Self::AggregatedSummary {
411                    quantiles: l_quantiles,
412                    count: l_count,
413                    sum: l_sum,
414                },
415                Self::AggregatedSummary {
416                    quantiles: r_quantiles,
417                    count: r_count,
418                    sum: r_sum,
419                },
420            ) => l_quantiles == r_quantiles && l_count == r_count && float_eq(*l_sum, *r_sum),
421            (Self::Sketch { sketch: l_sketch }, Self::Sketch { sketch: r_sketch }) => {
422                l_sketch == r_sketch
423            }
424            _ => false,
425        }
426    }
427}
428
429impl fmt::Display for MetricValue {
430    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
431        match &self {
432            MetricValue::Counter { value } | MetricValue::Gauge { value } => {
433                write!(fmt, "{value}")
434            }
435            MetricValue::Set { values } => {
436                write_list(fmt, " ", values.iter(), |fmt, value| write_word(fmt, value))
437            }
438            MetricValue::Distribution { samples, statistic } => {
439                write!(
440                    fmt,
441                    "{} ",
442                    match statistic {
443                        StatisticKind::Histogram => "histogram",
444                        StatisticKind::Summary => "summary",
445                    }
446                )?;
447                write_list(fmt, " ", samples, |fmt, sample| {
448                    write!(fmt, "{}@{}", sample.rate, sample.value)
449                })
450            }
451            MetricValue::AggregatedHistogram {
452                buckets,
453                count,
454                sum,
455            } => {
456                write!(fmt, "count={count} sum={sum} ")?;
457                write_list(fmt, " ", buckets, |fmt, bucket| {
458                    write!(fmt, "{}@{}", bucket.count, bucket.upper_limit)
459                })
460            }
461            MetricValue::AggregatedSummary {
462                quantiles,
463                count,
464                sum,
465            } => {
466                write!(fmt, "count={count} sum={sum} ")?;
467                write_list(fmt, " ", quantiles, |fmt, quantile| {
468                    write!(fmt, "{}@{}", quantile.quantile, quantile.value)
469                })
470            }
471            MetricValue::Sketch { sketch } => {
472                let quantiles = [0.5, 0.75, 0.9, 0.99]
473                    .iter()
474                    .map(|q| Quantile {
475                        quantile: *q,
476                        value: 0.0,
477                    })
478                    .collect::<Vec<_>>();
479
480                match sketch {
481                    MetricSketch::AgentDDSketch(ddsketch) => {
482                        write!(
483                            fmt,
484                            "count={} sum={:?} min={:?} max={:?} avg={:?} ",
485                            ddsketch.count(),
486                            ddsketch.sum(),
487                            ddsketch.min(),
488                            ddsketch.max(),
489                            ddsketch.avg()
490                        )?;
491                        write_list(fmt, " ", quantiles, |fmt, q| {
492                            write!(
493                                fmt,
494                                "{}={:?}",
495                                q.to_percentile_string(),
496                                ddsketch.quantile(q.quantile)
497                            )
498                        })
499                    }
500                }
501            }
502        }
503    }
504}
505
506impl From<AgentDDSketch> for MetricValue {
507    fn from(ddsketch: AgentDDSketch) -> Self {
508        MetricValue::Sketch {
509            sketch: MetricSketch::AgentDDSketch(ddsketch),
510        }
511    }
512}
513
514// Currently, VRL can only read the type of the value and doesn't consider any actual metric values.
515#[cfg(feature = "vrl")]
516impl From<MetricValue> for vrl::value::Value {
517    fn from(value: MetricValue) -> Self {
518        value.as_name().into()
519    }
520}
521
522/// Type of statistics to generate for a distribution.
523#[configurable_component]
524#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd)]
525#[serde(rename_all = "snake_case")]
526pub enum StatisticKind {
527    /// A histogram representation.
528    Histogram,
529
530    /// Corresponds to Datadog's Distribution Metric
531    /// <https://docs.datadoghq.com/developers/metrics/types/?tab=distribution#definition>
532    Summary,
533}
534
535/// A generalized metrics sketch.
536#[configurable_component]
537#[derive(Clone, Debug, Eq, PartialEq)]
538pub enum MetricSketch {
539    /// [DDSketch][ddsketch] implementation based on the [Datadog Agent][ddagent].
540    ///
541    /// While DDSketch has open-source implementations based on the white paper, the version used in
542    /// the Datadog Agent itself is subtly different. This version is suitable for sending directly
543    /// to Datadog's sketch ingest endpoint.
544    ///
545    /// [ddsketch]: https://www.vldb.org/pvldb/vol12/p2195-masson.pdf
546    /// [ddagent]: https://github.com/DataDog/datadog-agent
547    AgentDDSketch(AgentDDSketch),
548}
549
550impl MetricSketch {
551    /// Returns `true` if the sketch is empty.
552    pub fn is_empty(&self) -> bool {
553        match self {
554            MetricSketch::AgentDDSketch(ddsketch) => ddsketch.is_empty(),
555        }
556    }
557
558    /// Gets the name of the sketch as a string.
559    ///
560    /// This maps to the name of the enum variant itself.
561    pub fn as_name(&self) -> &'static str {
562        match self {
563            Self::AgentDDSketch(_) => "agent dd sketch",
564        }
565    }
566}
567
568impl ByteSizeOf for MetricSketch {
569    fn allocated_bytes(&self) -> usize {
570        match self {
571            Self::AgentDDSketch(ddsketch) => ddsketch.allocated_bytes(),
572        }
573    }
574}
575
576// Currently, VRL can only read the type of the value and doesn't consider ny actual metric values.
577#[cfg(feature = "vrl")]
578impl From<MetricSketch> for vrl::value::Value {
579    fn from(value: MetricSketch) -> Self {
580        value.as_name().into()
581    }
582}
583
584/// A single observation.
585#[configurable_component]
586#[derive(Clone, Copy, Debug)]
587pub struct Sample {
588    /// The value of the observation.
589    pub value: f64,
590
591    /// The rate at which the value was observed.
592    pub rate: u32,
593}
594
595impl PartialEq for Sample {
596    fn eq(&self, other: &Self) -> bool {
597        self.rate == other.rate && float_eq(self.value, other.value)
598    }
599}
600
601impl ByteSizeOf for Sample {
602    fn allocated_bytes(&self) -> usize {
603        0
604    }
605}
606
607/// Custom serialization function which converts special `f64` values to strings.
608/// Non-special values are serialized as numbers.
609#[allow(clippy::trivially_copy_pass_by_ref)]
610fn serialize_f64<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>
611where
612    S: Serializer,
613{
614    if value.is_infinite() {
615        serializer.serialize_str(if *value > 0.0 { INFINITY } else { NEG_INFINITY })
616    } else if value.is_nan() {
617        serializer.serialize_str(NAN)
618    } else {
619        serializer.serialize_f64(*value)
620    }
621}
622
623/// Custom deserialization function for handling special f64 values.
624fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
625where
626    D: Deserializer<'de>,
627{
628    struct UpperLimitVisitor;
629
630    impl de::Visitor<'_> for UpperLimitVisitor {
631        type Value = f64;
632
633        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
634            formatter.write_str("a number or a special string value")
635        }
636
637        fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
638            Ok(value)
639        }
640
641        fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
642            match value {
643                NAN => Ok(f64::NAN),
644                INFINITY => Ok(f64::INFINITY),
645                NEG_INFINITY => Ok(f64::NEG_INFINITY),
646                _ => Err(E::custom("unsupported string value")),
647            }
648        }
649    }
650
651    deserializer.deserialize_any(UpperLimitVisitor)
652}
653
654/// A histogram bucket.
655///
656/// Histogram buckets represent the `count` of observations where the value of the observations does
657/// not exceed the specified `upper_limit`.
658#[configurable_component(no_deser, no_ser)]
659#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
660pub struct Bucket {
661    /// The upper limit of values in the bucket.
662    #[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
663    pub upper_limit: f64,
664
665    /// The number of values tracked in this bucket.
666    pub count: u64,
667}
668
669impl PartialEq for Bucket {
670    fn eq(&self, other: &Self) -> bool {
671        self.count == other.count && float_eq(self.upper_limit, other.upper_limit)
672    }
673}
674
675impl ByteSizeOf for Bucket {
676    fn allocated_bytes(&self) -> usize {
677        0
678    }
679}
680
681/// A single quantile observation.
682///
683/// Quantiles themselves are "cut points dividing the range of a probability distribution into
684/// continuous intervals with equal probabilities". [[1][quantiles_wikipedia]].
685///
686/// We use quantiles to measure the value along these probability distributions for representing
687/// client-side aggregations of distributions, which represent a collection of observations over a
688/// specific time window.
689///
690/// In general, we typically use the term "quantile" to represent the concept of _percentiles_,
691/// which deal with whole integers -- 0, 1, 2, .., 99, 100 -- even though quantiles are
692/// floating-point numbers and can represent higher-precision cut points, such as 0.9999, or the
693/// 99.99th percentile.
694///
695/// [quantiles_wikipedia]: https://en.wikipedia.org/wiki/Quantile
696#[configurable_component]
697#[derive(Clone, Copy, Debug)]
698pub struct Quantile {
699    /// The value of the quantile.
700    ///
701    /// This value must be between 0.0 and 1.0, inclusive.
702    pub quantile: f64,
703
704    /// The estimated value of the given quantile within the probability distribution.
705    pub value: f64,
706}
707
708impl PartialEq for Quantile {
709    fn eq(&self, other: &Self) -> bool {
710        float_eq(self.quantile, other.quantile) && float_eq(self.value, other.value)
711    }
712}
713
714impl Quantile {
715    /// Renders this quantile as a string, scaled to be a percentile.
716    ///
717    /// Up to four significant digits are maintained, but the resulting string will be without a decimal point.
718    ///
719    /// For example, a quantile of 0.25, which represents a percentile of 25, will be rendered as "25" and a quantile of
720    /// 0.9999, which represents a percentile of 99.99, will be rendered as "9999". A quantile of 0.99999, which
721    /// represents a percentile of 99.999, would also be rendered as "9999", though.
722    pub fn to_percentile_string(&self) -> String {
723        let clamped = self.quantile.clamp(0.0, 1.0) * 100.0;
724        clamped
725            .to_string()
726            .chars()
727            .take(5)
728            .filter(|c| *c != '.')
729            .collect()
730    }
731
732    /// Renders this quantile as a string.
733    ///
734    /// Up to four significant digits are maintained.
735    ///
736    /// For example, a quantile of 0.25 will be rendered as "0.25", and a quantile of 0.9999 will be rendered as
737    /// "0.9999", but a quantile of 0.99999 will be rendered as "0.9999".
738    pub fn to_quantile_string(&self) -> String {
739        let clamped = self.quantile.clamp(0.0, 1.0);
740        clamped.to_string().chars().take(6).collect()
741    }
742}
743
744impl ByteSizeOf for Quantile {
745    fn allocated_bytes(&self) -> usize {
746        0
747    }
748}