vector_core/event/metric/
value.rs

1use core::fmt;
2use std::collections::BTreeSet;
3
4use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
5use vector_common::byte_size_of::ByteSizeOf;
6use vector_config::configurable_component;
7
8use super::{samples_to_buckets, write_list, write_word};
9use crate::{float_eq, metrics::AgentDDSketch};
10
11const INFINITY: &str = "inf";
12const NEG_INFINITY: &str = "-inf";
13const NAN: &str = "NaN";
14
15/// Metric value.
16#[configurable_component]
17#[derive(Clone, Debug)]
18#[serde(rename_all = "snake_case")]
19/// Container for the actual value of a metric.
20pub enum MetricValue {
21    /// A cumulative numerical value that can only increase or be reset to zero.
22    Counter {
23        /// The value of the counter.
24        value: f64,
25    },
26
27    /// A single numerical value that can arbitrarily go up and down.
28    Gauge {
29        /// The value of the gauge.
30        value: f64,
31    },
32
33    /// A set of (unordered) unique values for a key.
34    Set {
35        /// The values in the set.
36        values: BTreeSet<String>,
37    },
38
39    /// A set of observations without any aggregation or sampling.
40    Distribution {
41        /// The observed values within this distribution.
42        samples: Vec<Sample>,
43
44        /// The type of statistics to derive for this distribution.
45        statistic: StatisticKind,
46    },
47
48    /// A set of observations which are counted into buckets.
49    ///
50    /// It also contains the total count of all observations and their sum to allow calculating the mean.
51    AggregatedHistogram {
52        /// The buckets within this histogram.
53        buckets: Vec<Bucket>,
54
55        /// The total number of observations contained within this histogram.
56        count: u64,
57
58        /// The sum of all observations contained within this histogram.
59        sum: f64,
60    },
61
62    /// A set of observations which are represented by quantiles.
63    ///
64    /// Each quantile contains the upper value of the quantile (0 <= φ <= 1). It also contains the total count of all
65    /// observations and their sum to allow calculating the mean.
66    AggregatedSummary {
67        /// The quantiles measured from this summary.
68        quantiles: Vec<Quantile>,
69
70        /// The total number of observations contained within this summary.
71        count: u64,
72
73        /// The sum of all observations contained within this histogram.
74        sum: f64,
75    },
76
77    /// A data structure that can answer questions about the cumulative distribution of the contained samples in
78    /// space-efficient way.
79    ///
80    /// Sketches represent the data in a way that queries over it have bounded error guarantees without needing to hold
81    /// every single sample in memory. They are also, typically, able to be merged with other sketches of the same type
82    /// such that client-side _and_ server-side aggregation can be accomplished without loss of accuracy in the queries.
83    Sketch {
84        #[configurable(derived)]
85        sketch: MetricSketch,
86    },
87}
88
89impl MetricValue {
90    /// Returns `true` if the value is empty.
91    ///
92    /// Emptiness is dictated by whether or not the value has any samples or measurements present. Consequently, scalar
93    /// values (counter, gauge) are never considered empty.
94    pub fn is_empty(&self) -> bool {
95        match self {
96            MetricValue::Counter { .. } | MetricValue::Gauge { .. } => false,
97            MetricValue::Set { values } => values.is_empty(),
98            MetricValue::Distribution { samples, .. } => samples.is_empty(),
99            MetricValue::AggregatedSummary { count, .. }
100            | MetricValue::AggregatedHistogram { count, .. } => *count == 0,
101            MetricValue::Sketch { sketch } => sketch.is_empty(),
102        }
103    }
104
105    /// Gets the name of this value as a string.
106    ///
107    /// This maps to the name of the enum variant itself.
108    pub fn as_name(&self) -> &'static str {
109        match self {
110            Self::Counter { .. } => "counter",
111            Self::Gauge { .. } => "gauge",
112            Self::Set { .. } => "set",
113            Self::Distribution { .. } => "distribution",
114            Self::AggregatedHistogram { .. } => "aggregated histogram",
115            Self::AggregatedSummary { .. } => "aggregated summary",
116            Self::Sketch { sketch } => sketch.as_name(),
117        }
118    }
119
120    /// Converts a distribution to an aggregated histogram.
121    ///
122    /// Histogram bucket bounds are based on `buckets`, where the value is the upper bound of the bucket.  Samples will
123    /// be thus be ordered in a "less than" fashion: if the given sample is less than or equal to a given bucket's upper
124    /// bound, it will be counted towards that bucket at the given sample rate.
125    ///
126    /// If this value is not a distribution, then `None` is returned.  Otherwise,
127    /// `Some(MetricValue::AggregatedHistogram)` is returned.
128    pub fn distribution_to_agg_histogram(&self, buckets: &[f64]) -> Option<MetricValue> {
129        match self {
130            MetricValue::Distribution { samples, .. } => {
131                let (buckets, count, sum) = samples_to_buckets(samples, buckets);
132
133                Some(MetricValue::AggregatedHistogram {
134                    buckets,
135                    count,
136                    sum,
137                })
138            }
139            _ => None,
140        }
141    }
142
143    /// Converts a distribution to a sketch.
144    ///
145    /// This conversion specifically use the `AgentDDSketch` sketch variant, in the default configuration that matches
146    /// the Datadog Agent, parameter-wise.
147    ///
148    /// If this value is not a distribution, then `None` is returned.  Otherwise, `Some(MetricValue::Sketch)` is
149    /// returned.
150    pub fn distribution_to_sketch(&self) -> Option<MetricValue> {
151        match self {
152            MetricValue::Distribution { samples, .. } => {
153                let mut sketch = AgentDDSketch::with_agent_defaults();
154                for sample in samples {
155                    sketch.insert_n(sample.value, sample.rate);
156                }
157
158                Some(MetricValue::Sketch {
159                    sketch: MetricSketch::AgentDDSketch(sketch),
160                })
161            }
162            _ => None,
163        }
164    }
165
166    /// Zeroes out all the values contained in this value.
167    ///
168    /// This keeps all the bucket/value vectors for the histogram and summary metric types intact while zeroing the
169    /// counts. Distribution metrics are emptied of all their values.
170    pub fn zero(&mut self) {
171        match self {
172            Self::Counter { value } | Self::Gauge { value } => *value = 0.0,
173            Self::Set { values } => values.clear(),
174            Self::Distribution { samples, .. } => samples.clear(),
175            Self::AggregatedHistogram {
176                buckets,
177                count,
178                sum,
179            } => {
180                for bucket in buckets {
181                    bucket.count = 0;
182                }
183                *count = 0;
184                *sum = 0.0;
185            }
186            Self::AggregatedSummary {
187                quantiles,
188                sum,
189                count,
190            } => {
191                for quantile in quantiles {
192                    quantile.value = 0.0;
193                }
194                *count = 0;
195                *sum = 0.0;
196            }
197            Self::Sketch { sketch } => match sketch {
198                MetricSketch::AgentDDSketch(ddsketch) => {
199                    ddsketch.clear();
200                }
201            },
202        }
203    }
204
205    /// Adds another value to this one.
206    ///
207    /// If the other value is not the same type, or if they are but their defining characteristics of the value are
208    /// different (i.e. aggregated histograms with different bucket layouts), then `false` is returned.  Otherwise,
209    /// `true` is returned.
210    #[must_use]
211    pub fn add(&mut self, other: &Self) -> bool {
212        match (self, other) {
213            (Self::Counter { value }, Self::Counter { value: value2 })
214            | (Self::Gauge { value }, Self::Gauge { value: value2 }) => {
215                *value += value2;
216                true
217            }
218            (Self::Set { values }, Self::Set { values: values2 }) => {
219                values.extend(values2.iter().map(Into::into));
220                true
221            }
222            (
223                Self::Distribution {
224                    samples,
225                    statistic: statistic_a,
226                },
227                Self::Distribution {
228                    samples: samples2,
229                    statistic: statistic_b,
230                },
231            ) if statistic_a == statistic_b => {
232                samples.extend_from_slice(samples2);
233                true
234            }
235            (
236                Self::AggregatedHistogram {
237                    buckets,
238                    count,
239                    sum,
240                },
241                Self::AggregatedHistogram {
242                    buckets: buckets2,
243                    count: count2,
244                    sum: sum2,
245                },
246            ) if buckets.len() == buckets2.len()
247                && buckets
248                    .iter()
249                    .zip(buckets2.iter())
250                    .all(|(b1, b2)| b1.upper_limit == b2.upper_limit) =>
251            {
252                for (b1, b2) in buckets.iter_mut().zip(buckets2) {
253                    b1.count += b2.count;
254                }
255                *count += count2;
256                *sum += sum2;
257                true
258            }
259            (Self::Sketch { sketch }, Self::Sketch { sketch: sketch2 }) => {
260                match (sketch, sketch2) {
261                    (
262                        MetricSketch::AgentDDSketch(ddsketch),
263                        MetricSketch::AgentDDSketch(ddsketch2),
264                    ) => ddsketch.merge(ddsketch2).is_ok(),
265                }
266            }
267            _ => false,
268        }
269    }
270
271    /// Subtracts another value from this one.
272    ///
273    /// If the other value is not the same type, or if they are but their defining characteristics of the value are
274    /// different (i.e. aggregated histograms with different bucket layouts), then `false` is returned.  Otherwise,
275    /// `true` is returned.
276    #[must_use]
277    pub fn subtract(&mut self, other: &Self) -> bool {
278        match (self, other) {
279            // Counters are monotonic, they should _never_ go backwards unless reset to 0 due to
280            // process restart, etc.  Thus, being able to generate negative deltas would violate
281            // that.  Whether a counter is reset to 0, or if it incorrectly warps to a previous
282            // value, it doesn't matter: we're going to reinitialize it.
283            (Self::Counter { value }, Self::Counter { value: value2 }) if *value >= *value2 => {
284                *value -= value2;
285                true
286            }
287            (Self::Gauge { value }, Self::Gauge { value: value2 }) => {
288                *value -= value2;
289                true
290            }
291            (Self::Set { values }, Self::Set { values: values2 }) => {
292                for item in values2 {
293                    values.remove(item);
294                }
295                true
296            }
297            (
298                Self::Distribution {
299                    samples,
300                    statistic: statistic_a,
301                },
302                Self::Distribution {
303                    samples: samples2,
304                    statistic: statistic_b,
305                },
306            ) if statistic_a == statistic_b => {
307                // This is an ugly algorithm, but the use of a HashSet or equivalent is complicated by neither Hash nor
308                // Eq being implemented for the f64 part of Sample.
309                //
310                // TODO: This logic does not work if a value is repeated within a distribution. For example, if the
311                // current distribution is [1, 2, 3, 1, 2, 3] and the previous distribution is [1, 2, 3], this would
312                // yield a result of [].
313                //
314                // The only reasonable way we could provide subtraction, I believe, is if we required the ordering to
315                // stay the same, such that we would just take the samples from the non-overlapping region as the delta.
316                // In the above example: length of samples from `other` would be 3, so delta would be
317                // `self.samples[3..]`.
318                *samples = samples
319                    .iter()
320                    .copied()
321                    .filter(|sample| samples2.iter().all(|sample2| sample != sample2))
322                    .collect();
323                true
324            }
325            // Aggregated histograms, at least in Prometheus, are also typically monotonic in terms of growth.
326            // Subtracting them in reverse -- e.g.. subtracting a newer one with more values from an older one with
327            // fewer values -- would not make sense, since buckets should never be able to have negative counts... and
328            // it's not clear that a saturating subtraction is technically correct either.  Instead, we avoid having to
329            // make that decision, and simply force the metric to be reinitialized.
330            //
331            // We also check that each individual bucket count is >= the corresponding count in the
332            // other histogram, since bucket value redistribution (e.g., after a source restart or
333            // cache eviction) can cause individual buckets to have lower counts even when the total
334            // count is higher. Failing here leads to the metric being reinitialized.
335            (
336                Self::AggregatedHistogram {
337                    buckets,
338                    count,
339                    sum,
340                },
341                Self::AggregatedHistogram {
342                    buckets: buckets2,
343                    count: count2,
344                    sum: sum2,
345                },
346            ) if *count >= *count2
347                && buckets.len() == buckets2.len()
348                && buckets
349                    .iter()
350                    .zip(buckets2.iter())
351                    .all(|(b1, b2)| b1.upper_limit == b2.upper_limit && b1.count >= b2.count) =>
352            {
353                for (b1, b2) in buckets.iter_mut().zip(buckets2) {
354                    b1.count -= b2.count;
355                }
356                *count -= count2;
357                *sum -= sum2;
358                true
359            }
360            _ => false,
361        }
362    }
363}
364
365impl ByteSizeOf for MetricValue {
366    fn allocated_bytes(&self) -> usize {
367        match self {
368            Self::Counter { .. } | Self::Gauge { .. } => 0,
369            Self::Set { values } => values.allocated_bytes(),
370            Self::Distribution { samples, .. } => samples.allocated_bytes(),
371            Self::AggregatedHistogram { buckets, .. } => buckets.allocated_bytes(),
372            Self::AggregatedSummary { quantiles, .. } => quantiles.allocated_bytes(),
373            Self::Sketch { sketch } => sketch.allocated_bytes(),
374        }
375    }
376}
377
378impl PartialEq for MetricValue {
379    fn eq(&self, other: &Self) -> bool {
380        match (self, other) {
381            (Self::Counter { value: l_value }, Self::Counter { value: r_value })
382            | (Self::Gauge { value: l_value }, Self::Gauge { value: r_value }) => {
383                float_eq(*l_value, *r_value)
384            }
385            (Self::Set { values: l_values }, Self::Set { values: r_values }) => {
386                l_values == r_values
387            }
388            (
389                Self::Distribution {
390                    samples: l_samples,
391                    statistic: l_statistic,
392                },
393                Self::Distribution {
394                    samples: r_samples,
395                    statistic: r_statistic,
396                },
397            ) => l_samples == r_samples && l_statistic == r_statistic,
398            (
399                Self::AggregatedHistogram {
400                    buckets: l_buckets,
401                    count: l_count,
402                    sum: l_sum,
403                },
404                Self::AggregatedHistogram {
405                    buckets: r_buckets,
406                    count: r_count,
407                    sum: r_sum,
408                },
409            ) => l_buckets == r_buckets && l_count == r_count && float_eq(*l_sum, *r_sum),
410            (
411                Self::AggregatedSummary {
412                    quantiles: l_quantiles,
413                    count: l_count,
414                    sum: l_sum,
415                },
416                Self::AggregatedSummary {
417                    quantiles: r_quantiles,
418                    count: r_count,
419                    sum: r_sum,
420                },
421            ) => l_quantiles == r_quantiles && l_count == r_count && float_eq(*l_sum, *r_sum),
422            (Self::Sketch { sketch: l_sketch }, Self::Sketch { sketch: r_sketch }) => {
423                l_sketch == r_sketch
424            }
425            _ => false,
426        }
427    }
428}
429
430impl fmt::Display for MetricValue {
431    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
432        match &self {
433            MetricValue::Counter { value } | MetricValue::Gauge { value } => {
434                write!(fmt, "{value}")
435            }
436            MetricValue::Set { values } => {
437                write_list(fmt, " ", values.iter(), |fmt, value| write_word(fmt, value))
438            }
439            MetricValue::Distribution { samples, statistic } => {
440                write!(
441                    fmt,
442                    "{} ",
443                    match statistic {
444                        StatisticKind::Histogram => "histogram",
445                        StatisticKind::Summary => "summary",
446                    }
447                )?;
448                write_list(fmt, " ", samples, |fmt, sample| {
449                    write!(fmt, "{}@{}", sample.rate, sample.value)
450                })
451            }
452            MetricValue::AggregatedHistogram {
453                buckets,
454                count,
455                sum,
456            } => {
457                write!(fmt, "count={count} sum={sum} ")?;
458                write_list(fmt, " ", buckets, |fmt, bucket| {
459                    write!(fmt, "{}@{}", bucket.count, bucket.upper_limit)
460                })
461            }
462            MetricValue::AggregatedSummary {
463                quantiles,
464                count,
465                sum,
466            } => {
467                write!(fmt, "count={count} sum={sum} ")?;
468                write_list(fmt, " ", quantiles, |fmt, quantile| {
469                    write!(fmt, "{}@{}", quantile.quantile, quantile.value)
470                })
471            }
472            MetricValue::Sketch { sketch } => {
473                let quantiles = [0.5, 0.75, 0.9, 0.99]
474                    .iter()
475                    .map(|q| Quantile {
476                        quantile: *q,
477                        value: 0.0,
478                    })
479                    .collect::<Vec<_>>();
480
481                match sketch {
482                    MetricSketch::AgentDDSketch(ddsketch) => {
483                        write!(
484                            fmt,
485                            "count={} sum={:?} min={:?} max={:?} avg={:?} ",
486                            ddsketch.count(),
487                            ddsketch.sum(),
488                            ddsketch.min(),
489                            ddsketch.max(),
490                            ddsketch.avg()
491                        )?;
492                        write_list(fmt, " ", quantiles, |fmt, q| {
493                            write!(
494                                fmt,
495                                "{}={:?}",
496                                q.to_percentile_string(),
497                                ddsketch.quantile(q.quantile)
498                            )
499                        })
500                    }
501                }
502            }
503        }
504    }
505}
506
507impl From<AgentDDSketch> for MetricValue {
508    fn from(ddsketch: AgentDDSketch) -> Self {
509        MetricValue::Sketch {
510            sketch: MetricSketch::AgentDDSketch(ddsketch),
511        }
512    }
513}
514
515// Currently, VRL can only read the type of the value and doesn't consider any actual metric values.
516#[cfg(feature = "vrl")]
517impl From<MetricValue> for vrl::value::Value {
518    fn from(value: MetricValue) -> Self {
519        value.as_name().into()
520    }
521}
522
523/// Type of statistics to generate for a distribution.
524#[configurable_component]
525#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd)]
526#[serde(rename_all = "snake_case")]
527pub enum StatisticKind {
528    /// A histogram representation.
529    Histogram,
530
531    /// Corresponds to Datadog's Distribution Metric
532    /// <https://docs.datadoghq.com/developers/metrics/types/?tab=distribution#definition>
533    Summary,
534}
535
536/// A generalized metrics sketch.
537#[configurable_component]
538#[derive(Clone, Debug, Eq, PartialEq)]
539pub enum MetricSketch {
540    /// [DDSketch][ddsketch] implementation based on the [Datadog Agent][ddagent].
541    ///
542    /// While `DDSketch` has open-source implementations based on the white paper, the version used in
543    /// the Datadog Agent itself is subtly different. This version is suitable for sending directly
544    /// to Datadog's sketch ingest endpoint.
545    ///
546    /// [ddsketch]: https://www.vldb.org/pvldb/vol12/p2195-masson.pdf
547    /// [ddagent]: https://github.com/DataDog/datadog-agent
548    AgentDDSketch(AgentDDSketch),
549}
550
551impl MetricSketch {
552    /// Returns `true` if the sketch is empty.
553    pub fn is_empty(&self) -> bool {
554        match self {
555            MetricSketch::AgentDDSketch(ddsketch) => ddsketch.is_empty(),
556        }
557    }
558
559    /// Gets the name of the sketch as a string.
560    ///
561    /// This maps to the name of the enum variant itself.
562    pub fn as_name(&self) -> &'static str {
563        match self {
564            Self::AgentDDSketch(_) => "agent dd sketch",
565        }
566    }
567}
568
569impl ByteSizeOf for MetricSketch {
570    fn allocated_bytes(&self) -> usize {
571        match self {
572            Self::AgentDDSketch(ddsketch) => ddsketch.allocated_bytes(),
573        }
574    }
575}
576
577// Currently, VRL can only read the type of the value and doesn't consider ny actual metric values.
578#[cfg(feature = "vrl")]
579impl From<MetricSketch> for vrl::value::Value {
580    fn from(value: MetricSketch) -> Self {
581        value.as_name().into()
582    }
583}
584
585/// A single observation.
586#[configurable_component]
587#[derive(Clone, Copy, Debug)]
588pub struct Sample {
589    /// The value of the observation.
590    pub value: f64,
591
592    /// The rate at which the value was observed.
593    pub rate: u32,
594}
595
596impl PartialEq for Sample {
597    fn eq(&self, other: &Self) -> bool {
598        self.rate == other.rate && float_eq(self.value, other.value)
599    }
600}
601
602impl ByteSizeOf for Sample {
603    fn allocated_bytes(&self) -> usize {
604        0
605    }
606}
607
608/// Custom serialization function which converts special `f64` values to strings.
609/// Non-special values are serialized as numbers.
610#[allow(clippy::trivially_copy_pass_by_ref)]
611fn serialize_f64<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>
612where
613    S: Serializer,
614{
615    if value.is_infinite() {
616        serializer.serialize_str(if *value > 0.0 { INFINITY } else { NEG_INFINITY })
617    } else if value.is_nan() {
618        serializer.serialize_str(NAN)
619    } else {
620        serializer.serialize_f64(*value)
621    }
622}
623
624/// Custom deserialization function for handling special f64 values.
625fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
626where
627    D: Deserializer<'de>,
628{
629    struct UpperLimitVisitor;
630
631    impl de::Visitor<'_> for UpperLimitVisitor {
632        type Value = f64;
633
634        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
635            formatter.write_str("a number or a special string value")
636        }
637
638        fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
639            Ok(value)
640        }
641
642        fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
643            match value {
644                NAN => Ok(f64::NAN),
645                INFINITY => Ok(f64::INFINITY),
646                NEG_INFINITY => Ok(f64::NEG_INFINITY),
647                _ => Err(E::custom("unsupported string value")),
648            }
649        }
650    }
651
652    deserializer.deserialize_any(UpperLimitVisitor)
653}
654
655/// A histogram bucket.
656///
657/// Histogram buckets represent the `count` of observations where the value of the observations does
658/// not exceed the specified `upper_limit`.
659#[configurable_component(no_deser, no_ser)]
660#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
661pub struct Bucket {
662    /// The upper limit of values in the bucket.
663    #[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
664    pub upper_limit: f64,
665
666    /// The number of values tracked in this bucket.
667    pub count: u64,
668}
669
670impl PartialEq for Bucket {
671    fn eq(&self, other: &Self) -> bool {
672        self.count == other.count && float_eq(self.upper_limit, other.upper_limit)
673    }
674}
675
676impl ByteSizeOf for Bucket {
677    fn allocated_bytes(&self) -> usize {
678        0
679    }
680}
681
682/// A single quantile observation.
683///
684/// Quantiles themselves are "cut points dividing the range of a probability distribution into
685/// continuous intervals with equal probabilities". [[1][quantiles_wikipedia]].
686///
687/// We use quantiles to measure the value along these probability distributions for representing
688/// client-side aggregations of distributions, which represent a collection of observations over a
689/// specific time window.
690///
691/// In general, we typically use the term "quantile" to represent the concept of _percentiles_,
692/// which deal with whole integers -- 0, 1, 2, .., 99, 100 -- even though quantiles are
693/// floating-point numbers and can represent higher-precision cut points, such as 0.9999, or the
694/// 99.99th percentile.
695///
696/// [quantiles_wikipedia]: https://en.wikipedia.org/wiki/Quantile
697#[configurable_component]
698#[derive(Clone, Copy, Debug)]
699pub struct Quantile {
700    /// The value of the quantile.
701    ///
702    /// This value must be between 0.0 and 1.0, inclusive.
703    pub quantile: f64,
704
705    /// The estimated value of the given quantile within the probability distribution.
706    pub value: f64,
707}
708
709impl PartialEq for Quantile {
710    fn eq(&self, other: &Self) -> bool {
711        float_eq(self.quantile, other.quantile) && float_eq(self.value, other.value)
712    }
713}
714
715impl Quantile {
716    /// Renders this quantile as a string, scaled to be a percentile.
717    ///
718    /// Up to four significant digits are maintained, but the resulting string will be without a decimal point.
719    ///
720    /// For example, a quantile of 0.25, which represents a percentile of 25, will be rendered as "25" and a quantile of
721    /// 0.9999, which represents a percentile of 99.99, will be rendered as "9999". A quantile of 0.99999, which
722    /// represents a percentile of 99.999, would also be rendered as "9999", though.
723    pub fn to_percentile_string(&self) -> String {
724        let clamped = self.quantile.clamp(0.0, 1.0) * 100.0;
725        clamped
726            .to_string()
727            .chars()
728            .take(5)
729            .filter(|c| *c != '.')
730            .collect()
731    }
732
733    /// Renders this quantile as a string.
734    ///
735    /// Up to four significant digits are maintained.
736    ///
737    /// For example, a quantile of 0.25 will be rendered as "0.25", and a quantile of 0.9999 will be rendered as
738    /// "0.9999", but a quantile of 0.99999 will be rendered as "0.9999".
739    pub fn to_quantile_string(&self) -> String {
740        let clamped = self.quantile.clamp(0.0, 1.0);
741        clamped.to_string().chars().take(6).collect()
742    }
743}
744
745impl ByteSizeOf for Quantile {
746    fn allocated_bytes(&self) -> usize {
747        0
748    }
749}