vector_core/event/metric/
value.rs

1use core::fmt;
2use std::collections::BTreeSet;
3
4use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
5use vector_common::byte_size_of::ByteSizeOf;
6use vector_config::configurable_component;
7
8use super::{samples_to_buckets, write_list, write_word};
9use crate::{float_eq, metrics::AgentDDSketch};
10
11const INFINITY: &str = "inf";
12const NEG_INFINITY: &str = "-inf";
13const NAN: &str = "NaN";
14
15/// Metric value.
16#[configurable_component]
17#[derive(Clone, Debug)]
18#[serde(rename_all = "snake_case")]
19/// Container for the actual value of a metric.
20pub enum MetricValue {
21    /// A cumulative numerical value that can only increase or be reset to zero.
22    Counter {
23        /// The value of the counter.
24        value: f64,
25    },
26
27    /// A single numerical value that can arbitrarily go up and down.
28    Gauge {
29        /// The value of the gauge.
30        value: f64,
31    },
32
33    /// A set of (unordered) unique values for a key.
34    Set {
35        /// The values in the set.
36        values: BTreeSet<String>,
37    },
38
39    /// A set of observations without any aggregation or sampling.
40    Distribution {
41        /// The observed values within this distribution.
42        samples: Vec<Sample>,
43
44        /// The type of statistics to derive for this distribution.
45        statistic: StatisticKind,
46    },
47
48    /// A set of observations which are counted into buckets.
49    ///
50    /// It also contains the total count of all observations and their sum to allow calculating the mean.
51    AggregatedHistogram {
52        /// The buckets within this histogram.
53        buckets: Vec<Bucket>,
54
55        /// The total number of observations contained within this histogram.
56        count: u64,
57
58        /// The sum of all observations contained within this histogram.
59        sum: f64,
60    },
61
62    /// A set of observations which are represented by quantiles.
63    ///
64    /// Each quantile contains the upper value of the quantile (0 <= φ <= 1). It also contains the total count of all
65    /// observations and their sum to allow calculating the mean.
66    AggregatedSummary {
67        /// The quantiles measured from this summary.
68        quantiles: Vec<Quantile>,
69
70        /// The total number of observations contained within this summary.
71        count: u64,
72
73        /// The sum of all observations contained within this histogram.
74        sum: f64,
75    },
76
77    /// A data structure that can answer questions about the cumulative distribution of the contained samples in
78    /// space-efficient way.
79    ///
80    /// Sketches represent the data in a way that queries over it have bounded error guarantees without needing to hold
81    /// every single sample in memory. They are also, typically, able to be merged with other sketches of the same type
82    /// such that client-side _and_ server-side aggregation can be accomplished without loss of accuracy in the queries.
83    Sketch {
84        #[configurable(derived)]
85        sketch: MetricSketch,
86    },
87}
88
89impl MetricValue {
90    /// Returns `true` if the value is empty.
91    ///
92    /// Emptiness is dictated by whether or not the value has any samples or measurements present. Consequently, scalar
93    /// values (counter, gauge) are never considered empty.
94    pub fn is_empty(&self) -> bool {
95        match self {
96            MetricValue::Counter { .. } | MetricValue::Gauge { .. } => false,
97            MetricValue::Set { values } => values.is_empty(),
98            MetricValue::Distribution { samples, .. } => samples.is_empty(),
99            MetricValue::AggregatedSummary { count, .. }
100            | MetricValue::AggregatedHistogram { count, .. } => *count == 0,
101            MetricValue::Sketch { sketch } => sketch.is_empty(),
102        }
103    }
104
105    /// Gets the name of this value as a string.
106    ///
107    /// This maps to the name of the enum variant itself.
108    pub fn as_name(&self) -> &'static str {
109        match self {
110            Self::Counter { .. } => "counter",
111            Self::Gauge { .. } => "gauge",
112            Self::Set { .. } => "set",
113            Self::Distribution { .. } => "distribution",
114            Self::AggregatedHistogram { .. } => "aggregated histogram",
115            Self::AggregatedSummary { .. } => "aggregated summary",
116            Self::Sketch { sketch } => sketch.as_name(),
117        }
118    }
119
120    /// Converts a distribution to an aggregated histogram.
121    ///
122    /// Histogram bucket bounds are based on `buckets`, where the value is the upper bound of the bucket.  Samples will
123    /// be thus be ordered in a "less than" fashion: if the given sample is less than or equal to a given bucket's upper
124    /// bound, it will be counted towards that bucket at the given sample rate.
125    ///
126    /// If this value is not a distribution, then `None` is returned.  Otherwise,
127    /// `Some(MetricValue::AggregatedHistogram)` is returned.
128    pub fn distribution_to_agg_histogram(&self, buckets: &[f64]) -> Option<MetricValue> {
129        match self {
130            MetricValue::Distribution { samples, .. } => {
131                let (buckets, count, sum) = samples_to_buckets(samples, buckets);
132
133                Some(MetricValue::AggregatedHistogram {
134                    buckets,
135                    count,
136                    sum,
137                })
138            }
139            _ => None,
140        }
141    }
142
143    /// Converts a distribution to a sketch.
144    ///
145    /// This conversion specifically use the `AgentDDSketch` sketch variant, in the default configuration that matches
146    /// the Datadog Agent, parameter-wise.
147    ///
148    /// If this value is not a distribution, then `None` is returned.  Otherwise, `Some(MetricValue::Sketch)` is
149    /// returned.
150    pub fn distribution_to_sketch(&self) -> Option<MetricValue> {
151        match self {
152            MetricValue::Distribution { samples, .. } => {
153                let mut sketch = AgentDDSketch::with_agent_defaults();
154                for sample in samples {
155                    sketch.insert_n(sample.value, sample.rate);
156                }
157
158                Some(MetricValue::Sketch {
159                    sketch: MetricSketch::AgentDDSketch(sketch),
160                })
161            }
162            _ => None,
163        }
164    }
165
166    /// Zeroes out all the values contained in this value.
167    ///
168    /// This keeps all the bucket/value vectors for the histogram and summary metric types intact while zeroing the
169    /// counts. Distribution metrics are emptied of all their values.
170    pub fn zero(&mut self) {
171        match self {
172            Self::Counter { value } | Self::Gauge { value } => *value = 0.0,
173            Self::Set { values } => values.clear(),
174            Self::Distribution { samples, .. } => samples.clear(),
175            Self::AggregatedHistogram {
176                buckets,
177                count,
178                sum,
179            } => {
180                for bucket in buckets {
181                    bucket.count = 0;
182                }
183                *count = 0;
184                *sum = 0.0;
185            }
186            Self::AggregatedSummary {
187                quantiles,
188                sum,
189                count,
190            } => {
191                for quantile in quantiles {
192                    quantile.value = 0.0;
193                }
194                *count = 0;
195                *sum = 0.0;
196            }
197            Self::Sketch { sketch } => match sketch {
198                MetricSketch::AgentDDSketch(ddsketch) => {
199                    ddsketch.clear();
200                }
201            },
202        }
203    }
204
205    /// Adds another value to this one.
206    ///
207    /// If the other value is not the same type, or if they are but their defining characteristics of the value are
208    /// different (i.e. aggregated histograms with different bucket layouts), then `false` is returned.  Otherwise,
209    /// `true` is returned.
210    #[must_use]
211    pub fn add(&mut self, other: &Self) -> bool {
212        match (self, other) {
213            (Self::Counter { value }, Self::Counter { value: value2 })
214            | (Self::Gauge { value }, Self::Gauge { value: value2 }) => {
215                *value += value2;
216                true
217            }
218            (Self::Set { values }, Self::Set { values: values2 }) => {
219                values.extend(values2.iter().map(Into::into));
220                true
221            }
222            (
223                Self::Distribution {
224                    samples,
225                    statistic: statistic_a,
226                },
227                Self::Distribution {
228                    samples: samples2,
229                    statistic: statistic_b,
230                },
231            ) if statistic_a == statistic_b => {
232                samples.extend_from_slice(samples2);
233                true
234            }
235            (
236                Self::AggregatedHistogram {
237                    buckets,
238                    count,
239                    sum,
240                },
241                Self::AggregatedHistogram {
242                    buckets: buckets2,
243                    count: count2,
244                    sum: sum2,
245                },
246            ) if buckets.len() == buckets2.len()
247                && buckets
248                    .iter()
249                    .zip(buckets2.iter())
250                    .all(|(b1, b2)| b1.upper_limit == b2.upper_limit) =>
251            {
252                for (b1, b2) in buckets.iter_mut().zip(buckets2) {
253                    b1.count += b2.count;
254                }
255                *count += count2;
256                *sum += sum2;
257                true
258            }
259            (Self::Sketch { sketch }, Self::Sketch { sketch: sketch2 }) => {
260                match (sketch, sketch2) {
261                    (
262                        MetricSketch::AgentDDSketch(ddsketch),
263                        MetricSketch::AgentDDSketch(ddsketch2),
264                    ) => ddsketch.merge(ddsketch2).is_ok(),
265                }
266            }
267            _ => false,
268        }
269    }
270
271    /// Subtracts another value from this one.
272    ///
273    /// If the other value is not the same type, or if they are but their defining characteristics of the value are
274    /// different (i.e. aggregated histograms with different bucket layouts), then `false` is returned.  Otherwise,
275    /// `true` is returned.
276    #[must_use]
277    pub fn subtract(&mut self, other: &Self) -> bool {
278        match (self, other) {
279            // Counters are monotonic, they should _never_ go backwards unless reset to 0 due to
280            // process restart, etc.  Thus, being able to generate negative deltas would violate
281            // that.  Whether a counter is reset to 0, or if it incorrectly warps to a previous
282            // value, it doesn't matter: we're going to reinitialize it.
283            (Self::Counter { value }, Self::Counter { value: value2 }) if *value >= *value2 => {
284                *value -= value2;
285                true
286            }
287            (Self::Gauge { value }, Self::Gauge { value: value2 }) => {
288                *value -= value2;
289                true
290            }
291            (Self::Set { values }, Self::Set { values: values2 }) => {
292                for item in values2 {
293                    values.remove(item);
294                }
295                true
296            }
297            (
298                Self::Distribution {
299                    samples,
300                    statistic: statistic_a,
301                },
302                Self::Distribution {
303                    samples: samples2,
304                    statistic: statistic_b,
305                },
306            ) if statistic_a == statistic_b => {
307                // This is an ugly algorithm, but the use of a HashSet or equivalent is complicated by neither Hash nor
308                // Eq being implemented for the f64 part of Sample.
309                //
310                // TODO: This logic does not work if a value is repeated within a distribution. For example, if the
311                // current distribution is [1, 2, 3, 1, 2, 3] and the previous distribution is [1, 2, 3], this would
312                // yield a result of [].
313                //
314                // The only reasonable way we could provide subtraction, I believe, is if we required the ordering to
315                // stay the same, such that we would just take the samples from the non-overlapping region as the delta.
316                // In the above example: length of samples from `other` would be 3, so delta would be
317                // `self.samples[3..]`.
318                *samples = samples
319                    .iter()
320                    .copied()
321                    .filter(|sample| samples2.iter().all(|sample2| sample != sample2))
322                    .collect();
323                true
324            }
325            // Aggregated histograms, at least in Prometheus, are also typically monotonic in terms of growth.
326            // Subtracting them in reverse -- e.g.. subtracting a newer one with more values from an older one with
327            // fewer values -- would not make sense, since buckets should never be able to have negative counts... and
328            // it's not clear that a saturating subtraction is technically correct either.  Instead, we avoid having to
329            // make that decision, and simply force the metric to be reinitialized.
330            (
331                Self::AggregatedHistogram {
332                    buckets,
333                    count,
334                    sum,
335                },
336                Self::AggregatedHistogram {
337                    buckets: buckets2,
338                    count: count2,
339                    sum: sum2,
340                },
341            ) if *count >= *count2
342                && buckets.len() == buckets2.len()
343                && buckets
344                    .iter()
345                    .zip(buckets2.iter())
346                    .all(|(b1, b2)| b1.upper_limit == b2.upper_limit) =>
347            {
348                for (b1, b2) in buckets.iter_mut().zip(buckets2) {
349                    b1.count -= b2.count;
350                }
351                *count -= count2;
352                *sum -= sum2;
353                true
354            }
355            _ => false,
356        }
357    }
358}
359
360impl ByteSizeOf for MetricValue {
361    fn allocated_bytes(&self) -> usize {
362        match self {
363            Self::Counter { .. } | Self::Gauge { .. } => 0,
364            Self::Set { values } => values.allocated_bytes(),
365            Self::Distribution { samples, .. } => samples.allocated_bytes(),
366            Self::AggregatedHistogram { buckets, .. } => buckets.allocated_bytes(),
367            Self::AggregatedSummary { quantiles, .. } => quantiles.allocated_bytes(),
368            Self::Sketch { sketch } => sketch.allocated_bytes(),
369        }
370    }
371}
372
373impl PartialEq for MetricValue {
374    fn eq(&self, other: &Self) -> bool {
375        match (self, other) {
376            (Self::Counter { value: l_value }, Self::Counter { value: r_value })
377            | (Self::Gauge { value: l_value }, Self::Gauge { value: r_value }) => {
378                float_eq(*l_value, *r_value)
379            }
380            (Self::Set { values: l_values }, Self::Set { values: r_values }) => {
381                l_values == r_values
382            }
383            (
384                Self::Distribution {
385                    samples: l_samples,
386                    statistic: l_statistic,
387                },
388                Self::Distribution {
389                    samples: r_samples,
390                    statistic: r_statistic,
391                },
392            ) => l_samples == r_samples && l_statistic == r_statistic,
393            (
394                Self::AggregatedHistogram {
395                    buckets: l_buckets,
396                    count: l_count,
397                    sum: l_sum,
398                },
399                Self::AggregatedHistogram {
400                    buckets: r_buckets,
401                    count: r_count,
402                    sum: r_sum,
403                },
404            ) => l_buckets == r_buckets && l_count == r_count && float_eq(*l_sum, *r_sum),
405            (
406                Self::AggregatedSummary {
407                    quantiles: l_quantiles,
408                    count: l_count,
409                    sum: l_sum,
410                },
411                Self::AggregatedSummary {
412                    quantiles: r_quantiles,
413                    count: r_count,
414                    sum: r_sum,
415                },
416            ) => l_quantiles == r_quantiles && l_count == r_count && float_eq(*l_sum, *r_sum),
417            (Self::Sketch { sketch: l_sketch }, Self::Sketch { sketch: r_sketch }) => {
418                l_sketch == r_sketch
419            }
420            _ => false,
421        }
422    }
423}
424
425impl fmt::Display for MetricValue {
426    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
427        match &self {
428            MetricValue::Counter { value } | MetricValue::Gauge { value } => {
429                write!(fmt, "{value}")
430            }
431            MetricValue::Set { values } => {
432                write_list(fmt, " ", values.iter(), |fmt, value| write_word(fmt, value))
433            }
434            MetricValue::Distribution { samples, statistic } => {
435                write!(
436                    fmt,
437                    "{} ",
438                    match statistic {
439                        StatisticKind::Histogram => "histogram",
440                        StatisticKind::Summary => "summary",
441                    }
442                )?;
443                write_list(fmt, " ", samples, |fmt, sample| {
444                    write!(fmt, "{}@{}", sample.rate, sample.value)
445                })
446            }
447            MetricValue::AggregatedHistogram {
448                buckets,
449                count,
450                sum,
451            } => {
452                write!(fmt, "count={count} sum={sum} ")?;
453                write_list(fmt, " ", buckets, |fmt, bucket| {
454                    write!(fmt, "{}@{}", bucket.count, bucket.upper_limit)
455                })
456            }
457            MetricValue::AggregatedSummary {
458                quantiles,
459                count,
460                sum,
461            } => {
462                write!(fmt, "count={count} sum={sum} ")?;
463                write_list(fmt, " ", quantiles, |fmt, quantile| {
464                    write!(fmt, "{}@{}", quantile.quantile, quantile.value)
465                })
466            }
467            MetricValue::Sketch { sketch } => {
468                let quantiles = [0.5, 0.75, 0.9, 0.99]
469                    .iter()
470                    .map(|q| Quantile {
471                        quantile: *q,
472                        value: 0.0,
473                    })
474                    .collect::<Vec<_>>();
475
476                match sketch {
477                    MetricSketch::AgentDDSketch(ddsketch) => {
478                        write!(
479                            fmt,
480                            "count={} sum={:?} min={:?} max={:?} avg={:?} ",
481                            ddsketch.count(),
482                            ddsketch.sum(),
483                            ddsketch.min(),
484                            ddsketch.max(),
485                            ddsketch.avg()
486                        )?;
487                        write_list(fmt, " ", quantiles, |fmt, q| {
488                            write!(
489                                fmt,
490                                "{}={:?}",
491                                q.to_percentile_string(),
492                                ddsketch.quantile(q.quantile)
493                            )
494                        })
495                    }
496                }
497            }
498        }
499    }
500}
501
502impl From<AgentDDSketch> for MetricValue {
503    fn from(ddsketch: AgentDDSketch) -> Self {
504        MetricValue::Sketch {
505            sketch: MetricSketch::AgentDDSketch(ddsketch),
506        }
507    }
508}
509
510// Currently, VRL can only read the type of the value and doesn't consider any actual metric values.
511#[cfg(feature = "vrl")]
512impl From<MetricValue> for vrl::value::Value {
513    fn from(value: MetricValue) -> Self {
514        value.as_name().into()
515    }
516}
517
518/// Type of statistics to generate for a distribution.
519#[configurable_component]
520#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd)]
521#[serde(rename_all = "snake_case")]
522pub enum StatisticKind {
523    /// A histogram representation.
524    Histogram,
525
526    /// Corresponds to Datadog's Distribution Metric
527    /// <https://docs.datadoghq.com/developers/metrics/types/?tab=distribution#definition>
528    Summary,
529}
530
531/// A generalized metrics sketch.
532#[configurable_component]
533#[derive(Clone, Debug, Eq, PartialEq)]
534pub enum MetricSketch {
535    /// [DDSketch][ddsketch] implementation based on the [Datadog Agent][ddagent].
536    ///
537    /// While `DDSketch` has open-source implementations based on the white paper, the version used in
538    /// the Datadog Agent itself is subtly different. This version is suitable for sending directly
539    /// to Datadog's sketch ingest endpoint.
540    ///
541    /// [ddsketch]: https://www.vldb.org/pvldb/vol12/p2195-masson.pdf
542    /// [ddagent]: https://github.com/DataDog/datadog-agent
543    AgentDDSketch(AgentDDSketch),
544}
545
546impl MetricSketch {
547    /// Returns `true` if the sketch is empty.
548    pub fn is_empty(&self) -> bool {
549        match self {
550            MetricSketch::AgentDDSketch(ddsketch) => ddsketch.is_empty(),
551        }
552    }
553
554    /// Gets the name of the sketch as a string.
555    ///
556    /// This maps to the name of the enum variant itself.
557    pub fn as_name(&self) -> &'static str {
558        match self {
559            Self::AgentDDSketch(_) => "agent dd sketch",
560        }
561    }
562}
563
564impl ByteSizeOf for MetricSketch {
565    fn allocated_bytes(&self) -> usize {
566        match self {
567            Self::AgentDDSketch(ddsketch) => ddsketch.allocated_bytes(),
568        }
569    }
570}
571
572// Currently, VRL can only read the type of the value and doesn't consider ny actual metric values.
573#[cfg(feature = "vrl")]
574impl From<MetricSketch> for vrl::value::Value {
575    fn from(value: MetricSketch) -> Self {
576        value.as_name().into()
577    }
578}
579
580/// A single observation.
581#[configurable_component]
582#[derive(Clone, Copy, Debug)]
583pub struct Sample {
584    /// The value of the observation.
585    pub value: f64,
586
587    /// The rate at which the value was observed.
588    pub rate: u32,
589}
590
591impl PartialEq for Sample {
592    fn eq(&self, other: &Self) -> bool {
593        self.rate == other.rate && float_eq(self.value, other.value)
594    }
595}
596
597impl ByteSizeOf for Sample {
598    fn allocated_bytes(&self) -> usize {
599        0
600    }
601}
602
603/// Custom serialization function which converts special `f64` values to strings.
604/// Non-special values are serialized as numbers.
605#[allow(clippy::trivially_copy_pass_by_ref)]
606fn serialize_f64<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>
607where
608    S: Serializer,
609{
610    if value.is_infinite() {
611        serializer.serialize_str(if *value > 0.0 { INFINITY } else { NEG_INFINITY })
612    } else if value.is_nan() {
613        serializer.serialize_str(NAN)
614    } else {
615        serializer.serialize_f64(*value)
616    }
617}
618
619/// Custom deserialization function for handling special f64 values.
620fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
621where
622    D: Deserializer<'de>,
623{
624    struct UpperLimitVisitor;
625
626    impl de::Visitor<'_> for UpperLimitVisitor {
627        type Value = f64;
628
629        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
630            formatter.write_str("a number or a special string value")
631        }
632
633        fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
634            Ok(value)
635        }
636
637        fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> {
638            match value {
639                NAN => Ok(f64::NAN),
640                INFINITY => Ok(f64::INFINITY),
641                NEG_INFINITY => Ok(f64::NEG_INFINITY),
642                _ => Err(E::custom("unsupported string value")),
643            }
644        }
645    }
646
647    deserializer.deserialize_any(UpperLimitVisitor)
648}
649
650/// A histogram bucket.
651///
652/// Histogram buckets represent the `count` of observations where the value of the observations does
653/// not exceed the specified `upper_limit`.
654#[configurable_component(no_deser, no_ser)]
655#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
656pub struct Bucket {
657    /// The upper limit of values in the bucket.
658    #[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
659    pub upper_limit: f64,
660
661    /// The number of values tracked in this bucket.
662    pub count: u64,
663}
664
665impl PartialEq for Bucket {
666    fn eq(&self, other: &Self) -> bool {
667        self.count == other.count && float_eq(self.upper_limit, other.upper_limit)
668    }
669}
670
671impl ByteSizeOf for Bucket {
672    fn allocated_bytes(&self) -> usize {
673        0
674    }
675}
676
677/// A single quantile observation.
678///
679/// Quantiles themselves are "cut points dividing the range of a probability distribution into
680/// continuous intervals with equal probabilities". [[1][quantiles_wikipedia]].
681///
682/// We use quantiles to measure the value along these probability distributions for representing
683/// client-side aggregations of distributions, which represent a collection of observations over a
684/// specific time window.
685///
686/// In general, we typically use the term "quantile" to represent the concept of _percentiles_,
687/// which deal with whole integers -- 0, 1, 2, .., 99, 100 -- even though quantiles are
688/// floating-point numbers and can represent higher-precision cut points, such as 0.9999, or the
689/// 99.99th percentile.
690///
691/// [quantiles_wikipedia]: https://en.wikipedia.org/wiki/Quantile
692#[configurable_component]
693#[derive(Clone, Copy, Debug)]
694pub struct Quantile {
695    /// The value of the quantile.
696    ///
697    /// This value must be between 0.0 and 1.0, inclusive.
698    pub quantile: f64,
699
700    /// The estimated value of the given quantile within the probability distribution.
701    pub value: f64,
702}
703
704impl PartialEq for Quantile {
705    fn eq(&self, other: &Self) -> bool {
706        float_eq(self.quantile, other.quantile) && float_eq(self.value, other.value)
707    }
708}
709
710impl Quantile {
711    /// Renders this quantile as a string, scaled to be a percentile.
712    ///
713    /// Up to four significant digits are maintained, but the resulting string will be without a decimal point.
714    ///
715    /// For example, a quantile of 0.25, which represents a percentile of 25, will be rendered as "25" and a quantile of
716    /// 0.9999, which represents a percentile of 99.99, will be rendered as "9999". A quantile of 0.99999, which
717    /// represents a percentile of 99.999, would also be rendered as "9999", though.
718    pub fn to_percentile_string(&self) -> String {
719        let clamped = self.quantile.clamp(0.0, 1.0) * 100.0;
720        clamped
721            .to_string()
722            .chars()
723            .take(5)
724            .filter(|c| *c != '.')
725            .collect()
726    }
727
728    /// Renders this quantile as a string.
729    ///
730    /// Up to four significant digits are maintained.
731    ///
732    /// For example, a quantile of 0.25 will be rendered as "0.25", and a quantile of 0.9999 will be rendered as
733    /// "0.9999", but a quantile of 0.99999 will be rendered as "0.9999".
734    pub fn to_quantile_string(&self) -> String {
735        let clamped = self.quantile.clamp(0.0, 1.0);
736        clamped.to_string().chars().take(6).collect()
737    }
738}
739
740impl ByteSizeOf for Quantile {
741    fn allocated_bytes(&self) -> usize {
742        0
743    }
744}