diff --git a/src/metrics.test.ts b/src/metrics.test.ts index 1bfea91..a04f21c 100644 --- a/src/metrics.test.ts +++ b/src/metrics.test.ts @@ -64,12 +64,7 @@ describe("metrics", () => { describe("histogram", () => { it("should publish histogram metric with options and tags", () => { - const options = { - aggregates: ["max", "min", "avg"] as HistogramOptions["aggregates"], - percentiles: [0.5, 0.95, 0.99], - }; - - metrics.histogram("test.histogram", 150, options, { endpoint: "/api/users" }); + metrics.histogram("test.histogram", 150, { endpoint: "/api/users" }); expect(receivedMessages).toHaveLength(1); expect(receivedMessages[0]).toEqual({ @@ -77,7 +72,6 @@ describe("metrics", () => { name: "test.histogram", value: 150, tags: { endpoint: "/api/users" }, - options, }); }); }); diff --git a/src/metrics.ts b/src/metrics.ts index e63e784..b3cd05d 100644 --- a/src/metrics.ts +++ b/src/metrics.ts @@ -3,7 +3,6 @@ import { type CountMetricPayload, type GaugeMetricPayload, type HistogramMetricPayload, - type HistogramOptions, METRICS_CHANNEL_NAME, MetricType, type Tags, @@ -55,7 +54,6 @@ export function gauge(name: string, value: number, tags: Tags = {}): void { export function histogram( name: string, value: number, - options: HistogramOptions = {}, tags: Tags = {}, ): void { const payload: HistogramMetricPayload = { @@ -63,7 +61,6 @@ export function histogram( name, value, tags, - options, }; metricsChannel.publish(payload); diff --git a/src/metricsDb.test.ts b/src/metricsDb.test.ts index 525aded..791de44 100644 --- a/src/metricsDb.test.ts +++ b/src/metricsDb.test.ts @@ -1,9 +1,8 @@ import { describe, it, expect, beforeEach } from "vitest"; import { MetricsDb } from "./metricsDb"; import { + type EmittedMetricPayload, MetricType, - type ExportedMetricPayload, - type HistogramAggregates, } from "./types"; describe("MetricsDb", () => { @@ -65,16 +64,12 @@ describe("MetricsDb", () => { }); it("should store a histogram metric", () => { - const metric: ExportedMetricPayload = { + const metric: EmittedMetricPayload = { type: MetricType.HISTOGRAM, name: "test.histogram", value: 150, tags: { endpoint: "/api/users" }, timestamp: Date.now(), - options: { - percentiles: [0.5, 0.95], - aggregates: ["max", "min"] as HistogramAggregates[], - }, }; metricsDb.storeMetric(metric); @@ -84,10 +79,8 @@ describe("MetricsDb", () => { expect(metrics[0]).toEqual({ type: MetricType.HISTOGRAM, name: "test.histogram", - value: [150], + value: [{ value: 150, time: metric.timestamp }], tags: { endpoint: "/api/users" }, - percentiles: [0.5, 0.95], - aggregates: ["max", "min"], lastUpdated: metric.timestamp, }); }); @@ -97,10 +90,6 @@ describe("MetricsDb", () => { type: MetricType.HISTOGRAM, name: "test.histogram", tags: { endpoint: "/api/users" }, - options: { - percentiles: [0.5], - aggregates: ["max"] as HistogramAggregates[], - }, }; metricsDb.storeMetric({ @@ -117,8 +106,7 @@ describe("MetricsDb", () => { expect(metricsDb.getMetricCount()).toBe(1); const metrics = metricsDb.getAllMetrics(); - expect(metrics[0].value).toEqual([100, 200]); - expect(metrics[0].lastUpdated).toBe(2000); + expect(metrics[0].value).toEqual([{ value: 100, time: 1000 }, { value: 200, time: 2000 }]); }); it("should group metric keys based on name, type, and tags", () => { @@ -237,52 +225,7 @@ describe("MetricsDb", () => { name: "test.histogram", value: 100, tags: { endpoint: "/api" }, - timestamp: 1000, - options: { - percentiles: [0.5, 0.95], - }, - }); - - metricsDb.storeMetric({ - type: MetricType.HISTOGRAM, - name: "test.histogram", - value: 200, - tags: { endpoint: "/api" }, - timestamp: 2000, - options: { - percentiles: [0.5, 0.95], - }, - }); - - const payloads = metricsDb.toMetricPayloads(); - - expect(payloads).toHaveLength(2); - expect(payloads).toContainEqual({ - type: MetricType.GAUGE, - name: "test.histogram.p50", - value: 100, // 50th percentile of [100, 200] - tags: { endpoint: "/api" }, - timestamp: 2000, - }); - expect(payloads).toContainEqual({ - type: MetricType.GAUGE, - name: "test.histogram.p95", - value: 200, // 95th percentile of [100, 200] - tags: { endpoint: "/api" }, - timestamp: 2000, - }); - }); - - it("should export histogram aggregates as appropriate metric types", () => { - metricsDb.storeMetric({ - type: MetricType.HISTOGRAM, - name: "test.histogram", - value: 100, - tags: { endpoint: "/api" }, - timestamp: 1000, - options: { - aggregates: ["count", "max", "min", "avg"] as HistogramAggregates[], - }, + timestamp: 1000 }); metricsDb.storeMetric({ @@ -290,80 +233,23 @@ describe("MetricsDb", () => { name: "test.histogram", value: 200, tags: { endpoint: "/api" }, - timestamp: 2000, - options: { - aggregates: ["count", "max", "min", "avg"] as HistogramAggregates[], - }, + timestamp: 2000 }); const payloads = metricsDb.toMetricPayloads(); - expect(payloads).toHaveLength(4); - - // Count should be COUNT type - expect(payloads).toContainEqual({ - type: MetricType.COUNT, - name: "test.histogram.count", - value: 2, - tags: { endpoint: "/api" }, - timestamp: 2000, - }); - - // Other aggregates should be GAUGE type - expect(payloads).toContainEqual({ - type: MetricType.GAUGE, - name: "test.histogram.max", - value: 200, - tags: { endpoint: "/api" }, - timestamp: 2000, - }); - - expect(payloads).toContainEqual({ - type: MetricType.GAUGE, - name: "test.histogram.min", - value: 100, - tags: { endpoint: "/api" }, - timestamp: 2000, - }); - - expect(payloads).toContainEqual({ - type: MetricType.GAUGE, - name: "test.histogram.avg", - value: 150, - tags: { endpoint: "/api" }, - timestamp: 2000, - }); - }); - - it("should handle histogram with both percentiles and aggregates", () => { - metricsDb.storeMetric({ + expect(payloads).toHaveLength(1); + expect(payloads[0]).toEqual({ type: MetricType.HISTOGRAM, name: "test.histogram", - value: 100, - tags: { endpoint: "/api" }, - timestamp: 1000, - options: { - percentiles: [0.5], - aggregates: ["count"] as HistogramAggregates[], - }, - }); - - const payloads = metricsDb.toMetricPayloads(); - - expect(payloads).toHaveLength(2); - expect(payloads).toContainEqual({ - type: MetricType.GAUGE, - name: "test.histogram.p50", - value: 100, - tags: { endpoint: "/api" }, - timestamp: 1000, - }); - expect(payloads).toContainEqual({ - type: MetricType.COUNT, - name: "test.histogram.count", - value: 1, + value: [{ + value: 100, + time: 1000, + }, { + value: 200, + time: 2000, + }], tags: { endpoint: "/api" }, - timestamp: 1000, }); }); @@ -372,14 +258,13 @@ describe("MetricsDb", () => { type: MetricType.HISTOGRAM, name: "test.histogram", value: 100, - options: {}, tags: { endpoint: "/api" }, timestamp: 1000, }); const payloads = metricsDb.toMetricPayloads(); - expect(payloads).toHaveLength(0); + expect(payloads).toHaveLength(1); }); }); diff --git a/src/metricsDb.ts b/src/metricsDb.ts index f901be0..90a1e84 100644 --- a/src/metricsDb.ts +++ b/src/metricsDb.ts @@ -1,10 +1,9 @@ -import { calculateHistogramValue, calculatePercentile } from "./utils/maths"; import { - type HistogramAggregates, type MetricPayload, type ExportedMetricPayload, MetricType, type Tags, + type EmittedMetricPayload, } from "./types"; interface BaseStoredMetric { @@ -25,9 +24,10 @@ interface StoredGaugeMetric extends BaseStoredMetric { interface StoredHistogramMetric extends BaseStoredMetric { type: MetricType.HISTOGRAM; - value: number[]; - percentiles?: number[]; - aggregates?: HistogramAggregates[]; + value: { + time: number; + value: number; + }[]; } type StoredMetric = @@ -49,7 +49,7 @@ export class MetricsDb { return `${metric.name}:${metric.type}:${tagKey}`; } - public storeMetric(metric: ExportedMetricPayload): void { + public storeMetric(metric: EmittedMetricPayload): void { const key = this.getMetricKey(metric); const existingMetric = this.metrics.get(key); @@ -82,15 +82,20 @@ export class MetricsDb { case MetricType.HISTOGRAM: { const existingValue = existingMetric - ? (existingMetric.value as number[]) + ? (existingMetric.value as { value: number; time: number }[]) : []; this.metrics.set(key, { type: metric.type, name: metric.name, tags: metric.tags, - percentiles: metric.options?.percentiles, - aggregates: metric.options?.aggregates, - value: [...existingValue, metric.value], + + value: [ + ...existingValue, + { + value: Number(metric.value), + time: metric.timestamp, + }, + ], lastUpdated: metric.timestamp, }); } @@ -98,7 +103,7 @@ export class MetricsDb { } public storeMetrics( - metrics: (MetricPayload & { timestamp: number })[], + metrics: (MetricPayload & { timestamp: number })[] ): void { for (const metric of metrics) { this.storeMetric(metric); @@ -140,30 +145,13 @@ export class MetricsDb { }); break; case MetricType.HISTOGRAM: { - const sortedArray = [...metric.value].sort(); - - for (const percentile of metric.percentiles || []) { - const value = calculatePercentile(sortedArray, percentile); - payloads.push({ - type: MetricType.GAUGE, - name: `${metric.name}.p${Math.round(percentile * 100)}`, - value: value, - tags: metric.tags, - timestamp: metric.lastUpdated, - }); - } - - for (const aggregate of metric.aggregates || []) { - const value = calculateHistogramValue(aggregate, metric.value); - - payloads.push({ - type: aggregate === "count" ? MetricType.COUNT : MetricType.GAUGE, - name: `${metric.name}.${aggregate}`, - value: value, - tags: metric.tags, - timestamp: metric.lastUpdated, - }); - } + payloads.push({ + type: MetricType.HISTOGRAM, + name: metric.name, + value: metric.value, + tags: metric.tags, + }); + break; } } } diff --git a/src/sinks/metrics/datadog.ts b/src/sinks/metrics/datadog.ts index 89dde20..9c01de7 100644 --- a/src/sinks/metrics/datadog.ts +++ b/src/sinks/metrics/datadog.ts @@ -1,6 +1,10 @@ -import type { ExportedMetricPayload } from "../../types"; +import { MetricType, type ExportedMetricPayload, type Tags } from "../../types"; import { env } from "cloudflare:workers"; import type { MetricSink } from "../sink"; + +const DISTRIBUTION_POINTS_ENDPOINT_PATH: string = "api/v1/distribution_points"; +const METRICS_SERIES_ENDPOINT_PATH: string = "api/v1/series"; + export interface DatadogMetricSinkOptions { /** * Datadog API key @@ -13,9 +17,14 @@ export interface DatadogMetricSinkOptions { site?: string; /** - * Custom endpoint URL override (for testing or proxies) + * Custom distribution points endpoint URL override (for testing or proxies) */ - endpoint?: string; + distributionPointsEndpoint?: string; + + /** + * Custom metrics series endpoint URL override (for testing or proxies) + */ + metricsSeriesEndpoint?: string; } /** @@ -25,24 +34,33 @@ export class DatadogMetricSink implements MetricSink { private readonly options: { apiKey: string; site: string; - endpoint: string; + distributionPointsEndpoint: string; + metricsSeriesEndpoint: string; }; constructor(options?: DatadogMetricSinkOptions) { // @ts-ignore const apiKey = options?.apiKey || env.DD_API_KEY || env.DATADOG_API_KEY; if (!apiKey || apiKey.length === 0) { - console.error("Datadog API key was not found. Provide it in the sink options or set the DD_API_KEY environment variable. Metrics will not be sent to Datadog."); + console.error( + "Datadog API key was not found. Provide it in the sink options or set the DD_API_KEY environment variable. Metrics will not be sent to Datadog." + ); } // @ts-ignore const site = options?.site || env.DD_SITE || "datadoghq.com"; - const endpoint = options?.endpoint || `https://api.${site}/api/v1/series`; + const distributionPointsEndpoint = + options?.distributionPointsEndpoint || + `https://api.${site}/${DISTRIBUTION_POINTS_ENDPOINT_PATH}`; + const metricsSeriesEndpoint = + options?.metricsSeriesEndpoint || + `https://api.${site}/${METRICS_SERIES_ENDPOINT_PATH}`; this.options = { apiKey, site, - endpoint, + distributionPointsEndpoint, + metricsSeriesEndpoint, }; } @@ -53,14 +71,25 @@ export class DatadogMetricSink implements MetricSink { if (!payloads || payloads.length === 0) { return; } - + try { - const datadogMetrics = payloads.map((payload) => - this.transformMetric(payload), + // Filter out worker metrics, since Datadog is currently getting this metrics through an integration + // For now, Datadog only accepts custom metrics. + const payloadsWithoutWorkerMetrics = payloads.filter( + (payload) => !payload.name.startsWith("worker.") ); + + const datadogMetrics = payloadsWithoutWorkerMetrics.map((payload) => + this.transformMetric(payload) + ); + await this.sendToDatadog(datadogMetrics); } catch (error) { - throw new Error(`Failed to send metrics to Datadog: ${error instanceof Error ? error.message : String(error)}`); + throw new Error( + `Failed to send metrics to Datadog: ${ + error instanceof Error ? error.message : String(error) + }` + ); } } @@ -68,36 +97,132 @@ export class DatadogMetricSink implements MetricSink { * Transform a metric payload to Datadog format */ private transformMetric(payload: ExportedMetricPayload): DatadogMetric { - const formattedTags = Object.entries(payload.tags).map( - ([key, value]) => `${key}:${value}`, - ); + const tags = this.formatTags(payload.tags); - const metricType = payload.type.toLowerCase(); + const metricType = payload.type; + if (metricType === MetricType.HISTOGRAM) { + return { + metric: payload.name, + type: "distribution", + points: payload.value.map((value) => [ + Math.floor(value.time / 1000), + [value.value], + ]), + tags: tags, + }; + } return { metric: payload.name, - type: metricType, + type: metricType.toLowerCase(), points: [[Math.floor(payload.timestamp / 1000), payload.value]], - tags: formattedTags, + tags: tags, }; } + /** + * Format tags returns a list of tags in the format `key:value`, + * and adds the following tags: + * - `worker_script:${scriptName}` + * - `execution_model:${executionModel}` + * - `version:${versionId}` + * - `trigger:${trigger}` + * - `region:earth` + */ + private formatTags(tags: Tags): string[] { + const { scriptName, executionModel, versionId, trigger, ...customTags } = + tags; + + const formattedTags = Object.entries(customTags) + .filter(([_, value]) => value !== undefined && value !== null) + .map(([key, value]) => `${key}:${value}`); + + if (scriptName != null) { + formattedTags.push(`worker_script:${scriptName}`); + } + + if (executionModel != null) { + formattedTags.push(`execution_model:${executionModel}`); + } + + if (versionId != null) { + formattedTags.push(`version:${versionId}`); + } + + if (trigger != null) { + formattedTags.push(`trigger:${trigger}`); + } + + formattedTags.push("region:earth"); + + return formattedTags; + } + /** * Send metrics to Datadog API */ private async sendToDatadog(metrics: DatadogMetric[]): Promise { if (!this.options.apiKey || this.options.apiKey.length === 0) { - console.warn(`Datadog API key was not found. Dropping ${metrics.length} metrics.`); + console.warn( + `Datadog API key was not found. Dropping ${metrics.length} metrics.` + ); return; } - - const response = await fetch(this.options.endpoint, { + + const distributionMetrics: DatadogMetric[] = metrics.filter( + (metric) => metric.type === "histogram" + ); + // Other metrics are sent as metrics series + const otherMetrics: DatadogMetric[] = metrics.filter( + (metric) => metric.type !== "histogram" + ); + + const promises = [ + distributionMetrics.length > 0 && + this.sendDistributionMetrics(distributionMetrics), + otherMetrics.length > 0 && this.sendMetricsSeries(otherMetrics), + ].filter(Boolean); + + if (promises.length === 0) { + return; + } + + try { + await Promise.all(promises); + } catch (error) { + throw new Error( + `Failed to send metrics to Datadog: ${ + error instanceof Error ? error.message : String(error) + }` + ); + } + } + + private async sendMetricsSeries(metrics: DatadogMetric[]): Promise { + this.postRequest( + this.options.metricsSeriesEndpoint, + JSON.stringify({ series: metrics }) + ); + } + + private async sendDistributionMetrics( + metrics: DatadogMetric[] + ): Promise { + await this.postRequest( + this.options.distributionPointsEndpoint, + JSON.stringify({ series: metrics }) + ); + } + + private async postRequest(endpoint: string, body: string): Promise { + console.log(body); + const response = await fetch(endpoint, { method: "POST", headers: { "Content-Type": "application/json", "DD-API-KEY": this.options.apiKey, }, - body: JSON.stringify({ series: metrics }), + body, }); if (!response.ok) { @@ -107,9 +232,11 @@ export class DatadogMetricSink implements MetricSink { } } +type DatadogPoint = [number, number[]] | [number, number]; // [timestamp, [values]] (distribution) or [timestamp, value] (count, histogram, gauge) + interface DatadogMetric { metric: string; type: string; - points: [number, number][]; // [timestamp, value] + points: DatadogPoint[]; tags: string[]; } diff --git a/src/sinks/metrics/otel-metrics-types.ts b/src/sinks/metrics/otel-metrics-types.ts index d487044..db00cad 100644 --- a/src/sinks/metrics/otel-metrics-types.ts +++ b/src/sinks/metrics/otel-metrics-types.ts @@ -32,12 +32,36 @@ export interface Sum { isMonotonic: boolean; } +export interface ExponentialHistogramDataPoint { + attributes: KeyValue[]; + timeUnixNano: string; + startTimeUnixNano?: string; + count: string; + sum?: number; + scale: number; + zeroCount: string; + positive?: { + offset: number; + bucketCounts: string[]; + }; + negative?: { + offset: number; + bucketCounts: string[]; + }; +} + +export interface ExponentialHistogram { + dataPoints: ExponentialHistogramDataPoint[]; + aggregationTemporality: AggregationTemporality; +} + export interface Metric { name: string; description?: string; unit?: string; gauge?: Gauge; sum?: Sum; + exponentialHistogram?: ExponentialHistogram; } export interface InstrumentationScope { @@ -73,3 +97,9 @@ export function isGaugeMetric( export function isSumMetric(metric: Metric): metric is Metric & { sum: Sum } { return metric.sum !== undefined; } + +export function isExponentialHistogramMetric( + metric: Metric, +): metric is Metric & { exponentialHistogram: ExponentialHistogram } { + return metric.exponentialHistogram !== undefined; +} diff --git a/src/sinks/metrics/otel.ts b/src/sinks/metrics/otel.ts index 89b5e9b..7b9cb97 100644 --- a/src/sinks/metrics/otel.ts +++ b/src/sinks/metrics/otel.ts @@ -44,13 +44,15 @@ export class OtelMetricSink implements MetricSink { await this.exportMetrics(otlpPayload); } catch (error) { throw new Error( - `Failed to send metrics to OTEL collector: ${error instanceof Error ? error.message : String(error)}`, + `Failed to send metrics to OTEL collector: ${ + error instanceof Error ? error.message : String(error) + }` ); } } private buildOTLPPayload( - metrics: ExportedMetricPayload[], + metrics: ExportedMetricPayload[] ): OTLPMetricsPayload { const otlpMetrics: (ResourceMetrics | undefined)[] = metrics.map( (payload) => { @@ -93,7 +95,7 @@ export class OtelMetricSink implements MetricSink { scopeMetrics: [scopeMetrics], }; return resourceMetrics; - }, + } ); return { @@ -117,13 +119,13 @@ export class OtelMetricSink implements MetricSink { if (!response.ok) { const errorText = await response.text(); throw new Error( - `HTTP ${response.status} ${response.statusText}: ${errorText}`, + `HTTP ${response.status} ${response.statusText}: ${errorText}` ); } } private convertTagsToAttributes( - tags?: Record, + tags?: Record ): KeyValue[] { return Object.entries(tags || {}) .filter(([_, value]) => value !== undefined && value !== null) @@ -137,8 +139,23 @@ export class OtelMetricSink implements MetricSink { private payloadToMetric( payload: ExportedMetricPayload, - attributes: KeyValue[], + attributes: KeyValue[] ) { + if (payload.type === MetricType.HISTOGRAM) { + return { + name: payload.name, + exponentialHistogram: { + dataPoints: [ + this.buildExponentialHistogramDataPoint( + payload.value, + attributes + ), + ], + aggregationTemporality: + AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + }, + }; + } const timeUnixNano = this.timestampToNanos(payload.timestamp); if (payload.type === MetricType.COUNT) { return { @@ -173,6 +190,72 @@ export class OtelMetricSink implements MetricSink { } } + private buildExponentialHistogramDataPoint( + values: { time: number; value: number }[], + attributes: KeyValue[] + ) { + // Sort values to calculate buckets + const sortedValues = values.map((v) => v.value).sort((a, b) => a - b); + const count = String(sortedValues.length); + const sum = sortedValues.reduce((acc, val) => acc + val, 0); + + // Use scale 0 for simplicity (base-2 exponential buckets) + const scale = 0; + const zeroCount = String(sortedValues.filter((v) => v === 0).length); + + // Build positive buckets for exponential histogram + const positive = this.buildExponentialBuckets( + sortedValues.filter((v) => v > 0) + ); + const negative = this.buildExponentialBuckets( + sortedValues.filter((v) => v < 0).map((v) => Math.abs(v)) + ); + + return { + attributes, + count, + startTimeUnixNano: this.timestampToNanos( + values.length > 0 ? values[0].time : Date.now() + ), + timeUnixNano: this.timestampToNanos( + values.length > 0 ? values[values.length - 1].time : Date.now() + ), + sum, + scale, + zeroCount, + ...(positive.bucketCounts.length > 0 && { positive }), + ...(negative.bucketCounts.length > 0 && { negative }), + }; + } + + private buildExponentialBuckets(values: number[]) { + if (values.length === 0) { + return { offset: 0, bucketCounts: [] }; + } + + // For scale 0, bucket boundaries are powers of 2: [1, 2), [2, 4), [4, 8), etc. + const buckets = new Map(); + + for (const value of values) { + // Calculate bucket index for scale 0: floor(log2(value)) + const bucketIndex = value <= 0 ? 0 : Math.floor(Math.log2(value)); + buckets.set(bucketIndex, (buckets.get(bucketIndex) || 0) + 1); + } + + const minBucket = Math.min(...buckets.keys()); + const maxBucket = Math.max(...buckets.keys()); + const bucketCounts: string[] = []; + + for (let i = minBucket; i <= maxBucket; i++) { + bucketCounts.push(String(buckets.get(i) || 0)); + } + + return { + offset: minBucket, + bucketCounts, + }; + } + private timestampToNanos(timestampMs: number): string { // Convert milliseconds to nanoseconds using BigInt to avoid precision loss return String(BigInt(Math.round(timestampMs)) * BigInt(1000000)); diff --git a/src/types.ts b/src/types.ts index eed7b39..394a600 100644 --- a/src/types.ts +++ b/src/types.ts @@ -4,6 +4,7 @@ export enum MetricType { COUNT = "COUNT", GAUGE = "GAUGE", HISTOGRAM = "HISTOGRAM", + DISTRIBUTION = "DISTRIBUTION", } export type Tags = Record; @@ -11,7 +12,7 @@ export type Tags = Record; interface BaseMetricPayload { type: MetricType; name: string; - value: number; + value: unknown; tags: Tags; } @@ -44,12 +45,24 @@ export interface GaugeMetricPayload extends BaseMetricPayload { export interface HistogramMetricPayload extends BaseMetricPayload { type: MetricType.HISTOGRAM; value: number; - options: HistogramOptions; +} + +export interface DistributionMetricPayload extends BaseMetricPayload { + type: MetricType.DISTRIBUTION; + value: number; } export type MetricPayload = | CountMetricPayload | GaugeMetricPayload - | HistogramMetricPayload; + | HistogramMetricPayload + | DistributionMetricPayload; + +export type EmittedMetricPayload = MetricPayload & { timestamp: number }; + +export interface ExportedHistogramPayload extends BaseMetricPayload { + type: MetricType.HISTOGRAM; + value: { time: number, value: number }[]; +} -export type ExportedMetricPayload = MetricPayload & { timestamp: number }; \ No newline at end of file +export type ExportedMetricPayload = (CountMetricPayload | GaugeMetricPayload) & { timestamp: number } | ExportedHistogramPayload \ No newline at end of file