feat: Complete zCode CLI X with Telegram bot integration

- Add full Telegram bot functionality with Z.AI API integration
- Implement 4 tools: Bash, FileEdit, WebSearch, Git
- Add 3 agents: Code Reviewer, Architect, DevOps Engineer
- Add 6 skills for common coding tasks
- Add systemd service file for 24/7 operation
- Add nginx configuration for HTTPS webhook
- Add comprehensive documentation
- Implement WebSocket server for real-time updates
- Add logging system with Winston
- Add environment validation

🤖 zCode CLI X - Agentic coder with Z.AI + Telegram integration
This commit is contained in:
admin
2026-05-05 09:01:26 +00:00
Unverified
parent 4a7035dd92
commit 875c7f9b91
24688 changed files with 3224957 additions and 221 deletions

View File

@@ -0,0 +1,27 @@
import type { MetricAdvice, MetricOptions } from '@opentelemetry/api';
import type { View } from './view/View';
import type { InstrumentType, MetricDescriptor } from './export/MetricData';
/**
* An internal interface describing the instrument.
*
* This is intentionally distinguished from the public MetricDescriptor (a.k.a. InstrumentDescriptor)
* which may not contains internal fields like metric advice.
*/
export interface InstrumentDescriptor extends MetricDescriptor {
/**
* For internal use; exporter should avoid depending on the type of the
* instrument as their resulting aggregator can be re-mapped with views.
*/
readonly type: InstrumentType;
/**
* See {@link MetricAdvice}
*
* @experimental
*/
readonly advice: MetricAdvice;
}
export declare function createInstrumentDescriptor(name: string, type: InstrumentType, options?: MetricOptions): InstrumentDescriptor;
export declare function createInstrumentDescriptorWithView(view: View, instrument: InstrumentDescriptor): InstrumentDescriptor;
export declare function isDescriptorCompatibleWith(descriptor: InstrumentDescriptor, otherDescriptor: InstrumentDescriptor): boolean;
export declare function isValidName(name: string): boolean;
//# sourceMappingURL=InstrumentDescriptor.d.ts.map

View File

@@ -0,0 +1,50 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.isValidName = exports.isDescriptorCompatibleWith = exports.createInstrumentDescriptorWithView = exports.createInstrumentDescriptor = void 0;
const api_1 = require("@opentelemetry/api");
const utils_1 = require("./utils");
function createInstrumentDescriptor(name, type, options) {
if (!isValidName(name)) {
api_1.diag.warn(`Invalid metric name: "${name}". The metric name should be a ASCII string with a length no greater than 255 characters.`);
}
return {
name,
type,
description: options?.description ?? '',
unit: options?.unit ?? '',
valueType: options?.valueType ?? api_1.ValueType.DOUBLE,
advice: options?.advice ?? {},
};
}
exports.createInstrumentDescriptor = createInstrumentDescriptor;
function createInstrumentDescriptorWithView(view, instrument) {
return {
name: view.name ?? instrument.name,
description: view.description ?? instrument.description,
type: instrument.type,
unit: instrument.unit,
valueType: instrument.valueType,
advice: instrument.advice,
};
}
exports.createInstrumentDescriptorWithView = createInstrumentDescriptorWithView;
function isDescriptorCompatibleWith(descriptor, otherDescriptor) {
// Names are case-insensitive strings.
return ((0, utils_1.equalsCaseInsensitive)(descriptor.name, otherDescriptor.name) &&
descriptor.unit === otherDescriptor.unit &&
descriptor.type === otherDescriptor.type &&
descriptor.valueType === otherDescriptor.valueType);
}
exports.isDescriptorCompatibleWith = isDescriptorCompatibleWith;
// ASCII string with a length no greater than 255 characters.
// NB: the first character counted separately from the rest.
const NAME_REGEXP = /^[a-z][a-z0-9_.\-/]{0,254}$/i;
function isValidName(name) {
return NAME_REGEXP.test(name);
}
exports.isValidName = isValidName;
//# sourceMappingURL=InstrumentDescriptor.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"InstrumentDescriptor.js","sourceRoot":"","sources":["../../src/InstrumentDescriptor.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAGH,4CAAqD;AAErD,mCAAgD;AAwBhD,SAAgB,0BAA0B,CACxC,IAAY,EACZ,IAAoB,EACpB,OAAuB;IAEvB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;QACtB,UAAI,CAAC,IAAI,CACP,yBAAyB,IAAI,2FAA2F,CACzH,CAAC;KACH;IACD,OAAO;QACL,IAAI;QACJ,IAAI;QACJ,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,EAAE;QACvC,IAAI,EAAE,OAAO,EAAE,IAAI,IAAI,EAAE;QACzB,SAAS,EAAE,OAAO,EAAE,SAAS,IAAI,eAAS,CAAC,MAAM;QACjD,MAAM,EAAE,OAAO,EAAE,MAAM,IAAI,EAAE;KAC9B,CAAC;AACJ,CAAC;AAlBD,gEAkBC;AAED,SAAgB,kCAAkC,CAChD,IAAU,EACV,UAAgC;IAEhC,OAAO;QACL,IAAI,EAAE,IAAI,CAAC,IAAI,IAAI,UAAU,CAAC,IAAI;QAClC,WAAW,EAAE,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC,WAAW;QACvD,IAAI,EAAE,UAAU,CAAC,IAAI;QACrB,IAAI,EAAE,UAAU,CAAC,IAAI;QACrB,SAAS,EAAE,UAAU,CAAC,SAAS;QAC/B,MAAM,EAAE,UAAU,CAAC,MAAM;KAC1B,CAAC;AACJ,CAAC;AAZD,gFAYC;AAED,SAAgB,0BAA0B,CACxC,UAAgC,EAChC,eAAqC;IAErC,sCAAsC;IACtC,OAAO,CACL,IAAA,6BAAqB,EAAC,UAAU,CAAC,IAAI,EAAE,eAAe,CAAC,IAAI,CAAC;QAC5D,UAAU,CAAC,IAAI,KAAK,eAAe,CAAC,IAAI;QACxC,UAAU,CAAC,IAAI,KAAK,eAAe,CAAC,IAAI;QACxC,UAAU,CAAC,SAAS,KAAK,eAAe,CAAC,SAAS,CACnD,CAAC;AACJ,CAAC;AAXD,gEAWC;AAED,6DAA6D;AAC7D,4DAA4D;AAC5D,MAAM,WAAW,GAAG,8BAA8B,CAAC;AACnD,SAAgB,WAAW,CAAC,IAAY;IACtC,OAAO,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAChC,CAAC;AAFD,kCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { MetricAdvice, MetricOptions } from '@opentelemetry/api';\nimport { ValueType, diag } from '@opentelemetry/api';\nimport type { View } from './view/View';\nimport { equalsCaseInsensitive } from './utils';\nimport type { InstrumentType, MetricDescriptor } from './export/MetricData';\n\n/**\n * An internal interface describing the instrument.\n *\n * This is intentionally distinguished from the public MetricDescriptor (a.k.a. InstrumentDescriptor)\n * which may not contains internal fields like metric advice.\n */\nexport interface InstrumentDescriptor extends MetricDescriptor {\n /**\n * For internal use; exporter should avoid depending on the type of the\n * instrument as their resulting aggregator can be re-mapped with views.\n */\n readonly type: InstrumentType;\n\n /**\n * See {@link MetricAdvice}\n *\n * @experimental\n */\n readonly advice: MetricAdvice;\n}\n\nexport function createInstrumentDescriptor(\n name: string,\n type: InstrumentType,\n options?: MetricOptions\n): InstrumentDescriptor {\n if (!isValidName(name)) {\n diag.warn(\n `Invalid metric name: \"${name}\". The metric name should be a ASCII string with a length no greater than 255 characters.`\n );\n }\n return {\n name,\n type,\n description: options?.description ?? '',\n unit: options?.unit ?? '',\n valueType: options?.valueType ?? ValueType.DOUBLE,\n advice: options?.advice ?? {},\n };\n}\n\nexport function createInstrumentDescriptorWithView(\n view: View,\n instrument: InstrumentDescriptor\n): InstrumentDescriptor {\n return {\n name: view.name ?? instrument.name,\n description: view.description ?? instrument.description,\n type: instrument.type,\n unit: instrument.unit,\n valueType: instrument.valueType,\n advice: instrument.advice,\n };\n}\n\nexport function isDescriptorCompatibleWith(\n descriptor: InstrumentDescriptor,\n otherDescriptor: InstrumentDescriptor\n) {\n // Names are case-insensitive strings.\n return (\n equalsCaseInsensitive(descriptor.name, otherDescriptor.name) &&\n descriptor.unit === otherDescriptor.unit &&\n descriptor.type === otherDescriptor.type &&\n descriptor.valueType === otherDescriptor.valueType\n );\n}\n\n// ASCII string with a length no greater than 255 characters.\n// NB: the first character counted separately from the rest.\nconst NAME_REGEXP = /^[a-z][a-z0-9_.\\-/]{0,254}$/i;\nexport function isValidName(name: string): boolean {\n return NAME_REGEXP.test(name);\n}\n"]}

View File

@@ -0,0 +1,70 @@
import type { Context, Attributes, UpDownCounter, Counter, Gauge, Histogram, Observable, ObservableCallback, ObservableCounter, ObservableGauge, ObservableUpDownCounter } from '@opentelemetry/api';
import type { InstrumentDescriptor } from './InstrumentDescriptor';
import type { ObservableRegistry } from './state/ObservableRegistry';
import type { AsyncWritableMetricStorage, WritableMetricStorage } from './state/WritableMetricStorage';
export declare class SyncInstrument {
private _writableMetricStorage;
protected _descriptor: InstrumentDescriptor;
constructor(writableMetricStorage: WritableMetricStorage, descriptor: InstrumentDescriptor);
protected _record(value: number, attributes?: Attributes, context?: Context): void;
}
/**
* The class implements {@link UpDownCounter} interface.
*/
export declare class UpDownCounterInstrument extends SyncInstrument implements UpDownCounter {
/**
* Increment value of counter by the input. Inputs may be negative.
*/
add(value: number, attributes?: Attributes, ctx?: Context): void;
}
/**
* The class implements {@link Counter} interface.
*/
export declare class CounterInstrument extends SyncInstrument implements Counter {
/**
* Increment value of counter by the input. Inputs may not be negative.
*/
add(value: number, attributes?: Attributes, ctx?: Context): void;
}
/**
* The class implements {@link Gauge} interface.
*/
export declare class GaugeInstrument extends SyncInstrument implements Gauge {
/**
* Records a measurement.
*/
record(value: number, attributes?: Attributes, ctx?: Context): void;
}
/**
* The class implements {@link Histogram} interface.
*/
export declare class HistogramInstrument extends SyncInstrument implements Histogram {
/**
* Records a measurement. Value of the measurement must not be negative.
*/
record(value: number, attributes?: Attributes, ctx?: Context): void;
}
export declare class ObservableInstrument implements Observable {
/** @internal */
_metricStorages: AsyncWritableMetricStorage[];
/** @internal */
_descriptor: InstrumentDescriptor;
private _observableRegistry;
constructor(descriptor: InstrumentDescriptor, metricStorages: AsyncWritableMetricStorage[], observableRegistry: ObservableRegistry);
/**
* @see {Observable.addCallback}
*/
addCallback(callback: ObservableCallback): void;
/**
* @see {Observable.removeCallback}
*/
removeCallback(callback: ObservableCallback): void;
}
export declare class ObservableCounterInstrument extends ObservableInstrument implements ObservableCounter {
}
export declare class ObservableGaugeInstrument extends ObservableInstrument implements ObservableGauge {
}
export declare class ObservableUpDownCounterInstrument extends ObservableInstrument implements ObservableUpDownCounter {
}
export declare function isObservableInstrument(it: unknown): it is ObservableInstrument;
//# sourceMappingURL=Instruments.d.ts.map

View File

@@ -0,0 +1,129 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.isObservableInstrument = exports.ObservableUpDownCounterInstrument = exports.ObservableGaugeInstrument = exports.ObservableCounterInstrument = exports.ObservableInstrument = exports.HistogramInstrument = exports.GaugeInstrument = exports.CounterInstrument = exports.UpDownCounterInstrument = exports.SyncInstrument = void 0;
const api_1 = require("@opentelemetry/api");
const core_1 = require("@opentelemetry/core");
class SyncInstrument {
_writableMetricStorage;
_descriptor;
constructor(writableMetricStorage, descriptor) {
this._writableMetricStorage = writableMetricStorage;
this._descriptor = descriptor;
}
_record(value, attributes = {}, context = api_1.context.active()) {
if (typeof value !== 'number') {
api_1.diag.warn(`non-number value provided to metric ${this._descriptor.name}: ${value}`);
return;
}
if (this._descriptor.valueType === api_1.ValueType.INT &&
!Number.isInteger(value)) {
api_1.diag.warn(`INT value type cannot accept a floating-point value for ${this._descriptor.name}, ignoring the fractional digits.`);
value = Math.trunc(value);
// ignore non-finite values.
if (!Number.isInteger(value)) {
return;
}
}
this._writableMetricStorage.record(value, attributes, context, (0, core_1.millisToHrTime)(Date.now()));
}
}
exports.SyncInstrument = SyncInstrument;
/**
* The class implements {@link UpDownCounter} interface.
*/
class UpDownCounterInstrument extends SyncInstrument {
/**
* Increment value of counter by the input. Inputs may be negative.
*/
add(value, attributes, ctx) {
this._record(value, attributes, ctx);
}
}
exports.UpDownCounterInstrument = UpDownCounterInstrument;
/**
* The class implements {@link Counter} interface.
*/
class CounterInstrument extends SyncInstrument {
/**
* Increment value of counter by the input. Inputs may not be negative.
*/
add(value, attributes, ctx) {
if (value < 0) {
api_1.diag.warn(`negative value provided to counter ${this._descriptor.name}: ${value}`);
return;
}
this._record(value, attributes, ctx);
}
}
exports.CounterInstrument = CounterInstrument;
/**
* The class implements {@link Gauge} interface.
*/
class GaugeInstrument extends SyncInstrument {
/**
* Records a measurement.
*/
record(value, attributes, ctx) {
this._record(value, attributes, ctx);
}
}
exports.GaugeInstrument = GaugeInstrument;
/**
* The class implements {@link Histogram} interface.
*/
class HistogramInstrument extends SyncInstrument {
/**
* Records a measurement. Value of the measurement must not be negative.
*/
record(value, attributes, ctx) {
if (value < 0) {
api_1.diag.warn(`negative value provided to histogram ${this._descriptor.name}: ${value}`);
return;
}
this._record(value, attributes, ctx);
}
}
exports.HistogramInstrument = HistogramInstrument;
class ObservableInstrument {
/** @internal */
_metricStorages;
/** @internal */
_descriptor;
_observableRegistry;
constructor(descriptor, metricStorages, observableRegistry) {
this._descriptor = descriptor;
this._metricStorages = metricStorages;
this._observableRegistry = observableRegistry;
}
/**
* @see {Observable.addCallback}
*/
addCallback(callback) {
this._observableRegistry.addCallback(callback, this);
}
/**
* @see {Observable.removeCallback}
*/
removeCallback(callback) {
this._observableRegistry.removeCallback(callback, this);
}
}
exports.ObservableInstrument = ObservableInstrument;
class ObservableCounterInstrument extends ObservableInstrument {
}
exports.ObservableCounterInstrument = ObservableCounterInstrument;
class ObservableGaugeInstrument extends ObservableInstrument {
}
exports.ObservableGaugeInstrument = ObservableGaugeInstrument;
class ObservableUpDownCounterInstrument extends ObservableInstrument {
}
exports.ObservableUpDownCounterInstrument = ObservableUpDownCounterInstrument;
function isObservableInstrument(it) {
return it instanceof ObservableInstrument;
}
exports.isObservableInstrument = isObservableInstrument;
//# sourceMappingURL=Instruments.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,46 @@
import type { Meter as IMeter, MetricOptions, Gauge, Histogram, Counter, UpDownCounter, ObservableGauge, ObservableCounter, ObservableUpDownCounter, BatchObservableCallback, Observable } from '@opentelemetry/api';
import type { MeterSharedState } from './state/MeterSharedState';
/**
* This class implements the {@link IMeter} interface.
*/
export declare class Meter implements IMeter {
private _meterSharedState;
constructor(meterSharedState: MeterSharedState);
/**
* Create a {@link Gauge} instrument.
*/
createGauge(name: string, options?: MetricOptions): Gauge;
/**
* Create a {@link Histogram} instrument.
*/
createHistogram(name: string, options?: MetricOptions): Histogram;
/**
* Create a {@link Counter} instrument.
*/
createCounter(name: string, options?: MetricOptions): Counter;
/**
* Create a {@link UpDownCounter} instrument.
*/
createUpDownCounter(name: string, options?: MetricOptions): UpDownCounter;
/**
* Create a {@link ObservableGauge} instrument.
*/
createObservableGauge(name: string, options?: MetricOptions): ObservableGauge;
/**
* Create a {@link ObservableCounter} instrument.
*/
createObservableCounter(name: string, options?: MetricOptions): ObservableCounter;
/**
* Create a {@link ObservableUpDownCounter} instrument.
*/
createObservableUpDownCounter(name: string, options?: MetricOptions): ObservableUpDownCounter;
/**
* @see {@link Meter.addBatchObservableCallback}
*/
addBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void;
/**
* @see {@link Meter.removeBatchObservableCallback}
*/
removeBatchObservableCallback(callback: BatchObservableCallback, observables: Observable[]): void;
}
//# sourceMappingURL=Meter.d.ts.map

View File

@@ -0,0 +1,89 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.Meter = void 0;
const InstrumentDescriptor_1 = require("./InstrumentDescriptor");
const Instruments_1 = require("./Instruments");
const MetricData_1 = require("./export/MetricData");
/**
* This class implements the {@link IMeter} interface.
*/
class Meter {
_meterSharedState;
constructor(meterSharedState) {
this._meterSharedState = meterSharedState;
}
/**
* Create a {@link Gauge} instrument.
*/
createGauge(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.GAUGE, options);
const storage = this._meterSharedState.registerMetricStorage(descriptor);
return new Instruments_1.GaugeInstrument(storage, descriptor);
}
/**
* Create a {@link Histogram} instrument.
*/
createHistogram(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.HISTOGRAM, options);
const storage = this._meterSharedState.registerMetricStorage(descriptor);
return new Instruments_1.HistogramInstrument(storage, descriptor);
}
/**
* Create a {@link Counter} instrument.
*/
createCounter(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.COUNTER, options);
const storage = this._meterSharedState.registerMetricStorage(descriptor);
return new Instruments_1.CounterInstrument(storage, descriptor);
}
/**
* Create a {@link UpDownCounter} instrument.
*/
createUpDownCounter(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.UP_DOWN_COUNTER, options);
const storage = this._meterSharedState.registerMetricStorage(descriptor);
return new Instruments_1.UpDownCounterInstrument(storage, descriptor);
}
/**
* Create a {@link ObservableGauge} instrument.
*/
createObservableGauge(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.OBSERVABLE_GAUGE, options);
const storages = this._meterSharedState.registerAsyncMetricStorage(descriptor);
return new Instruments_1.ObservableGaugeInstrument(descriptor, storages, this._meterSharedState.observableRegistry);
}
/**
* Create a {@link ObservableCounter} instrument.
*/
createObservableCounter(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.OBSERVABLE_COUNTER, options);
const storages = this._meterSharedState.registerAsyncMetricStorage(descriptor);
return new Instruments_1.ObservableCounterInstrument(descriptor, storages, this._meterSharedState.observableRegistry);
}
/**
* Create a {@link ObservableUpDownCounter} instrument.
*/
createObservableUpDownCounter(name, options) {
const descriptor = (0, InstrumentDescriptor_1.createInstrumentDescriptor)(name, MetricData_1.InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, options);
const storages = this._meterSharedState.registerAsyncMetricStorage(descriptor);
return new Instruments_1.ObservableUpDownCounterInstrument(descriptor, storages, this._meterSharedState.observableRegistry);
}
/**
* @see {@link Meter.addBatchObservableCallback}
*/
addBatchObservableCallback(callback, observables) {
this._meterSharedState.observableRegistry.addBatchCallback(callback, observables);
}
/**
* @see {@link Meter.removeBatchObservableCallback}
*/
removeBatchObservableCallback(callback, observables) {
this._meterSharedState.observableRegistry.removeBatchCallback(callback, observables);
}
}
exports.Meter = Meter;
//# sourceMappingURL=Meter.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,40 @@
import type { MeterProvider as IMeterProvider, Meter as IMeter, MeterOptions } from '@opentelemetry/api';
import type { Resource } from '@opentelemetry/resources';
import type { IMetricReader } from './export/MetricReader';
import type { ForceFlushOptions, ShutdownOptions } from './types';
import type { ViewOptions } from './view/View';
/**
* MeterProviderOptions provides an interface for configuring a MeterProvider.
*/
export interface MeterProviderOptions {
/** Resource associated with metric telemetry */
resource?: Resource;
views?: ViewOptions[];
readers?: IMetricReader[];
}
/**
* This class implements the {@link MeterProvider} interface.
*/
export declare class MeterProvider implements IMeterProvider {
private _sharedState;
private _shutdown;
constructor(options?: MeterProviderOptions);
/**
* Get a meter with the configuration of the MeterProvider.
*/
getMeter(name: string, version?: string, options?: MeterOptions): IMeter;
/**
* Shut down the MeterProvider and all registered
* MetricReaders.
*
* Returns a promise which is resolved when all flushes are complete.
*/
shutdown(options?: ShutdownOptions): Promise<void>;
/**
* Notifies all registered MetricReaders to flush any buffered data.
*
* Returns a promise which is resolved when all flushes are complete.
*/
forceFlush(options?: ForceFlushOptions): Promise<void>;
}
//# sourceMappingURL=MeterProvider.d.ts.map

View File

@@ -0,0 +1,82 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.MeterProvider = void 0;
const api_1 = require("@opentelemetry/api");
const resources_1 = require("@opentelemetry/resources");
const MeterProviderSharedState_1 = require("./state/MeterProviderSharedState");
const MetricCollector_1 = require("./state/MetricCollector");
const View_1 = require("./view/View");
/**
* This class implements the {@link MeterProvider} interface.
*/
class MeterProvider {
_sharedState;
_shutdown = false;
constructor(options) {
this._sharedState = new MeterProviderSharedState_1.MeterProviderSharedState(options?.resource ?? (0, resources_1.defaultResource)());
if (options?.views != null && options.views.length > 0) {
for (const viewOption of options.views) {
this._sharedState.viewRegistry.addView(new View_1.View(viewOption));
}
}
if (options?.readers != null && options.readers.length > 0) {
for (const metricReader of options.readers) {
const collector = new MetricCollector_1.MetricCollector(this._sharedState, metricReader);
metricReader.setMetricProducer(collector);
this._sharedState.metricCollectors.push(collector);
}
}
}
/**
* Get a meter with the configuration of the MeterProvider.
*/
getMeter(name, version = '', options = {}) {
// https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#meter-creation
if (this._shutdown) {
api_1.diag.warn('A shutdown MeterProvider cannot provide a Meter');
return (0, api_1.createNoopMeter)();
}
return this._sharedState.getMeterSharedState({
name,
version,
schemaUrl: options.schemaUrl,
}).meter;
}
/**
* Shut down the MeterProvider and all registered
* MetricReaders.
*
* Returns a promise which is resolved when all flushes are complete.
*/
async shutdown(options) {
if (this._shutdown) {
api_1.diag.warn('shutdown may only be called once per MeterProvider');
return;
}
this._shutdown = true;
await Promise.all(this._sharedState.metricCollectors.map(collector => {
return collector.shutdown(options);
}));
}
/**
* Notifies all registered MetricReaders to flush any buffered data.
*
* Returns a promise which is resolved when all flushes are complete.
*/
async forceFlush(options) {
// do not flush after shutdown
if (this._shutdown) {
api_1.diag.warn('invalid attempt to force flush after MeterProvider shutdown');
return;
}
await Promise.all(this._sharedState.metricCollectors.map(collector => {
return collector.forceFlush(options);
}));
}
}
exports.MeterProvider = MeterProvider;
//# sourceMappingURL=MeterProvider.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,34 @@
import type { ObservableResult, Attributes, BatchObservableResult, Observable } from '@opentelemetry/api';
import { ValueType } from '@opentelemetry/api';
import { AttributeHashMap } from './state/HashMap';
import type { ObservableInstrument } from './Instruments';
/**
* The class implements {@link ObservableResult} interface.
*/
export declare class ObservableResultImpl implements ObservableResult {
/**
* @internal
*/
_buffer: AttributeHashMap<number>;
private _instrumentName;
private _valueType;
constructor(instrumentName: string, valueType: ValueType);
/**
* Observe a measurement of the value associated with the given attributes.
*/
observe(value: number, attributes?: Attributes): void;
}
/**
* The class implements {@link BatchObservableCallback} interface.
*/
export declare class BatchObservableResultImpl implements BatchObservableResult {
/**
* @internal
*/
_buffer: Map<ObservableInstrument, AttributeHashMap<number>>;
/**
* Observe a measurement of the value associated with the given attributes.
*/
observe(metric: Observable, value: number, attributes?: Attributes): void;
}
//# sourceMappingURL=ObservableResult.d.ts.map

View File

@@ -0,0 +1,82 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.BatchObservableResultImpl = exports.ObservableResultImpl = void 0;
const api_1 = require("@opentelemetry/api");
const HashMap_1 = require("./state/HashMap");
const Instruments_1 = require("./Instruments");
/**
* The class implements {@link ObservableResult} interface.
*/
class ObservableResultImpl {
/**
* @internal
*/
_buffer = new HashMap_1.AttributeHashMap();
_instrumentName;
_valueType;
constructor(instrumentName, valueType) {
this._instrumentName = instrumentName;
this._valueType = valueType;
}
/**
* Observe a measurement of the value associated with the given attributes.
*/
observe(value, attributes = {}) {
if (typeof value !== 'number') {
api_1.diag.warn(`non-number value provided to metric ${this._instrumentName}: ${value}`);
return;
}
if (this._valueType === api_1.ValueType.INT && !Number.isInteger(value)) {
api_1.diag.warn(`INT value type cannot accept a floating-point value for ${this._instrumentName}, ignoring the fractional digits.`);
value = Math.trunc(value);
// ignore non-finite values.
if (!Number.isInteger(value)) {
return;
}
}
this._buffer.set(attributes, value);
}
}
exports.ObservableResultImpl = ObservableResultImpl;
/**
* The class implements {@link BatchObservableCallback} interface.
*/
class BatchObservableResultImpl {
/**
* @internal
*/
_buffer = new Map();
/**
* Observe a measurement of the value associated with the given attributes.
*/
observe(metric, value, attributes = {}) {
if (!(0, Instruments_1.isObservableInstrument)(metric)) {
return;
}
let map = this._buffer.get(metric);
if (map == null) {
map = new HashMap_1.AttributeHashMap();
this._buffer.set(metric, map);
}
if (typeof value !== 'number') {
api_1.diag.warn(`non-number value provided to metric ${metric._descriptor.name}: ${value}`);
return;
}
if (metric._descriptor.valueType === api_1.ValueType.INT &&
!Number.isInteger(value)) {
api_1.diag.warn(`INT value type cannot accept a floating-point value for ${metric._descriptor.name}, ignoring the fractional digits.`);
value = Math.trunc(value);
// ignore non-finite values.
if (!Number.isInteger(value)) {
return;
}
}
map.set(attributes, value);
}
}
exports.BatchObservableResultImpl = BatchObservableResultImpl;
//# sourceMappingURL=ObservableResult.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ObservableResult.js","sourceRoot":"","sources":["../../src/ObservableResult.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAQH,4CAAqD;AACrD,6CAAmD;AAEnD,+CAAuD;AAEvD;;GAEG;AACH,MAAa,oBAAoB;IAC/B;;OAEG;IACH,OAAO,GAAG,IAAI,0BAAgB,EAAU,CAAC;IACjC,eAAe,CAAS;IACxB,UAAU,CAAY;IAE9B,YAAY,cAAsB,EAAE,SAAoB;QACtD,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC;QACtC,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,OAAO,CAAC,KAAa,EAAE,aAAyB,EAAE;QAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,UAAI,CAAC,IAAI,CACP,uCAAuC,IAAI,CAAC,eAAe,KAAK,KAAK,EAAE,CACxE,CAAC;YACF,OAAO;SACR;QACD,IAAI,IAAI,CAAC,UAAU,KAAK,eAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YACjE,UAAI,CAAC,IAAI,CACP,2DAA2D,IAAI,CAAC,eAAe,mCAAmC,CACnH,CAAC;YACF,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC1B,4BAA4B;YAC5B,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;gBAC5B,OAAO;aACR;SACF;QACD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IACtC,CAAC;CACF;AAnCD,oDAmCC;AAED;;GAEG;AACH,MAAa,yBAAyB;IACpC;;OAEG;IACH,OAAO,GAAwD,IAAI,GAAG,EAAE,CAAC;IAEzE;;OAEG;IACH,OAAO,CACL,MAAkB,EAClB,KAAa,EACb,aAAyB,EAAE;QAE3B,IAAI,CAAC,IAAA,oCAAsB,EAAC,MAAM,CAAC,EAAE;YACnC,OAAO;SACR;QACD,IAAI,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QACnC,IAAI,GAAG,IAAI,IAAI,EAAE;YACf,GAAG,GAAG,IAAI,0BAAgB,EAAE,CAAC;YAC7B,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;SAC/B;QACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,UAAI,CAAC,IAAI,CACP,uCAAuC,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,KAAK,EAAE,CAC3E,CAAC;YACF,OAAO;SACR;QACD,IACE,MAAM,CAAC,WAAW,CAAC,SAAS,KAAK,eAAS,CAAC,GAAG;YAC9C,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EACxB;YACA,UAAI,CAAC,IAAI,CACP,2DAA2D,MAAM,CAAC,WAAW,CAAC,IAAI,mCAAmC,CACtH,CAAC;YACF,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC1B,4BAA4B;YAC5B,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;gBAC5B,OAAO;aACR;SACF;QACD,GAAG,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAC7B,CAAC;CACF;AA3CD,8DA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n ObservableResult,\n Attributes,\n BatchObservableResult,\n Observable,\n} from '@opentelemetry/api';\nimport { diag, ValueType } from '@opentelemetry/api';\nimport { AttributeHashMap } from './state/HashMap';\nimport type { ObservableInstrument } from './Instruments';\nimport { isObservableInstrument } from './Instruments';\n\n/**\n * The class implements {@link ObservableResult} interface.\n */\nexport class ObservableResultImpl implements ObservableResult {\n /**\n * @internal\n */\n _buffer = new AttributeHashMap<number>();\n private _instrumentName: string;\n private _valueType: ValueType;\n\n constructor(instrumentName: string, valueType: ValueType) {\n this._instrumentName = instrumentName;\n this._valueType = valueType;\n }\n\n /**\n * Observe a measurement of the value associated with the given attributes.\n */\n observe(value: number, attributes: Attributes = {}): void {\n if (typeof value !== 'number') {\n diag.warn(\n `non-number value provided to metric ${this._instrumentName}: ${value}`\n );\n return;\n }\n if (this._valueType === ValueType.INT && !Number.isInteger(value)) {\n diag.warn(\n `INT value type cannot accept a floating-point value for ${this._instrumentName}, ignoring the fractional digits.`\n );\n value = Math.trunc(value);\n // ignore non-finite values.\n if (!Number.isInteger(value)) {\n return;\n }\n }\n this._buffer.set(attributes, value);\n }\n}\n\n/**\n * The class implements {@link BatchObservableCallback} interface.\n */\nexport class BatchObservableResultImpl implements BatchObservableResult {\n /**\n * @internal\n */\n _buffer: Map<ObservableInstrument, AttributeHashMap<number>> = new Map();\n\n /**\n * Observe a measurement of the value associated with the given attributes.\n */\n observe(\n metric: Observable,\n value: number,\n attributes: Attributes = {}\n ): void {\n if (!isObservableInstrument(metric)) {\n return;\n }\n let map = this._buffer.get(metric);\n if (map == null) {\n map = new AttributeHashMap();\n this._buffer.set(metric, map);\n }\n if (typeof value !== 'number') {\n diag.warn(\n `non-number value provided to metric ${metric._descriptor.name}: ${value}`\n );\n return;\n }\n if (\n metric._descriptor.valueType === ValueType.INT &&\n !Number.isInteger(value)\n ) {\n diag.warn(\n `INT value type cannot accept a floating-point value for ${metric._descriptor.name}, ignoring the fractional digits.`\n );\n value = Math.trunc(value);\n // ignore non-finite values.\n if (!Number.isInteger(value)) {\n return;\n }\n }\n map.set(attributes, value);\n }\n}\n"]}

View File

@@ -0,0 +1,15 @@
import type { HrTime } from '@opentelemetry/api';
import type { AggregationTemporality } from '../export/AggregationTemporality';
import type { MetricData, MetricDescriptor } from '../export/MetricData';
import type { Maybe } from '../utils';
import type { Aggregator, AccumulationRecord } from './types';
import { AggregatorKind } from './types';
/** Basic aggregator for None which keeps no recorded value. */
export declare class DropAggregator implements Aggregator<undefined> {
kind: AggregatorKind.DROP;
createAccumulation(): undefined;
merge(_previous: undefined, _delta: undefined): undefined;
diff(_previous: undefined, _current: undefined): undefined;
toMetricData(_descriptor: MetricDescriptor, _aggregationTemporality: AggregationTemporality, _accumulationByAttributes: AccumulationRecord<undefined>[], _endTime: HrTime): Maybe<MetricData>;
}
//# sourceMappingURL=Drop.d.ts.map

View File

@@ -0,0 +1,26 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.DropAggregator = void 0;
const types_1 = require("./types");
/** Basic aggregator for None which keeps no recorded value. */
class DropAggregator {
kind = types_1.AggregatorKind.DROP;
createAccumulation() {
return undefined;
}
merge(_previous, _delta) {
return undefined;
}
diff(_previous, _current) {
return undefined;
}
toMetricData(_descriptor, _aggregationTemporality, _accumulationByAttributes, _endTime) {
return undefined;
}
}
exports.DropAggregator = DropAggregator;
//# sourceMappingURL=Drop.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"Drop.js","sourceRoot":"","sources":["../../../src/aggregator/Drop.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAOH,mCAAyC;AAEzC,+DAA+D;AAC/D,MAAa,cAAc;IACzB,IAAI,GAAwB,sBAAc,CAAC,IAAI,CAAC;IAEhD,kBAAkB;QAChB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,KAAK,CAAC,SAAoB,EAAE,MAAiB;QAC3C,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,IAAI,CAAC,SAAoB,EAAE,QAAmB;QAC5C,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,YAAY,CACV,WAA6B,EAC7B,uBAA+C,EAC/C,yBAA0D,EAC1D,QAAgB;QAEhB,OAAO,SAAS,CAAC;IACnB,CAAC;CACF;AAvBD,wCAuBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { HrTime } from '@opentelemetry/api';\nimport type { AggregationTemporality } from '../export/AggregationTemporality';\nimport type { MetricData, MetricDescriptor } from '../export/MetricData';\nimport type { Maybe } from '../utils';\nimport type { Aggregator, AccumulationRecord } from './types';\nimport { AggregatorKind } from './types';\n\n/** Basic aggregator for None which keeps no recorded value. */\nexport class DropAggregator implements Aggregator<undefined> {\n kind: AggregatorKind.DROP = AggregatorKind.DROP;\n\n createAccumulation() {\n return undefined;\n }\n\n merge(_previous: undefined, _delta: undefined) {\n return undefined;\n }\n\n diff(_previous: undefined, _current: undefined) {\n return undefined;\n }\n\n toMetricData(\n _descriptor: MetricDescriptor,\n _aggregationTemporality: AggregationTemporality,\n _accumulationByAttributes: AccumulationRecord<undefined>[],\n _endTime: HrTime\n ): Maybe<MetricData> {\n return undefined;\n }\n}\n"]}

View File

@@ -0,0 +1,178 @@
import type { Accumulation, AccumulationRecord, Aggregator, ExponentialHistogram } from './types';
import { AggregatorKind } from './types';
import type { ExponentialHistogramMetricData } from '../export/MetricData';
import type { HrTime } from '@opentelemetry/api';
import type { Maybe } from '../utils';
import type { AggregationTemporality } from '../export/AggregationTemporality';
import type { InstrumentDescriptor } from '../InstrumentDescriptor';
import { Buckets } from './exponential-histogram/Buckets';
import type { Mapping } from './exponential-histogram/mapping/types';
/**
* Internal value type for ExponentialHistogramAggregation.
* Differs from the exported type as undefined sum/min/max complicate arithmetic
* performed by this aggregation, but are required to be undefined in the exported types.
*/
interface InternalHistogram extends ExponentialHistogram {
hasMinMax: boolean;
min: number;
max: number;
sum: number;
}
export declare class ExponentialHistogramAccumulation implements Accumulation {
startTime: HrTime;
private _maxSize;
private _recordMinMax;
private _sum;
private _count;
private _zeroCount;
private _min;
private _max;
private _positive;
private _negative;
private _mapping;
constructor(startTime: HrTime, maxSize?: number, recordMinMax?: boolean, sum?: number, count?: number, zeroCount?: number, min?: number, max?: number, positive?: Buckets, negative?: Buckets, mapping?: Mapping);
/**
* record updates a histogram with a single count
* @param {Number} value
*/
record(value: number): void;
/**
* Sets the start time for this accumulation
* @param {HrTime} startTime
*/
setStartTime(startTime: HrTime): void;
/**
* Returns the datapoint representation of this accumulation
* @param {HrTime} startTime
*/
toPointValue(): InternalHistogram;
/**
* @returns {Number} The sum of values recorded by this accumulation
*/
get sum(): number;
/**
* @returns {Number} The minimum value recorded by this accumulation
*/
get min(): number;
/**
* @returns {Number} The maximum value recorded by this accumulation
*/
get max(): number;
/**
* @returns {Number} The count of values recorded by this accumulation
*/
get count(): number;
/**
* @returns {Number} The number of 0 values recorded by this accumulation
*/
get zeroCount(): number;
/**
* @returns {Number} The scale used by this accumulation
*/
get scale(): number;
/**
* positive holds the positive values
* @returns {Buckets}
*/
get positive(): Buckets;
/**
* negative holds the negative values by their absolute value
* @returns {Buckets}
*/
get negative(): Buckets;
/**
* updateByIncr supports updating a histogram with a non-negative
* increment.
* @param value
* @param increment
*/
updateByIncrement(value: number, increment: number): void;
/**
* merge combines data from previous value into self
* @param {ExponentialHistogramAccumulation} previous
*/
merge(previous: ExponentialHistogramAccumulation): void;
/**
* diff subtracts other from self
* @param {ExponentialHistogramAccumulation} other
*/
diff(other: ExponentialHistogramAccumulation): void;
/**
* clone returns a deep copy of self
* @returns {ExponentialHistogramAccumulation}
*/
clone(): ExponentialHistogramAccumulation;
/**
* _updateBuckets maps the incoming value to a bucket index for the current
* scale. If the bucket index is outside of the range of the backing array,
* it will rescale the backing array and update the mapping for the new scale.
*/
private _updateBuckets;
/**
* _incrementIndexBy increments the count of the bucket specified by `index`.
* If the index is outside of the range [buckets.indexStart, buckets.indexEnd]
* the boundaries of the backing array will be adjusted and more buckets will
* be added if needed.
*/
private _incrementIndexBy;
/**
* grow resizes the backing array by doubling in size up to maxSize.
* This extends the array with a bunch of zeros and copies the
* existing counts to the same position.
*/
private _grow;
/**
* _changeScale computes how much downscaling is needed by shifting the
* high and low values until they are separated by no more than size.
*/
private _changeScale;
/**
* _downscale subtracts `change` from the current mapping scale.
*/
private _downscale;
/**
* _minScale is used by diff and merge to compute an ideal combined scale
*/
private _minScale;
/**
* _highLowAtScale is used by diff and merge to compute an ideal combined scale.
*/
private _highLowAtScale;
/**
* _mergeBuckets translates index values from another histogram and
* adds the values into the corresponding buckets of this histogram.
*/
private _mergeBuckets;
/**
* _diffBuckets translates index values from another histogram and
* subtracts the values in the corresponding buckets of this histogram.
*/
private _diffBuckets;
}
/**
* Aggregator for ExponentialHistogramAccumulations
*/
export declare class ExponentialHistogramAggregator implements Aggregator<ExponentialHistogramAccumulation> {
kind: AggregatorKind.EXPONENTIAL_HISTOGRAM;
readonly _maxSize: number;
private readonly _recordMinMax;
/**
* @param _maxSize Maximum number of buckets for each of the positive
* and negative ranges, exclusive of the zero-bucket.
* @param _recordMinMax If set to true, min and max will be recorded.
* Otherwise, min and max will not be recorded.
*/
constructor(maxSize: number, recordMinMax: boolean);
createAccumulation(startTime: HrTime): ExponentialHistogramAccumulation;
/**
* Return the result of the merge of two exponential histogram accumulations.
*/
merge(previous: ExponentialHistogramAccumulation, delta: ExponentialHistogramAccumulation): ExponentialHistogramAccumulation;
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*/
diff(previous: ExponentialHistogramAccumulation, current: ExponentialHistogramAccumulation): ExponentialHistogramAccumulation;
toMetricData(descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord<ExponentialHistogramAccumulation>[], endTime: HrTime): Maybe<ExponentialHistogramMetricData>;
}
export {};
//# sourceMappingURL=ExponentialHistogram.d.ts.map

View File

@@ -0,0 +1,469 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExponentialHistogramAggregator = exports.ExponentialHistogramAccumulation = void 0;
const types_1 = require("./types");
const MetricData_1 = require("../export/MetricData");
const api_1 = require("@opentelemetry/api");
const Buckets_1 = require("./exponential-histogram/Buckets");
const getMapping_1 = require("./exponential-histogram/mapping/getMapping");
const util_1 = require("./exponential-histogram/util");
// HighLow is a utility class used for computing a common scale for
// two exponential histogram accumulations
class HighLow {
static combine(h1, h2) {
return new HighLow(Math.min(h1.low, h2.low), Math.max(h1.high, h2.high));
}
low;
high;
constructor(low, high) {
this.low = low;
this.high = high;
}
}
const MAX_SCALE = 20;
const DEFAULT_MAX_SIZE = 160;
const MIN_MAX_SIZE = 2;
class ExponentialHistogramAccumulation {
startTime;
_maxSize;
_recordMinMax;
_sum;
_count;
_zeroCount;
_min;
_max;
_positive;
_negative;
_mapping;
constructor(startTime, maxSize = DEFAULT_MAX_SIZE, recordMinMax = true, sum = 0, count = 0, zeroCount = 0, min = Number.POSITIVE_INFINITY, max = Number.NEGATIVE_INFINITY, positive = new Buckets_1.Buckets(), negative = new Buckets_1.Buckets(), mapping = (0, getMapping_1.getMapping)(MAX_SCALE)) {
this.startTime = startTime;
this._maxSize = maxSize;
this._recordMinMax = recordMinMax;
this._sum = sum;
this._count = count;
this._zeroCount = zeroCount;
this._min = min;
this._max = max;
this._positive = positive;
this._negative = negative;
this._mapping = mapping;
if (this._maxSize < MIN_MAX_SIZE) {
api_1.diag.warn(`Exponential Histogram Max Size set to ${this._maxSize}, \
changing to the minimum size of: ${MIN_MAX_SIZE}`);
this._maxSize = MIN_MAX_SIZE;
}
}
/**
* record updates a histogram with a single count
* @param {Number} value
*/
record(value) {
this.updateByIncrement(value, 1);
}
/**
* Sets the start time for this accumulation
* @param {HrTime} startTime
*/
setStartTime(startTime) {
this.startTime = startTime;
}
/**
* Returns the datapoint representation of this accumulation
* @param {HrTime} startTime
*/
toPointValue() {
return {
hasMinMax: this._recordMinMax,
min: this.min,
max: this.max,
sum: this.sum,
positive: {
offset: this.positive.offset,
bucketCounts: this.positive.counts(),
},
negative: {
offset: this.negative.offset,
bucketCounts: this.negative.counts(),
},
count: this.count,
scale: this.scale,
zeroCount: this.zeroCount,
};
}
/**
* @returns {Number} The sum of values recorded by this accumulation
*/
get sum() {
return this._sum;
}
/**
* @returns {Number} The minimum value recorded by this accumulation
*/
get min() {
return this._min;
}
/**
* @returns {Number} The maximum value recorded by this accumulation
*/
get max() {
return this._max;
}
/**
* @returns {Number} The count of values recorded by this accumulation
*/
get count() {
return this._count;
}
/**
* @returns {Number} The number of 0 values recorded by this accumulation
*/
get zeroCount() {
return this._zeroCount;
}
/**
* @returns {Number} The scale used by this accumulation
*/
get scale() {
if (this._count === this._zeroCount) {
// all zeros! scale doesn't matter, use zero
return 0;
}
return this._mapping.scale;
}
/**
* positive holds the positive values
* @returns {Buckets}
*/
get positive() {
return this._positive;
}
/**
* negative holds the negative values by their absolute value
* @returns {Buckets}
*/
get negative() {
return this._negative;
}
/**
* updateByIncr supports updating a histogram with a non-negative
* increment.
* @param value
* @param increment
*/
updateByIncrement(value, increment) {
// NaN does not fall into any bucket, is not zero and should not be counted,
// NaN is never greater than max nor less than min, therefore return as there's nothing for us to do.
if (Number.isNaN(value)) {
return;
}
if (value > this._max) {
this._max = value;
}
if (value < this._min) {
this._min = value;
}
this._count += increment;
if (value === 0) {
this._zeroCount += increment;
return;
}
this._sum += value * increment;
if (value > 0) {
this._updateBuckets(this._positive, value, increment);
}
else {
this._updateBuckets(this._negative, -value, increment);
}
}
/**
* merge combines data from previous value into self
* @param {ExponentialHistogramAccumulation} previous
*/
merge(previous) {
if (this._count === 0) {
this._min = previous.min;
this._max = previous.max;
}
else if (previous.count !== 0) {
if (previous.min < this.min) {
this._min = previous.min;
}
if (previous.max > this.max) {
this._max = previous.max;
}
}
this.startTime = previous.startTime;
this._sum += previous.sum;
this._count += previous.count;
this._zeroCount += previous.zeroCount;
const minScale = this._minScale(previous);
this._downscale(this.scale - minScale);
this._mergeBuckets(this.positive, previous, previous.positive, minScale);
this._mergeBuckets(this.negative, previous, previous.negative, minScale);
}
/**
* diff subtracts other from self
* @param {ExponentialHistogramAccumulation} other
*/
diff(other) {
this._min = Infinity;
this._max = -Infinity;
this._sum -= other.sum;
this._count -= other.count;
this._zeroCount -= other.zeroCount;
const minScale = this._minScale(other);
this._downscale(this.scale - minScale);
this._diffBuckets(this.positive, other, other.positive, minScale);
this._diffBuckets(this.negative, other, other.negative, minScale);
}
/**
* clone returns a deep copy of self
* @returns {ExponentialHistogramAccumulation}
*/
clone() {
return new ExponentialHistogramAccumulation(this.startTime, this._maxSize, this._recordMinMax, this._sum, this._count, this._zeroCount, this._min, this._max, this.positive.clone(), this.negative.clone(), this._mapping);
}
/**
* _updateBuckets maps the incoming value to a bucket index for the current
* scale. If the bucket index is outside of the range of the backing array,
* it will rescale the backing array and update the mapping for the new scale.
*/
_updateBuckets(buckets, value, increment) {
let index = this._mapping.mapToIndex(value);
// rescale the mapping if needed
let rescalingNeeded = false;
let high = 0;
let low = 0;
if (buckets.length === 0) {
buckets.indexStart = index;
buckets.indexEnd = buckets.indexStart;
buckets.indexBase = buckets.indexStart;
}
else if (index < buckets.indexStart &&
buckets.indexEnd - index >= this._maxSize) {
rescalingNeeded = true;
low = index;
high = buckets.indexEnd;
}
else if (index > buckets.indexEnd &&
index - buckets.indexStart >= this._maxSize) {
rescalingNeeded = true;
low = buckets.indexStart;
high = index;
}
// rescale and compute index at new scale
if (rescalingNeeded) {
const change = this._changeScale(high, low);
this._downscale(change);
index = this._mapping.mapToIndex(value);
}
this._incrementIndexBy(buckets, index, increment);
}
/**
* _incrementIndexBy increments the count of the bucket specified by `index`.
* If the index is outside of the range [buckets.indexStart, buckets.indexEnd]
* the boundaries of the backing array will be adjusted and more buckets will
* be added if needed.
*/
_incrementIndexBy(buckets, index, increment) {
if (increment === 0) {
// nothing to do for a zero increment, can happen during a merge operation
return;
}
if (buckets.length === 0) {
buckets.indexStart = buckets.indexEnd = buckets.indexBase = index;
}
if (index < buckets.indexStart) {
const span = buckets.indexEnd - index;
if (span >= buckets.backing.length) {
this._grow(buckets, span + 1);
}
buckets.indexStart = index;
}
else if (index > buckets.indexEnd) {
const span = index - buckets.indexStart;
if (span >= buckets.backing.length) {
this._grow(buckets, span + 1);
}
buckets.indexEnd = index;
}
let bucketIndex = index - buckets.indexBase;
if (bucketIndex < 0) {
bucketIndex += buckets.backing.length;
}
buckets.incrementBucket(bucketIndex, increment);
}
/**
* grow resizes the backing array by doubling in size up to maxSize.
* This extends the array with a bunch of zeros and copies the
* existing counts to the same position.
*/
_grow(buckets, needed) {
const size = buckets.backing.length;
const bias = buckets.indexBase - buckets.indexStart;
const oldPositiveLimit = size - bias;
let newSize = (0, util_1.nextGreaterSquare)(needed);
if (newSize > this._maxSize) {
newSize = this._maxSize;
}
const newPositiveLimit = newSize - bias;
buckets.backing.growTo(newSize, oldPositiveLimit, newPositiveLimit);
}
/**
* _changeScale computes how much downscaling is needed by shifting the
* high and low values until they are separated by no more than size.
*/
_changeScale(high, low) {
let change = 0;
while (high - low >= this._maxSize) {
high >>= 1;
low >>= 1;
change++;
}
return change;
}
/**
* _downscale subtracts `change` from the current mapping scale.
*/
_downscale(change) {
if (change === 0) {
return;
}
if (change < 0) {
// Note: this should be impossible. If we get here it's because
// there is a bug in the implementation.
throw new Error(`impossible change of scale: ${this.scale}`);
}
const newScale = this._mapping.scale - change;
this._positive.downscale(change);
this._negative.downscale(change);
this._mapping = (0, getMapping_1.getMapping)(newScale);
}
/**
* _minScale is used by diff and merge to compute an ideal combined scale
*/
_minScale(other) {
const minScale = Math.min(this.scale, other.scale);
const highLowPos = HighLow.combine(this._highLowAtScale(this.positive, this.scale, minScale), this._highLowAtScale(other.positive, other.scale, minScale));
const highLowNeg = HighLow.combine(this._highLowAtScale(this.negative, this.scale, minScale), this._highLowAtScale(other.negative, other.scale, minScale));
return Math.min(minScale - this._changeScale(highLowPos.high, highLowPos.low), minScale - this._changeScale(highLowNeg.high, highLowNeg.low));
}
/**
* _highLowAtScale is used by diff and merge to compute an ideal combined scale.
*/
_highLowAtScale(buckets, currentScale, newScale) {
if (buckets.length === 0) {
return new HighLow(0, -1);
}
const shift = currentScale - newScale;
return new HighLow(buckets.indexStart >> shift, buckets.indexEnd >> shift);
}
/**
* _mergeBuckets translates index values from another histogram and
* adds the values into the corresponding buckets of this histogram.
*/
_mergeBuckets(ours, other, theirs, scale) {
const theirOffset = theirs.offset;
const theirChange = other.scale - scale;
for (let i = 0; i < theirs.length; i++) {
this._incrementIndexBy(ours, (theirOffset + i) >> theirChange, theirs.at(i));
}
}
/**
* _diffBuckets translates index values from another histogram and
* subtracts the values in the corresponding buckets of this histogram.
*/
_diffBuckets(ours, other, theirs, scale) {
const theirOffset = theirs.offset;
const theirChange = other.scale - scale;
for (let i = 0; i < theirs.length; i++) {
const ourIndex = (theirOffset + i) >> theirChange;
let bucketIndex = ourIndex - ours.indexBase;
if (bucketIndex < 0) {
bucketIndex += ours.backing.length;
}
ours.decrementBucket(bucketIndex, theirs.at(i));
}
ours.trim();
}
}
exports.ExponentialHistogramAccumulation = ExponentialHistogramAccumulation;
/**
* Aggregator for ExponentialHistogramAccumulations
*/
class ExponentialHistogramAggregator {
kind = types_1.AggregatorKind.EXPONENTIAL_HISTOGRAM;
_maxSize;
_recordMinMax;
/**
* @param _maxSize Maximum number of buckets for each of the positive
* and negative ranges, exclusive of the zero-bucket.
* @param _recordMinMax If set to true, min and max will be recorded.
* Otherwise, min and max will not be recorded.
*/
constructor(maxSize, recordMinMax) {
this._maxSize = maxSize;
this._recordMinMax = recordMinMax;
}
createAccumulation(startTime) {
return new ExponentialHistogramAccumulation(startTime, this._maxSize, this._recordMinMax);
}
/**
* Return the result of the merge of two exponential histogram accumulations.
*/
merge(previous, delta) {
const result = delta.clone();
result.merge(previous);
return result;
}
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*/
diff(previous, current) {
const result = current.clone();
result.diff(previous);
return result;
}
toMetricData(descriptor, aggregationTemporality, accumulationByAttributes, endTime) {
return {
descriptor,
aggregationTemporality,
dataPointType: MetricData_1.DataPointType.EXPONENTIAL_HISTOGRAM,
dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => {
const pointValue = accumulation.toPointValue();
// determine if instrument allows negative values.
const allowsNegativeValues = descriptor.type === MetricData_1.InstrumentType.GAUGE ||
descriptor.type === MetricData_1.InstrumentType.UP_DOWN_COUNTER ||
descriptor.type === MetricData_1.InstrumentType.OBSERVABLE_GAUGE ||
descriptor.type === MetricData_1.InstrumentType.OBSERVABLE_UP_DOWN_COUNTER;
return {
attributes,
startTime: accumulation.startTime,
endTime,
value: {
min: pointValue.hasMinMax ? pointValue.min : undefined,
max: pointValue.hasMinMax ? pointValue.max : undefined,
sum: !allowsNegativeValues ? pointValue.sum : undefined,
positive: {
offset: pointValue.positive.offset,
bucketCounts: pointValue.positive.bucketCounts,
},
negative: {
offset: pointValue.negative.offset,
bucketCounts: pointValue.negative.bucketCounts,
},
count: pointValue.count,
scale: pointValue.scale,
zeroCount: pointValue.zeroCount,
},
};
}),
};
}
}
exports.ExponentialHistogramAggregator = ExponentialHistogramAggregator;
//# sourceMappingURL=ExponentialHistogram.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,61 @@
import type { Accumulation, AccumulationRecord, Aggregator } from './types';
import { AggregatorKind } from './types';
import type { HistogramMetricData } from '../export/MetricData';
import type { HrTime } from '@opentelemetry/api';
import type { Maybe } from '../utils';
import type { AggregationTemporality } from '../export/AggregationTemporality';
import type { InstrumentDescriptor } from '../InstrumentDescriptor';
/**
* Internal value type for HistogramAggregation.
* Differs from the exported type as undefined sum/min/max complicate arithmetic
* performed by this aggregation, but are required to be undefined in the exported types.
*/
interface InternalHistogram {
buckets: {
boundaries: number[];
counts: number[];
};
sum: number;
count: number;
hasMinMax: boolean;
min: number;
max: number;
}
export declare class HistogramAccumulation implements Accumulation {
startTime: HrTime;
private readonly _boundaries;
private _recordMinMax;
private _current;
constructor(startTime: HrTime, boundaries: number[], recordMinMax?: boolean, current?: InternalHistogram);
record(value: number): void;
setStartTime(startTime: HrTime): void;
toPointValue(): InternalHistogram;
}
/**
* Basic aggregator which observes events and counts them in pre-defined buckets
* and provides the total sum and count of all observations.
*/
export declare class HistogramAggregator implements Aggregator<HistogramAccumulation> {
kind: AggregatorKind.HISTOGRAM;
private readonly _boundaries;
private readonly _recordMinMax;
/**
* @param _boundaries sorted upper bounds of recorded values.
* @param _recordMinMax If set to true, min and max will be recorded. Otherwise, min and max will not be recorded.
*/
constructor(boundaries: number[], recordMinMax: boolean);
createAccumulation(startTime: HrTime): HistogramAccumulation;
/**
* Return the result of the merge of two histogram accumulations. As long as one Aggregator
* instance produces all Accumulations with constant boundaries we don't need to worry about
* merging accumulations with different boundaries.
*/
merge(previous: HistogramAccumulation, delta: HistogramAccumulation): HistogramAccumulation;
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*/
diff(previous: HistogramAccumulation, current: HistogramAccumulation): HistogramAccumulation;
toMetricData(descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord<HistogramAccumulation>[], endTime: HrTime): Maybe<HistogramMetricData>;
}
export {};
//# sourceMappingURL=Histogram.d.ts.map

View File

@@ -0,0 +1,176 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.HistogramAggregator = exports.HistogramAccumulation = void 0;
const types_1 = require("./types");
const MetricData_1 = require("../export/MetricData");
const utils_1 = require("../utils");
function createNewEmptyCheckpoint(boundaries) {
const counts = boundaries.map(() => 0);
counts.push(0);
return {
buckets: {
boundaries,
counts,
},
sum: 0,
count: 0,
hasMinMax: false,
min: Infinity,
max: -Infinity,
};
}
class HistogramAccumulation {
startTime;
_boundaries;
_recordMinMax;
_current;
constructor(startTime, boundaries, recordMinMax = true, current = createNewEmptyCheckpoint(boundaries)) {
this.startTime = startTime;
this._boundaries = boundaries;
this._recordMinMax = recordMinMax;
this._current = current;
}
record(value) {
// NaN does not fall into any bucket, is not zero and should not be counted,
// NaN is never greater than max nor less than min, therefore return as there's nothing for us to do.
if (Number.isNaN(value)) {
return;
}
this._current.count += 1;
this._current.sum += value;
if (this._recordMinMax) {
this._current.min = Math.min(value, this._current.min);
this._current.max = Math.max(value, this._current.max);
this._current.hasMinMax = true;
}
const idx = (0, utils_1.binarySearchUB)(this._boundaries, value);
this._current.buckets.counts[idx] += 1;
}
setStartTime(startTime) {
this.startTime = startTime;
}
toPointValue() {
return this._current;
}
}
exports.HistogramAccumulation = HistogramAccumulation;
/**
* Basic aggregator which observes events and counts them in pre-defined buckets
* and provides the total sum and count of all observations.
*/
class HistogramAggregator {
kind = types_1.AggregatorKind.HISTOGRAM;
_boundaries;
_recordMinMax;
/**
* @param _boundaries sorted upper bounds of recorded values.
* @param _recordMinMax If set to true, min and max will be recorded. Otherwise, min and max will not be recorded.
*/
constructor(boundaries, recordMinMax) {
this._boundaries = boundaries;
this._recordMinMax = recordMinMax;
}
createAccumulation(startTime) {
return new HistogramAccumulation(startTime, this._boundaries, this._recordMinMax);
}
/**
* Return the result of the merge of two histogram accumulations. As long as one Aggregator
* instance produces all Accumulations with constant boundaries we don't need to worry about
* merging accumulations with different boundaries.
*/
merge(previous, delta) {
const previousValue = previous.toPointValue();
const deltaValue = delta.toPointValue();
const previousCounts = previousValue.buckets.counts;
const deltaCounts = deltaValue.buckets.counts;
const mergedCounts = new Array(previousCounts.length);
for (let idx = 0; idx < previousCounts.length; idx++) {
mergedCounts[idx] = previousCounts[idx] + deltaCounts[idx];
}
let min = Infinity;
let max = -Infinity;
if (this._recordMinMax) {
if (previousValue.hasMinMax && deltaValue.hasMinMax) {
min = Math.min(previousValue.min, deltaValue.min);
max = Math.max(previousValue.max, deltaValue.max);
}
else if (previousValue.hasMinMax) {
min = previousValue.min;
max = previousValue.max;
}
else if (deltaValue.hasMinMax) {
min = deltaValue.min;
max = deltaValue.max;
}
}
return new HistogramAccumulation(previous.startTime, previousValue.buckets.boundaries, this._recordMinMax, {
buckets: {
boundaries: previousValue.buckets.boundaries,
counts: mergedCounts,
},
count: previousValue.count + deltaValue.count,
sum: previousValue.sum + deltaValue.sum,
hasMinMax: this._recordMinMax &&
(previousValue.hasMinMax || deltaValue.hasMinMax),
min: min,
max: max,
});
}
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*/
diff(previous, current) {
const previousValue = previous.toPointValue();
const currentValue = current.toPointValue();
const previousCounts = previousValue.buckets.counts;
const currentCounts = currentValue.buckets.counts;
const diffedCounts = new Array(previousCounts.length);
for (let idx = 0; idx < previousCounts.length; idx++) {
diffedCounts[idx] = currentCounts[idx] - previousCounts[idx];
}
return new HistogramAccumulation(current.startTime, previousValue.buckets.boundaries, this._recordMinMax, {
buckets: {
boundaries: previousValue.buckets.boundaries,
counts: diffedCounts,
},
count: currentValue.count - previousValue.count,
sum: currentValue.sum - previousValue.sum,
hasMinMax: false,
min: Infinity,
max: -Infinity,
});
}
toMetricData(descriptor, aggregationTemporality, accumulationByAttributes, endTime) {
return {
descriptor,
aggregationTemporality,
dataPointType: MetricData_1.DataPointType.HISTOGRAM,
dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => {
const pointValue = accumulation.toPointValue();
// determine if instrument allows negative values.
const allowsNegativeValues = descriptor.type === MetricData_1.InstrumentType.GAUGE ||
descriptor.type === MetricData_1.InstrumentType.UP_DOWN_COUNTER ||
descriptor.type === MetricData_1.InstrumentType.OBSERVABLE_GAUGE ||
descriptor.type === MetricData_1.InstrumentType.OBSERVABLE_UP_DOWN_COUNTER;
return {
attributes,
startTime: accumulation.startTime,
endTime,
value: {
min: pointValue.hasMinMax ? pointValue.min : undefined,
max: pointValue.hasMinMax ? pointValue.max : undefined,
sum: !allowsNegativeValues ? pointValue.sum : undefined,
buckets: pointValue.buckets,
count: pointValue.count,
},
};
}),
};
}
}
exports.HistogramAggregator = HistogramAggregator;
//# sourceMappingURL=Histogram.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,36 @@
import type { Accumulation, AccumulationRecord, Aggregator, LastValue } from './types';
import { AggregatorKind } from './types';
import type { HrTime } from '@opentelemetry/api';
import type { GaugeMetricData } from '../export/MetricData';
import type { Maybe } from '../utils';
import type { AggregationTemporality } from '../export/AggregationTemporality';
import type { InstrumentDescriptor } from '../InstrumentDescriptor';
export declare class LastValueAccumulation implements Accumulation {
startTime: HrTime;
private _current;
sampleTime: HrTime;
constructor(startTime: HrTime, current?: number, sampleTime?: HrTime);
record(value: number): void;
setStartTime(startTime: HrTime): void;
toPointValue(): LastValue;
}
/** Basic aggregator which calculates a LastValue from individual measurements. */
export declare class LastValueAggregator implements Aggregator<LastValueAccumulation> {
kind: AggregatorKind.LAST_VALUE;
createAccumulation(startTime: HrTime): LastValueAccumulation;
/**
* Returns the result of the merge of the given accumulations.
*
* Return the newly captured (delta) accumulation for LastValueAggregator.
*/
merge(previous: LastValueAccumulation, delta: LastValueAccumulation): LastValueAccumulation;
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*
* A delta aggregation is not meaningful to LastValueAggregator, just return
* the newly captured (delta) accumulation for LastValueAggregator.
*/
diff(previous: LastValueAccumulation, current: LastValueAccumulation): LastValueAccumulation;
toMetricData(descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord<LastValueAccumulation>[], endTime: HrTime): Maybe<GaugeMetricData>;
}
//# sourceMappingURL=LastValue.d.ts.map

View File

@@ -0,0 +1,82 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.LastValueAggregator = exports.LastValueAccumulation = void 0;
const types_1 = require("./types");
const core_1 = require("@opentelemetry/core");
const MetricData_1 = require("../export/MetricData");
class LastValueAccumulation {
startTime;
_current;
sampleTime;
constructor(startTime, current = 0, sampleTime = [0, 0]) {
this.startTime = startTime;
this._current = current;
this.sampleTime = sampleTime;
}
record(value) {
this._current = value;
this.sampleTime = (0, core_1.millisToHrTime)(Date.now());
}
setStartTime(startTime) {
this.startTime = startTime;
}
toPointValue() {
return this._current;
}
}
exports.LastValueAccumulation = LastValueAccumulation;
/** Basic aggregator which calculates a LastValue from individual measurements. */
class LastValueAggregator {
kind = types_1.AggregatorKind.LAST_VALUE;
createAccumulation(startTime) {
return new LastValueAccumulation(startTime);
}
/**
* Returns the result of the merge of the given accumulations.
*
* Return the newly captured (delta) accumulation for LastValueAggregator.
*/
merge(previous, delta) {
// nanoseconds may lose precisions.
const latestAccumulation = (0, core_1.hrTimeToMicroseconds)(delta.sampleTime) >=
(0, core_1.hrTimeToMicroseconds)(previous.sampleTime)
? delta
: previous;
return new LastValueAccumulation(previous.startTime, latestAccumulation.toPointValue(), latestAccumulation.sampleTime);
}
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*
* A delta aggregation is not meaningful to LastValueAggregator, just return
* the newly captured (delta) accumulation for LastValueAggregator.
*/
diff(previous, current) {
// nanoseconds may lose precisions.
const latestAccumulation = (0, core_1.hrTimeToMicroseconds)(current.sampleTime) >=
(0, core_1.hrTimeToMicroseconds)(previous.sampleTime)
? current
: previous;
return new LastValueAccumulation(current.startTime, latestAccumulation.toPointValue(), latestAccumulation.sampleTime);
}
toMetricData(descriptor, aggregationTemporality, accumulationByAttributes, endTime) {
return {
descriptor,
aggregationTemporality,
dataPointType: MetricData_1.DataPointType.GAUGE,
dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => {
return {
attributes,
startTime: accumulation.startTime,
endTime,
value: accumulation.toPointValue(),
};
}),
};
}
}
exports.LastValueAggregator = LastValueAggregator;
//# sourceMappingURL=LastValue.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,34 @@
import type { Sum, Aggregator, Accumulation, AccumulationRecord } from './types';
import { AggregatorKind } from './types';
import type { HrTime } from '@opentelemetry/api';
import type { SumMetricData } from '../export/MetricData';
import type { Maybe } from '../utils';
import type { AggregationTemporality } from '../export/AggregationTemporality';
import type { InstrumentDescriptor } from '../InstrumentDescriptor';
export declare class SumAccumulation implements Accumulation {
startTime: HrTime;
monotonic: boolean;
private _current;
reset: boolean;
constructor(startTime: HrTime, monotonic: boolean, current?: number, reset?: boolean);
record(value: number): void;
setStartTime(startTime: HrTime): void;
toPointValue(): Sum;
}
/** Basic aggregator which calculates a Sum from individual measurements. */
export declare class SumAggregator implements Aggregator<SumAccumulation> {
kind: AggregatorKind.SUM;
monotonic: boolean;
constructor(monotonic: boolean);
createAccumulation(startTime: HrTime): SumAccumulation;
/**
* Returns the result of the merge of the given accumulations.
*/
merge(previous: SumAccumulation, delta: SumAccumulation): SumAccumulation;
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*/
diff(previous: SumAccumulation, current: SumAccumulation): SumAccumulation;
toMetricData(descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord<SumAccumulation>[], endTime: HrTime): Maybe<SumMetricData>;
}
//# sourceMappingURL=Sum.d.ts.map

View File

@@ -0,0 +1,90 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.SumAggregator = exports.SumAccumulation = void 0;
const types_1 = require("./types");
const MetricData_1 = require("../export/MetricData");
class SumAccumulation {
startTime;
monotonic;
_current;
reset;
constructor(startTime, monotonic, current = 0, reset = false) {
this.startTime = startTime;
this.monotonic = monotonic;
this._current = current;
this.reset = reset;
}
record(value) {
if (this.monotonic && value < 0) {
return;
}
this._current += value;
}
setStartTime(startTime) {
this.startTime = startTime;
}
toPointValue() {
return this._current;
}
}
exports.SumAccumulation = SumAccumulation;
/** Basic aggregator which calculates a Sum from individual measurements. */
class SumAggregator {
kind = types_1.AggregatorKind.SUM;
monotonic;
constructor(monotonic) {
this.monotonic = monotonic;
}
createAccumulation(startTime) {
return new SumAccumulation(startTime, this.monotonic);
}
/**
* Returns the result of the merge of the given accumulations.
*/
merge(previous, delta) {
const prevPv = previous.toPointValue();
const deltaPv = delta.toPointValue();
if (delta.reset) {
return new SumAccumulation(delta.startTime, this.monotonic, deltaPv, delta.reset);
}
return new SumAccumulation(previous.startTime, this.monotonic, prevPv + deltaPv);
}
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*/
diff(previous, current) {
const prevPv = previous.toPointValue();
const currPv = current.toPointValue();
/**
* If the SumAggregator is a monotonic one and the previous point value is
* greater than the current one, a reset is deemed to be happened.
* Return the current point value to prevent the value from been reset.
*/
if (this.monotonic && prevPv > currPv) {
return new SumAccumulation(current.startTime, this.monotonic, currPv, true);
}
return new SumAccumulation(current.startTime, this.monotonic, currPv - prevPv);
}
toMetricData(descriptor, aggregationTemporality, accumulationByAttributes, endTime) {
return {
descriptor,
aggregationTemporality,
dataPointType: MetricData_1.DataPointType.SUM,
dataPoints: accumulationByAttributes.map(([attributes, accumulation]) => {
return {
attributes,
startTime: accumulation.startTime,
endTime,
value: accumulation.toPointValue(),
};
}),
isMonotonic: this.monotonic,
};
}
}
exports.SumAggregator = SumAggregator;
//# sourceMappingURL=Sum.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,133 @@
export declare class Buckets {
backing: BucketsBacking;
indexBase: number;
indexStart: number;
indexEnd: number;
/**
* The term index refers to the number of the exponential histogram bucket
* used to determine its boundaries. The lower boundary of a bucket is
* determined by base ** index and the upper boundary of a bucket is
* determined by base ** (index + 1). index values are signed to account
* for values less than or equal to 1.
*
* indexBase is the index of the 0th position in the
* backing array, i.e., backing[0] is the count
* in the bucket with index `indexBase`.
*
* indexStart is the smallest index value represented
* in the backing array.
*
* indexEnd is the largest index value represented in
* the backing array.
*/
constructor(backing?: BucketsBacking, indexBase?: number, indexStart?: number, indexEnd?: number);
/**
* Offset is the bucket index of the smallest entry in the counts array
* @returns {number}
*/
get offset(): number;
/**
* Buckets is a view into the backing array.
* @returns {number}
*/
get length(): number;
/**
* An array of counts, where count[i] carries the count
* of the bucket at index (offset+i). count[i] is the count of
* values greater than base^(offset+i) and less than or equal to
* base^(offset+i+1).
* @returns {number} The logical counts based on the backing array
*/
counts(): number[];
/**
* At returns the count of the bucket at a position in the logical
* array of counts.
* @param position
* @returns {number}
*/
at(position: number): number;
/**
* incrementBucket increments the backing array index by `increment`
* @param bucketIndex
* @param increment
*/
incrementBucket(bucketIndex: number, increment: number): void;
/**
* decrementBucket decrements the backing array index by `decrement`
* if decrement is greater than the current value, it's set to 0.
* @param bucketIndex
* @param decrement
*/
decrementBucket(bucketIndex: number, decrement: number): void;
/**
* trim removes leading and / or trailing zero buckets (which can occur
* after diffing two histos) and rotates the backing array so that the
* smallest non-zero index is in the 0th position of the backing array
*/
trim(): void;
/**
* downscale first rotates, then collapses 2**`by`-to-1 buckets.
* @param by
*/
downscale(by: number): void;
/**
* Clone returns a deep copy of Buckets
* @returns {Buckets}
*/
clone(): Buckets;
/**
* _rotate shifts the backing array contents so that indexStart ==
* indexBase to simplify the downscale logic.
*/
private _rotate;
/**
* _relocateBucket adds the count in counts[src] to counts[dest] and
* resets count[src] to zero.
*/
private _relocateBucket;
}
/**
* BucketsBacking holds the raw buckets and some utility methods to
* manage them.
*/
declare class BucketsBacking {
private _counts;
constructor(counts?: number[]);
/**
* length returns the physical size of the backing array, which
* is >= buckets.length()
*/
get length(): number;
/**
* countAt returns the count in a specific bucket
*/
countAt(pos: number): number;
/**
* growTo grows a backing array and copies old entries
* into their correct new positions.
*/
growTo(newSize: number, oldPositiveLimit: number, newPositiveLimit: number): void;
/**
* reverse the items in the backing array in the range [from, limit).
*/
reverse(from: number, limit: number): void;
/**
* emptyBucket empties the count from a bucket, for
* moving into another.
*/
emptyBucket(src: number): number;
/**
* increments a bucket by `increment`
*/
increment(bucketIndex: number, increment: number): void;
/**
* decrements a bucket by `decrement`
*/
decrement(bucketIndex: number, decrement: number): void;
/**
* clone returns a deep copy of BucketsBacking
*/
clone(): BucketsBacking;
}
export {};
//# sourceMappingURL=Buckets.d.ts.map

View File

@@ -0,0 +1,266 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Buckets = void 0;
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
class Buckets {
backing;
indexBase;
indexStart;
indexEnd;
/**
* The term index refers to the number of the exponential histogram bucket
* used to determine its boundaries. The lower boundary of a bucket is
* determined by base ** index and the upper boundary of a bucket is
* determined by base ** (index + 1). index values are signed to account
* for values less than or equal to 1.
*
* indexBase is the index of the 0th position in the
* backing array, i.e., backing[0] is the count
* in the bucket with index `indexBase`.
*
* indexStart is the smallest index value represented
* in the backing array.
*
* indexEnd is the largest index value represented in
* the backing array.
*/
constructor(backing = new BucketsBacking(), indexBase = 0, indexStart = 0, indexEnd = 0) {
this.backing = backing;
this.indexBase = indexBase;
this.indexStart = indexStart;
this.indexEnd = indexEnd;
}
/**
* Offset is the bucket index of the smallest entry in the counts array
* @returns {number}
*/
get offset() {
return this.indexStart;
}
/**
* Buckets is a view into the backing array.
* @returns {number}
*/
get length() {
if (this.backing.length === 0) {
return 0;
}
if (this.indexEnd === this.indexStart && this.at(0) === 0) {
return 0;
}
return this.indexEnd - this.indexStart + 1;
}
/**
* An array of counts, where count[i] carries the count
* of the bucket at index (offset+i). count[i] is the count of
* values greater than base^(offset+i) and less than or equal to
* base^(offset+i+1).
* @returns {number} The logical counts based on the backing array
*/
counts() {
return Array.from({ length: this.length }, (_, i) => this.at(i));
}
/**
* At returns the count of the bucket at a position in the logical
* array of counts.
* @param position
* @returns {number}
*/
at(position) {
const bias = this.indexBase - this.indexStart;
if (position < bias) {
position += this.backing.length;
}
position -= bias;
return this.backing.countAt(position);
}
/**
* incrementBucket increments the backing array index by `increment`
* @param bucketIndex
* @param increment
*/
incrementBucket(bucketIndex, increment) {
this.backing.increment(bucketIndex, increment);
}
/**
* decrementBucket decrements the backing array index by `decrement`
* if decrement is greater than the current value, it's set to 0.
* @param bucketIndex
* @param decrement
*/
decrementBucket(bucketIndex, decrement) {
this.backing.decrement(bucketIndex, decrement);
}
/**
* trim removes leading and / or trailing zero buckets (which can occur
* after diffing two histos) and rotates the backing array so that the
* smallest non-zero index is in the 0th position of the backing array
*/
trim() {
for (let i = 0; i < this.length; i++) {
if (this.at(i) !== 0) {
this.indexStart += i;
break;
}
else if (i === this.length - 1) {
//the entire array is zeroed out
this.indexStart = this.indexEnd = this.indexBase = 0;
return;
}
}
for (let i = this.length - 1; i >= 0; i--) {
if (this.at(i) !== 0) {
this.indexEnd -= this.length - i - 1;
break;
}
}
this._rotate();
}
/**
* downscale first rotates, then collapses 2**`by`-to-1 buckets.
* @param by
*/
downscale(by) {
this._rotate();
const size = 1 + this.indexEnd - this.indexStart;
const each = 1 << by;
let inpos = 0;
let outpos = 0;
for (let pos = this.indexStart; pos <= this.indexEnd;) {
let mod = pos % each;
if (mod < 0) {
mod += each;
}
for (let i = mod; i < each && inpos < size; i++) {
this._relocateBucket(outpos, inpos);
inpos++;
pos++;
}
outpos++;
}
this.indexStart >>= by;
this.indexEnd >>= by;
this.indexBase = this.indexStart;
}
/**
* Clone returns a deep copy of Buckets
* @returns {Buckets}
*/
clone() {
return new Buckets(this.backing.clone(), this.indexBase, this.indexStart, this.indexEnd);
}
/**
* _rotate shifts the backing array contents so that indexStart ==
* indexBase to simplify the downscale logic.
*/
_rotate() {
const bias = this.indexBase - this.indexStart;
if (bias === 0) {
return;
}
else if (bias > 0) {
this.backing.reverse(0, this.backing.length);
this.backing.reverse(0, bias);
this.backing.reverse(bias, this.backing.length);
}
else {
// negative bias, this can happen when diffing two histograms
this.backing.reverse(0, this.backing.length);
this.backing.reverse(0, this.backing.length + bias);
}
this.indexBase = this.indexStart;
}
/**
* _relocateBucket adds the count in counts[src] to counts[dest] and
* resets count[src] to zero.
*/
_relocateBucket(dest, src) {
if (dest === src) {
return;
}
this.incrementBucket(dest, this.backing.emptyBucket(src));
}
}
exports.Buckets = Buckets;
/**
* BucketsBacking holds the raw buckets and some utility methods to
* manage them.
*/
class BucketsBacking {
_counts;
constructor(counts = [0]) {
this._counts = counts;
}
/**
* length returns the physical size of the backing array, which
* is >= buckets.length()
*/
get length() {
return this._counts.length;
}
/**
* countAt returns the count in a specific bucket
*/
countAt(pos) {
return this._counts[pos];
}
/**
* growTo grows a backing array and copies old entries
* into their correct new positions.
*/
growTo(newSize, oldPositiveLimit, newPositiveLimit) {
const tmp = new Array(newSize).fill(0);
tmp.splice(newPositiveLimit, this._counts.length - oldPositiveLimit, ...this._counts.slice(oldPositiveLimit));
tmp.splice(0, oldPositiveLimit, ...this._counts.slice(0, oldPositiveLimit));
this._counts = tmp;
}
/**
* reverse the items in the backing array in the range [from, limit).
*/
reverse(from, limit) {
const num = Math.floor((from + limit) / 2) - from;
for (let i = 0; i < num; i++) {
const tmp = this._counts[from + i];
this._counts[from + i] = this._counts[limit - i - 1];
this._counts[limit - i - 1] = tmp;
}
}
/**
* emptyBucket empties the count from a bucket, for
* moving into another.
*/
emptyBucket(src) {
const tmp = this._counts[src];
this._counts[src] = 0;
return tmp;
}
/**
* increments a bucket by `increment`
*/
increment(bucketIndex, increment) {
this._counts[bucketIndex] += increment;
}
/**
* decrements a bucket by `decrement`
*/
decrement(bucketIndex, decrement) {
if (this._counts[bucketIndex] >= decrement) {
this._counts[bucketIndex] -= decrement;
}
else {
// this should not happen, but we're being defensive against
// negative counts.
this._counts[bucketIndex] = 0;
}
}
/**
* clone returns a deep copy of BucketsBacking
*/
clone() {
return new BucketsBacking([...this._counts]);
}
}
//# sourceMappingURL=Buckets.js.map

View File

@@ -0,0 +1,31 @@
import type { Mapping } from './types';
/**
* ExponentMapping implements exponential mapping functions for
* scales <=0. For scales > 0 LogarithmMapping should be used.
*/
export declare class ExponentMapping implements Mapping {
private readonly _shift;
constructor(scale: number);
/**
* Maps positive floating point values to indexes corresponding to scale
* @param value
* @returns {number} index for provided value at the current scale
*/
mapToIndex(value: number): number;
/**
* Returns the lower bucket boundary for the given index for scale
*
* @param index
* @returns {number}
*/
lowerBoundary(index: number): number;
/**
* The scale used by this mapping
* @returns {number}
*/
get scale(): number;
private _minNormalLowerBoundaryIndex;
private _maxNormalLowerBoundaryIndex;
private _rightShift;
}
//# sourceMappingURL=ExponentMapping.d.ts.map

View File

@@ -0,0 +1,79 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExponentMapping = void 0;
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
const ieee754 = require("./ieee754");
const util = require("../util");
const types_1 = require("./types");
/**
* ExponentMapping implements exponential mapping functions for
* scales <=0. For scales > 0 LogarithmMapping should be used.
*/
class ExponentMapping {
_shift;
constructor(scale) {
this._shift = -scale;
}
/**
* Maps positive floating point values to indexes corresponding to scale
* @param value
* @returns {number} index for provided value at the current scale
*/
mapToIndex(value) {
if (value < ieee754.MIN_VALUE) {
return this._minNormalLowerBoundaryIndex();
}
const exp = ieee754.getNormalBase2(value);
// In case the value is an exact power of two, compute a
// correction of -1. Note, we are using a custom _rightShift
// to accommodate a 52-bit argument, which the native bitwise
// operators do not support
const correction = this._rightShift(ieee754.getSignificand(value) - 1, ieee754.SIGNIFICAND_WIDTH);
return (exp + correction) >> this._shift;
}
/**
* Returns the lower bucket boundary for the given index for scale
*
* @param index
* @returns {number}
*/
lowerBoundary(index) {
const minIndex = this._minNormalLowerBoundaryIndex();
if (index < minIndex) {
throw new types_1.MappingError(`underflow: ${index} is < minimum lower boundary: ${minIndex}`);
}
const maxIndex = this._maxNormalLowerBoundaryIndex();
if (index > maxIndex) {
throw new types_1.MappingError(`overflow: ${index} is > maximum lower boundary: ${maxIndex}`);
}
return util.ldexp(1, index << this._shift);
}
/**
* The scale used by this mapping
* @returns {number}
*/
get scale() {
if (this._shift === 0) {
return 0;
}
return -this._shift;
}
_minNormalLowerBoundaryIndex() {
let index = ieee754.MIN_NORMAL_EXPONENT >> this._shift;
if (this._shift < 2) {
index--;
}
return index;
}
_maxNormalLowerBoundaryIndex() {
return ieee754.MAX_NORMAL_EXPONENT >> this._shift;
}
_rightShift(value, shift) {
return Math.floor(value * Math.pow(2, -shift));
}
}
exports.ExponentMapping = ExponentMapping;
//# sourceMappingURL=ExponentMapping.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ExponentMapping.js","sourceRoot":"","sources":["../../../../../src/aggregator/exponential-histogram/mapping/ExponentMapping.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACH,qCAAqC;AACrC,gCAAgC;AAEhC,mCAAuC;AAEvC;;;GAGG;AACH,MAAa,eAAe;IACT,MAAM,CAAS;IAEhC,YAAY,KAAa;QACvB,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,UAAU,CAAC,KAAa;QACtB,IAAI,KAAK,GAAG,OAAO,CAAC,SAAS,EAAE;YAC7B,OAAO,IAAI,CAAC,4BAA4B,EAAE,CAAC;SAC5C;QAED,MAAM,GAAG,GAAG,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC;QAE1C,wDAAwD;QACxD,4DAA4D;QAC5D,6DAA6D;QAC7D,2BAA2B;QAC3B,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,CACjC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,EACjC,OAAO,CAAC,iBAAiB,CAC1B,CAAC;QAEF,OAAO,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,IAAI,CAAC,MAAM,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,aAAa,CAAC,KAAa;QACzB,MAAM,QAAQ,GAAG,IAAI,CAAC,4BAA4B,EAAE,CAAC;QACrD,IAAI,KAAK,GAAG,QAAQ,EAAE;YACpB,MAAM,IAAI,oBAAY,CACpB,cAAc,KAAK,iCAAiC,QAAQ,EAAE,CAC/D,CAAC;SACH;QACD,MAAM,QAAQ,GAAG,IAAI,CAAC,4BAA4B,EAAE,CAAC;QACrD,IAAI,KAAK,GAAG,QAAQ,EAAE;YACpB,MAAM,IAAI,oBAAY,CACpB,aAAa,KAAK,iCAAiC,QAAQ,EAAE,CAC9D,CAAC;SACH;QAED,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,IAAI,KAAK;QACP,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;YACrB,OAAO,CAAC,CAAC;SACV;QACD,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;IACtB,CAAC;IAEO,4BAA4B;QAClC,IAAI,KAAK,GAAG,OAAO,CAAC,mBAAmB,IAAI,IAAI,CAAC,MAAM,CAAC;QACvD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;YACnB,KAAK,EAAE,CAAC;SACT;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAEO,4BAA4B;QAClC,OAAO,OAAO,CAAC,mBAAmB,IAAI,IAAI,CAAC,MAAM,CAAC;IACpD,CAAC;IAEO,WAAW,CAAC,KAAa,EAAE,KAAa;QAC9C,OAAO,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;IACjD,CAAC;CACF;AAjFD,0CAiFC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\nimport * as ieee754 from './ieee754';\nimport * as util from '../util';\nimport type { Mapping } from './types';\nimport { MappingError } from './types';\n\n/**\n * ExponentMapping implements exponential mapping functions for\n * scales <=0. For scales > 0 LogarithmMapping should be used.\n */\nexport class ExponentMapping implements Mapping {\n private readonly _shift: number;\n\n constructor(scale: number) {\n this._shift = -scale;\n }\n\n /**\n * Maps positive floating point values to indexes corresponding to scale\n * @param value\n * @returns {number} index for provided value at the current scale\n */\n mapToIndex(value: number): number {\n if (value < ieee754.MIN_VALUE) {\n return this._minNormalLowerBoundaryIndex();\n }\n\n const exp = ieee754.getNormalBase2(value);\n\n // In case the value is an exact power of two, compute a\n // correction of -1. Note, we are using a custom _rightShift\n // to accommodate a 52-bit argument, which the native bitwise\n // operators do not support\n const correction = this._rightShift(\n ieee754.getSignificand(value) - 1,\n ieee754.SIGNIFICAND_WIDTH\n );\n\n return (exp + correction) >> this._shift;\n }\n\n /**\n * Returns the lower bucket boundary for the given index for scale\n *\n * @param index\n * @returns {number}\n */\n lowerBoundary(index: number): number {\n const minIndex = this._minNormalLowerBoundaryIndex();\n if (index < minIndex) {\n throw new MappingError(\n `underflow: ${index} is < minimum lower boundary: ${minIndex}`\n );\n }\n const maxIndex = this._maxNormalLowerBoundaryIndex();\n if (index > maxIndex) {\n throw new MappingError(\n `overflow: ${index} is > maximum lower boundary: ${maxIndex}`\n );\n }\n\n return util.ldexp(1, index << this._shift);\n }\n\n /**\n * The scale used by this mapping\n * @returns {number}\n */\n get scale(): number {\n if (this._shift === 0) {\n return 0;\n }\n return -this._shift;\n }\n\n private _minNormalLowerBoundaryIndex(): number {\n let index = ieee754.MIN_NORMAL_EXPONENT >> this._shift;\n if (this._shift < 2) {\n index--;\n }\n\n return index;\n }\n\n private _maxNormalLowerBoundaryIndex(): number {\n return ieee754.MAX_NORMAL_EXPONENT >> this._shift;\n }\n\n private _rightShift(value: number, shift: number): number {\n return Math.floor(value * Math.pow(2, -shift));\n }\n}\n"]}

View File

@@ -0,0 +1,32 @@
import type { Mapping } from './types';
/**
* LogarithmMapping implements exponential mapping functions for scale > 0.
* For scales <= 0 the exponent mapping should be used.
*/
export declare class LogarithmMapping implements Mapping {
private readonly _scale;
private readonly _scaleFactor;
private readonly _inverseFactor;
constructor(scale: number);
/**
* Maps positive floating point values to indexes corresponding to scale
* @param value
* @returns {number} index for provided value at the current scale
*/
mapToIndex(value: number): number;
/**
* Returns the lower bucket boundary for the given index for scale
*
* @param index
* @returns {number}
*/
lowerBoundary(index: number): number;
/**
* The scale used by this mapping
* @returns {number}
*/
get scale(): number;
private _minNormalLowerBoundaryIndex;
private _maxNormalLowerBoundaryIndex;
}
//# sourceMappingURL=LogarithmMapping.d.ts.map

View File

@@ -0,0 +1,87 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.LogarithmMapping = void 0;
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
const ieee754 = require("./ieee754");
const util = require("../util");
const types_1 = require("./types");
/**
* LogarithmMapping implements exponential mapping functions for scale > 0.
* For scales <= 0 the exponent mapping should be used.
*/
class LogarithmMapping {
_scale;
_scaleFactor;
_inverseFactor;
constructor(scale) {
this._scale = scale;
this._scaleFactor = util.ldexp(Math.LOG2E, scale);
this._inverseFactor = util.ldexp(Math.LN2, -scale);
}
/**
* Maps positive floating point values to indexes corresponding to scale
* @param value
* @returns {number} index for provided value at the current scale
*/
mapToIndex(value) {
if (value <= ieee754.MIN_VALUE) {
return this._minNormalLowerBoundaryIndex() - 1;
}
// exact power of two special case
if (ieee754.getSignificand(value) === 0) {
const exp = ieee754.getNormalBase2(value);
return (exp << this._scale) - 1;
}
// non-power of two cases. use Math.floor to round the scaled logarithm
const index = Math.floor(Math.log(value) * this._scaleFactor);
const maxIndex = this._maxNormalLowerBoundaryIndex();
if (index >= maxIndex) {
return maxIndex;
}
return index;
}
/**
* Returns the lower bucket boundary for the given index for scale
*
* @param index
* @returns {number}
*/
lowerBoundary(index) {
const maxIndex = this._maxNormalLowerBoundaryIndex();
if (index >= maxIndex) {
if (index === maxIndex) {
return 2 * Math.exp((index - (1 << this._scale)) / this._scaleFactor);
}
throw new types_1.MappingError(`overflow: ${index} is > maximum lower boundary: ${maxIndex}`);
}
const minIndex = this._minNormalLowerBoundaryIndex();
if (index <= minIndex) {
if (index === minIndex) {
return ieee754.MIN_VALUE;
}
else if (index === minIndex - 1) {
return Math.exp((index + (1 << this._scale)) / this._scaleFactor) / 2;
}
throw new types_1.MappingError(`overflow: ${index} is < minimum lower boundary: ${minIndex}`);
}
return Math.exp(index * this._inverseFactor);
}
/**
* The scale used by this mapping
* @returns {number}
*/
get scale() {
return this._scale;
}
_minNormalLowerBoundaryIndex() {
return ieee754.MIN_NORMAL_EXPONENT << this._scale;
}
_maxNormalLowerBoundaryIndex() {
return ((ieee754.MAX_NORMAL_EXPONENT + 1) << this._scale) - 1;
}
}
exports.LogarithmMapping = LogarithmMapping;
//# sourceMappingURL=LogarithmMapping.js.map

View File

@@ -0,0 +1,10 @@
import type { Mapping } from './types';
/**
* getMapping returns an appropriate mapping for the given scale. For scales -10
* to 0 the underlying type will be ExponentMapping. For scales 1 to 20 the
* underlying type will be LogarithmMapping.
* @param scale a number in the range [-10, 20]
* @returns {Mapping}
*/
export declare function getMapping(scale: number): Mapping;
//# sourceMappingURL=getMapping.d.ts.map

View File

@@ -0,0 +1,34 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getMapping = void 0;
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
const ExponentMapping_1 = require("./ExponentMapping");
const LogarithmMapping_1 = require("./LogarithmMapping");
const types_1 = require("./types");
const MIN_SCALE = -10;
const MAX_SCALE = 20;
const PREBUILT_MAPPINGS = Array.from({ length: 31 }, (_, i) => {
if (i > 10) {
return new LogarithmMapping_1.LogarithmMapping(i - 10);
}
return new ExponentMapping_1.ExponentMapping(i - 10);
});
/**
* getMapping returns an appropriate mapping for the given scale. For scales -10
* to 0 the underlying type will be ExponentMapping. For scales 1 to 20 the
* underlying type will be LogarithmMapping.
* @param scale a number in the range [-10, 20]
* @returns {Mapping}
*/
function getMapping(scale) {
if (scale > MAX_SCALE || scale < MIN_SCALE) {
throw new types_1.MappingError(`expected scale >= ${MIN_SCALE} && <= ${MAX_SCALE}, got: ${scale}`);
}
// mappings are offset by 10. scale -10 is at position 0 and scale 20 is at 30
return PREBUILT_MAPPINGS[scale + 10];
}
exports.getMapping = getMapping;
//# sourceMappingURL=getMapping.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"getMapping.js","sourceRoot":"","sources":["../../../../../src/aggregator/exponential-histogram/mapping/getMapping.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACH,uDAAoD;AACpD,yDAAsD;AAEtD,mCAAuC;AAEvC,MAAM,SAAS,GAAG,CAAC,EAAE,CAAC;AACtB,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;IAC5D,IAAI,CAAC,GAAG,EAAE,EAAE;QACV,OAAO,IAAI,mCAAgB,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;KACrC;IACD,OAAO,IAAI,iCAAe,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;AACrC,CAAC,CAAC,CAAC;AAEH;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,KAAa;IACtC,IAAI,KAAK,GAAG,SAAS,IAAI,KAAK,GAAG,SAAS,EAAE;QAC1C,MAAM,IAAI,oBAAY,CACpB,qBAAqB,SAAS,UAAU,SAAS,UAAU,KAAK,EAAE,CACnE,CAAC;KACH;IACD,8EAA8E;IAC9E,OAAO,iBAAiB,CAAC,KAAK,GAAG,EAAE,CAAC,CAAC;AACvC,CAAC;AARD,gCAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\nimport { ExponentMapping } from './ExponentMapping';\nimport { LogarithmMapping } from './LogarithmMapping';\nimport type { Mapping } from './types';\nimport { MappingError } from './types';\n\nconst MIN_SCALE = -10;\nconst MAX_SCALE = 20;\nconst PREBUILT_MAPPINGS = Array.from({ length: 31 }, (_, i) => {\n if (i > 10) {\n return new LogarithmMapping(i - 10);\n }\n return new ExponentMapping(i - 10);\n});\n\n/**\n * getMapping returns an appropriate mapping for the given scale. For scales -10\n * to 0 the underlying type will be ExponentMapping. For scales 1 to 20 the\n * underlying type will be LogarithmMapping.\n * @param scale a number in the range [-10, 20]\n * @returns {Mapping}\n */\nexport function getMapping(scale: number): Mapping {\n if (scale > MAX_SCALE || scale < MIN_SCALE) {\n throw new MappingError(\n `expected scale >= ${MIN_SCALE} && <= ${MAX_SCALE}, got: ${scale}`\n );\n }\n // mappings are offset by 10. scale -10 is at position 0 and scale 20 is at 30\n return PREBUILT_MAPPINGS[scale + 10];\n}\n"]}

View File

@@ -0,0 +1,41 @@
/**
* The functions and constants in this file allow us to interact
* with the internal representation of an IEEE 64-bit floating point
* number. We need to work with all 64-bits, thus, care needs to be
* taken when working with Javascript's bitwise operators (<<, >>, &,
* |, etc) as they truncate operands to 32-bits. In order to work around
* this we work with the 64-bits as two 32-bit halves, perform bitwise
* operations on them independently, and combine the results (if needed).
*/
export declare const SIGNIFICAND_WIDTH = 52;
/**
* MIN_NORMAL_EXPONENT is the minimum exponent of a normalized
* floating point: -1022.
*/
export declare const MIN_NORMAL_EXPONENT: number;
/**
* MAX_NORMAL_EXPONENT is the maximum exponent of a normalized
* floating point: 1023.
*/
export declare const MAX_NORMAL_EXPONENT = 1023;
/**
* MIN_VALUE is the smallest normal number
*/
export declare const MIN_VALUE: number;
/**
* getNormalBase2 extracts the normalized base-2 fractional exponent.
* This returns k for the equation f x 2**k where f is
* in the range [1, 2). Note that this function is not called for
* subnormal numbers.
* @param {number} value - the value to determine normalized base-2 fractional
* exponent for
* @returns {number} the normalized base-2 exponent
*/
export declare function getNormalBase2(value: number): number;
/**
* GetSignificand returns the 52 bit (unsigned) significand as a signed value.
* @param {number} value - the floating point number to extract the significand from
* @returns {number} The 52-bit significand
*/
export declare function getSignificand(value: number): number;
//# sourceMappingURL=ieee754.d.ts.map

View File

@@ -0,0 +1,83 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getSignificand = exports.getNormalBase2 = exports.MIN_VALUE = exports.MAX_NORMAL_EXPONENT = exports.MIN_NORMAL_EXPONENT = exports.SIGNIFICAND_WIDTH = void 0;
/**
* The functions and constants in this file allow us to interact
* with the internal representation of an IEEE 64-bit floating point
* number. We need to work with all 64-bits, thus, care needs to be
* taken when working with Javascript's bitwise operators (<<, >>, &,
* |, etc) as they truncate operands to 32-bits. In order to work around
* this we work with the 64-bits as two 32-bit halves, perform bitwise
* operations on them independently, and combine the results (if needed).
*/
exports.SIGNIFICAND_WIDTH = 52;
/**
* EXPONENT_MASK is set to 1 for the hi 32-bits of an IEEE 754
* floating point exponent: 0x7ff00000.
*/
const EXPONENT_MASK = 0x7ff00000;
/**
* SIGNIFICAND_MASK is the mask for the significand portion of the hi 32-bits
* of an IEEE 754 double-precision floating-point value: 0xfffff
*/
const SIGNIFICAND_MASK = 0xfffff;
/**
* EXPONENT_BIAS is the exponent bias specified for encoding
* the IEEE 754 double-precision floating point exponent: 1023
*/
const EXPONENT_BIAS = 1023;
/**
* MIN_NORMAL_EXPONENT is the minimum exponent of a normalized
* floating point: -1022.
*/
exports.MIN_NORMAL_EXPONENT = -EXPONENT_BIAS + 1;
/**
* MAX_NORMAL_EXPONENT is the maximum exponent of a normalized
* floating point: 1023.
*/
exports.MAX_NORMAL_EXPONENT = EXPONENT_BIAS;
/**
* MIN_VALUE is the smallest normal number
*/
exports.MIN_VALUE = Math.pow(2, -1022);
/**
* getNormalBase2 extracts the normalized base-2 fractional exponent.
* This returns k for the equation f x 2**k where f is
* in the range [1, 2). Note that this function is not called for
* subnormal numbers.
* @param {number} value - the value to determine normalized base-2 fractional
* exponent for
* @returns {number} the normalized base-2 exponent
*/
function getNormalBase2(value) {
const dv = new DataView(new ArrayBuffer(8));
dv.setFloat64(0, value);
// access the raw 64-bit float as 32-bit uints
const hiBits = dv.getUint32(0);
const expBits = (hiBits & EXPONENT_MASK) >> 20;
return expBits - EXPONENT_BIAS;
}
exports.getNormalBase2 = getNormalBase2;
/**
* GetSignificand returns the 52 bit (unsigned) significand as a signed value.
* @param {number} value - the floating point number to extract the significand from
* @returns {number} The 52-bit significand
*/
function getSignificand(value) {
const dv = new DataView(new ArrayBuffer(8));
dv.setFloat64(0, value);
// access the raw 64-bit float as two 32-bit uints
const hiBits = dv.getUint32(0);
const loBits = dv.getUint32(4);
// extract the significand bits from the hi bits and left shift 32 places note:
// we can't use the native << operator as it will truncate the result to 32-bits
const significandHiBits = (hiBits & SIGNIFICAND_MASK) * Math.pow(2, 32);
// combine the hi and lo bits and return
return significandHiBits + loBits;
}
exports.getSignificand = getSignificand;
//# sourceMappingURL=ieee754.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ieee754.js","sourceRoot":"","sources":["../../../../../src/aggregator/exponential-histogram/mapping/ieee754.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH;;;;;;;;GAQG;AAEU,QAAA,iBAAiB,GAAG,EAAE,CAAC;AAEpC;;;GAGG;AACH,MAAM,aAAa,GAAG,UAAU,CAAC;AAEjC;;;GAGG;AACH,MAAM,gBAAgB,GAAG,OAAO,CAAC;AAEjC;;;GAGG;AACH,MAAM,aAAa,GAAG,IAAI,CAAC;AAE3B;;;GAGG;AACU,QAAA,mBAAmB,GAAG,CAAC,aAAa,GAAG,CAAC,CAAC;AAEtD;;;GAGG;AACU,QAAA,mBAAmB,GAAG,aAAa,CAAC;AAEjD;;GAEG;AACU,QAAA,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC;AAE5C;;;;;;;;GAQG;AACH,SAAgB,cAAc,CAAC,KAAa;IAC1C,MAAM,EAAE,GAAG,IAAI,QAAQ,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5C,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;IACxB,8CAA8C;IAC9C,MAAM,MAAM,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;IAC/B,MAAM,OAAO,GAAG,CAAC,MAAM,GAAG,aAAa,CAAC,IAAI,EAAE,CAAC;IAC/C,OAAO,OAAO,GAAG,aAAa,CAAC;AACjC,CAAC;AAPD,wCAOC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,KAAa;IAC1C,MAAM,EAAE,GAAG,IAAI,QAAQ,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5C,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;IACxB,kDAAkD;IAClD,MAAM,MAAM,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;IAC/B,MAAM,MAAM,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;IAC/B,+EAA+E;IAC/E,gFAAgF;IAChF,MAAM,iBAAiB,GAAG,CAAC,MAAM,GAAG,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IACxE,wCAAwC;IACxC,OAAO,iBAAiB,GAAG,MAAM,CAAC;AACpC,CAAC;AAXD,wCAWC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * The functions and constants in this file allow us to interact\n * with the internal representation of an IEEE 64-bit floating point\n * number. We need to work with all 64-bits, thus, care needs to be\n * taken when working with Javascript's bitwise operators (<<, >>, &,\n * |, etc) as they truncate operands to 32-bits. In order to work around\n * this we work with the 64-bits as two 32-bit halves, perform bitwise\n * operations on them independently, and combine the results (if needed).\n */\n\nexport const SIGNIFICAND_WIDTH = 52;\n\n/**\n * EXPONENT_MASK is set to 1 for the hi 32-bits of an IEEE 754\n * floating point exponent: 0x7ff00000.\n */\nconst EXPONENT_MASK = 0x7ff00000;\n\n/**\n * SIGNIFICAND_MASK is the mask for the significand portion of the hi 32-bits\n * of an IEEE 754 double-precision floating-point value: 0xfffff\n */\nconst SIGNIFICAND_MASK = 0xfffff;\n\n/**\n * EXPONENT_BIAS is the exponent bias specified for encoding\n * the IEEE 754 double-precision floating point exponent: 1023\n */\nconst EXPONENT_BIAS = 1023;\n\n/**\n * MIN_NORMAL_EXPONENT is the minimum exponent of a normalized\n * floating point: -1022.\n */\nexport const MIN_NORMAL_EXPONENT = -EXPONENT_BIAS + 1;\n\n/**\n * MAX_NORMAL_EXPONENT is the maximum exponent of a normalized\n * floating point: 1023.\n */\nexport const MAX_NORMAL_EXPONENT = EXPONENT_BIAS;\n\n/**\n * MIN_VALUE is the smallest normal number\n */\nexport const MIN_VALUE = Math.pow(2, -1022);\n\n/**\n * getNormalBase2 extracts the normalized base-2 fractional exponent.\n * This returns k for the equation f x 2**k where f is\n * in the range [1, 2). Note that this function is not called for\n * subnormal numbers.\n * @param {number} value - the value to determine normalized base-2 fractional\n * exponent for\n * @returns {number} the normalized base-2 exponent\n */\nexport function getNormalBase2(value: number): number {\n const dv = new DataView(new ArrayBuffer(8));\n dv.setFloat64(0, value);\n // access the raw 64-bit float as 32-bit uints\n const hiBits = dv.getUint32(0);\n const expBits = (hiBits & EXPONENT_MASK) >> 20;\n return expBits - EXPONENT_BIAS;\n}\n\n/**\n * GetSignificand returns the 52 bit (unsigned) significand as a signed value.\n * @param {number} value - the floating point number to extract the significand from\n * @returns {number} The 52-bit significand\n */\nexport function getSignificand(value: number): number {\n const dv = new DataView(new ArrayBuffer(8));\n dv.setFloat64(0, value);\n // access the raw 64-bit float as two 32-bit uints\n const hiBits = dv.getUint32(0);\n const loBits = dv.getUint32(4);\n // extract the significand bits from the hi bits and left shift 32 places note:\n // we can't use the native << operator as it will truncate the result to 32-bits\n const significandHiBits = (hiBits & SIGNIFICAND_MASK) * Math.pow(2, 32);\n // combine the hi and lo bits and return\n return significandHiBits + loBits;\n}\n"]}

View File

@@ -0,0 +1,13 @@
export declare class MappingError extends Error {
}
/**
* The mapping interface is used by the exponential histogram to determine
* where to bucket values. The interface is implemented by ExponentMapping,
* used for scales [-10, 0] and LogarithmMapping, used for scales [1, 20].
*/
export interface Mapping {
mapToIndex(value: number): number;
lowerBoundary(index: number): number;
get scale(): number;
}
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1,11 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MappingError = void 0;
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
class MappingError extends Error {
}
exports.MappingError = MappingError;
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../../../src/aggregator/exponential-histogram/mapping/types.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACH,MAAa,YAAa,SAAQ,KAAK;CAAG;AAA1C,oCAA0C","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\nexport class MappingError extends Error {}\n\n/**\n * The mapping interface is used by the exponential histogram to determine\n * where to bucket values. The interface is implemented by ExponentMapping,\n * used for scales [-10, 0] and LogarithmMapping, used for scales [1, 20].\n */\nexport interface Mapping {\n mapToIndex(value: number): number;\n lowerBoundary(index: number): number;\n get scale(): number;\n}\n"]}

View File

@@ -0,0 +1,23 @@
/**
* Note: other languages provide this as a built in function. This is
* a naive, but functionally correct implementation. This is used sparingly,
* when creating a new mapping in a running application.
*
* ldexp returns frac × 2**exp. With the following special cases:
* ldexp(±0, exp) = ±0
* ldexp(±Inf, exp) = ±Inf
* ldexp(NaN, exp) = NaN
* @param frac
* @param exp
* @returns {number}
*/
export declare function ldexp(frac: number, exp: number): number;
/**
* Computes the next power of two that is greater than or equal to v.
* This implementation more efficient than, but functionally equivalent
* to Math.pow(2, Math.ceil(Math.log(x)/Math.log(2))).
* @param v
* @returns {number}
*/
export declare function nextGreaterSquare(v: number): number;
//# sourceMappingURL=util.d.ts.map

View File

@@ -0,0 +1,52 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.nextGreaterSquare = exports.ldexp = void 0;
/**
* Note: other languages provide this as a built in function. This is
* a naive, but functionally correct implementation. This is used sparingly,
* when creating a new mapping in a running application.
*
* ldexp returns frac × 2**exp. With the following special cases:
* ldexp(±0, exp) = ±0
* ldexp(±Inf, exp) = ±Inf
* ldexp(NaN, exp) = NaN
* @param frac
* @param exp
* @returns {number}
*/
function ldexp(frac, exp) {
if (frac === 0 ||
frac === Number.POSITIVE_INFINITY ||
frac === Number.NEGATIVE_INFINITY ||
Number.isNaN(frac)) {
return frac;
}
return frac * Math.pow(2, exp);
}
exports.ldexp = ldexp;
/**
* Computes the next power of two that is greater than or equal to v.
* This implementation more efficient than, but functionally equivalent
* to Math.pow(2, Math.ceil(Math.log(x)/Math.log(2))).
* @param v
* @returns {number}
*/
function nextGreaterSquare(v) {
// The following expression computes the least power-of-two
// that is >= v. There are a number of tricky ways to
// do this, see https://stackoverflow.com/questions/466204/rounding-up-to-next-power-of-2
v--;
v |= v >> 1;
v |= v >> 2;
v |= v >> 4;
v |= v >> 8;
v |= v >> 16;
v++;
return v;
}
exports.nextGreaterSquare = nextGreaterSquare;
//# sourceMappingURL=util.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../../src/aggregator/exponential-histogram/util.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH;;;;;;;;;;;;GAYG;AACH,SAAgB,KAAK,CAAC,IAAY,EAAE,GAAW;IAC7C,IACE,IAAI,KAAK,CAAC;QACV,IAAI,KAAK,MAAM,CAAC,iBAAiB;QACjC,IAAI,KAAK,MAAM,CAAC,iBAAiB;QACjC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,EAClB;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AACjC,CAAC;AAVD,sBAUC;AAED;;;;;;GAMG;AACH,SAAgB,iBAAiB,CAAC,CAAS;IACzC,2DAA2D;IAC3D,sDAAsD;IACtD,yFAAyF;IACzF,CAAC,EAAE,CAAC;IACJ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACZ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACZ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACZ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACZ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;IACb,CAAC,EAAE,CAAC;IACJ,OAAO,CAAC,CAAC;AACX,CAAC;AAZD,8CAYC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * Note: other languages provide this as a built in function. This is\n * a naive, but functionally correct implementation. This is used sparingly,\n * when creating a new mapping in a running application.\n *\n * ldexp returns frac × 2**exp. With the following special cases:\n * ldexp(±0, exp) = ±0\n * ldexp(±Inf, exp) = ±Inf\n * ldexp(NaN, exp) = NaN\n * @param frac\n * @param exp\n * @returns {number}\n */\nexport function ldexp(frac: number, exp: number): number {\n if (\n frac === 0 ||\n frac === Number.POSITIVE_INFINITY ||\n frac === Number.NEGATIVE_INFINITY ||\n Number.isNaN(frac)\n ) {\n return frac;\n }\n return frac * Math.pow(2, exp);\n}\n\n/**\n * Computes the next power of two that is greater than or equal to v.\n * This implementation more efficient than, but functionally equivalent\n * to Math.pow(2, Math.ceil(Math.log(x)/Math.log(2))).\n * @param v\n * @returns {number}\n */\nexport function nextGreaterSquare(v: number): number {\n // The following expression computes the least power-of-two\n // that is >= v. There are a number of tricky ways to\n // do this, see https://stackoverflow.com/questions/466204/rounding-up-to-next-power-of-2\n v--;\n v |= v >> 1;\n v |= v >> 2;\n v |= v >> 4;\n v |= v >> 8;\n v |= v >> 16;\n v++;\n return v;\n}\n"]}

View File

@@ -0,0 +1,7 @@
export { DropAggregator } from './Drop';
export { HistogramAccumulation, HistogramAggregator } from './Histogram';
export { ExponentialHistogramAccumulation, ExponentialHistogramAggregator, } from './ExponentialHistogram';
export { LastValueAccumulation, LastValueAggregator } from './LastValue';
export { SumAccumulation, SumAggregator } from './Sum';
export type { Aggregator } from './types';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1,22 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.SumAggregator = exports.SumAccumulation = exports.LastValueAggregator = exports.LastValueAccumulation = exports.ExponentialHistogramAggregator = exports.ExponentialHistogramAccumulation = exports.HistogramAggregator = exports.HistogramAccumulation = exports.DropAggregator = void 0;
var Drop_1 = require("./Drop");
Object.defineProperty(exports, "DropAggregator", { enumerable: true, get: function () { return Drop_1.DropAggregator; } });
var Histogram_1 = require("./Histogram");
Object.defineProperty(exports, "HistogramAccumulation", { enumerable: true, get: function () { return Histogram_1.HistogramAccumulation; } });
Object.defineProperty(exports, "HistogramAggregator", { enumerable: true, get: function () { return Histogram_1.HistogramAggregator; } });
var ExponentialHistogram_1 = require("./ExponentialHistogram");
Object.defineProperty(exports, "ExponentialHistogramAccumulation", { enumerable: true, get: function () { return ExponentialHistogram_1.ExponentialHistogramAccumulation; } });
Object.defineProperty(exports, "ExponentialHistogramAggregator", { enumerable: true, get: function () { return ExponentialHistogram_1.ExponentialHistogramAggregator; } });
var LastValue_1 = require("./LastValue");
Object.defineProperty(exports, "LastValueAccumulation", { enumerable: true, get: function () { return LastValue_1.LastValueAccumulation; } });
Object.defineProperty(exports, "LastValueAggregator", { enumerable: true, get: function () { return LastValue_1.LastValueAggregator; } });
var Sum_1 = require("./Sum");
Object.defineProperty(exports, "SumAccumulation", { enumerable: true, get: function () { return Sum_1.SumAccumulation; } });
Object.defineProperty(exports, "SumAggregator", { enumerable: true, get: function () { return Sum_1.SumAggregator; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/aggregator/index.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,+BAAwC;AAA/B,sGAAA,cAAc,OAAA;AACvB,yCAAyE;AAAhE,kHAAA,qBAAqB,OAAA;AAAE,gHAAA,mBAAmB,OAAA;AACnD,+DAGgC;AAF9B,wIAAA,gCAAgC,OAAA;AAChC,sIAAA,8BAA8B,OAAA;AAEhC,yCAAyE;AAAhE,kHAAA,qBAAqB,OAAA;AAAE,gHAAA,mBAAmB,OAAA;AACnD,6BAAuD;AAA9C,sGAAA,eAAe,OAAA;AAAE,oGAAA,aAAa,OAAA","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport { DropAggregator } from './Drop';\nexport { HistogramAccumulation, HistogramAggregator } from './Histogram';\nexport {\n ExponentialHistogramAccumulation,\n ExponentialHistogramAggregator,\n} from './ExponentialHistogram';\nexport { LastValueAccumulation, LastValueAggregator } from './LastValue';\nexport { SumAccumulation, SumAggregator } from './Sum';\nexport type { Aggregator } from './types';\n"]}

View File

@@ -0,0 +1,111 @@
import type { HrTime, Attributes } from '@opentelemetry/api';
import type { AggregationTemporality } from '../export/AggregationTemporality';
import type { MetricData } from '../export/MetricData';
import type { Maybe } from '../utils';
import type { InstrumentDescriptor } from '../InstrumentDescriptor';
/** The kind of aggregator. */
export declare enum AggregatorKind {
DROP = 0,
SUM = 1,
LAST_VALUE = 2,
HISTOGRAM = 3,
EXPONENTIAL_HISTOGRAM = 4
}
/** DataPoint value type for SumAggregation. */
export type Sum = number;
/** DataPoint value type for LastValueAggregation. */
export type LastValue = number;
/** DataPoint value type for HistogramAggregation. */
export interface Histogram {
/**
* Buckets are implemented using two different arrays:
* - boundaries: contains every finite bucket boundary, which are inclusive upper bounds
* - counts: contains event counts for each bucket
*
* Note that we'll always have n+1 buckets, where n is the number of boundaries.
* This is because we need to count events that are higher than the upper boundary.
*
* Example: if we measure the values: [5, 30, 5, 40, 5, 15, 15, 15, 25]
* with the boundaries [ 10, 20, 30 ], we will have the following state:
*
* buckets: {
* boundaries: [10, 20, 30],
* counts: [3, 3, 2, 1],
* }
*/
buckets: {
boundaries: number[];
counts: number[];
};
sum?: number;
count: number;
min?: number;
max?: number;
}
/** DataPoint value type for ExponentialHistogramAggregation. */
export interface ExponentialHistogram {
count: number;
sum?: number;
scale: number;
zeroCount: number;
positive: {
offset: number;
bucketCounts: number[];
};
negative: {
offset: number;
bucketCounts: number[];
};
min?: number;
max?: number;
}
/**
* An Aggregator accumulation state.
*/
export interface Accumulation {
setStartTime(startTime: HrTime): void;
record(value: number): void;
}
export type AccumulationRecord<T> = [Attributes, T];
/**
* Base interface for aggregators. Aggregators are responsible for holding
* aggregated values and taking a snapshot of these values upon export.
*/
export interface Aggregator<T> {
/** The kind of the aggregator. */
kind: AggregatorKind;
/**
* Create a clean state of accumulation.
*/
createAccumulation(startTime: HrTime): T;
/**
* Returns the result of the merge of the given accumulations.
*
* This should always assume that the accumulations do not overlap and merge together for a new
* cumulative report.
*
* @param previous the previously captured accumulation
* @param delta the newly captured (delta) accumulation
* @returns the result of the merge of the given accumulations
*/
merge(previous: T, delta: T): T;
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
*
* @param previous the previously captured accumulation
* @param current the newly captured (cumulative) accumulation
* @returns The resulting delta accumulation
*/
diff(previous: T, current: T): T;
/**
* Returns the {@link MetricData} that this {@link Aggregator} will produce.
*
* @param descriptor the metric descriptor.
* @param aggregationTemporality the temporality of the resulting {@link MetricData}
* @param accumulationByAttributes the array of attributes and accumulation pairs.
* @param endTime the end time of the metric data.
* @return the {@link MetricData} that this {@link Aggregator} will produce.
*/
toMetricData(descriptor: InstrumentDescriptor, aggregationTemporality: AggregationTemporality, accumulationByAttributes: AccumulationRecord<T>[], endTime: HrTime): Maybe<MetricData>;
}
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1,17 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.AggregatorKind = void 0;
/** The kind of aggregator. */
var AggregatorKind;
(function (AggregatorKind) {
AggregatorKind[AggregatorKind["DROP"] = 0] = "DROP";
AggregatorKind[AggregatorKind["SUM"] = 1] = "SUM";
AggregatorKind[AggregatorKind["LAST_VALUE"] = 2] = "LAST_VALUE";
AggregatorKind[AggregatorKind["HISTOGRAM"] = 3] = "HISTOGRAM";
AggregatorKind[AggregatorKind["EXPONENTIAL_HISTOGRAM"] = 4] = "EXPONENTIAL_HISTOGRAM";
})(AggregatorKind = exports.AggregatorKind || (exports.AggregatorKind = {}));
//# sourceMappingURL=types.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/aggregator/types.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAQH,8BAA8B;AAC9B,IAAY,cAMX;AAND,WAAY,cAAc;IACxB,mDAAI,CAAA;IACJ,iDAAG,CAAA;IACH,+DAAU,CAAA;IACV,6DAAS,CAAA;IACT,qFAAqB,CAAA;AACvB,CAAC,EANW,cAAc,GAAd,sBAAc,KAAd,sBAAc,QAMzB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { HrTime, Attributes } from '@opentelemetry/api';\nimport type { AggregationTemporality } from '../export/AggregationTemporality';\nimport type { MetricData } from '../export/MetricData';\nimport type { Maybe } from '../utils';\nimport type { InstrumentDescriptor } from '../InstrumentDescriptor';\n\n/** The kind of aggregator. */\nexport enum AggregatorKind {\n DROP,\n SUM,\n LAST_VALUE,\n HISTOGRAM,\n EXPONENTIAL_HISTOGRAM,\n}\n\n/** DataPoint value type for SumAggregation. */\nexport type Sum = number;\n\n/** DataPoint value type for LastValueAggregation. */\nexport type LastValue = number;\n\n/** DataPoint value type for HistogramAggregation. */\nexport interface Histogram {\n /**\n * Buckets are implemented using two different arrays:\n * - boundaries: contains every finite bucket boundary, which are inclusive upper bounds\n * - counts: contains event counts for each bucket\n *\n * Note that we'll always have n+1 buckets, where n is the number of boundaries.\n * This is because we need to count events that are higher than the upper boundary.\n *\n * Example: if we measure the values: [5, 30, 5, 40, 5, 15, 15, 15, 25]\n * with the boundaries [ 10, 20, 30 ], we will have the following state:\n *\n * buckets: {\n *\tboundaries: [10, 20, 30],\n *\tcounts: [3, 3, 2, 1],\n * }\n */\n buckets: {\n boundaries: number[];\n counts: number[];\n };\n sum?: number;\n count: number;\n min?: number;\n max?: number;\n}\n\n/** DataPoint value type for ExponentialHistogramAggregation. */\nexport interface ExponentialHistogram {\n count: number;\n sum?: number;\n scale: number;\n zeroCount: number;\n positive: {\n offset: number;\n bucketCounts: number[];\n };\n negative: {\n offset: number;\n bucketCounts: number[];\n };\n min?: number;\n max?: number;\n}\n\n/**\n * An Aggregator accumulation state.\n */\nexport interface Accumulation {\n setStartTime(startTime: HrTime): void;\n record(value: number): void;\n}\n\nexport type AccumulationRecord<T> = [Attributes, T];\n\n/**\n * Base interface for aggregators. Aggregators are responsible for holding\n * aggregated values and taking a snapshot of these values upon export.\n */\nexport interface Aggregator<T> {\n /** The kind of the aggregator. */\n kind: AggregatorKind;\n\n /**\n * Create a clean state of accumulation.\n */\n createAccumulation(startTime: HrTime): T;\n\n /**\n * Returns the result of the merge of the given accumulations.\n *\n * This should always assume that the accumulations do not overlap and merge together for a new\n * cumulative report.\n *\n * @param previous the previously captured accumulation\n * @param delta the newly captured (delta) accumulation\n * @returns the result of the merge of the given accumulations\n */\n merge(previous: T, delta: T): T;\n\n /**\n * Returns a new DELTA aggregation by comparing two cumulative measurements.\n *\n * @param previous the previously captured accumulation\n * @param current the newly captured (cumulative) accumulation\n * @returns The resulting delta accumulation\n */\n diff(previous: T, current: T): T;\n\n /**\n * Returns the {@link MetricData} that this {@link Aggregator} will produce.\n *\n * @param descriptor the metric descriptor.\n * @param aggregationTemporality the temporality of the resulting {@link MetricData}\n * @param accumulationByAttributes the array of attributes and accumulation pairs.\n * @param endTime the end time of the metric data.\n * @return the {@link MetricData} that this {@link Aggregator} will produce.\n */\n toMetricData(\n descriptor: InstrumentDescriptor,\n aggregationTemporality: AggregationTemporality,\n accumulationByAttributes: AccumulationRecord<T>[],\n endTime: HrTime\n ): Maybe<MetricData>;\n}\n"]}

View File

@@ -0,0 +1,14 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
import { FixedSizeExemplarReservoirBase } from './ExemplarReservoir';
/**
* AlignedHistogramBucketExemplarReservoir takes the same boundaries
* configuration of a Histogram. This algorithm keeps the last seen measurement
* that falls within a histogram bucket.
*/
export declare class AlignedHistogramBucketExemplarReservoir extends FixedSizeExemplarReservoirBase {
private _boundaries;
constructor(boundaries: number[]);
private _findBucketIndex;
offer(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): void;
}
//# sourceMappingURL=AlignedHistogramBucketExemplarReservoir.d.ts.map

View File

@@ -0,0 +1,34 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.AlignedHistogramBucketExemplarReservoir = void 0;
const ExemplarReservoir_1 = require("./ExemplarReservoir");
/**
* AlignedHistogramBucketExemplarReservoir takes the same boundaries
* configuration of a Histogram. This algorithm keeps the last seen measurement
* that falls within a histogram bucket.
*/
class AlignedHistogramBucketExemplarReservoir extends ExemplarReservoir_1.FixedSizeExemplarReservoirBase {
_boundaries;
constructor(boundaries) {
super(boundaries.length + 1);
this._boundaries = boundaries;
}
_findBucketIndex(value, _timestamp, _attributes, _ctx) {
for (let i = 0; i < this._boundaries.length; i++) {
if (value <= this._boundaries[i]) {
return i;
}
}
return this._boundaries.length;
}
offer(value, timestamp, attributes, ctx) {
const index = this._findBucketIndex(value, timestamp, attributes, ctx);
this._reservoirStorage[index].offer(value, timestamp, attributes, ctx);
}
}
exports.AlignedHistogramBucketExemplarReservoir = AlignedHistogramBucketExemplarReservoir;
//# sourceMappingURL=AlignedHistogramBucketExemplarReservoir.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"AlignedHistogramBucketExemplarReservoir.js","sourceRoot":"","sources":["../../../src/exemplar/AlignedHistogramBucketExemplarReservoir.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAGH,2DAAqE;AAErE;;;;GAIG;AACH,MAAa,uCAAwC,SAAQ,kDAA8B;IACjF,WAAW,CAAW;IAC9B,YAAY,UAAoB;QAC9B,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC7B,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;IAChC,CAAC;IAEO,gBAAgB,CACtB,KAAa,EACb,UAAkB,EAClB,WAAuB,EACvB,IAAa;QAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChD,IAAI,KAAK,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;gBAChC,OAAO,CAAC,CAAC;aACV;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;IACjC,CAAC;IAED,KAAK,CACH,KAAa,EACb,SAAiB,EACjB,UAAsB,EACtB,GAAY;QAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;QACvE,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IACzE,CAAC;CACF;AA9BD,0FA8BC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Context, HrTime, Attributes } from '@opentelemetry/api';\nimport { FixedSizeExemplarReservoirBase } from './ExemplarReservoir';\n\n/**\n * AlignedHistogramBucketExemplarReservoir takes the same boundaries\n * configuration of a Histogram. This algorithm keeps the last seen measurement\n * that falls within a histogram bucket.\n */\nexport class AlignedHistogramBucketExemplarReservoir extends FixedSizeExemplarReservoirBase {\n private _boundaries: number[];\n constructor(boundaries: number[]) {\n super(boundaries.length + 1);\n this._boundaries = boundaries;\n }\n\n private _findBucketIndex(\n value: number,\n _timestamp: HrTime,\n _attributes: Attributes,\n _ctx: Context\n ) {\n for (let i = 0; i < this._boundaries.length; i++) {\n if (value <= this._boundaries[i]) {\n return i;\n }\n }\n return this._boundaries.length;\n }\n\n offer(\n value: number,\n timestamp: HrTime,\n attributes: Attributes,\n ctx: Context\n ): void {\n const index = this._findBucketIndex(value, timestamp, attributes, ctx);\n this._reservoirStorage[index].offer(value, timestamp, attributes, ctx);\n }\n}\n"]}

View File

@@ -0,0 +1,6 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
import type { ExemplarFilter } from './ExemplarFilter';
export declare class AlwaysSampleExemplarFilter implements ExemplarFilter {
shouldSample(_value: number, _timestamp: HrTime, _attributes: Attributes, _ctx: Context): boolean;
}
//# sourceMappingURL=AlwaysSampleExemplarFilter.d.ts.map

View File

@@ -0,0 +1,14 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.AlwaysSampleExemplarFilter = void 0;
class AlwaysSampleExemplarFilter {
shouldSample(_value, _timestamp, _attributes, _ctx) {
return true;
}
}
exports.AlwaysSampleExemplarFilter = AlwaysSampleExemplarFilter;
//# sourceMappingURL=AlwaysSampleExemplarFilter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"AlwaysSampleExemplarFilter.js","sourceRoot":"","sources":["../../../src/exemplar/AlwaysSampleExemplarFilter.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAKH,MAAa,0BAA0B;IACrC,YAAY,CACV,MAAc,EACd,UAAkB,EAClB,WAAuB,EACvB,IAAa;QAEb,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AATD,gEASC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Context, HrTime, Attributes } from '@opentelemetry/api';\nimport type { ExemplarFilter } from './ExemplarFilter';\n\nexport class AlwaysSampleExemplarFilter implements ExemplarFilter {\n shouldSample(\n _value: number,\n _timestamp: HrTime,\n _attributes: Attributes,\n _ctx: Context\n ): boolean {\n return true;\n }\n}\n"]}

View File

@@ -0,0 +1,15 @@
import type { HrTime, Attributes } from '@opentelemetry/api';
/**
* A representation of an exemplar, which is a sample input measurement.
* Exemplars also hold information about the environment when the measurement
* was recorded, for example the span and trace ID of the active span when the
* exemplar was recorded.
*/
export type Exemplar = {
filteredAttributes: Attributes;
value: number;
timestamp: HrTime;
spanId?: string;
traceId?: string;
};
//# sourceMappingURL=Exemplar.d.ts.map

View File

@@ -0,0 +1,7 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=Exemplar.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"Exemplar.js","sourceRoot":"","sources":["../../../src/exemplar/Exemplar.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { HrTime, Attributes } from '@opentelemetry/api';\n\n/**\n * A representation of an exemplar, which is a sample input measurement.\n * Exemplars also hold information about the environment when the measurement\n * was recorded, for example the span and trace ID of the active span when the\n * exemplar was recorded.\n */\nexport type Exemplar = {\n // The set of key/value pairs that were filtered out by the aggregator, but\n // recorded alongside the original measurement. Only key/value pairs that were\n // filtered out by the aggregator should be included\n filteredAttributes: Attributes;\n\n // The value of the measurement that was recorded.\n value: number;\n\n // timestamp is the exact time when this exemplar was recorded\n timestamp: HrTime;\n\n // (Optional) Span ID of the exemplar trace.\n // span_id may be missing if the measurement is not recorded inside a trace\n // or if the trace is not sampled.\n spanId?: string;\n\n // (Optional) Trace ID of the exemplar trace.\n // trace_id may be missing if the measurement is not recorded inside a trace\n // or if the trace is not sampled.\n traceId?: string;\n};\n"]}

View File

@@ -0,0 +1,18 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
/**
* This interface represents a ExemplarFilter. Exemplar filters are
* used to filter measurements before attempting to store them in a
* reservoir.
*/
export interface ExemplarFilter {
/**
* Returns whether or not a reservoir should attempt to filter a measurement.
*
* @param value The value of the measurement
* @param timestamp A timestamp that best represents when the measurement was taken
* @param attributes The complete set of Attributes of the measurement
* @param ctx The Context of the measurement
*/
shouldSample(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): boolean;
}
//# sourceMappingURL=ExemplarFilter.d.ts.map

View File

@@ -0,0 +1,7 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=ExemplarFilter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ExemplarFilter.js","sourceRoot":"","sources":["../../../src/exemplar/ExemplarFilter.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Context, HrTime, Attributes } from '@opentelemetry/api';\n\n/**\n * This interface represents a ExemplarFilter. Exemplar filters are\n * used to filter measurements before attempting to store them in a\n * reservoir.\n */\nexport interface ExemplarFilter {\n /**\n * Returns whether or not a reservoir should attempt to filter a measurement.\n *\n * @param value The value of the measurement\n * @param timestamp A timestamp that best represents when the measurement was taken\n * @param attributes The complete set of Attributes of the measurement\n * @param ctx The Context of the measurement\n */\n shouldSample(\n value: number,\n timestamp: HrTime,\n attributes: Attributes,\n ctx: Context\n ): boolean;\n}\n"]}

View File

@@ -0,0 +1,43 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
import type { Exemplar } from './Exemplar';
/**
* An interface for an exemplar reservoir of samples.
*/
export interface ExemplarReservoir {
/** Offers a measurement to be sampled. */
offer(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): void;
/**
* Returns accumulated Exemplars and also resets the reservoir
* for the next sampling period
*
* @param pointAttributes The attributes associated with metric point.
*
* @returns a list of {@link Exemplar}s. Returned exemplars contain the attributes that were filtered out by the
* aggregator, but recorded alongside the original measurement.
*/
collect(pointAttributes: Attributes): Exemplar[];
}
declare class ExemplarBucket {
private value;
private attributes;
private timestamp;
private spanId?;
private traceId?;
private _offered;
offer(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): void;
collect(pointAttributes: Attributes): Exemplar | null;
}
export declare abstract class FixedSizeExemplarReservoirBase implements ExemplarReservoir {
protected _reservoirStorage: ExemplarBucket[];
protected _size: number;
constructor(size: number);
abstract offer(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): void;
maxSize(): number;
/**
* Resets the reservoir
*/
protected reset(): void;
collect(pointAttributes: Attributes): Exemplar[];
}
export {};
//# sourceMappingURL=ExemplarReservoir.d.ts.map

View File

@@ -0,0 +1,83 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.FixedSizeExemplarReservoirBase = void 0;
const api_1 = require("@opentelemetry/api");
class ExemplarBucket {
value = 0;
attributes = {};
timestamp = [0, 0];
spanId;
traceId;
_offered = false;
offer(value, timestamp, attributes, ctx) {
this.value = value;
this.timestamp = timestamp;
this.attributes = attributes;
const spanContext = api_1.trace.getSpanContext(ctx);
if (spanContext && (0, api_1.isSpanContextValid)(spanContext)) {
this.spanId = spanContext.spanId;
this.traceId = spanContext.traceId;
}
this._offered = true;
}
collect(pointAttributes) {
if (!this._offered)
return null;
const currentAttributes = this.attributes;
// filter attributes
Object.keys(pointAttributes).forEach(key => {
if (pointAttributes[key] === currentAttributes[key]) {
delete currentAttributes[key];
}
});
const retVal = {
filteredAttributes: currentAttributes,
value: this.value,
timestamp: this.timestamp,
spanId: this.spanId,
traceId: this.traceId,
};
this.attributes = {};
this.value = 0;
this.timestamp = [0, 0];
this.spanId = undefined;
this.traceId = undefined;
this._offered = false;
return retVal;
}
}
class FixedSizeExemplarReservoirBase {
_reservoirStorage;
_size;
constructor(size) {
this._size = size;
this._reservoirStorage = new Array(size);
for (let i = 0; i < this._size; i++) {
this._reservoirStorage[i] = new ExemplarBucket();
}
}
maxSize() {
return this._size;
}
/**
* Resets the reservoir
*/
reset() { }
collect(pointAttributes) {
const exemplars = [];
this._reservoirStorage.forEach(storageItem => {
const res = storageItem.collect(pointAttributes);
if (res !== null) {
exemplars.push(res);
}
});
this.reset();
return exemplars;
}
}
exports.FixedSizeExemplarReservoirBase = FixedSizeExemplarReservoirBase;
//# sourceMappingURL=ExemplarReservoir.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,6 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
import type { ExemplarFilter } from './ExemplarFilter';
export declare class NeverSampleExemplarFilter implements ExemplarFilter {
shouldSample(_value: number, _timestamp: HrTime, _attributes: Attributes, _ctx: Context): boolean;
}
//# sourceMappingURL=NeverSampleExemplarFilter.d.ts.map

View File

@@ -0,0 +1,14 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.NeverSampleExemplarFilter = void 0;
class NeverSampleExemplarFilter {
shouldSample(_value, _timestamp, _attributes, _ctx) {
return false;
}
}
exports.NeverSampleExemplarFilter = NeverSampleExemplarFilter;
//# sourceMappingURL=NeverSampleExemplarFilter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"NeverSampleExemplarFilter.js","sourceRoot":"","sources":["../../../src/exemplar/NeverSampleExemplarFilter.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAKH,MAAa,yBAAyB;IACpC,YAAY,CACV,MAAc,EACd,UAAkB,EAClB,WAAuB,EACvB,IAAa;QAEb,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AATD,8DASC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Context, HrTime, Attributes } from '@opentelemetry/api';\nimport type { ExemplarFilter } from './ExemplarFilter';\n\nexport class NeverSampleExemplarFilter implements ExemplarFilter {\n shouldSample(\n _value: number,\n _timestamp: HrTime,\n _attributes: Attributes,\n _ctx: Context\n ): boolean {\n return false;\n }\n}\n"]}

View File

@@ -0,0 +1,16 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
import { FixedSizeExemplarReservoirBase } from './ExemplarReservoir';
/**
* Fixed size reservoir that uses equivalent of naive reservoir sampling
* algorithm to accept measurements.
*
*/
export declare class SimpleFixedSizeExemplarReservoir extends FixedSizeExemplarReservoirBase {
private _numMeasurementsSeen;
constructor(size: number);
private getRandomInt;
private _findBucketIndex;
offer(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): void;
reset(): void;
}
//# sourceMappingURL=SimpleFixedSizeExemplarReservoir.d.ts.map

View File

@@ -0,0 +1,41 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.SimpleFixedSizeExemplarReservoir = void 0;
const ExemplarReservoir_1 = require("./ExemplarReservoir");
/**
* Fixed size reservoir that uses equivalent of naive reservoir sampling
* algorithm to accept measurements.
*
*/
class SimpleFixedSizeExemplarReservoir extends ExemplarReservoir_1.FixedSizeExemplarReservoirBase {
_numMeasurementsSeen;
constructor(size) {
super(size);
this._numMeasurementsSeen = 0;
}
getRandomInt(min, max) {
//[min, max)
return Math.floor(Math.random() * (max - min) + min);
}
_findBucketIndex(_value, _timestamp, _attributes, _ctx) {
if (this._numMeasurementsSeen < this._size)
return this._numMeasurementsSeen++;
const index = this.getRandomInt(0, ++this._numMeasurementsSeen);
return index < this._size ? index : -1;
}
offer(value, timestamp, attributes, ctx) {
const index = this._findBucketIndex(value, timestamp, attributes, ctx);
if (index !== -1) {
this._reservoirStorage[index].offer(value, timestamp, attributes, ctx);
}
}
reset() {
this._numMeasurementsSeen = 0;
}
}
exports.SimpleFixedSizeExemplarReservoir = SimpleFixedSizeExemplarReservoir;
//# sourceMappingURL=SimpleFixedSizeExemplarReservoir.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"SimpleFixedSizeExemplarReservoir.js","sourceRoot":"","sources":["../../../src/exemplar/SimpleFixedSizeExemplarReservoir.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAGH,2DAAqE;AAErE;;;;GAIG;AACH,MAAa,gCAAiC,SAAQ,kDAA8B;IAC1E,oBAAoB,CAAS;IACrC,YAAY,IAAY;QACtB,KAAK,CAAC,IAAI,CAAC,CAAC;QACZ,IAAI,CAAC,oBAAoB,GAAG,CAAC,CAAC;IAChC,CAAC;IAEO,YAAY,CAAC,GAAW,EAAE,GAAW;QAC3C,YAAY;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;IACvD,CAAC;IAEO,gBAAgB,CACtB,MAAc,EACd,UAAkB,EAClB,WAAuB,EACvB,IAAa;QAEb,IAAI,IAAI,CAAC,oBAAoB,GAAG,IAAI,CAAC,KAAK;YACxC,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,EAAE,EAAE,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChE,OAAO,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACzC,CAAC;IAED,KAAK,CACH,KAAa,EACb,SAAiB,EACjB,UAAsB,EACtB,GAAY;QAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;QACvE,IAAI,KAAK,KAAK,CAAC,CAAC,EAAE;YAChB,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;SACxE;IACH,CAAC;IAEQ,KAAK;QACZ,IAAI,CAAC,oBAAoB,GAAG,CAAC,CAAC;IAChC,CAAC;CACF;AAvCD,4EAuCC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Context, HrTime, Attributes } from '@opentelemetry/api';\nimport { FixedSizeExemplarReservoirBase } from './ExemplarReservoir';\n\n/**\n * Fixed size reservoir that uses equivalent of naive reservoir sampling\n * algorithm to accept measurements.\n *\n */\nexport class SimpleFixedSizeExemplarReservoir extends FixedSizeExemplarReservoirBase {\n private _numMeasurementsSeen: number;\n constructor(size: number) {\n super(size);\n this._numMeasurementsSeen = 0;\n }\n\n private getRandomInt(min: number, max: number) {\n //[min, max)\n return Math.floor(Math.random() * (max - min) + min);\n }\n\n private _findBucketIndex(\n _value: number,\n _timestamp: HrTime,\n _attributes: Attributes,\n _ctx: Context\n ) {\n if (this._numMeasurementsSeen < this._size)\n return this._numMeasurementsSeen++;\n const index = this.getRandomInt(0, ++this._numMeasurementsSeen);\n return index < this._size ? index : -1;\n }\n\n offer(\n value: number,\n timestamp: HrTime,\n attributes: Attributes,\n ctx: Context\n ): void {\n const index = this._findBucketIndex(value, timestamp, attributes, ctx);\n if (index !== -1) {\n this._reservoirStorage[index].offer(value, timestamp, attributes, ctx);\n }\n }\n\n override reset() {\n this._numMeasurementsSeen = 0;\n }\n}\n"]}

View File

@@ -0,0 +1,6 @@
import type { Context, HrTime, Attributes } from '@opentelemetry/api';
import type { ExemplarFilter } from './ExemplarFilter';
export declare class WithTraceExemplarFilter implements ExemplarFilter {
shouldSample(value: number, timestamp: HrTime, attributes: Attributes, ctx: Context): boolean;
}
//# sourceMappingURL=WithTraceExemplarFilter.d.ts.map

View File

@@ -0,0 +1,18 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.WithTraceExemplarFilter = void 0;
const api_1 = require("@opentelemetry/api");
class WithTraceExemplarFilter {
shouldSample(value, timestamp, attributes, ctx) {
const spanContext = api_1.trace.getSpanContext(ctx);
if (!spanContext || !(0, api_1.isSpanContextValid)(spanContext))
return false;
return spanContext.traceFlags & api_1.TraceFlags.SAMPLED ? true : false;
}
}
exports.WithTraceExemplarFilter = WithTraceExemplarFilter;
//# sourceMappingURL=WithTraceExemplarFilter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"WithTraceExemplarFilter.js","sourceRoot":"","sources":["../../../src/exemplar/WithTraceExemplarFilter.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAGH,4CAA2E;AAG3E,MAAa,uBAAuB;IAClC,YAAY,CACV,KAAa,EACb,SAAiB,EACjB,UAAsB,EACtB,GAAY;QAEZ,MAAM,WAAW,GAAG,WAAK,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC;QAC9C,IAAI,CAAC,WAAW,IAAI,CAAC,IAAA,wBAAkB,EAAC,WAAW,CAAC;YAAE,OAAO,KAAK,CAAC;QACnE,OAAO,WAAW,CAAC,UAAU,GAAG,gBAAU,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC;IACpE,CAAC;CACF;AAXD,0DAWC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Context, HrTime, Attributes } from '@opentelemetry/api';\nimport { isSpanContextValid, trace, TraceFlags } from '@opentelemetry/api';\nimport type { ExemplarFilter } from './ExemplarFilter';\n\nexport class WithTraceExemplarFilter implements ExemplarFilter {\n shouldSample(\n value: number,\n timestamp: HrTime,\n attributes: Attributes,\n ctx: Context\n ): boolean {\n const spanContext = trace.getSpanContext(ctx);\n if (!spanContext || !isSpanContextValid(spanContext)) return false;\n return spanContext.traceFlags & TraceFlags.SAMPLED ? true : false;\n }\n}\n"]}

View File

@@ -0,0 +1,10 @@
export type { Exemplar } from './Exemplar';
export type { ExemplarFilter } from './ExemplarFilter';
export { AlwaysSampleExemplarFilter } from './AlwaysSampleExemplarFilter';
export { NeverSampleExemplarFilter } from './NeverSampleExemplarFilter';
export { WithTraceExemplarFilter } from './WithTraceExemplarFilter';
export type { ExemplarReservoir } from './ExemplarReservoir';
export { FixedSizeExemplarReservoirBase } from './ExemplarReservoir';
export { AlignedHistogramBucketExemplarReservoir } from './AlignedHistogramBucketExemplarReservoir';
export { SimpleFixedSizeExemplarReservoir } from './SimpleFixedSizeExemplarReservoir';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1,20 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.SimpleFixedSizeExemplarReservoir = exports.AlignedHistogramBucketExemplarReservoir = exports.FixedSizeExemplarReservoirBase = exports.WithTraceExemplarFilter = exports.NeverSampleExemplarFilter = exports.AlwaysSampleExemplarFilter = void 0;
var AlwaysSampleExemplarFilter_1 = require("./AlwaysSampleExemplarFilter");
Object.defineProperty(exports, "AlwaysSampleExemplarFilter", { enumerable: true, get: function () { return AlwaysSampleExemplarFilter_1.AlwaysSampleExemplarFilter; } });
var NeverSampleExemplarFilter_1 = require("./NeverSampleExemplarFilter");
Object.defineProperty(exports, "NeverSampleExemplarFilter", { enumerable: true, get: function () { return NeverSampleExemplarFilter_1.NeverSampleExemplarFilter; } });
var WithTraceExemplarFilter_1 = require("./WithTraceExemplarFilter");
Object.defineProperty(exports, "WithTraceExemplarFilter", { enumerable: true, get: function () { return WithTraceExemplarFilter_1.WithTraceExemplarFilter; } });
var ExemplarReservoir_1 = require("./ExemplarReservoir");
Object.defineProperty(exports, "FixedSizeExemplarReservoirBase", { enumerable: true, get: function () { return ExemplarReservoir_1.FixedSizeExemplarReservoirBase; } });
var AlignedHistogramBucketExemplarReservoir_1 = require("./AlignedHistogramBucketExemplarReservoir");
Object.defineProperty(exports, "AlignedHistogramBucketExemplarReservoir", { enumerable: true, get: function () { return AlignedHistogramBucketExemplarReservoir_1.AlignedHistogramBucketExemplarReservoir; } });
var SimpleFixedSizeExemplarReservoir_1 = require("./SimpleFixedSizeExemplarReservoir");
Object.defineProperty(exports, "SimpleFixedSizeExemplarReservoir", { enumerable: true, get: function () { return SimpleFixedSizeExemplarReservoir_1.SimpleFixedSizeExemplarReservoir; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/exemplar/index.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAIH,2EAA0E;AAAjE,wIAAA,0BAA0B,OAAA;AACnC,yEAAwE;AAA/D,sIAAA,yBAAyB,OAAA;AAClC,qEAAoE;AAA3D,kIAAA,uBAAuB,OAAA;AAEhC,yDAAqE;AAA5D,mIAAA,8BAA8B,OAAA;AACvC,qGAAoG;AAA3F,kKAAA,uCAAuC,OAAA;AAChD,uFAAsF;AAA7E,oJAAA,gCAAgC,OAAA","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport type { Exemplar } from './Exemplar';\nexport type { ExemplarFilter } from './ExemplarFilter';\nexport { AlwaysSampleExemplarFilter } from './AlwaysSampleExemplarFilter';\nexport { NeverSampleExemplarFilter } from './NeverSampleExemplarFilter';\nexport { WithTraceExemplarFilter } from './WithTraceExemplarFilter';\nexport type { ExemplarReservoir } from './ExemplarReservoir';\nexport { FixedSizeExemplarReservoirBase } from './ExemplarReservoir';\nexport { AlignedHistogramBucketExemplarReservoir } from './AlignedHistogramBucketExemplarReservoir';\nexport { SimpleFixedSizeExemplarReservoir } from './SimpleFixedSizeExemplarReservoir';\n"]}

View File

@@ -0,0 +1,14 @@
import { AggregationTemporality } from './AggregationTemporality';
import type { InstrumentType } from './MetricData';
import type { AggregationOption } from '../view/AggregationOption';
/**
* Aggregation selector based on metric instrument types.
*/
export type AggregationSelector = (instrumentType: InstrumentType) => AggregationOption;
/**
* Aggregation temporality selector based on metric instrument types.
*/
export type AggregationTemporalitySelector = (instrumentType: InstrumentType) => AggregationTemporality;
export declare const DEFAULT_AGGREGATION_SELECTOR: AggregationSelector;
export declare const DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR: AggregationTemporalitySelector;
//# sourceMappingURL=AggregationSelector.d.ts.map

View File

@@ -0,0 +1,18 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR = exports.DEFAULT_AGGREGATION_SELECTOR = void 0;
const AggregationTemporality_1 = require("./AggregationTemporality");
const AggregationOption_1 = require("../view/AggregationOption");
const DEFAULT_AGGREGATION_SELECTOR = _instrumentType => {
return {
type: AggregationOption_1.AggregationType.DEFAULT,
};
};
exports.DEFAULT_AGGREGATION_SELECTOR = DEFAULT_AGGREGATION_SELECTOR;
const DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR = _instrumentType => AggregationTemporality_1.AggregationTemporality.CUMULATIVE;
exports.DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR = DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR;
//# sourceMappingURL=AggregationSelector.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"AggregationSelector.js","sourceRoot":"","sources":["../../../src/export/AggregationSelector.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,qEAAkE;AAGlE,iEAA4D;AAgBrD,MAAM,4BAA4B,GACvC,eAAe,CAAC,EAAE;IAChB,OAAO;QACL,IAAI,EAAE,mCAAe,CAAC,OAAO;KAC9B,CAAC;AACJ,CAAC,CAAC;AALS,QAAA,4BAA4B,gCAKrC;AAEG,MAAM,wCAAwC,GACnD,eAAe,CAAC,EAAE,CAAC,+CAAsB,CAAC,UAAU,CAAC;AAD1C,QAAA,wCAAwC,4CACE","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AggregationTemporality } from './AggregationTemporality';\nimport type { InstrumentType } from './MetricData';\nimport type { AggregationOption } from '../view/AggregationOption';\nimport { AggregationType } from '../view/AggregationOption';\n\n/**\n * Aggregation selector based on metric instrument types.\n */\nexport type AggregationSelector = (\n instrumentType: InstrumentType\n) => AggregationOption;\n\n/**\n * Aggregation temporality selector based on metric instrument types.\n */\nexport type AggregationTemporalitySelector = (\n instrumentType: InstrumentType\n) => AggregationTemporality;\n\nexport const DEFAULT_AGGREGATION_SELECTOR: AggregationSelector =\n _instrumentType => {\n return {\n type: AggregationType.DEFAULT,\n };\n };\n\nexport const DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR: AggregationTemporalitySelector =\n _instrumentType => AggregationTemporality.CUMULATIVE;\n"]}

View File

@@ -0,0 +1,8 @@
/**
* AggregationTemporality indicates the way additive quantities are expressed.
*/
export declare enum AggregationTemporality {
DELTA = 0,
CUMULATIVE = 1
}
//# sourceMappingURL=AggregationTemporality.d.ts.map

View File

@@ -0,0 +1,16 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.AggregationTemporality = void 0;
/**
* AggregationTemporality indicates the way additive quantities are expressed.
*/
var AggregationTemporality;
(function (AggregationTemporality) {
AggregationTemporality[AggregationTemporality["DELTA"] = 0] = "DELTA";
AggregationTemporality[AggregationTemporality["CUMULATIVE"] = 1] = "CUMULATIVE";
})(AggregationTemporality = exports.AggregationTemporality || (exports.AggregationTemporality = {}));
//# sourceMappingURL=AggregationTemporality.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"AggregationTemporality.js","sourceRoot":"","sources":["../../../src/export/AggregationTemporality.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH;;GAEG;AACH,IAAY,sBAGX;AAHD,WAAY,sBAAsB;IAChC,qEAAK,CAAA;IACL,+EAAU,CAAA;AACZ,CAAC,EAHW,sBAAsB,GAAtB,8BAAsB,KAAtB,8BAAsB,QAGjC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * AggregationTemporality indicates the way additive quantities are expressed.\n */\nexport enum AggregationTemporality {\n DELTA,\n CUMULATIVE,\n}\n"]}

View File

@@ -0,0 +1,6 @@
import type { InstrumentType } from './MetricData';
/**
* Cardinality Limit selector based on metric instrument types.
*/
export type CardinalitySelector = (instrumentType: InstrumentType) => number;
//# sourceMappingURL=CardinalitySelector.d.ts.map

View File

@@ -0,0 +1,7 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=CardinalitySelector.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"CardinalitySelector.js","sourceRoot":"","sources":["../../../src/export/CardinalitySelector.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { InstrumentType } from './MetricData';\n\n/**\n * Cardinality Limit selector based on metric instrument types.\n */\nexport type CardinalitySelector = (instrumentType: InstrumentType) => number;\n"]}

View File

@@ -0,0 +1,26 @@
import type { ExportResult } from '@opentelemetry/core';
import type { AggregationTemporality } from './AggregationTemporality';
import type { ResourceMetrics, InstrumentType } from './MetricData';
import type { PushMetricExporter } from './MetricExporter';
import type { AggregationTemporalitySelector } from './AggregationSelector';
interface ConsoleMetricExporterOptions {
temporalitySelector?: AggregationTemporalitySelector;
}
/**
* This is an implementation of {@link PushMetricExporter} that prints metrics to the
* console. This class can be used for diagnostic purposes.
*
* NOTE: This {@link PushMetricExporter} is intended for diagnostics use only, output rendered to the console may change at any time.
*/
export declare class ConsoleMetricExporter implements PushMetricExporter {
protected _shutdown: boolean;
protected _temporalitySelector: AggregationTemporalitySelector;
constructor(options?: ConsoleMetricExporterOptions);
export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void;
forceFlush(): Promise<void>;
selectAggregationTemporality(_instrumentType: InstrumentType): AggregationTemporality;
shutdown(): Promise<void>;
private static _sendMetrics;
}
export {};
//# sourceMappingURL=ConsoleMetricExporter.d.ts.map

View File

@@ -0,0 +1,52 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ConsoleMetricExporter = void 0;
const core_1 = require("@opentelemetry/core");
const AggregationSelector_1 = require("./AggregationSelector");
/**
* This is an implementation of {@link PushMetricExporter} that prints metrics to the
* console. This class can be used for diagnostic purposes.
*
* NOTE: This {@link PushMetricExporter} is intended for diagnostics use only, output rendered to the console may change at any time.
*/
/* eslint-disable no-console */
class ConsoleMetricExporter {
_shutdown = false;
_temporalitySelector;
constructor(options) {
this._temporalitySelector =
options?.temporalitySelector ?? AggregationSelector_1.DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR;
}
export(metrics, resultCallback) {
if (this._shutdown) {
// If the exporter is shutting down, by spec, we need to return FAILED as export result
resultCallback({ code: core_1.ExportResultCode.FAILED });
return;
}
return ConsoleMetricExporter._sendMetrics(metrics, resultCallback);
}
forceFlush() {
return Promise.resolve();
}
selectAggregationTemporality(_instrumentType) {
return this._temporalitySelector(_instrumentType);
}
shutdown() {
this._shutdown = true;
return Promise.resolve();
}
static _sendMetrics(metrics, done) {
for (const scopeMetrics of metrics.scopeMetrics) {
for (const metric of scopeMetrics.metrics) {
console.dir({
descriptor: metric.descriptor,
dataPointType: metric.dataPointType,
dataPoints: metric.dataPoints,
}, { depth: null });
}
}
done({ code: core_1.ExportResultCode.SUCCESS });
}
}
exports.ConsoleMetricExporter = ConsoleMetricExporter;
//# sourceMappingURL=ConsoleMetricExporter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"ConsoleMetricExporter.js","sourceRoot":"","sources":["../../../src/export/ConsoleMetricExporter.ts"],"names":[],"mappings":";;;AAKA,8CAAuD;AAKvD,+DAAiF;AAMjF;;;;;GAKG;AAEH,+BAA+B;AAC/B,MAAa,qBAAqB;IACtB,SAAS,GAAG,KAAK,CAAC;IAClB,oBAAoB,CAAiC;IAE/D,YAAY,OAAsC;QAChD,IAAI,CAAC,oBAAoB;YACvB,OAAO,EAAE,mBAAmB,IAAI,8DAAwC,CAAC;IAC7E,CAAC;IAED,MAAM,CACJ,OAAwB,EACxB,cAA8C;QAE9C,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,uFAAuF;YACvF,cAAc,CAAC,EAAE,IAAI,EAAE,uBAAgB,CAAC,MAAM,EAAE,CAAC,CAAC;YAClD,OAAO;SACR;QAED,OAAO,qBAAqB,CAAC,YAAY,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IACrE,CAAC;IAED,UAAU;QACR,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;IAED,4BAA4B,CAC1B,eAA+B;QAE/B,OAAO,IAAI,CAAC,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACpD,CAAC;IAED,QAAQ;QACN,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC;QACtB,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;IAEO,MAAM,CAAC,YAAY,CACzB,OAAwB,EACxB,IAAoC;QAEpC,KAAK,MAAM,YAAY,IAAI,OAAO,CAAC,YAAY,EAAE;YAC/C,KAAK,MAAM,MAAM,IAAI,YAAY,CAAC,OAAO,EAAE;gBACzC,OAAO,CAAC,GAAG,CACT;oBACE,UAAU,EAAE,MAAM,CAAC,UAAU;oBAC7B,aAAa,EAAE,MAAM,CAAC,aAAa;oBACnC,UAAU,EAAE,MAAM,CAAC,UAAU;iBAC9B,EACD,EAAE,KAAK,EAAE,IAAI,EAAE,CAChB,CAAC;aACH;SACF;QAED,IAAI,CAAC,EAAE,IAAI,EAAE,uBAAgB,CAAC,OAAO,EAAE,CAAC,CAAC;IAC3C,CAAC;CACF;AAxDD,sDAwDC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\nimport type { ExportResult } from '@opentelemetry/core';\nimport { ExportResultCode } from '@opentelemetry/core';\nimport type { AggregationTemporality } from './AggregationTemporality';\nimport type { ResourceMetrics, InstrumentType } from './MetricData';\nimport type { PushMetricExporter } from './MetricExporter';\nimport type { AggregationTemporalitySelector } from './AggregationSelector';\nimport { DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR } from './AggregationSelector';\n\ninterface ConsoleMetricExporterOptions {\n temporalitySelector?: AggregationTemporalitySelector;\n}\n\n/**\n * This is an implementation of {@link PushMetricExporter} that prints metrics to the\n * console. This class can be used for diagnostic purposes.\n *\n * NOTE: This {@link PushMetricExporter} is intended for diagnostics use only, output rendered to the console may change at any time.\n */\n\n/* eslint-disable no-console */\nexport class ConsoleMetricExporter implements PushMetricExporter {\n protected _shutdown = false;\n protected _temporalitySelector: AggregationTemporalitySelector;\n\n constructor(options?: ConsoleMetricExporterOptions) {\n this._temporalitySelector =\n options?.temporalitySelector ?? DEFAULT_AGGREGATION_TEMPORALITY_SELECTOR;\n }\n\n export(\n metrics: ResourceMetrics,\n resultCallback: (result: ExportResult) => void\n ): void {\n if (this._shutdown) {\n // If the exporter is shutting down, by spec, we need to return FAILED as export result\n resultCallback({ code: ExportResultCode.FAILED });\n return;\n }\n\n return ConsoleMetricExporter._sendMetrics(metrics, resultCallback);\n }\n\n forceFlush(): Promise<void> {\n return Promise.resolve();\n }\n\n selectAggregationTemporality(\n _instrumentType: InstrumentType\n ): AggregationTemporality {\n return this._temporalitySelector(_instrumentType);\n }\n\n shutdown(): Promise<void> {\n this._shutdown = true;\n return Promise.resolve();\n }\n\n private static _sendMetrics(\n metrics: ResourceMetrics,\n done: (result: ExportResult) => void\n ): void {\n for (const scopeMetrics of metrics.scopeMetrics) {\n for (const metric of scopeMetrics.metrics) {\n console.dir(\n {\n descriptor: metric.descriptor,\n dataPointType: metric.dataPointType,\n dataPoints: metric.dataPoints,\n },\n { depth: null }\n );\n }\n }\n\n done({ code: ExportResultCode.SUCCESS });\n }\n}\n"]}

View File

@@ -0,0 +1,29 @@
import type { ExportResult } from '@opentelemetry/core';
import type { AggregationTemporality } from './AggregationTemporality';
import type { InstrumentType, ResourceMetrics } from './MetricData';
import type { PushMetricExporter } from './MetricExporter';
/**
* In-memory Metrics Exporter is a Push Metric Exporter
* which accumulates metrics data in the local memory and
* allows to inspect it (useful for e.g. unit tests).
*/
export declare class InMemoryMetricExporter implements PushMetricExporter {
protected _shutdown: boolean;
protected _aggregationTemporality: AggregationTemporality;
private _metrics;
constructor(aggregationTemporality: AggregationTemporality);
/**
* @inheritedDoc
*/
export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void;
/**
* Returns all the collected resource metrics
* @returns ResourceMetrics[]
*/
getMetrics(): ResourceMetrics[];
forceFlush(): Promise<void>;
reset(): void;
selectAggregationTemporality(_instrumentType: InstrumentType): AggregationTemporality;
shutdown(): Promise<void>;
}
//# sourceMappingURL=InMemoryMetricExporter.d.ts.map

View File

@@ -0,0 +1,55 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.InMemoryMetricExporter = void 0;
const core_1 = require("@opentelemetry/core");
/**
* In-memory Metrics Exporter is a Push Metric Exporter
* which accumulates metrics data in the local memory and
* allows to inspect it (useful for e.g. unit tests).
*/
class InMemoryMetricExporter {
_shutdown = false;
_aggregationTemporality;
_metrics = [];
constructor(aggregationTemporality) {
this._aggregationTemporality = aggregationTemporality;
}
/**
* @inheritedDoc
*/
export(metrics, resultCallback) {
// Avoid storing metrics when exporter is shutdown
if (this._shutdown) {
setTimeout(() => resultCallback({ code: core_1.ExportResultCode.FAILED }), 0);
return;
}
this._metrics.push(metrics);
setTimeout(() => resultCallback({ code: core_1.ExportResultCode.SUCCESS }), 0);
}
/**
* Returns all the collected resource metrics
* @returns ResourceMetrics[]
*/
getMetrics() {
return this._metrics;
}
forceFlush() {
return Promise.resolve();
}
reset() {
this._metrics = [];
}
selectAggregationTemporality(_instrumentType) {
return this._aggregationTemporality;
}
shutdown() {
this._shutdown = true;
return Promise.resolve();
}
}
exports.InMemoryMetricExporter = InMemoryMetricExporter;
//# sourceMappingURL=InMemoryMetricExporter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"InMemoryMetricExporter.js","sourceRoot":"","sources":["../../../src/export/InMemoryMetricExporter.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,8CAAuD;AAMvD;;;;GAIG;AACH,MAAa,sBAAsB;IACvB,SAAS,GAAG,KAAK,CAAC;IAClB,uBAAuB,CAAyB;IAClD,QAAQ,GAAsB,EAAE,CAAC;IAEzC,YAAY,sBAA8C;QACxD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,OAAwB,EACxB,cAA8C;QAE9C,kDAAkD;QAClD,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,UAAU,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,EAAE,IAAI,EAAE,uBAAgB,CAAC,MAAM,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;YACvE,OAAO;SACR;QAED,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5B,UAAU,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,EAAE,IAAI,EAAE,uBAAgB,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;IAC1E,CAAC;IAED;;;OAGG;IACI,UAAU;QACf,OAAO,IAAI,CAAC,QAAQ,CAAC;IACvB,CAAC;IAED,UAAU;QACR,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;IAED,KAAK;QACH,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;IACrB,CAAC;IAED,4BAA4B,CAC1B,eAA+B;QAE/B,OAAO,IAAI,CAAC,uBAAuB,CAAC;IACtC,CAAC;IAED,QAAQ;QACN,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC;QACtB,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;CACF;AApDD,wDAoDC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ExportResultCode } from '@opentelemetry/core';\nimport type { ExportResult } from '@opentelemetry/core';\nimport type { AggregationTemporality } from './AggregationTemporality';\nimport type { InstrumentType, ResourceMetrics } from './MetricData';\nimport type { PushMetricExporter } from './MetricExporter';\n\n/**\n * In-memory Metrics Exporter is a Push Metric Exporter\n * which accumulates metrics data in the local memory and\n * allows to inspect it (useful for e.g. unit tests).\n */\nexport class InMemoryMetricExporter implements PushMetricExporter {\n protected _shutdown = false;\n protected _aggregationTemporality: AggregationTemporality;\n private _metrics: ResourceMetrics[] = [];\n\n constructor(aggregationTemporality: AggregationTemporality) {\n this._aggregationTemporality = aggregationTemporality;\n }\n\n /**\n * @inheritedDoc\n */\n export(\n metrics: ResourceMetrics,\n resultCallback: (result: ExportResult) => void\n ): void {\n // Avoid storing metrics when exporter is shutdown\n if (this._shutdown) {\n setTimeout(() => resultCallback({ code: ExportResultCode.FAILED }), 0);\n return;\n }\n\n this._metrics.push(metrics);\n setTimeout(() => resultCallback({ code: ExportResultCode.SUCCESS }), 0);\n }\n\n /**\n * Returns all the collected resource metrics\n * @returns ResourceMetrics[]\n */\n public getMetrics(): ResourceMetrics[] {\n return this._metrics;\n }\n\n forceFlush(): Promise<void> {\n return Promise.resolve();\n }\n\n reset() {\n this._metrics = [];\n }\n\n selectAggregationTemporality(\n _instrumentType: InstrumentType\n ): AggregationTemporality {\n return this._aggregationTemporality;\n }\n\n shutdown(): Promise<void> {\n this._shutdown = true;\n return Promise.resolve();\n }\n}\n"]}

View File

@@ -0,0 +1,144 @@
import type { HrTime, Attributes, ValueType } from '@opentelemetry/api';
import type { InstrumentationScope } from '@opentelemetry/core';
import type { Resource } from '@opentelemetry/resources';
import type { AggregationTemporality } from './AggregationTemporality';
import type { Histogram, ExponentialHistogram } from '../aggregator/types';
/**
* Supported types of metric instruments.
*/
export declare enum InstrumentType {
COUNTER = "COUNTER",
GAUGE = "GAUGE",
HISTOGRAM = "HISTOGRAM",
UP_DOWN_COUNTER = "UP_DOWN_COUNTER",
OBSERVABLE_COUNTER = "OBSERVABLE_COUNTER",
OBSERVABLE_GAUGE = "OBSERVABLE_GAUGE",
OBSERVABLE_UP_DOWN_COUNTER = "OBSERVABLE_UP_DOWN_COUNTER"
}
export interface MetricDescriptor {
readonly name: string;
readonly description: string;
readonly unit: string;
readonly valueType: ValueType;
}
/**
* Basic metric data fields.
*/
interface BaseMetricData {
readonly descriptor: MetricDescriptor;
readonly aggregationTemporality: AggregationTemporality;
/**
* DataPointType of the metric instrument.
*/
readonly dataPointType: DataPointType;
}
/**
* Represents a metric data aggregated by either a LastValueAggregation or
* SumAggregation.
*/
export interface SumMetricData extends BaseMetricData {
readonly dataPointType: DataPointType.SUM;
readonly dataPoints: DataPoint<number>[];
readonly isMonotonic: boolean;
}
export interface GaugeMetricData extends BaseMetricData {
readonly dataPointType: DataPointType.GAUGE;
readonly dataPoints: DataPoint<number>[];
}
/**
* Represents a metric data aggregated by a HistogramAggregation.
*/
export interface HistogramMetricData extends BaseMetricData {
readonly dataPointType: DataPointType.HISTOGRAM;
readonly dataPoints: DataPoint<Histogram>[];
}
/**
* Represents a metric data aggregated by a ExponentialHistogramAggregation.
*/
export interface ExponentialHistogramMetricData extends BaseMetricData {
readonly dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM;
readonly dataPoints: DataPoint<ExponentialHistogram>[];
}
/**
* Represents an aggregated metric data.
*/
export type MetricData = SumMetricData | GaugeMetricData | HistogramMetricData | ExponentialHistogramMetricData;
export interface ScopeMetrics {
scope: InstrumentationScope;
metrics: MetricData[];
}
export interface ResourceMetrics {
resource: Resource;
scopeMetrics: ScopeMetrics[];
}
/**
* Represents the collection result of the metrics. If there are any
* non-critical errors in the collection, like throwing in a single observable
* callback, these errors are aggregated in the {@link CollectionResult.errors}
* array and other successfully collected metrics are returned.
*/
export interface CollectionResult {
/**
* Collected metrics.
*/
resourceMetrics: ResourceMetrics;
/**
* Arbitrary JavaScript exception values.
*/
errors: unknown[];
}
/**
* The aggregated point data type.
*/
export declare enum DataPointType {
/**
* A histogram data point contains a histogram statistics of collected
* values with a list of explicit bucket boundaries and statistics such
* as min, max, count, and sum of all collected values.
*/
HISTOGRAM = 0,
/**
* An exponential histogram data point contains a histogram statistics of
* collected values where bucket boundaries are automatically calculated
* using an exponential function, and statistics such as min, max, count,
* and sum of all collected values.
*/
EXPONENTIAL_HISTOGRAM = 1,
/**
* A gauge metric data point has only a single numeric value.
*/
GAUGE = 2,
/**
* A sum metric data point has a single numeric value and a
* monotonicity-indicator.
*/
SUM = 3
}
/**
* Represents an aggregated point data with start time, end time and their
* associated attributes and points.
*/
export interface DataPoint<T> {
/**
* The start epoch timestamp of the DataPoint, usually the time when
* the metric was created when the preferred AggregationTemporality is
* CUMULATIVE, or last collection time otherwise.
*/
readonly startTime: HrTime;
/**
* The end epoch timestamp when data were collected, usually it represents
* the moment when `MetricReader.collect` was called.
*/
readonly endTime: HrTime;
/**
* The attributes associated with this DataPoint.
*/
readonly attributes: Attributes;
/**
* The value for this DataPoint. The type of the value is indicated by the
* {@link DataPointType}.
*/
readonly value: T;
}
export {};
//# sourceMappingURL=MetricData.d.ts.map

Some files were not shown because too many files have changed in this diff Show More