feat: Complete zCode CLI X with Telegram bot integration

- Add full Telegram bot functionality with Z.AI API integration
- Implement 4 tools: Bash, FileEdit, WebSearch, Git
- Add 3 agents: Code Reviewer, Architect, DevOps Engineer
- Add 6 skills for common coding tasks
- Add systemd service file for 24/7 operation
- Add nginx configuration for HTTPS webhook
- Add comprehensive documentation
- Implement WebSocket server for real-time updates
- Add logging system with Winston
- Add environment validation

🤖 zCode CLI X - Agentic coder with Z.AI + Telegram integration
This commit is contained in:
admin
2026-05-05 09:01:26 +00:00
Unverified
parent 4a7035dd92
commit 875c7f9b91
24688 changed files with 3224957 additions and 221 deletions

View File

@@ -0,0 +1,11 @@
export interface IExportMetricsServiceResponse {
/** ExportMetricsServiceResponse partialSuccess */
partialSuccess?: IExportMetricsPartialSuccess;
}
export interface IExportMetricsPartialSuccess {
/** ExportMetricsPartialSuccess rejectedDataPoints */
rejectedDataPoints?: number;
/** ExportMetricsPartialSuccess errorMessage */
errorMessage?: string;
}
//# sourceMappingURL=export-response.d.ts.map

View File

@@ -0,0 +1,7 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=export-response.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"export-response.js","sourceRoot":"","sources":["../../../src/metrics/export-response.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport interface IExportMetricsServiceResponse {\n /** ExportMetricsServiceResponse partialSuccess */\n partialSuccess?: IExportMetricsPartialSuccess;\n}\n\nexport interface IExportMetricsPartialSuccess {\n /** ExportMetricsPartialSuccess rejectedDataPoints */\n rejectedDataPoints?: number;\n\n /** ExportMetricsPartialSuccess errorMessage */\n errorMessage?: string;\n}\n"]}

View File

@@ -0,0 +1,2 @@
export type { IExportMetricsPartialSuccess, IExportMetricsServiceResponse, } from './export-response';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1,7 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/metrics/index.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\n// IMPORTANT: exports added here are public\nexport type {\n IExportMetricsPartialSuccess,\n IExportMetricsServiceResponse,\n} from './export-response';\n"]}

View File

@@ -0,0 +1,263 @@
import type { Fixed64, IInstrumentationScope, IKeyValue, Resource } from '../common/internal-types';
/** Properties of an ExportMetricsServiceRequest. */
export interface IExportMetricsServiceRequest {
/** ExportMetricsServiceRequest resourceMetrics */
resourceMetrics: IResourceMetrics[];
}
/** Properties of a ResourceMetrics. */
export interface IResourceMetrics {
/** ResourceMetrics resource */
resource?: Resource;
/** ResourceMetrics scopeMetrics */
scopeMetrics: IScopeMetrics[];
/** ResourceMetrics schemaUrl */
schemaUrl?: string;
}
/** Properties of an IScopeMetrics. */
export interface IScopeMetrics {
/** ScopeMetrics scope */
scope?: IInstrumentationScope;
/** ScopeMetrics metrics */
metrics: IMetric[];
/** ScopeMetrics schemaUrl */
schemaUrl?: string;
}
/** Properties of a Metric. */
export interface IMetric {
/** Metric name */
name: string;
/** Metric description */
description?: string;
/** Metric unit */
unit?: string;
/** Metric gauge */
gauge?: IGauge;
/** Metric sum */
sum?: ISum;
/** Metric histogram */
histogram?: IHistogram;
/** Metric exponentialHistogram */
exponentialHistogram?: IExponentialHistogram;
/** Metric summary */
summary?: ISummary;
}
/** Properties of a Gauge. */
export interface IGauge {
/** Gauge dataPoints */
dataPoints: INumberDataPoint[];
}
/** Properties of a Sum. */
export interface ISum {
/** Sum dataPoints */
dataPoints: INumberDataPoint[];
/** Sum aggregationTemporality */
aggregationTemporality: EAggregationTemporality;
/** Sum isMonotonic */
isMonotonic?: boolean | null;
}
/** Properties of a Histogram. */
export interface IHistogram {
/** Histogram dataPoints */
dataPoints: IHistogramDataPoint[];
/** Histogram aggregationTemporality */
aggregationTemporality?: EAggregationTemporality;
}
/** Properties of an ExponentialHistogram. */
export interface IExponentialHistogram {
/** ExponentialHistogram dataPoints */
dataPoints: IExponentialHistogramDataPoint[];
/** ExponentialHistogram aggregationTemporality */
aggregationTemporality?: EAggregationTemporality;
}
/** Properties of a Summary. */
export interface ISummary {
/** Summary dataPoints */
dataPoints: ISummaryDataPoint[];
}
/** Properties of a NumberDataPoint. */
export interface INumberDataPoint {
/** NumberDataPoint attributes */
attributes: IKeyValue[];
/** NumberDataPoint startTimeUnixNano */
startTimeUnixNano?: Fixed64;
/** NumberDataPoint timeUnixNano */
timeUnixNano?: Fixed64;
/** NumberDataPoint asDouble */
asDouble?: number | null;
/** NumberDataPoint asInt */
asInt?: number;
/** NumberDataPoint exemplars */
exemplars?: IExemplar[];
/** NumberDataPoint flags */
flags?: number;
}
/** Properties of a HistogramDataPoint. */
export interface IHistogramDataPoint {
/** HistogramDataPoint attributes */
attributes?: IKeyValue[];
/** HistogramDataPoint startTimeUnixNano */
startTimeUnixNano?: Fixed64;
/** HistogramDataPoint timeUnixNano */
timeUnixNano?: Fixed64;
/** HistogramDataPoint count */
count?: number;
/** HistogramDataPoint sum */
sum?: number;
/** HistogramDataPoint bucketCounts */
bucketCounts?: number[];
/** HistogramDataPoint explicitBounds */
explicitBounds?: number[];
/** HistogramDataPoint exemplars */
exemplars?: IExemplar[];
/** HistogramDataPoint flags */
flags?: number;
/** HistogramDataPoint min */
min?: number;
/** HistogramDataPoint max */
max?: number;
}
/** Properties of an ExponentialHistogramDataPoint. */
export interface IExponentialHistogramDataPoint {
/** ExponentialHistogramDataPoint attributes */
attributes?: IKeyValue[];
/** ExponentialHistogramDataPoint startTimeUnixNano */
startTimeUnixNano?: Fixed64;
/** ExponentialHistogramDataPoint timeUnixNano */
timeUnixNano?: Fixed64;
/** ExponentialHistogramDataPoint count */
count?: number;
/** ExponentialHistogramDataPoint sum */
sum?: number;
/** ExponentialHistogramDataPoint scale */
scale?: number;
/** ExponentialHistogramDataPoint zeroCount */
zeroCount?: number;
/** ExponentialHistogramDataPoint positive */
positive?: IBuckets;
/** ExponentialHistogramDataPoint negative */
negative?: IBuckets;
/** ExponentialHistogramDataPoint flags */
flags?: number;
/** ExponentialHistogramDataPoint exemplars */
exemplars?: IExemplar[];
/** ExponentialHistogramDataPoint min */
min?: number;
/** ExponentialHistogramDataPoint max */
max?: number;
}
/** Properties of a SummaryDataPoint. */
export interface ISummaryDataPoint {
/** SummaryDataPoint attributes */
attributes?: IKeyValue[];
/** SummaryDataPoint startTimeUnixNano */
startTimeUnixNano?: number;
/** SummaryDataPoint timeUnixNano */
timeUnixNano?: string;
/** SummaryDataPoint count */
count?: number;
/** SummaryDataPoint sum */
sum?: number;
/** SummaryDataPoint quantileValues */
quantileValues?: IValueAtQuantile[];
/** SummaryDataPoint flags */
flags?: number;
}
/** Properties of a ValueAtQuantile. */
export interface IValueAtQuantile {
/** ValueAtQuantile quantile */
quantile?: number;
/** ValueAtQuantile value */
value?: number;
}
/** Properties of a Buckets. */
export interface IBuckets {
/** Buckets offset */
offset?: number;
/** Buckets bucketCounts */
bucketCounts?: number[];
}
/** Properties of an Exemplar. */
export interface IExemplar {
/** Exemplar filteredAttributes */
filteredAttributes?: IKeyValue[];
/** Exemplar timeUnixNano */
timeUnixNano?: string;
/** Exemplar asDouble */
asDouble?: number;
/** Exemplar asInt */
asInt?: number;
/** Exemplar spanId */
spanId?: string | Uint8Array;
/** Exemplar traceId */
traceId?: string | Uint8Array;
}
/**
* AggregationTemporality defines how a metric aggregator reports aggregated
* values. It describes how those values relate to the time interval over
* which they are aggregated.
*/
export declare const enum EAggregationTemporality {
AGGREGATION_TEMPORALITY_UNSPECIFIED = 0,
/** DELTA is an AggregationTemporality for a metric aggregator which reports
changes since last report time. Successive metrics contain aggregation of
values from continuous and non-overlapping intervals.
The values for a DELTA metric are based only on the time interval
associated with one measurement cycle. There is no dependency on
previous measurements like is the case for CUMULATIVE metrics.
For example, consider a system measuring the number of requests that
it receives and reports the sum of these requests every second as a
DELTA metric:
1. The system starts receiving at time=t_0.
2. A request is received, the system measures 1 request.
3. A request is received, the system measures 1 request.
4. A request is received, the system measures 1 request.
5. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0 to
t_0+1 with a value of 3.
6. A request is received, the system measures 1 request.
7. A request is received, the system measures 1 request.
8. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0+1 to
t_0+2 with a value of 2. */
AGGREGATION_TEMPORALITY_DELTA = 1,
/** CUMULATIVE is an AggregationTemporality for a metric aggregator which
reports changes since a fixed start time. This means that current values
of a CUMULATIVE metric depend on all previous measurements since the
start time. Because of this, the sender is required to retain this state
in some form. If this state is lost or invalidated, the CUMULATIVE metric
values MUST be reset and a new fixed start time following the last
reported measurement time sent MUST be used.
For example, consider a system measuring the number of requests that
it receives and reports the sum of these requests every second as a
CUMULATIVE metric:
1. The system starts receiving at time=t_0.
2. A request is received, the system measures 1 request.
3. A request is received, the system measures 1 request.
4. A request is received, the system measures 1 request.
5. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0 to
t_0+1 with a value of 3.
6. A request is received, the system measures 1 request.
7. A request is received, the system measures 1 request.
8. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0 to
t_0+2 with a value of 5.
9. The system experiences a fault and loses state.
10. The system recovers and resumes receiving at time=t_1.
11. A request is received, the system measures 1 request.
12. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_1 to
t_0+1 with a value of 1.
Note: Even though, when reporting changes since last report time, using
CUMULATIVE is valid, it is not recommended. This may cause problems for
systems that do not use start_time to determine when the aggregation
value was reset (e.g. Prometheus). */
AGGREGATION_TEMPORALITY_CUMULATIVE = 2
}
//# sourceMappingURL=internal-types.d.ts.map

View File

@@ -0,0 +1,75 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.EAggregationTemporality = void 0;
/**
* AggregationTemporality defines how a metric aggregator reports aggregated
* values. It describes how those values relate to the time interval over
* which they are aggregated.
*/
var EAggregationTemporality;
(function (EAggregationTemporality) {
/* UNSPECIFIED is the default AggregationTemporality, it MUST not be used. */
EAggregationTemporality[EAggregationTemporality["AGGREGATION_TEMPORALITY_UNSPECIFIED"] = 0] = "AGGREGATION_TEMPORALITY_UNSPECIFIED";
/** DELTA is an AggregationTemporality for a metric aggregator which reports
changes since last report time. Successive metrics contain aggregation of
values from continuous and non-overlapping intervals.
The values for a DELTA metric are based only on the time interval
associated with one measurement cycle. There is no dependency on
previous measurements like is the case for CUMULATIVE metrics.
For example, consider a system measuring the number of requests that
it receives and reports the sum of these requests every second as a
DELTA metric:
1. The system starts receiving at time=t_0.
2. A request is received, the system measures 1 request.
3. A request is received, the system measures 1 request.
4. A request is received, the system measures 1 request.
5. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0 to
t_0+1 with a value of 3.
6. A request is received, the system measures 1 request.
7. A request is received, the system measures 1 request.
8. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0+1 to
t_0+2 with a value of 2. */
EAggregationTemporality[EAggregationTemporality["AGGREGATION_TEMPORALITY_DELTA"] = 1] = "AGGREGATION_TEMPORALITY_DELTA";
/** CUMULATIVE is an AggregationTemporality for a metric aggregator which
reports changes since a fixed start time. This means that current values
of a CUMULATIVE metric depend on all previous measurements since the
start time. Because of this, the sender is required to retain this state
in some form. If this state is lost or invalidated, the CUMULATIVE metric
values MUST be reset and a new fixed start time following the last
reported measurement time sent MUST be used.
For example, consider a system measuring the number of requests that
it receives and reports the sum of these requests every second as a
CUMULATIVE metric:
1. The system starts receiving at time=t_0.
2. A request is received, the system measures 1 request.
3. A request is received, the system measures 1 request.
4. A request is received, the system measures 1 request.
5. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0 to
t_0+1 with a value of 3.
6. A request is received, the system measures 1 request.
7. A request is received, the system measures 1 request.
8. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_0 to
t_0+2 with a value of 5.
9. The system experiences a fault and loses state.
10. The system recovers and resumes receiving at time=t_1.
11. A request is received, the system measures 1 request.
12. The 1 second collection cycle ends. A metric is exported for the
number of requests received over the interval of time t_1 to
t_0+1 with a value of 1.
Note: Even though, when reporting changes since last report time, using
CUMULATIVE is valid, it is not recommended. This may cause problems for
systems that do not use start_time to determine when the aggregation
value was reset (e.g. Prometheus). */
EAggregationTemporality[EAggregationTemporality["AGGREGATION_TEMPORALITY_CUMULATIVE"] = 2] = "AGGREGATION_TEMPORALITY_CUMULATIVE";
})(EAggregationTemporality = exports.EAggregationTemporality || (exports.EAggregationTemporality = {}));
//# sourceMappingURL=internal-types.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
import type { MetricData, ResourceMetrics, ScopeMetrics } from '@opentelemetry/sdk-metrics';
import type { IExportMetricsServiceRequest, IMetric, IResourceMetrics, IScopeMetrics } from './internal-types';
import type { Encoder } from '../common/utils';
export declare function toResourceMetrics(resourceMetrics: ResourceMetrics, encoder: Encoder): IResourceMetrics;
export declare function toScopeMetrics(scopeMetrics: ScopeMetrics[], encoder: Encoder): IScopeMetrics[];
export declare function toMetric(metricData: MetricData, encoder: Encoder): IMetric;
export declare function createExportMetricsServiceRequest(resourceMetrics: ResourceMetrics[], encoder: Encoder): IExportMetricsServiceRequest;
//# sourceMappingURL=internal.d.ts.map

View File

@@ -0,0 +1,140 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createExportMetricsServiceRequest = exports.toMetric = exports.toScopeMetrics = exports.toResourceMetrics = void 0;
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
const api_1 = require("@opentelemetry/api");
const sdk_metrics_1 = require("@opentelemetry/sdk-metrics");
const internal_types_1 = require("./internal-types");
const internal_1 = require("../common/internal");
function toResourceMetrics(resourceMetrics, encoder) {
const processedResource = (0, internal_1.createResource)(resourceMetrics.resource, encoder);
return {
resource: processedResource,
schemaUrl: processedResource.schemaUrl,
scopeMetrics: toScopeMetrics(resourceMetrics.scopeMetrics, encoder),
};
}
exports.toResourceMetrics = toResourceMetrics;
function toScopeMetrics(scopeMetrics, encoder) {
return Array.from(scopeMetrics.map(metrics => ({
scope: (0, internal_1.createInstrumentationScope)(metrics.scope),
metrics: metrics.metrics.map(metricData => toMetric(metricData, encoder)),
schemaUrl: metrics.scope.schemaUrl,
})));
}
exports.toScopeMetrics = toScopeMetrics;
function toMetric(metricData, encoder) {
const out = {
name: metricData.descriptor.name,
description: metricData.descriptor.description,
unit: metricData.descriptor.unit,
};
const aggregationTemporality = toAggregationTemporality(metricData.aggregationTemporality);
switch (metricData.dataPointType) {
case sdk_metrics_1.DataPointType.SUM:
out.sum = {
aggregationTemporality,
isMonotonic: metricData.isMonotonic,
dataPoints: toSingularDataPoints(metricData, encoder),
};
break;
case sdk_metrics_1.DataPointType.GAUGE:
out.gauge = {
dataPoints: toSingularDataPoints(metricData, encoder),
};
break;
case sdk_metrics_1.DataPointType.HISTOGRAM:
out.histogram = {
aggregationTemporality,
dataPoints: toHistogramDataPoints(metricData, encoder),
};
break;
case sdk_metrics_1.DataPointType.EXPONENTIAL_HISTOGRAM:
out.exponentialHistogram = {
aggregationTemporality,
dataPoints: toExponentialHistogramDataPoints(metricData, encoder),
};
break;
}
return out;
}
exports.toMetric = toMetric;
function toSingularDataPoint(dataPoint, valueType, encoder) {
const out = {
attributes: (0, internal_1.toAttributes)(dataPoint.attributes, encoder),
startTimeUnixNano: encoder.encodeHrTime(dataPoint.startTime),
timeUnixNano: encoder.encodeHrTime(dataPoint.endTime),
};
switch (valueType) {
case api_1.ValueType.INT:
out.asInt = dataPoint.value;
break;
case api_1.ValueType.DOUBLE:
out.asDouble = dataPoint.value;
break;
}
return out;
}
function toSingularDataPoints(metricData, encoder) {
return metricData.dataPoints.map(dataPoint => {
return toSingularDataPoint(dataPoint, metricData.descriptor.valueType, encoder);
});
}
function toHistogramDataPoints(metricData, encoder) {
return metricData.dataPoints.map(dataPoint => {
const histogram = dataPoint.value;
return {
attributes: (0, internal_1.toAttributes)(dataPoint.attributes, encoder),
bucketCounts: histogram.buckets.counts,
explicitBounds: histogram.buckets.boundaries,
count: histogram.count,
sum: histogram.sum,
min: histogram.min,
max: histogram.max,
startTimeUnixNano: encoder.encodeHrTime(dataPoint.startTime),
timeUnixNano: encoder.encodeHrTime(dataPoint.endTime),
};
});
}
function toExponentialHistogramDataPoints(metricData, encoder) {
return metricData.dataPoints.map(dataPoint => {
const histogram = dataPoint.value;
return {
attributes: (0, internal_1.toAttributes)(dataPoint.attributes, encoder),
count: histogram.count,
min: histogram.min,
max: histogram.max,
sum: histogram.sum,
positive: {
offset: histogram.positive.offset,
bucketCounts: histogram.positive.bucketCounts,
},
negative: {
offset: histogram.negative.offset,
bucketCounts: histogram.negative.bucketCounts,
},
scale: histogram.scale,
zeroCount: histogram.zeroCount,
startTimeUnixNano: encoder.encodeHrTime(dataPoint.startTime),
timeUnixNano: encoder.encodeHrTime(dataPoint.endTime),
};
});
}
function toAggregationTemporality(temporality) {
switch (temporality) {
case sdk_metrics_1.AggregationTemporality.DELTA:
return internal_types_1.EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA;
case sdk_metrics_1.AggregationTemporality.CUMULATIVE:
return internal_types_1.EAggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE;
}
}
function createExportMetricsServiceRequest(resourceMetrics, encoder) {
return {
resourceMetrics: resourceMetrics.map(metrics => toResourceMetrics(metrics, encoder)),
};
}
exports.createExportMetricsServiceRequest = createExportMetricsServiceRequest;
//# sourceMappingURL=internal.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
export { JsonMetricsSerializer } from './metrics';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1,11 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsonMetricsSerializer = void 0;
// IMPORTANT: exports added here are public
var metrics_1 = require("./metrics");
Object.defineProperty(exports, "JsonMetricsSerializer", { enumerable: true, get: function () { return metrics_1.JsonMetricsSerializer; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/metrics/json/index.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,2CAA2C;AAC3C,qCAAkD;AAAzC,gHAAA,qBAAqB,OAAA","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\n// IMPORTANT: exports added here are public\nexport { JsonMetricsSerializer } from './metrics';\n"]}

View File

@@ -0,0 +1,5 @@
import type { ISerializer } from '../../i-serializer';
import type { ResourceMetrics } from '@opentelemetry/sdk-metrics';
import type { IExportMetricsServiceResponse } from '../export-response';
export declare const JsonMetricsSerializer: ISerializer<ResourceMetrics, IExportMetricsServiceResponse>;
//# sourceMappingURL=metrics.d.ts.map

View File

@@ -0,0 +1,20 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsonMetricsSerializer = void 0;
const internal_1 = require("../internal");
const utils_1 = require("../../common/utils");
exports.JsonMetricsSerializer = {
serializeRequest: (arg) => {
const request = (0, internal_1.createExportMetricsServiceRequest)([arg], utils_1.JSON_ENCODER);
const encoder = new TextEncoder();
return encoder.encode(JSON.stringify(request));
},
deserializeResponse: (arg) => {
if (arg.length === 0) {
return {};
}
const decoder = new TextDecoder();
return JSON.parse(decoder.decode(arg));
},
};
//# sourceMappingURL=metrics.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"metrics.js","sourceRoot":"","sources":["../../../../src/metrics/json/metrics.ts"],"names":[],"mappings":";;;AAMA,0CAAgE;AAEhE,8CAAkD;AAErC,QAAA,qBAAqB,GAG9B;IACF,gBAAgB,EAAE,CAAC,GAAoB,EAAE,EAAE;QACzC,MAAM,OAAO,GAAG,IAAA,4CAAiC,EAAC,CAAC,GAAG,CAAC,EAAE,oBAAY,CAAC,CAAC;QACvE,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClC,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,mBAAmB,EAAE,CAAC,GAAe,EAAE,EAAE;QACvC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;YACpB,OAAO,EAAE,CAAC;SACX;QACD,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClC,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAkC,CAAC;IAC1E,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\nimport type { ISerializer } from '../../i-serializer';\nimport type { ResourceMetrics } from '@opentelemetry/sdk-metrics';\nimport { createExportMetricsServiceRequest } from '../internal';\nimport type { IExportMetricsServiceResponse } from '../export-response';\nimport { JSON_ENCODER } from '../../common/utils';\n\nexport const JsonMetricsSerializer: ISerializer<\n ResourceMetrics,\n IExportMetricsServiceResponse\n> = {\n serializeRequest: (arg: ResourceMetrics) => {\n const request = createExportMetricsServiceRequest([arg], JSON_ENCODER);\n const encoder = new TextEncoder();\n return encoder.encode(JSON.stringify(request));\n },\n deserializeResponse: (arg: Uint8Array) => {\n if (arg.length === 0) {\n return {};\n }\n const decoder = new TextDecoder();\n return JSON.parse(decoder.decode(arg)) as IExportMetricsServiceResponse;\n },\n};\n"]}

View File

@@ -0,0 +1,2 @@
export { ProtobufMetricsSerializer } from './metrics';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1,11 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.ProtobufMetricsSerializer = void 0;
// IMPORTANT: exports added here are public
var metrics_1 = require("./metrics");
Object.defineProperty(exports, "ProtobufMetricsSerializer", { enumerable: true, get: function () { return metrics_1.ProtobufMetricsSerializer; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/metrics/protobuf/index.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,2CAA2C;AAC3C,qCAAsD;AAA7C,oHAAA,yBAAyB,OAAA","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\n// IMPORTANT: exports added here are public\nexport { ProtobufMetricsSerializer } from './metrics';\n"]}

View File

@@ -0,0 +1,5 @@
import type { ISerializer } from '../../i-serializer';
import type { ResourceMetrics } from '@opentelemetry/sdk-metrics';
import type { IExportMetricsServiceResponse } from '../export-response';
export declare const ProtobufMetricsSerializer: ISerializer<ResourceMetrics, IExportMetricsServiceResponse>;
//# sourceMappingURL=metrics.d.ts.map

View File

@@ -0,0 +1,24 @@
"use strict";
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.ProtobufMetricsSerializer = void 0;
const root = require("../../generated/root");
const internal_1 = require("../internal");
const utils_1 = require("../../common/utils");
const metricsResponseType = root.opentelemetry.proto.collector.metrics.v1
.ExportMetricsServiceResponse;
const metricsRequestType = root.opentelemetry.proto.collector.metrics.v1
.ExportMetricsServiceRequest;
exports.ProtobufMetricsSerializer = {
serializeRequest: (arg) => {
const request = (0, internal_1.createExportMetricsServiceRequest)([arg], utils_1.PROTOBUF_ENCODER);
return metricsRequestType.encode(request).finish();
},
deserializeResponse: (arg) => {
return metricsResponseType.decode(arg);
},
};
//# sourceMappingURL=metrics.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"metrics.js","sourceRoot":"","sources":["../../../../src/metrics/protobuf/metrics.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,6CAA6C;AAI7C,0CAAgE;AAGhE,8CAAsD;AAEtD,MAAM,mBAAmB,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE;KACtE,4BAAyE,CAAC;AAE7E,MAAM,kBAAkB,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE;KACrE,2BAAuE,CAAC;AAE9D,QAAA,yBAAyB,GAGlC;IACF,gBAAgB,EAAE,CAAC,GAAoB,EAAE,EAAE;QACzC,MAAM,OAAO,GAAG,IAAA,4CAAiC,EAAC,CAAC,GAAG,CAAC,EAAE,wBAAgB,CAAC,CAAC;QAC3E,OAAO,kBAAkB,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAC;IACrD,CAAC;IACD,mBAAmB,EAAE,CAAC,GAAe,EAAE,EAAE;QACvC,OAAO,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;IACzC,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport * as root from '../../generated/root';\nimport type { ISerializer } from '../../i-serializer';\nimport type { IExportMetricsServiceRequest } from '../internal-types';\nimport type { ExportType } from '../../common/protobuf/protobuf-export-type';\nimport { createExportMetricsServiceRequest } from '../internal';\nimport type { ResourceMetrics } from '@opentelemetry/sdk-metrics';\nimport type { IExportMetricsServiceResponse } from '../export-response';\nimport { PROTOBUF_ENCODER } from '../../common/utils';\n\nconst metricsResponseType = root.opentelemetry.proto.collector.metrics.v1\n .ExportMetricsServiceResponse as ExportType<IExportMetricsServiceResponse>;\n\nconst metricsRequestType = root.opentelemetry.proto.collector.metrics.v1\n .ExportMetricsServiceRequest as ExportType<IExportMetricsServiceRequest>;\n\nexport const ProtobufMetricsSerializer: ISerializer<\n ResourceMetrics,\n IExportMetricsServiceResponse\n> = {\n serializeRequest: (arg: ResourceMetrics) => {\n const request = createExportMetricsServiceRequest([arg], PROTOBUF_ENCODER);\n return metricsRequestType.encode(request).finish();\n },\n deserializeResponse: (arg: Uint8Array) => {\n return metricsResponseType.decode(arg);\n },\n};\n"]}