diff --git a/.gitmodules b/.gitmodules index 33a9a7d9b21..1cf297f7875 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,6 @@ -[submodule "experimental/packages/otlp-grpc-exporter-base/protos"] - path = experimental/packages/otlp-grpc-exporter-base/protos - url = https://github.com/open-telemetry/opentelemetry-proto.git [submodule "experimental/packages/otlp-proto-exporter-base/protos"] path = experimental/packages/otlp-proto-exporter-base/protos url = https://github.com/open-telemetry/opentelemetry-proto.git +[submodule "experimental/packages/otlp-transformer/protos"] + path = experimental/packages/otlp-transformer/protos + url = https://github.com/open-telemetry/opentelemetry-proto.git diff --git a/experimental/CHANGELOG.md b/experimental/CHANGELOG.md index 717c64e493f..6dfa4301322 100644 --- a/experimental/CHANGELOG.md +++ b/experimental/CHANGELOG.md @@ -6,6 +6,12 @@ All notable changes to experimental packages in this project will be documented ### :boom: Breaking Change +* feat(exporter-*-otlp-*)!: move serialization for Node.js exporters to `@opentelemetry/otlp-transformer` [#4542](https://github.com/open-telemetry/opentelemetry-js/pull/4542) @pichlermarc + * Breaking changes: + * (user-facing) `convert()` now returns an empty object and will be removed in a follow-up + * (internal) OTLPExporterNodeBase now has additional constructor parameters that are required + * (internal) OTLPExporterNodeBase now has an additional `ResponseType` type parameter + ### :rocket: (Enhancement) ### :bug: (Bug Fix) diff --git a/experimental/packages/exporter-logs-otlp-grpc/src/OTLPLogExporter.ts b/experimental/packages/exporter-logs-otlp-grpc/src/OTLPLogExporter.ts index b59a177b22e..600f6626b06 100644 --- a/experimental/packages/exporter-logs-otlp-grpc/src/OTLPLogExporter.ts +++ b/experimental/packages/exporter-logs-otlp-grpc/src/OTLPLogExporter.ts @@ -21,12 +21,11 @@ import { OTLPGRPCExporterNodeBase, validateAndNormalizeUrl, DEFAULT_COLLECTOR_URL, - LogsSerializer, } from '@opentelemetry/otlp-grpc-exporter-base'; import { - createExportLogsServiceRequest, IExportLogsServiceRequest, IExportLogsServiceResponse, + ProtobufLogsSerializer, } from '@opentelemetry/otlp-transformer'; import { VERSION } from './version'; @@ -57,14 +56,10 @@ export class OTLPLogExporter signalSpecificMetadata, 'LogsExportService', '/opentelemetry.proto.collector.logs.v1.LogsService/Export', - LogsSerializer + ProtobufLogsSerializer ); } - convert(logRecords: ReadableLogRecord[]): IExportLogsServiceRequest { - return createExportLogsServiceRequest(logRecords); - } - getDefaultUrl(config: OTLPGRPCExporterConfigNode) { return validateAndNormalizeUrl(this.getUrlFromConfig(config)); } diff --git a/experimental/packages/exporter-logs-otlp-grpc/test/OTLPLogExporter.test.ts b/experimental/packages/exporter-logs-otlp-grpc/test/OTLPLogExporter.test.ts index e179bd5f85b..db4d0b7a925 100644 --- a/experimental/packages/exporter-logs-otlp-grpc/test/OTLPLogExporter.test.ts +++ b/experimental/packages/exporter-logs-otlp-grpc/test/OTLPLogExporter.test.ts @@ -40,9 +40,7 @@ import { VERSION } from '../src/version'; const logsServiceProtoPath = 'opentelemetry/proto/collector/logs/v1/logs_service.proto'; -const includeDirs = [ - path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos'), -]; +const includeDirs = [path.resolve(__dirname, '../../otlp-transformer/protos')]; const httpAddr = 'https://localhost:1503'; const udsAddr = 'unix:///tmp/otlp-logs.sock'; diff --git a/experimental/packages/exporter-logs-otlp-http/src/platform/node/OTLPLogExporter.ts b/experimental/packages/exporter-logs-otlp-http/src/platform/node/OTLPLogExporter.ts index 0a020cb0593..e422f1c5516 100644 --- a/experimental/packages/exporter-logs-otlp-http/src/platform/node/OTLPLogExporter.ts +++ b/experimental/packages/exporter-logs-otlp-http/src/platform/node/OTLPLogExporter.ts @@ -19,13 +19,16 @@ import type { LogRecordExporter, } from '@opentelemetry/sdk-logs'; import type { OTLPExporterNodeConfigBase } from '@opentelemetry/otlp-exporter-base'; -import type { IExportLogsServiceRequest } from '@opentelemetry/otlp-transformer'; +import type { + IExportLogsServiceRequest, + IExportLogsServiceResponse, +} from '@opentelemetry/otlp-transformer'; import { getEnv, baggageUtils } from '@opentelemetry/core'; import { OTLPExporterNodeBase, parseHeaders, } from '@opentelemetry/otlp-exporter-base'; -import { createExportLogsServiceRequest } from '@opentelemetry/otlp-transformer'; +import { JsonLogsSerializer } from '@opentelemetry/otlp-transformer'; import { getDefaultUrl } from '../config'; import { VERSION } from '../../version'; @@ -38,15 +41,23 @@ const USER_AGENT = { * Collector Logs Exporter for Node */ export class OTLPLogExporter - extends OTLPExporterNodeBase + extends OTLPExporterNodeBase< + ReadableLogRecord, + IExportLogsServiceRequest, + IExportLogsServiceResponse + > implements LogRecordExporter { constructor(config: OTLPExporterNodeConfigBase = {}) { // load OTEL_EXPORTER_OTLP_LOGS_TIMEOUT env - super({ - timeoutMillis: getEnv().OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, - ...config, - }); + super( + { + timeoutMillis: getEnv().OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, + ...config, + }, + JsonLogsSerializer, + 'application/json' + ); this.headers = { ...this.headers, ...USER_AGENT, @@ -57,13 +68,6 @@ export class OTLPLogExporter }; } - convert(logRecords: ReadableLogRecord[]): IExportLogsServiceRequest { - return createExportLogsServiceRequest(logRecords, { - useHex: true, - useLongBits: false, - }); - } - getDefaultUrl(config: OTLPExporterNodeConfigBase): string { return getDefaultUrl(config); } diff --git a/experimental/packages/exporter-logs-otlp-proto/src/platform/node/OTLPLogExporter.ts b/experimental/packages/exporter-logs-otlp-proto/src/platform/node/OTLPLogExporter.ts index 16e86fc21e0..9fc7ad479c1 100644 --- a/experimental/packages/exporter-logs-otlp-proto/src/platform/node/OTLPLogExporter.ts +++ b/experimental/packages/exporter-logs-otlp-proto/src/platform/node/OTLPLogExporter.ts @@ -19,15 +19,14 @@ import { OTLPExporterConfigBase, appendResourcePathToUrl, appendRootPathToUrlIfNeeded, + OTLPExporterNodeBase, parseHeaders, } from '@opentelemetry/otlp-exporter-base'; +import { ServiceClientType } from '@opentelemetry/otlp-proto-exporter-base'; import { - OTLPProtoExporterNodeBase, - ServiceClientType, -} from '@opentelemetry/otlp-proto-exporter-base'; -import { - createExportLogsServiceRequest, IExportLogsServiceRequest, + IExportLogsServiceResponse, + ProtobufLogsSerializer, } from '@opentelemetry/otlp-transformer'; import { ReadableLogRecord, LogRecordExporter } from '@opentelemetry/sdk-logs'; @@ -44,14 +43,15 @@ const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURC * Collector Trace Exporter for Node */ export class OTLPLogExporter - extends OTLPProtoExporterNodeBase< + extends OTLPExporterNodeBase< ReadableLogRecord, - IExportLogsServiceRequest + IExportLogsServiceRequest, + IExportLogsServiceResponse > implements LogRecordExporter { constructor(config: OTLPExporterConfigBase = {}) { - super(config); + super(config, ProtobufLogsSerializer, 'application/x-protobuf'); this.headers = { ...this.headers, ...USER_AGENT, @@ -61,9 +61,6 @@ export class OTLPLogExporter ...parseHeaders(config?.headers), }; } - convert(logs: ReadableLogRecord[]): IExportLogsServiceRequest { - return createExportLogsServiceRequest(logs); - } getDefaultUrl(config: OTLPExporterConfigBase): string { return typeof config.url === 'string' diff --git a/experimental/packages/exporter-logs-otlp-proto/test/node/OTLPLogExporter.test.ts b/experimental/packages/exporter-logs-otlp-proto/test/node/OTLPLogExporter.test.ts index 9778c950464..8bf14362062 100644 --- a/experimental/packages/exporter-logs-otlp-proto/test/node/OTLPLogExporter.test.ts +++ b/experimental/packages/exporter-logs-otlp-proto/test/node/OTLPLogExporter.test.ts @@ -193,8 +193,6 @@ describe('OTLPLogExporter - node with proto over http', () => { }); it('should open the connection', done => { - collectorExporter.export(logs, () => {}); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.hostname, 'foo.bar.com'); assert.strictEqual(options.method, 'POST'); @@ -206,11 +204,10 @@ describe('OTLPLogExporter - node with proto over http', () => { done(); return fakeRequest as any; }); + collectorExporter.export(logs, () => {}); }); it('should set custom headers', done => { - collectorExporter.export(logs, () => {}); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.headers['foo'], 'bar'); @@ -220,11 +217,10 @@ describe('OTLPLogExporter - node with proto over http', () => { done(); return fakeRequest as any; }); + collectorExporter.export(logs, () => {}); }); it('should have keep alive and keepAliveMsecs option set', done => { - collectorExporter.export(logs, () => {}); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.agent.keepAlive, true); assert.strictEqual(options.agent.options.keepAliveMsecs, 2000); @@ -235,6 +231,7 @@ describe('OTLPLogExporter - node with proto over http', () => { done(); return fakeRequest as any; }); + collectorExporter.export(logs, () => {}); }); it('should successfully send the logs', done => { @@ -271,28 +268,21 @@ describe('OTLPLogExporter - node with proto over http', () => { // Need to stub/spy on the underlying logger as the "diag" instance is global const spyLoggerError = sinon.stub(diag, 'error'); - collectorExporter.export(logs, result => { - assert.strictEqual(result.code, ExportResultCode.SUCCESS); - assert.strictEqual(spyLoggerError.args.length, 0); - done(); - }); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { const mockRes = new MockedResponse(200); cb(mockRes); mockRes.send('success'); return fakeRequest as any; }); - }); - it('should log the error message', done => { collectorExporter.export(logs, result => { - assert.strictEqual(result.code, ExportResultCode.FAILED); - // @ts-expect-error verify error code - assert.strictEqual(result.error.code, 400); + assert.strictEqual(result.code, ExportResultCode.SUCCESS); + assert.strictEqual(spyLoggerError.args.length, 0); done(); }); + }); + it('should log the error message', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { const mockResError = new MockedResponse(400); cb(mockResError); @@ -300,6 +290,13 @@ describe('OTLPLogExporter - node with proto over http', () => { return fakeRequest as any; }); + + collectorExporter.export(logs, result => { + assert.strictEqual(result.code, ExportResultCode.FAILED); + // @ts-expect-error verify error code + assert.strictEqual(result.error.code, 400); + done(); + }); }); }); describe('export - with compression', () => { diff --git a/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts index 88e55734e6e..82bdaa3da27 100644 --- a/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-grpc/src/OTLPTraceExporter.ts @@ -21,12 +21,11 @@ import { OTLPGRPCExporterNodeBase, validateAndNormalizeUrl, DEFAULT_COLLECTOR_URL, - TraceSerializer, } from '@opentelemetry/otlp-grpc-exporter-base'; import { - createExportTraceServiceRequest, IExportTraceServiceRequest, IExportTraceServiceResponse, + ProtobufTraceSerializer, } from '@opentelemetry/otlp-transformer'; import { VERSION } from './version'; @@ -57,14 +56,10 @@ export class OTLPTraceExporter signalSpecificMetadata, 'TraceExportService', '/opentelemetry.proto.collector.trace.v1.TraceService/Export', - TraceSerializer + ProtobufTraceSerializer ); } - convert(spans: ReadableSpan[]): IExportTraceServiceRequest { - return createExportTraceServiceRequest(spans); - } - getDefaultUrl(config: OTLPGRPCExporterConfigNode) { return validateAndNormalizeUrl(this.getUrlFromConfig(config)); } diff --git a/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts b/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts index 3d8bfdf3fb4..da30913e080 100644 --- a/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts +++ b/experimental/packages/exporter-trace-otlp-grpc/test/OTLPTraceExporter.test.ts @@ -44,9 +44,7 @@ import { const traceServiceProtoPath = 'opentelemetry/proto/collector/trace/v1/trace_service.proto'; -const includeDirs = [ - path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos'), -]; +const includeDirs = [path.resolve(__dirname, '../../otlp-transformer/protos')]; const httpAddr = 'https://localhost:1501'; const udsAddr = 'unix:///tmp/otlp-traces.sock'; diff --git a/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts index 892e5c5b692..54180a9a2b1 100644 --- a/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-http/src/platform/node/OTLPTraceExporter.ts @@ -26,10 +26,11 @@ import { appendRootPathToUrlIfNeeded, } from '@opentelemetry/otlp-exporter-base'; import { - createExportTraceServiceRequest, IExportTraceServiceRequest, + IExportTraceServiceResponse, } from '@opentelemetry/otlp-transformer'; import { VERSION } from '../../version'; +import { JsonTraceSerializer } from '@opentelemetry/otlp-transformer'; const DEFAULT_COLLECTOR_RESOURCE_PATH = 'v1/traces'; const DEFAULT_COLLECTOR_URL = `http://localhost:4318/${DEFAULT_COLLECTOR_RESOURCE_PATH}`; @@ -41,11 +42,15 @@ const USER_AGENT = { * Collector Trace Exporter for Node */ export class OTLPTraceExporter - extends OTLPExporterNodeBase + extends OTLPExporterNodeBase< + ReadableSpan, + IExportTraceServiceRequest, + IExportTraceServiceResponse + > implements SpanExporter { constructor(config: OTLPExporterNodeConfigBase = {}) { - super(config); + super(config, JsonTraceSerializer, 'application/json'); this.headers = { ...this.headers, ...USER_AGENT, @@ -56,13 +61,6 @@ export class OTLPTraceExporter }; } - convert(spans: ReadableSpan[]): IExportTraceServiceRequest { - return createExportTraceServiceRequest(spans, { - useHex: true, - useLongBits: false, - }); - } - getDefaultUrl(config: OTLPExporterNodeConfigBase): string { return typeof config.url === 'string' ? config.url diff --git a/experimental/packages/exporter-trace-otlp-proto/src/platform/node/OTLPTraceExporter.ts b/experimental/packages/exporter-trace-otlp-proto/src/platform/node/OTLPTraceExporter.ts index 48191660ecc..8bcb35fd36f 100644 --- a/experimental/packages/exporter-trace-otlp-proto/src/platform/node/OTLPTraceExporter.ts +++ b/experimental/packages/exporter-trace-otlp-proto/src/platform/node/OTLPTraceExporter.ts @@ -20,15 +20,14 @@ import { OTLPExporterNodeConfigBase, appendResourcePathToUrl, appendRootPathToUrlIfNeeded, + OTLPExporterNodeBase, parseHeaders, } from '@opentelemetry/otlp-exporter-base'; +import { ServiceClientType } from '@opentelemetry/otlp-proto-exporter-base'; import { - OTLPProtoExporterNodeBase, - ServiceClientType, -} from '@opentelemetry/otlp-proto-exporter-base'; -import { - createExportTraceServiceRequest, IExportTraceServiceRequest, + IExportTraceServiceResponse, + ProtobufTraceSerializer, } from '@opentelemetry/otlp-transformer'; import { VERSION } from '../../version'; @@ -42,11 +41,15 @@ const USER_AGENT = { * Collector Trace Exporter for Node with protobuf */ export class OTLPTraceExporter - extends OTLPProtoExporterNodeBase + extends OTLPExporterNodeBase< + ReadableSpan, + IExportTraceServiceRequest, + IExportTraceServiceResponse + > implements SpanExporter { constructor(config: OTLPExporterNodeConfigBase = {}) { - super(config); + super(config, ProtobufTraceSerializer, 'application/x-protobuf'); this.headers = { ...this.headers, ...USER_AGENT, @@ -57,10 +60,6 @@ export class OTLPTraceExporter }; } - convert(spans: ReadableSpan[]): IExportTraceServiceRequest { - return createExportTraceServiceRequest(spans); - } - getDefaultUrl(config: OTLPExporterNodeConfigBase) { return typeof config.url === 'string' ? config.url diff --git a/experimental/packages/exporter-trace-otlp-proto/test/node/OTLPTraceExporter.test.ts b/experimental/packages/exporter-trace-otlp-proto/test/node/OTLPTraceExporter.test.ts index b18c5a39deb..7dfbb101fe5 100644 --- a/experimental/packages/exporter-trace-otlp-proto/test/node/OTLPTraceExporter.test.ts +++ b/experimental/packages/exporter-trace-otlp-proto/test/node/OTLPTraceExporter.test.ts @@ -197,8 +197,6 @@ describe('OTLPTraceExporter - node with proto over http', () => { }); it('should open the connection', done => { - collectorExporter.export(spans, () => {}); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.hostname, 'foo.bar.com'); assert.strictEqual(options.method, 'POST'); @@ -210,11 +208,11 @@ describe('OTLPTraceExporter - node with proto over http', () => { done(); return fakeRequest as any; }); - }); - it('should set custom headers', done => { collectorExporter.export(spans, () => {}); + }); + it('should set custom headers', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.headers['foo'], 'bar'); @@ -224,11 +222,11 @@ describe('OTLPTraceExporter - node with proto over http', () => { done(); return fakeRequest as any; }); - }); - it('should have keep alive and keepAliveMsecs option set', done => { collectorExporter.export(spans, () => {}); + }); + it('should have keep alive and keepAliveMsecs option set', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.agent.keepAlive, true); assert.strictEqual(options.agent.options.keepAliveMsecs, 2000); @@ -239,6 +237,8 @@ describe('OTLPTraceExporter - node with proto over http', () => { done(); return fakeRequest as any; }); + + collectorExporter.export(spans, () => {}); }); it('should successfully send the spans', done => { @@ -275,28 +275,21 @@ describe('OTLPTraceExporter - node with proto over http', () => { // Need to stub/spy on the underlying logger as the "diag" instance is global const spyLoggerError = sinon.stub(diag, 'error'); - collectorExporter.export(spans, result => { - assert.strictEqual(result.code, ExportResultCode.SUCCESS); - assert.strictEqual(spyLoggerError.args.length, 0); - done(); - }); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { const mockRes = new MockedResponse(200); cb(mockRes); mockRes.send('success'); return fakeRequest as any; }); - }); - it('should log the error message', done => { collectorExporter.export(spans, result => { - assert.strictEqual(result.code, ExportResultCode.FAILED); - // @ts-expect-error verify error code - assert.strictEqual(result.error.code, 400); + assert.strictEqual(result.code, ExportResultCode.SUCCESS); + assert.strictEqual(spyLoggerError.args.length, 0); done(); }); + }); + it('should log the error message', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { const mockResError = new MockedResponse(400); cb(mockResError); @@ -304,6 +297,13 @@ describe('OTLPTraceExporter - node with proto over http', () => { return fakeRequest as any; }); + + collectorExporter.export(spans, result => { + assert.strictEqual(result.code, ExportResultCode.FAILED); + // @ts-expect-error verify error code + assert.strictEqual(result.error.code, 400); + done(); + }); }); }); describe('export - with compression', () => { diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts index 4151dbd5396..864f0f7c322 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/src/OTLPMetricExporter.ts @@ -24,13 +24,12 @@ import { OTLPGRPCExporterNodeBase, validateAndNormalizeUrl, DEFAULT_COLLECTOR_URL, - MetricsSerializer, } from '@opentelemetry/otlp-grpc-exporter-base'; import { baggageUtils, getEnv } from '@opentelemetry/core'; import { - createExportMetricsServiceRequest, IExportMetricsServiceRequest, IExportMetricsServiceResponse, + ProtobufMetricsSerializer, } from '@opentelemetry/otlp-transformer'; import { VERSION } from './version'; import { parseHeaders } from '@opentelemetry/otlp-exporter-base'; @@ -57,7 +56,7 @@ class OTLPMetricExporterProxy extends OTLPGRPCExporterNodeBase< signalSpecificMetadata, 'MetricsExportService', '/opentelemetry.proto.collector.metrics.v1.MetricsService/Export', - MetricsSerializer + ProtobufMetricsSerializer ); } @@ -65,10 +64,6 @@ class OTLPMetricExporterProxy extends OTLPGRPCExporterNodeBase< return validateAndNormalizeUrl(this.getUrlFromConfig(config)); } - convert(metrics: ResourceMetrics[]): IExportMetricsServiceRequest { - return createExportMetricsServiceRequest(metrics); - } - getUrlFromConfig(config: OTLPGRPCExporterConfigNode): string { if (typeof config.url === 'string') { return config.url; diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts index 3ed034bde89..a90b13923ef 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-grpc/test/OTLPMetricExporter.test.ts @@ -45,9 +45,7 @@ import { AggregationTemporalityPreference } from '@opentelemetry/exporter-metric const metricsServiceProtoPath = 'opentelemetry/proto/collector/metrics/v1/metrics_service.proto'; -const includeDirs = [ - path.resolve(__dirname, '../../otlp-grpc-exporter-base/protos'), -]; +const includeDirs = [path.resolve(__dirname, '../../otlp-transformer/protos')]; const httpAddr = 'https://localhost:1502'; const udsAddr = 'unix:///tmp/otlp-metrics.sock'; diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts index 5831f74cf9a..d76249b316f 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-http/src/platform/node/OTLPMetricExporter.ts @@ -26,8 +26,9 @@ import { parseHeaders, } from '@opentelemetry/otlp-exporter-base'; import { - createExportMetricsServiceRequest, IExportMetricsServiceRequest, + IExportMetricsServiceResponse, + JsonMetricsSerializer, } from '@opentelemetry/otlp-transformer'; import { VERSION } from '../../version'; @@ -39,10 +40,11 @@ const USER_AGENT = { class OTLPExporterNodeProxy extends OTLPExporterNodeBase< ResourceMetrics, - IExportMetricsServiceRequest + IExportMetricsServiceRequest, + IExportMetricsServiceResponse > { constructor(config?: OTLPExporterNodeConfigBase & OTLPMetricExporterOptions) { - super(config); + super(config, JsonMetricsSerializer, 'application/json'); this.headers = { ...this.headers, ...USER_AGENT, @@ -53,10 +55,6 @@ class OTLPExporterNodeProxy extends OTLPExporterNodeBase< }; } - convert(metrics: ResourceMetrics[]): IExportMetricsServiceRequest { - return createExportMetricsServiceRequest(metrics, { useLongBits: false }); - } - getDefaultUrl(config: OTLPExporterNodeConfigBase): string { return typeof config.url === 'string' ? config.url diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts index 9d424687cf6..598b49806c9 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/src/OTLPMetricExporter.ts @@ -15,10 +15,7 @@ */ import { OTLPMetricExporterOptions } from '@opentelemetry/exporter-metrics-otlp-http'; -import { - ServiceClientType, - OTLPProtoExporterNodeBase, -} from '@opentelemetry/otlp-proto-exporter-base'; +import { ServiceClientType } from '@opentelemetry/otlp-proto-exporter-base'; import { getEnv, baggageUtils } from '@opentelemetry/core'; import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; import { OTLPMetricExporterBase } from '@opentelemetry/exporter-metrics-otlp-http'; @@ -27,10 +24,12 @@ import { appendResourcePathToUrl, appendRootPathToUrlIfNeeded, parseHeaders, + OTLPExporterNodeBase, } from '@opentelemetry/otlp-exporter-base'; import { - createExportMetricsServiceRequest, IExportMetricsServiceRequest, + IExportMetricsServiceResponse, + ProtobufMetricsSerializer, } from '@opentelemetry/otlp-transformer'; import { VERSION } from './version'; @@ -40,12 +39,13 @@ const USER_AGENT = { 'User-Agent': `OTel-OTLP-Exporter-JavaScript/${VERSION}`, }; -class OTLPMetricExporterNodeProxy extends OTLPProtoExporterNodeBase< +class OTLPMetricExporterNodeProxy extends OTLPExporterNodeBase< ResourceMetrics, - IExportMetricsServiceRequest + IExportMetricsServiceRequest, + IExportMetricsServiceResponse > { constructor(config?: OTLPExporterNodeConfigBase & OTLPMetricExporterOptions) { - super(config); + super(config, ProtobufMetricsSerializer, 'application/x-protobuf'); this.headers = { ...this.headers, ...USER_AGENT, @@ -56,10 +56,6 @@ class OTLPMetricExporterNodeProxy extends OTLPProtoExporterNodeBase< }; } - convert(metrics: ResourceMetrics[]): IExportMetricsServiceRequest { - return createExportMetricsServiceRequest(metrics); - } - getDefaultUrl(config: OTLPExporterNodeConfigBase) { return typeof config.url === 'string' ? config.url diff --git a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts index 51c175f8e6c..8398b2a3f77 100644 --- a/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts +++ b/experimental/packages/opentelemetry-exporter-metrics-otlp-proto/test/OTLPMetricExporter.test.ts @@ -228,8 +228,6 @@ describe('OTLPMetricExporter - node with proto over http', () => { }); it('should open the connection', done => { - collectorExporter.export(metrics, () => {}); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.hostname, 'foo.bar.com'); assert.strictEqual(options.method, 'POST'); @@ -241,11 +239,11 @@ describe('OTLPMetricExporter - node with proto over http', () => { done(); return fakeRequest as any; }); - }); - it('should set custom headers', done => { collectorExporter.export(metrics, () => {}); + }); + it('should set custom headers', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.headers['foo'], 'bar'); @@ -256,11 +254,11 @@ describe('OTLPMetricExporter - node with proto over http', () => { done(); return fakeRequest as any; }); - }); - it('should have keep alive and keepAliveMsecs option set', done => { collectorExporter.export(metrics, () => {}); + }); + it('should have keep alive and keepAliveMsecs option set', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { assert.strictEqual(options.agent.keepAlive, true); assert.strictEqual(options.agent.options.keepAliveMsecs, 2000); @@ -272,6 +270,8 @@ describe('OTLPMetricExporter - node with proto over http', () => { done(); return fakeRequest as any; }); + + collectorExporter.export(metrics, () => {}); }); it('should successfully send metrics', done => { @@ -353,28 +353,21 @@ describe('OTLPMetricExporter - node with proto over http', () => { // Need to stub/spy on the underlying logger as the "diag" instance is global const spyLoggerError = sinon.stub(diag, 'error'); - collectorExporter.export(metrics, result => { - assert.strictEqual(result.code, ExportResultCode.SUCCESS); - assert.strictEqual(spyLoggerError.args.length, 0); - done(); - }); - sinon.stub(http, 'request').callsFake((options: any, cb: any) => { const mockRes = new MockedResponse(200); cb(mockRes); mockRes.send('success'); return fakeRequest as any; }); - }); - it('should log the error message', done => { collectorExporter.export(metrics, result => { - assert.strictEqual(result.code, ExportResultCode.FAILED); - // @ts-expect-error verify error code - assert.strictEqual(result.error.code, 400); + assert.strictEqual(result.code, ExportResultCode.SUCCESS); + assert.strictEqual(spyLoggerError.args.length, 0); done(); }); + }); + it('should log the error message', done => { sinon.stub(http, 'request').callsFake((options: any, cb: any) => { const mockResError = new MockedResponse(400); cb(mockResError); @@ -382,6 +375,13 @@ describe('OTLPMetricExporter - node with proto over http', () => { return fakeRequest as any; }); + + collectorExporter.export(metrics, result => { + assert.strictEqual(result.code, ExportResultCode.FAILED); + // @ts-expect-error verify error code + assert.strictEqual(result.error.code, 400); + done(); + }); }); }); }); diff --git a/experimental/packages/otlp-exporter-base/package.json b/experimental/packages/otlp-exporter-base/package.json index d5bac712108..c8485d09c13 100644 --- a/experimental/packages/otlp-exporter-base/package.json +++ b/experimental/packages/otlp-exporter-base/package.json @@ -61,7 +61,8 @@ "access": "public" }, "dependencies": { - "@opentelemetry/core": "1.24.0" + "@opentelemetry/core": "1.24.0", + "@opentelemetry/otlp-transformer": "0.51.0" }, "devDependencies": { "@babel/core": "7.23.6", diff --git a/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts b/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts index 5a8b1dfdfa5..c4b13e8218d 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/node/OTLPExporterNodeBase.ts @@ -24,6 +24,7 @@ import { parseHeaders } from '../../util'; import { createHttpAgent, sendWithHttp, configureCompression } from './util'; import { diag } from '@opentelemetry/api'; import { getEnv, baggageUtils } from '@opentelemetry/core'; +import { ISerializer } from '@opentelemetry/otlp-transformer'; /** * Collector Metric Exporter abstract base class @@ -31,6 +32,7 @@ import { getEnv, baggageUtils } from '@opentelemetry/core'; export abstract class OTLPExporterNodeBase< ExportItem, ServiceRequest, + ServiceResponse, > extends OTLPExporterBase< OTLPExporterNodeConfigBase, ExportItem, @@ -40,9 +42,16 @@ export abstract class OTLPExporterNodeBase< headers: Record; agent: http.Agent | https.Agent | undefined; compression: CompressionAlgorithm; + private _serializer: ISerializer; + private _contentType: string; - constructor(config: OTLPExporterNodeConfigBase = {}) { + constructor( + config: OTLPExporterNodeConfigBase = {}, + serializer: ISerializer, + contentType: string + ) { super(config); + this._contentType = contentType; // eslint-disable-next-line @typescript-eslint/no-explicit-any if ((config as any).metadata) { diag.warn('Metadata cannot be set when using http'); @@ -54,10 +63,16 @@ export abstract class OTLPExporterNodeBase< ); this.agent = createHttpAgent(config); this.compression = configureCompression(config.compression); + this._serializer = serializer; } onInit(_config: OTLPExporterNodeConfigBase): void {} + override convert(_objects: ExportItem[]): ServiceRequest { + // TODO(pichlermarc): needs to be removed from base in a follow-up + return {} as ServiceRequest; + } + send( objects: ExportItem[], onSuccess: () => void, @@ -67,13 +82,12 @@ export abstract class OTLPExporterNodeBase< diag.debug('Shutdown already started. Cannot send objects'); return; } - const serviceRequest = this.convert(objects); const promise = new Promise((resolve, reject) => { sendWithHttp( this, - JSON.stringify(serviceRequest), - 'application/json', + this._serializer.serializeRequest(objects) ?? new Uint8Array(), + this._contentType, resolve, reject ); diff --git a/experimental/packages/otlp-exporter-base/src/platform/node/util.ts b/experimental/packages/otlp-exporter-base/src/platform/node/util.ts index fd40981e857..a42c95b1101 100644 --- a/experimental/packages/otlp-exporter-base/src/platform/node/util.ts +++ b/experimental/packages/otlp-exporter-base/src/platform/node/util.ts @@ -41,9 +41,9 @@ import { * @param onSuccess * @param onError */ -export function sendWithHttp( - collector: OTLPExporterNodeBase, - data: string | Buffer, +export function sendWithHttp( + collector: OTLPExporterNodeBase, + data: string | Uint8Array, contentType: string, onSuccess: () => void, onError: (error: OTLPExporterError) => void @@ -165,7 +165,7 @@ export function sendWithHttp( switch (collector.compression) { case CompressionAlgorithm.GZIP: { req.setHeader('Content-Encoding', 'gzip'); - const dataStream = readableFromBuffer(data); + const dataStream = readableFromUnit8Array(data); dataStream .on('error', onError) .pipe(zlib.createGzip()) @@ -175,14 +175,14 @@ export function sendWithHttp( break; } default: - req.end(data); + req.end(Buffer.from(data)); break; } }; sendWithRetry(); } -function readableFromBuffer(buff: string | Buffer): Readable { +function readableFromUnit8Array(buff: string | Uint8Array): Readable { const readable = new Readable(); readable.push(buff); readable.push(null); diff --git a/experimental/packages/otlp-exporter-base/test/node/util.test.ts b/experimental/packages/otlp-exporter-base/test/node/util.test.ts index b279e57b9a1..39c2321349a 100644 --- a/experimental/packages/otlp-exporter-base/test/node/util.test.ts +++ b/experimental/packages/otlp-exporter-base/test/node/util.test.ts @@ -28,6 +28,7 @@ import { OTLPExporterError } from '../../src/types'; import { PassThrough } from 'stream'; import * as http from 'http'; import * as zlib from 'zlib'; +import { ISerializer } from '@opentelemetry/otlp-transformer'; // Meant to simulate http.IncomingMessage, at least the parts that sendWithHttp cares about // but make it a PassThrough so we can inspect it for the test @@ -50,19 +51,24 @@ class HttpRequest extends PassThrough { // Barebones exporter for use by sendWithHttp type ExporterConfig = OTLPExporterNodeConfigBase; -class Exporter extends OTLPExporterNodeBase { +class Exporter extends OTLPExporterNodeBase { getDefaultUrl(config: ExporterConfig): string { return config.url || ''; } +} - convert(spans: object[]): object { +const noopSerializer: ISerializer = { + serializeRequest(request: object): Uint8Array | undefined { + return new Uint8Array(); + }, + deserializeResponse(data: Uint8Array): object { return {}; - } -} + }, +}; describe('force flush', () => { it('forceFlush should flush spans and return', async () => { - const exporter = new Exporter({}); + const exporter = new Exporter({}, noopSerializer, ''); await exporter.forceFlush(); }); }); @@ -219,10 +225,14 @@ describe('sendWithHttp', () => { }); it('should send with no compression if configured to do so', () => { - exporter = new Exporter({ - url: 'http://foobar.com', - compression: CompressionAlgorithm.NONE, - }); + exporter = new Exporter( + { + url: 'http://foobar.com', + compression: CompressionAlgorithm.NONE, + }, + noopSerializer, + '' + ); const data = JSON.stringify(spanData); // Show that data is written to the request stream @@ -252,10 +262,14 @@ describe('sendWithHttp', () => { }); it('should send with gzip compression if configured to do so', () => { - exporter = new Exporter({ - url: 'http://foobar.com', - compression: CompressionAlgorithm.GZIP, - }); + exporter = new Exporter( + { + url: 'http://foobar.com', + compression: CompressionAlgorithm.GZIP, + }, + noopSerializer, + '' + ); const data = JSON.stringify(spanData); const compressedData = zlib.gzipSync(Buffer.from(data)); @@ -287,10 +301,14 @@ describe('sendWithHttp', () => { }); it('should work with gzip compression enabled even after multiple requests', () => { - exporter = new Exporter({ - url: 'http://foobar.com', - compression: CompressionAlgorithm.GZIP, - }); + exporter = new Exporter( + { + url: 'http://foobar.com', + compression: CompressionAlgorithm.GZIP, + }, + noopSerializer, + '' + ); const data = JSON.stringify(spanData); const compressedData = zlib.gzipSync(Buffer.from(data)); diff --git a/experimental/packages/otlp-exporter-base/tsconfig.esm.json b/experimental/packages/otlp-exporter-base/tsconfig.esm.json index ae474033346..29f55b1213e 100644 --- a/experimental/packages/otlp-exporter-base/tsconfig.esm.json +++ b/experimental/packages/otlp-exporter-base/tsconfig.esm.json @@ -14,6 +14,9 @@ }, { "path": "../../../packages/opentelemetry-core" + }, + { + "path": "../otlp-transformer" } ] } diff --git a/experimental/packages/otlp-exporter-base/tsconfig.esnext.json b/experimental/packages/otlp-exporter-base/tsconfig.esnext.json index 5f6cf572d5a..0fb9636081e 100644 --- a/experimental/packages/otlp-exporter-base/tsconfig.esnext.json +++ b/experimental/packages/otlp-exporter-base/tsconfig.esnext.json @@ -14,6 +14,9 @@ }, { "path": "../../../packages/opentelemetry-core" + }, + { + "path": "../otlp-transformer" } ] } diff --git a/experimental/packages/otlp-exporter-base/tsconfig.json b/experimental/packages/otlp-exporter-base/tsconfig.json index 6feabd64655..2bff90df9b8 100644 --- a/experimental/packages/otlp-exporter-base/tsconfig.json +++ b/experimental/packages/otlp-exporter-base/tsconfig.json @@ -15,6 +15,9 @@ }, { "path": "../../../packages/opentelemetry-core" + }, + { + "path": "../otlp-transformer" } ] } diff --git a/experimental/packages/otlp-grpc-exporter-base/.gitignore b/experimental/packages/otlp-grpc-exporter-base/.gitignore deleted file mode 100644 index c82683cbd73..00000000000 --- a/experimental/packages/otlp-grpc-exporter-base/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -src/generated/* -!src/generated/.gitkeep diff --git a/experimental/packages/otlp-grpc-exporter-base/package.json b/experimental/packages/otlp-grpc-exporter-base/package.json index d26b584fdd8..a75c59c8500 100644 --- a/experimental/packages/otlp-grpc-exporter-base/package.json +++ b/experimental/packages/otlp-grpc-exporter-base/package.json @@ -8,17 +8,14 @@ "scripts": { "prepublishOnly": "npm run compile", "codecov": "nyc report --reporter=json && codecov -f coverage/*.json -p ../../../", - "compile": "npm run protos && tsc --build", + "compile": "tsc --build", "clean": "tsc --build --clean", "lint": "eslint . --ext .ts", "lint:fix": "eslint . --ext .ts --fix", - "protos": "npm run submodule && npm run protos:generate", - "protos:generate": "node ../../../scripts/generate-protos.js", - "submodule": "git submodule sync --recursive && git submodule update --init --recursive", "tdd": "npm run test -- --watch-extensions ts --watch", "test": "nyc ts-mocha -p tsconfig.json 'test/**/*.test.ts'", "version": "node ../../../scripts/version-update.js", - "watch": "npm run protos && tsc -w", + "watch": "tsc -w", "precompile": "cross-var lerna run version --scope $npm_package_name --include-dependencies", "prewatch": "npm run precompile" }, @@ -61,7 +58,6 @@ "lerna": "6.6.2", "mocha": "10.2.0", "nyc": "15.1.0", - "protobufjs-cli": "1.1.2", "sinon": "15.1.2", "ts-loader": "8.4.0", "ts-mocha": "10.0.0", diff --git a/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts b/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts index cfa7fba3d73..fb75bcf19c5 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts +++ b/experimental/packages/otlp-grpc-exporter-base/src/OTLPGRPCExporterNodeBase.ts @@ -27,7 +27,7 @@ import { GrpcExporterTransport, } from './grpc-exporter-transport'; import { configureCompression, configureCredentials } from './util'; -import { ISerializer } from './serializers'; +import { ISerializer } from '@opentelemetry/otlp-transformer'; import { IExporterTransport } from './exporter-transport'; /** @@ -45,14 +45,14 @@ export abstract class OTLPGRPCExporterNodeBase< grpcQueue: GRPCQueueItem[] = []; compression: CompressionAlgorithm; private _transport: IExporterTransport; - private _serializer: ISerializer; + private _serializer: ISerializer; constructor( config: OTLPGRPCExporterConfigNode = {}, signalSpecificMetadata: Record, grpcName: string, grpcPath: string, - serializer: ISerializer + serializer: ISerializer ) { super(config); this._serializer = serializer; @@ -114,6 +114,11 @@ export abstract class OTLPGRPCExporterNodeBase< this._transport.shutdown(); } + override convert(_objects: ExportItem[]): ServiceRequest { + // Not used, nothing to do. + return {} as ServiceRequest; + } + send( objects: ExportItem[], onSuccess: () => void, @@ -124,8 +129,7 @@ export abstract class OTLPGRPCExporterNodeBase< return; } - const converted = this.convert(objects); - const data = this._serializer.serializeRequest(converted); + const data = this._serializer.serializeRequest(objects); if (data == null) { onError(new Error('Could not serialize message')); diff --git a/experimental/packages/otlp-grpc-exporter-base/src/index.ts b/experimental/packages/otlp-grpc-exporter-base/src/index.ts index 0e7f487e2be..566b12f42f9 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/index.ts +++ b/experimental/packages/otlp-grpc-exporter-base/src/index.ts @@ -17,9 +17,3 @@ export { OTLPGRPCExporterNodeBase } from './OTLPGRPCExporterNodeBase'; export { OTLPGRPCExporterConfigNode } from './types'; export { DEFAULT_COLLECTOR_URL, validateAndNormalizeUrl } from './util'; -export { - MetricsSerializer, - TraceSerializer, - LogsSerializer, - ISerializer, -} from './serializers'; diff --git a/experimental/packages/otlp-grpc-exporter-base/test/OTLPGRPCExporterNodeBase.test.ts b/experimental/packages/otlp-grpc-exporter-base/test/OTLPGRPCExporterNodeBase.test.ts index f70ffd08cd3..cca5434c9c7 100644 --- a/experimental/packages/otlp-grpc-exporter-base/test/OTLPGRPCExporterNodeBase.test.ts +++ b/experimental/packages/otlp-grpc-exporter-base/test/OTLPGRPCExporterNodeBase.test.ts @@ -21,7 +21,7 @@ import { OTLPGRPCExporterConfigNode } from '../src/types'; import { mockedReadableSpan } from './traceHelper'; import { ExportResponse, ExportResponseSuccess } from '../src/export-response'; import { IExporterTransport } from '../src/exporter-transport'; -import { ISerializer } from '../src'; +import { ISerializer } from '@opentelemetry/otlp-transformer'; import sinon = require('sinon'); class MockCollectorExporter extends OTLPGRPCExporterNodeBase< @@ -33,10 +33,6 @@ class MockCollectorExporter extends OTLPGRPCExporterNodeBase< return ''; } - convert(spans: ReadableSpan[]): ReadableSpan[] { - return spans; - } - getUrlFromConfig(config: OTLPGRPCExporterConfigNode): string { return ''; } diff --git a/experimental/packages/otlp-grpc-exporter-base/tsconfig.json b/experimental/packages/otlp-grpc-exporter-base/tsconfig.json index ddc035cf3a4..417e1ad4684 100644 --- a/experimental/packages/otlp-grpc-exporter-base/tsconfig.json +++ b/experimental/packages/otlp-grpc-exporter-base/tsconfig.json @@ -1,14 +1,11 @@ { "extends": "../../../tsconfig.base.json", "compilerOptions": { - "allowJs": true, "outDir": "build", "rootDir": "." }, "include": [ "src/**/*.ts", - "src/generated/**/*.js", - "src/generated/**/*.ts", "test/**/*.ts" ], "references": [ diff --git a/experimental/packages/otlp-proto-exporter-base/src/platform/index.ts b/experimental/packages/otlp-proto-exporter-base/src/platform/index.ts index fc344756ea5..f6ead82b842 100644 --- a/experimental/packages/otlp-proto-exporter-base/src/platform/index.ts +++ b/experimental/packages/otlp-proto-exporter-base/src/platform/index.ts @@ -14,7 +14,6 @@ * limitations under the License. */ -export { OTLPProtoExporterNodeBase, send } from './node'; export { OTLPProtoExporterBrowserBase } from './browser'; export { ServiceClientType } from './types'; export { ExportRequestType, getExportRequestProto } from './util'; diff --git a/experimental/packages/otlp-proto-exporter-base/src/platform/node/OTLPProtoExporterNodeBase.ts b/experimental/packages/otlp-proto-exporter-base/src/platform/node/OTLPProtoExporterNodeBase.ts deleted file mode 100644 index 17804c47b49..00000000000 --- a/experimental/packages/otlp-proto-exporter-base/src/platform/node/OTLPProtoExporterNodeBase.ts +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { diag } from '@opentelemetry/api'; -import { ServiceClientType } from '../types'; -import { - OTLPExporterNodeBase as OTLPExporterBaseMain, - CompressionAlgorithm, - OTLPExporterError, - OTLPExporterNodeConfigBase, -} from '@opentelemetry/otlp-exporter-base'; - -type SendFn = ( - collector: OTLPProtoExporterNodeBase, - objects: ExportItem[], - compression: CompressionAlgorithm, - onSuccess: () => void, - onError: (error: OTLPExporterError) => void -) => void; - -/** - * Collector Exporter abstract base class - */ -export abstract class OTLPProtoExporterNodeBase< - ExportItem, - ServiceRequest, -> extends OTLPExporterBaseMain { - private _send!: SendFn; - - constructor(config: OTLPExporterNodeConfigBase = {}) { - super(config); - } - - private _sendPromise( - objects: ExportItem[], - onSuccess: () => void, - onError: (error: OTLPExporterError) => void - ): void { - const promise = new Promise((resolve, reject) => { - this._send(this, objects, this.compression, resolve, reject); - }).then(onSuccess, onError); - - this._sendingPromises.push(promise); - const popPromise = () => { - const index = this._sendingPromises.indexOf(promise); - this._sendingPromises.splice(index, 1); - }; - promise.then(popPromise, popPromise); - } - - override send( - objects: ExportItem[], - onSuccess: () => void, - onError: (error: OTLPExporterError) => void - ): void { - if (this._shutdownOnce.isCalled) { - diag.debug('Shutdown already started. Cannot send objects'); - return; - } - if (!this._send) { - // defer to next tick and lazy load to avoid loading protobufjs too early - // and making this impossible to be instrumented - setImmediate(() => { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const { send } = require('./util'); - this._send = send; - this._sendPromise(objects, onSuccess, onError); - }); - } else { - this._sendPromise(objects, onSuccess, onError); - } - } - - abstract getServiceClientType(): ServiceClientType; -} diff --git a/experimental/packages/otlp-proto-exporter-base/src/platform/node/util.ts b/experimental/packages/otlp-proto-exporter-base/src/platform/node/util.ts deleted file mode 100644 index 36fe866d678..00000000000 --- a/experimental/packages/otlp-proto-exporter-base/src/platform/node/util.ts +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { OTLPProtoExporterNodeBase } from './OTLPProtoExporterNodeBase'; -import { - CompressionAlgorithm, - OTLPExporterError, - sendWithHttp, -} from '@opentelemetry/otlp-exporter-base'; - -import { getExportRequestProto } from '../util'; - -export function send( - collector: OTLPProtoExporterNodeBase, - objects: ExportItem[], - compression: CompressionAlgorithm, - onSuccess: () => void, - onError: (error: OTLPExporterError) => void -): void { - const serviceRequest = collector.convert(objects); - - const exportRequestType = getExportRequestProto( - collector.getServiceClientType() - ); - const message = exportRequestType.create(serviceRequest); - if (message) { - const body = exportRequestType.encode(message).finish(); - if (body) { - sendWithHttp( - collector, - Buffer.from(body), - 'application/x-protobuf', - onSuccess, - onError - ); - } - } else { - onError(new OTLPExporterError('No proto')); - } -} diff --git a/experimental/packages/otlp-transformer/.gitignore b/experimental/packages/otlp-transformer/.gitignore index ae2d1881105..2cded78f303 100644 --- a/experimental/packages/otlp-transformer/.gitignore +++ b/experimental/packages/otlp-transformer/.gitignore @@ -1,2 +1,3 @@ -src/generated -!src/logs \ No newline at end of file +src/generated/* +!src/generated/.gitkeep +!src/logs diff --git a/experimental/packages/otlp-transformer/package.json b/experimental/packages/otlp-transformer/package.json index d9be7799c45..fbd8895f2a3 100644 --- a/experimental/packages/otlp-transformer/package.json +++ b/experimental/packages/otlp-transformer/package.json @@ -14,16 +14,19 @@ "scripts": { "prepublishOnly": "npm run compile", "precompile": "cross-var lerna run version --scope $npm_package_name --include-dependencies", - "compile": "tsc --build tsconfig.json tsconfig.esm.json tsconfig.esnext.json", + "compile": "npm run protos && tsc --build tsconfig.json tsconfig.esm.json tsconfig.esnext.json", "clean": "tsc --build --clean tsconfig.json tsconfig.esm.json tsconfig.esnext.json", + "protos": "npm run submodule && npm run protos:generate", + "protos:generate": "node ../../../scripts/generate-protos.js", "lint": "eslint . --ext .ts", "lint:fix": "eslint . --ext .ts --fix", "tdd": "npm run test -- --watch-extensions ts --watch", + "submodule": "git submodule sync --recursive && git submodule update --init --recursive", "test": "nyc ts-mocha -p tsconfig.json 'test/**/*.test.ts'", "test:browser": "karma start --single-run", "test:bench": "node test/performance/benchmark/index.js | tee .benchmark-results.txt", "prewatch": "node ../../../scripts/version-update.js", - "watch": "tsc --build -w tsconfig.json tsconfig.esm.json tsconfig.esnext.json", + "watch": "npm run protos && tsc -w tsconfig.json tsconfig.esm.json tsconfig.esnext.json", "peer-api-check": "node ../../../scripts/peer-api-check.js", "codecov": "nyc report --reporter=json && codecov -f coverage/*.json -p ../../../" }, @@ -76,7 +79,8 @@ "ts-loader": "8.4.0", "ts-mocha": "10.0.0", "typescript": "4.4.4", - "webpack": "5.89.0" + "webpack": "5.89.0", + "protobufjs-cli": "1.1.2" }, "dependencies": { "@opentelemetry/api-logs": "0.51.0", @@ -84,7 +88,8 @@ "@opentelemetry/resources": "1.24.0", "@opentelemetry/sdk-logs": "0.51.0", "@opentelemetry/sdk-metrics": "1.24.0", - "@opentelemetry/sdk-trace-base": "1.24.0" + "@opentelemetry/sdk-trace-base": "1.24.0", + "protobufjs": "^7.2.3" }, "homepage": "https://github.com/open-telemetry/opentelemetry-js/tree/main/experimental/packages/otlp-transformer", "sideEffects": false diff --git a/experimental/packages/otlp-grpc-exporter-base/protos b/experimental/packages/otlp-transformer/protos similarity index 100% rename from experimental/packages/otlp-grpc-exporter-base/protos rename to experimental/packages/otlp-transformer/protos diff --git a/experimental/packages/otlp-proto-exporter-base/src/platform/node/index.ts b/experimental/packages/otlp-transformer/src/common/i-serializer.ts similarity index 69% rename from experimental/packages/otlp-proto-exporter-base/src/platform/node/index.ts rename to experimental/packages/otlp-transformer/src/common/i-serializer.ts index 08016fec935..c7cf7c9cba3 100644 --- a/experimental/packages/otlp-proto-exporter-base/src/platform/node/index.ts +++ b/experimental/packages/otlp-transformer/src/common/i-serializer.ts @@ -14,5 +14,10 @@ * limitations under the License. */ -export { OTLPProtoExporterNodeBase } from './OTLPProtoExporterNodeBase'; -export { send } from './util'; +/** + * Serializes and deserializes the OTLP request/response to and from {@link Uint8Array} + */ +export interface ISerializer { + serializeRequest(request: Request): Uint8Array | undefined; + deserializeResponse(data: Uint8Array): Response; +} diff --git a/experimental/packages/otlp-grpc-exporter-base/src/generated/.gitkeep b/experimental/packages/otlp-transformer/src/generated/.gitkeep similarity index 100% rename from experimental/packages/otlp-grpc-exporter-base/src/generated/.gitkeep rename to experimental/packages/otlp-transformer/src/generated/.gitkeep diff --git a/experimental/packages/otlp-transformer/src/index.ts b/experimental/packages/otlp-transformer/src/index.ts index 9c325644dc4..66040a07c0b 100644 --- a/experimental/packages/otlp-transformer/src/index.ts +++ b/experimental/packages/otlp-transformer/src/index.ts @@ -24,3 +24,17 @@ export * from './logs/types'; export { createExportTraceServiceRequest } from './trace'; export { createExportMetricsServiceRequest } from './metrics'; export { createExportLogsServiceRequest } from './logs'; + +export { + ProtobufLogsSerializer, + ProtobufMetricsSerializer, + ProtobufTraceSerializer, +} from './protobuf/serializers'; + +export { + JsonTraceSerializer, + JsonLogsSerializer, + JsonMetricsSerializer, +} from './json/serializers'; + +export { ISerializer } from './common/i-serializer'; diff --git a/experimental/packages/otlp-transformer/src/json/serializers.ts b/experimental/packages/otlp-transformer/src/json/serializers.ts new file mode 100644 index 00000000000..5d360faf47f --- /dev/null +++ b/experimental/packages/otlp-transformer/src/json/serializers.ts @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ISerializer } from '../common/i-serializer'; +import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; +import { IExportTraceServiceResponse } from '../trace/types'; +import { createExportTraceServiceRequest } from '../trace'; +import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; +import { createExportMetricsServiceRequest } from '../metrics'; +import { ReadableLogRecord } from '@opentelemetry/sdk-logs'; +import { IExportMetricsServiceResponse } from '../metrics/types'; +import { IExportLogsServiceResponse } from '../logs/types'; +import { createExportLogsServiceRequest } from '../logs'; + +export const JsonTraceSerializer: ISerializer< + ReadableSpan[], + IExportTraceServiceResponse +> = { + serializeRequest: (arg: ReadableSpan[]) => { + const request = createExportTraceServiceRequest(arg, { + useHex: true, + useLongBits: false, + }); + const encoder = new TextEncoder(); + return encoder.encode(JSON.stringify(request)); + }, + deserializeResponse: (arg: Uint8Array) => { + const decoder = new TextDecoder(); + return JSON.parse(decoder.decode(arg)) as IExportTraceServiceResponse; + }, +}; + +export const JsonMetricsSerializer: ISerializer< + ResourceMetrics[], + IExportMetricsServiceResponse +> = { + serializeRequest: (arg: ResourceMetrics[]) => { + const request = createExportMetricsServiceRequest(arg, { + useLongBits: false, + }); + const encoder = new TextEncoder(); + return encoder.encode(JSON.stringify(request)); + }, + deserializeResponse: (arg: Uint8Array) => { + const decoder = new TextDecoder(); + return JSON.parse(decoder.decode(arg)) as IExportMetricsServiceResponse; + }, +}; + +export const JsonLogsSerializer: ISerializer< + ReadableLogRecord[], + IExportLogsServiceResponse +> = { + serializeRequest: (arg: ReadableLogRecord[]) => { + const request = createExportLogsServiceRequest(arg, { + useHex: true, + useLongBits: false, + }); + const encoder = new TextEncoder(); + return encoder.encode(JSON.stringify(request)); + }, + deserializeResponse: (arg: Uint8Array) => { + const decoder = new TextDecoder(); + return JSON.parse(decoder.decode(arg)) as IExportLogsServiceResponse; + }, +}; diff --git a/experimental/packages/otlp-grpc-exporter-base/src/internal-types.ts b/experimental/packages/otlp-transformer/src/protobuf/protobuf-export-type.ts similarity index 94% rename from experimental/packages/otlp-grpc-exporter-base/src/internal-types.ts rename to experimental/packages/otlp-transformer/src/protobuf/protobuf-export-type.ts index 8d108a0a229..c84d5311c13 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/internal-types.ts +++ b/experimental/packages/otlp-transformer/src/protobuf/protobuf-export-type.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import type * as protobuf from 'protobufjs'; +import * as protobuf from 'protobufjs'; export interface ExportType unknown }> { encode(message: T, writer?: protobuf.Writer): protobuf.Writer; diff --git a/experimental/packages/otlp-grpc-exporter-base/src/serializers.ts b/experimental/packages/otlp-transformer/src/protobuf/serializers.ts similarity index 57% rename from experimental/packages/otlp-grpc-exporter-base/src/serializers.ts rename to experimental/packages/otlp-transformer/src/protobuf/serializers.ts index 3e8b22f6053..ce2d0d94098 100644 --- a/experimental/packages/otlp-grpc-exporter-base/src/serializers.ts +++ b/experimental/packages/otlp-transformer/src/protobuf/serializers.ts @@ -14,16 +14,27 @@ * limitations under the License. */ -import * as root from './generated/root'; +import * as root from '../generated/root'; +import { ISerializer } from '../common/i-serializer'; import { - IExportLogsServiceRequest, - IExportLogsServiceResponse, IExportMetricsServiceRequest, IExportMetricsServiceResponse, +} from '../metrics/types'; +import { ExportType } from './protobuf-export-type'; +import { IExportTraceServiceRequest, IExportTraceServiceResponse, -} from '@opentelemetry/otlp-transformer'; -import { ExportType } from './internal-types'; +} from '../trace/types'; +import { + IExportLogsServiceRequest, + IExportLogsServiceResponse, +} from '../logs/types'; +import { ReadableSpan } from '@opentelemetry/sdk-trace-base'; +import { createExportTraceServiceRequest } from '../trace'; +import { createExportMetricsServiceRequest } from '../metrics'; +import { ResourceMetrics } from '@opentelemetry/sdk-metrics'; +import { createExportLogsServiceRequest } from '../logs'; +import { ReadableLogRecord } from '@opentelemetry/sdk-logs'; const logsResponseType = root.opentelemetry.proto.collector.logs.v1 .ExportLogsServiceResponse as ExportType; @@ -43,46 +54,41 @@ const traceResponseType = root.opentelemetry.proto.collector.trace.v1 const traceRequestType = root.opentelemetry.proto.collector.trace.v1 .ExportTraceServiceRequest as ExportType; -/** - * Serializes and deserializes the OTLP request/response to and from {@link Uint8Array} - */ -export interface ISerializer { - serializeRequest(request: Request): Uint8Array | undefined; - deserializeResponse(data: Uint8Array): Response; -} - -export const LogsSerializer: ISerializer< - IExportLogsServiceRequest, +export const ProtobufLogsSerializer: ISerializer< + ReadableLogRecord[], IExportLogsServiceResponse > = { - serializeRequest: (arg: IExportLogsServiceRequest) => { - return Buffer.from(logsRequestType.encode(arg).finish()); + serializeRequest: (arg: ReadableLogRecord[]) => { + const request = createExportLogsServiceRequest(arg); + return logsRequestType.encode(request).finish(); }, - deserializeResponse: (arg: Buffer) => { + deserializeResponse: (arg: Uint8Array) => { return logsResponseType.decode(arg); }, }; -export const TraceSerializer: ISerializer< - IExportTraceServiceRequest, - IExportTraceServiceResponse +export const ProtobufMetricsSerializer: ISerializer< + ResourceMetrics[], + IExportMetricsServiceResponse > = { - serializeRequest: (arg: IExportTraceServiceRequest) => { - return Buffer.from(traceRequestType.encode(arg).finish()); + serializeRequest: (arg: ResourceMetrics[]) => { + const request = createExportMetricsServiceRequest(arg); + return metricsRequestType.encode(request).finish(); }, - deserializeResponse: (arg: Buffer) => { - return traceResponseType.decode(arg); + deserializeResponse: (arg: Uint8Array) => { + return metricsResponseType.decode(arg); }, }; -export const MetricsSerializer: ISerializer< - IExportMetricsServiceRequest, - IExportMetricsServiceResponse +export const ProtobufTraceSerializer: ISerializer< + ReadableSpan[], + IExportTraceServiceResponse > = { - serializeRequest: (arg: IExportMetricsServiceRequest) => { - return Buffer.from(metricsRequestType.encode(arg).finish()); + serializeRequest: (arg: ReadableSpan[]) => { + const request = createExportTraceServiceRequest(arg); + return traceRequestType.encode(request).finish(); }, - deserializeResponse: (arg: Buffer) => { - return metricsResponseType.decode(arg); + deserializeResponse: (arg: Uint8Array) => { + return traceResponseType.decode(arg); }, }; diff --git a/experimental/packages/otlp-proto-exporter-base/submodule.md b/experimental/packages/otlp-transformer/submodule.md similarity index 94% rename from experimental/packages/otlp-proto-exporter-base/submodule.md rename to experimental/packages/otlp-transformer/submodule.md index 2b3be0c7b76..d5ebe6661ac 100644 --- a/experimental/packages/otlp-proto-exporter-base/submodule.md +++ b/experimental/packages/otlp-transformer/submodule.md @@ -40,7 +40,7 @@ Knowing this if you want to change the submodule to point to a different version 8. Now thing which is very important. You have to commit this to apply these changes ```shell script - git commit -am "chore: updating submodule for otlp-proto-exporter-base" + git commit -am "chore: updating submodule for otlp-transformer" ``` 9. If you look now at git log you will notice that the folder `protos` has been changed and it will show what was the previous sha and what is current one. diff --git a/experimental/packages/otlp-transformer/test/logs.test.ts b/experimental/packages/otlp-transformer/test/logs.test.ts index 278607ae232..b279ae75dae 100644 --- a/experimental/packages/otlp-transformer/test/logs.test.ts +++ b/experimental/packages/otlp-transformer/test/logs.test.ts @@ -21,11 +21,28 @@ import { createExportLogsServiceRequest, ESeverityNumber, IExportLogsServiceRequest, + ProtobufLogsSerializer, + JsonLogsSerializer, + OtlpEncodingOptions, } from '../src'; import { ReadableLogRecord } from '@opentelemetry/sdk-logs'; import { SeverityNumber } from '@opentelemetry/api-logs'; +import { toBase64 } from './utils'; +import * as root from '../src/generated/root'; + +function createExpectedLogJson( + options: OtlpEncodingOptions +): IExportLogsServiceRequest { + const useHex = options.useHex ?? false; + const useLongBits = options.useLongBits ?? true; + + const timeUnixNano = useLongBits + ? { low: 4132445859, high: 391214506 } + : '1680253513123241635'; + const observedTimeUnixNano = useLongBits + ? { low: 584929536, high: 391976663 } + : '1683526948965142784'; -function createExpectedLogJson(useHex: boolean): IExportLogsServiceRequest { const traceId = useHex ? '00000000000000000000000000000001' : hexToBinary('00000000000000000000000000000001'); @@ -52,8 +69,8 @@ function createExpectedLogJson(useHex: boolean): IExportLogsServiceRequest { }, logRecords: [ { - timeUnixNano: { low: 4132445859, high: 391214506 }, - observedTimeUnixNano: { low: 584929536, high: 391976663 }, + timeUnixNano, + observedTimeUnixNano, severityNumber: ESeverityNumber.SEVERITY_NUMBER_ERROR, severityText: 'error', body: { stringValue: 'some_log_body' }, @@ -78,78 +95,137 @@ function createExpectedLogJson(useHex: boolean): IExportLogsServiceRequest { }; } -describe('Logs', () => { - describe('createExportLogsServiceRequest', () => { - let resource_1: Resource; - let resource_2: Resource; - let scope_1: InstrumentationScope; - let scope_2: InstrumentationScope; - let log_1_1_1: ReadableLogRecord; - let log_1_1_2: ReadableLogRecord; - let log_1_2_1: ReadableLogRecord; - let log_2_1_1: ReadableLogRecord; - - beforeEach(() => { - resource_1 = new Resource({ - 'resource-attribute': 'some attribute value', - }); - resource_2 = new Resource({ - 'resource-attribute': 'another attribute value', - }); - scope_1 = { - name: 'scope_name_1', - version: '0.1.0', - schemaUrl: 'http://url.to.schema', - }; - scope_2 = { - name: 'scope_name_2', - }; - const log_fragment_1 = { - hrTime: [1680253513, 123241635] as HrTime, - hrTimeObserved: [1683526948, 965142784] as HrTime, - attributes: { - 'some-attribute': 'some attribute value', - }, - droppedAttributesCount: 0, - severityNumber: SeverityNumber.ERROR, - severityText: 'error', - body: 'some_log_body', - spanContext: { - spanId: '0000000000000002', - traceFlags: TraceFlags.SAMPLED, - traceId: '00000000000000000000000000000001', - }, - }; - const log_fragment_2 = { - hrTime: [1680253797, 687038506] as HrTime, - hrTimeObserved: [1680253797, 687038506] as HrTime, - attributes: { - 'another-attribute': 'another attribute value', +function createExpectedLogProtobuf(): IExportLogsServiceRequest { + const traceId = toBase64('00000000000000000000000000000001'); + const spanId = toBase64('0000000000000002'); + + return { + resourceLogs: [ + { + resource: { + attributes: [ + { + key: 'resource-attribute', + value: { stringValue: 'some attribute value' }, + }, + ], + droppedAttributesCount: 0, }, - droppedAttributesCount: 0, - }; - log_1_1_1 = { - ...log_fragment_1, - resource: resource_1, - instrumentationScope: scope_1, - }; - log_1_1_2 = { - ...log_fragment_2, - resource: resource_1, - instrumentationScope: scope_1, - }; - log_1_2_1 = { - ...log_fragment_1, - resource: resource_1, - instrumentationScope: scope_2, - }; - log_2_1_1 = { - ...log_fragment_1, - resource: resource_2, - instrumentationScope: scope_1, - }; + scopeLogs: [ + { + scope: { name: 'scope_name_1', version: '0.1.0' }, + logRecords: [ + { + timeUnixNano: 1680253513123241700, + observedTimeUnixNano: 1683526948965142800, + severityNumber: ESeverityNumber.SEVERITY_NUMBER_ERROR, + severityText: 'error', + body: { stringValue: 'some_log_body' }, + + attributes: [ + { + key: 'some-attribute', + value: { stringValue: 'some attribute value' }, + }, + ], + droppedAttributesCount: 0, + flags: 1, + traceId: traceId, + spanId: spanId, + }, + ], + schemaUrl: 'http://url.to.schema', + }, + ], + }, + ], + }; +} + +describe('Logs', () => { + let resource_1: Resource; + let resource_2: Resource; + let scope_1: InstrumentationScope; + let scope_2: InstrumentationScope; + + /* + The following log_X_Y_Z should follow the pattern + - X is the resource + - Y is the scope + - Z is the log fragment + */ + + // using `resource_1`, `scope_1`, `log_fragment_1` + let log_1_1_1: ReadableLogRecord; + // using `resource_1`, `scope_1`, `log_fragment_2` + let log_1_1_2: ReadableLogRecord; + // using `resource_1`, `scope_2`, `log_fragment_1` + let log_1_2_1: ReadableLogRecord; + // using `resource_2`, `scope_1`, `log_fragment_1` + let log_2_1_1: ReadableLogRecord; + + beforeEach(() => { + resource_1 = new Resource({ + 'resource-attribute': 'some attribute value', + }); + resource_2 = new Resource({ + 'resource-attribute': 'another attribute value', }); + scope_1 = { + name: 'scope_name_1', + version: '0.1.0', + schemaUrl: 'http://url.to.schema', + }; + scope_2 = { + name: 'scope_name_2', + }; + const log_fragment_1 = { + hrTime: [1680253513, 123241635] as HrTime, + hrTimeObserved: [1683526948, 965142784] as HrTime, + attributes: { + 'some-attribute': 'some attribute value', + }, + droppedAttributesCount: 0, + severityNumber: SeverityNumber.ERROR, + severityText: 'error', + body: 'some_log_body', + spanContext: { + spanId: '0000000000000002', + traceFlags: TraceFlags.SAMPLED, + traceId: '00000000000000000000000000000001', + }, + }; + const log_fragment_2 = { + hrTime: [1680253797, 687038506] as HrTime, + hrTimeObserved: [1680253797, 687038506] as HrTime, + attributes: { + 'another-attribute': 'another attribute value', + }, + droppedAttributesCount: 0, + }; + log_1_1_1 = { + ...log_fragment_1, + resource: resource_1, + instrumentationScope: scope_1, + }; + log_1_1_2 = { + ...log_fragment_2, + resource: resource_1, + instrumentationScope: scope_1, + }; + log_1_2_1 = { + ...log_fragment_1, + resource: resource_1, + instrumentationScope: scope_2, + }; + log_2_1_1 = { + ...log_fragment_1, + resource: resource_2, + instrumentationScope: scope_1, + }; + }); + describe('createExportLogsServiceRequest', () => { it('returns null on an empty list', () => { assert.deepStrictEqual( createExportLogsServiceRequest([], { useHex: true }), @@ -164,7 +240,10 @@ describe('Logs', () => { useHex: true, }); assert.ok(exportRequest); - assert.deepStrictEqual(exportRequest, createExpectedLogJson(true)); + assert.deepStrictEqual( + exportRequest, + createExpectedLogJson({ useHex: true }) + ); }); it('serializes a log record with useHex = false', () => { @@ -172,7 +251,10 @@ describe('Logs', () => { useHex: false, }); assert.ok(exportRequest); - assert.deepStrictEqual(exportRequest, createExpectedLogJson(false)); + assert.deepStrictEqual( + exportRequest, + createExpectedLogJson({ useHex: false }) + ); }); it('aggregates multiple logs with same resource and same scope', () => { @@ -208,4 +290,97 @@ describe('Logs', () => { assert.strictEqual(exportRequest.resourceLogs?.length, 2); }); }); + + describe('ProtobufLogsSerializer', function () { + it('serializes an export request', () => { + const serialized = ProtobufLogsSerializer.serializeRequest([log_1_1_1]); + assert.ok(serialized, 'serialized response is undefined'); + const decoded = + root.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest.decode( + serialized + ); + + const expected = createExpectedLogProtobuf(); + const decodedObj = + root.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest.toObject( + decoded, + { + // This incurs some precision loss that's taken into account in createExpectedLogsProtobuf() + // Using String here will incur the same precision loss on browser only, using Number to prevent having to + // have different assertions for browser and Node.js + longs: Number, + // Convert to String (Base64) as otherwise the type will be different for Node.js (Buffer) and Browser (Uint8Array) + // and this fails assertions. + bytes: String, + } + ); + + assert.deepStrictEqual(decodedObj, expected); + }); + + it('deserializes a response', () => { + const protobufSerializedResponse = + root.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse.encode( + { + partialSuccess: { + errorMessage: 'foo', + rejectedLogRecords: 1, + }, + } + ).finish(); + + const deserializedResponse = ProtobufLogsSerializer.deserializeResponse( + protobufSerializedResponse + ); + + assert.ok( + deserializedResponse.partialSuccess, + 'partialSuccess not present in the deserialized message' + ); + assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo'); + assert.equal( + Number(deserializedResponse.partialSuccess.rejectedLogRecords), + 1 + ); + }); + }); + + describe('JsonLogsSerializer', function () { + it('serializes an export request', () => { + // stringify, then parse to remove undefined keys in the expected JSON + const expected = JSON.parse( + JSON.stringify( + createExpectedLogJson({ useHex: true, useLongBits: false }) + ) + ); + const serialized = JsonLogsSerializer.serializeRequest([log_1_1_1]); + + const decoder = new TextDecoder(); + assert.deepStrictEqual(JSON.parse(decoder.decode(serialized)), expected); + }); + + it('deserializes a response', () => { + const expectedResponse = { + partialSuccess: { + errorMessage: 'foo', + rejectedLogRecords: 1, + }, + }; + const encoder = new TextEncoder(); + const encodedResponse = encoder.encode(JSON.stringify(expectedResponse)); + + const deserializedResponse = + JsonLogsSerializer.deserializeResponse(encodedResponse); + + assert.ok( + deserializedResponse.partialSuccess, + 'partialSuccess not present in the deserialized message' + ); + assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo'); + assert.equal( + Number(deserializedResponse.partialSuccess.rejectedLogRecords), + 1 + ); + }); + }); }); diff --git a/experimental/packages/otlp-transformer/test/metrics.test.ts b/experimental/packages/otlp-transformer/test/metrics.test.ts index 526458e6585..54d67cde86f 100644 --- a/experimental/packages/otlp-transformer/test/metrics.test.ts +++ b/experimental/packages/otlp-transformer/test/metrics.test.ts @@ -25,8 +25,14 @@ import { import * as assert from 'assert'; import { createExportMetricsServiceRequest } from '../src/metrics'; import { EAggregationTemporality } from '../src/metrics/types'; -import { hrTime } from '@opentelemetry/core'; -import { encodeAsLongBits } from '../src'; +import { hrTime, hrTimeToNanoseconds } from '@opentelemetry/core'; +import { + encodeAsString, + encodeAsLongBits, + ProtobufMetricsSerializer, + JsonMetricsSerializer, +} from '../src'; +import * as root from '../src/generated/root'; const START_TIME = hrTime(); const END_TIME = hrTime(); @@ -39,293 +45,293 @@ const ATTRIBUTES = { }; describe('Metrics', () => { - describe('createExportMetricsServiceRequest', () => { - const expectedResource = { - attributes: [ - { - key: 'resource-attribute', - value: { - stringValue: 'resource attribute value', - }, - }, - ], - droppedAttributesCount: 0, - }; - - const expectedScope = { - name: 'mylib', - version: '0.1.0', - }; - - const expectedSchemaUrl = 'http://url.to.schema'; - - const expectedAttributes = [ + const expectedResource = { + attributes: [ { - key: 'string-attribute', + key: 'resource-attribute', value: { - stringValue: 'some attribute value', + stringValue: 'resource attribute value', }, }, - { - key: 'int-attribute', - value: { - intValue: 1, - }, + ], + droppedAttributesCount: 0, + }; + + const expectedScope = { + name: 'mylib', + version: '0.1.0', + }; + + const expectedSchemaUrl = 'http://url.to.schema'; + + const expectedAttributes = [ + { + key: 'string-attribute', + value: { + stringValue: 'some attribute value', }, - { - key: 'double-attribute', - value: { - doubleValue: 1.1, - }, + }, + { + key: 'int-attribute', + value: { + intValue: 1, }, - { - key: 'boolean-attribute', - value: { - boolValue: true, - }, + }, + { + key: 'double-attribute', + value: { + doubleValue: 1.1, }, - { - key: 'array-attribute', - value: { - arrayValue: { - values: [ - { - stringValue: 'attribute value 1', - }, - { - stringValue: 'attribute value 2', - }, - ], - }, - }, + }, + { + key: 'boolean-attribute', + value: { + boolValue: true, }, - ]; - - function createCounterData( - value: number, - aggregationTemporality: AggregationTemporality - ): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.COUNTER, - name: 'counter', - unit: '1', - valueType: ValueType.INT, + }, + { + key: 'array-attribute', + value: { + arrayValue: { + values: [ + { + stringValue: 'attribute value 1', + }, + { + stringValue: 'attribute value 2', + }, + ], }, - aggregationTemporality, - dataPointType: DataPointType.SUM, - isMonotonic: true, - dataPoints: [ - { - value: value, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, - }, - ], - }; - } - - function createUpDownCounterData( - value: number, - aggregationTemporality: AggregationTemporality - ): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.UP_DOWN_COUNTER, - name: 'up-down-counter', - unit: '1', - valueType: ValueType.INT, + }, + }, + ]; + + function createCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.COUNTER, + name: 'counter', + unit: '1', + valueType: ValueType.INT, + }, + aggregationTemporality, + dataPointType: DataPointType.SUM, + isMonotonic: true, + dataPoints: [ + { + value: value, + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, }, - aggregationTemporality, - dataPointType: DataPointType.SUM, - isMonotonic: false, - dataPoints: [ - { - value: value, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, - }, - ], - }; - } - - function createObservableCounterData( - value: number, - aggregationTemporality: AggregationTemporality - ): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.OBSERVABLE_COUNTER, - name: 'observable-counter', - unit: '1', - valueType: ValueType.INT, + ], + }; + } + + function createUpDownCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.UP_DOWN_COUNTER, + name: 'up-down-counter', + unit: '1', + valueType: ValueType.INT, + }, + aggregationTemporality, + dataPointType: DataPointType.SUM, + isMonotonic: false, + dataPoints: [ + { + value: value, + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, }, - aggregationTemporality, - dataPointType: DataPointType.SUM, - isMonotonic: true, - dataPoints: [ - { - value: value, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, - }, - ], - }; - } - - function createObservableUpDownCounterData( - value: number, - aggregationTemporality: AggregationTemporality - ): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, - name: 'observable-up-down-counter', - unit: '1', - valueType: ValueType.INT, + ], + }; + } + + function createObservableCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.OBSERVABLE_COUNTER, + name: 'observable-counter', + unit: '1', + valueType: ValueType.INT, + }, + aggregationTemporality, + dataPointType: DataPointType.SUM, + isMonotonic: true, + dataPoints: [ + { + value: value, + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, }, - aggregationTemporality, - dataPointType: DataPointType.SUM, - isMonotonic: false, - dataPoints: [ - { - value: value, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, - }, - ], - }; - } - - function createObservableGaugeData(value: number): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.OBSERVABLE_GAUGE, - name: 'gauge', - unit: '1', - valueType: ValueType.DOUBLE, + ], + }; + } + + function createObservableUpDownCounterData( + value: number, + aggregationTemporality: AggregationTemporality + ): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, + name: 'observable-up-down-counter', + unit: '1', + valueType: ValueType.INT, + }, + aggregationTemporality, + dataPointType: DataPointType.SUM, + isMonotonic: false, + dataPoints: [ + { + value: value, + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, }, - aggregationTemporality: AggregationTemporality.CUMULATIVE, - dataPointType: DataPointType.GAUGE, - dataPoints: [ - { - value: value, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, - }, - ], - }; - } - - function createHistogramMetrics( - count: number, - sum: number, - boundaries: number[], - counts: number[], - aggregationTemporality: AggregationTemporality, - min?: number, - max?: number - ): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.HISTOGRAM, - name: 'hist', - unit: '1', - valueType: ValueType.INT, + ], + }; + } + + function createObservableGaugeData(value: number): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.OBSERVABLE_GAUGE, + name: 'gauge', + unit: '1', + valueType: ValueType.DOUBLE, + }, + aggregationTemporality: AggregationTemporality.CUMULATIVE, + dataPointType: DataPointType.GAUGE, + dataPoints: [ + { + value: value, + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, }, - aggregationTemporality, - dataPointType: DataPointType.HISTOGRAM, - dataPoints: [ - { - value: { - sum: sum, - count: count, - min: min, - max: max, - buckets: { - boundaries: boundaries, - counts: counts, - }, + ], + }; + } + + function createHistogramMetrics( + count: number, + sum: number, + boundaries: number[], + counts: number[], + aggregationTemporality: AggregationTemporality, + min?: number, + max?: number + ): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.HISTOGRAM, + name: 'hist', + unit: '1', + valueType: ValueType.INT, + }, + aggregationTemporality, + dataPointType: DataPointType.HISTOGRAM, + dataPoints: [ + { + value: { + sum: sum, + count: count, + min: min, + max: max, + buckets: { + boundaries: boundaries, + counts: counts, }, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, }, - ], - }; - } - - function createExponentialHistogramMetrics( - count: number, - sum: number, - scale: number, - zeroCount: number, - positive: { offset: number; bucketCounts: number[] }, - negative: { offset: number; bucketCounts: number[] }, - aggregationTemporality: AggregationTemporality, - min?: number, - max?: number - ): MetricData { - return { - descriptor: { - description: 'this is a description', - type: InstrumentType.HISTOGRAM, - name: 'xhist', - unit: '1', - valueType: ValueType.INT, + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, }, - aggregationTemporality, - dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM, - dataPoints: [ - { - value: { - sum: sum, - count: count, - min: min, - max: max, - zeroCount: zeroCount, - scale: scale, - positive: positive, - negative: negative, - }, - startTime: START_TIME, - endTime: END_TIME, - attributes: ATTRIBUTES, + ], + }; + } + + function createExponentialHistogramMetrics( + count: number, + sum: number, + scale: number, + zeroCount: number, + positive: { offset: number; bucketCounts: number[] }, + negative: { offset: number; bucketCounts: number[] }, + aggregationTemporality: AggregationTemporality, + min?: number, + max?: number + ): MetricData { + return { + descriptor: { + description: 'this is a description', + type: InstrumentType.HISTOGRAM, + name: 'xhist', + unit: '1', + valueType: ValueType.INT, + }, + aggregationTemporality, + dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM, + dataPoints: [ + { + value: { + sum: sum, + count: count, + min: min, + max: max, + zeroCount: zeroCount, + scale: scale, + positive: positive, + negative: negative, }, - ], - }; - } + startTime: START_TIME, + endTime: END_TIME, + attributes: ATTRIBUTES, + }, + ], + }; + } - function createResourceMetrics(metricData: MetricData[]): ResourceMetrics { - const resource = new Resource({ - 'resource-attribute': 'resource attribute value', - }); - return { - resource: resource, - scopeMetrics: [ - { - scope: { - name: 'mylib', - version: '0.1.0', - schemaUrl: expectedSchemaUrl, - }, - metrics: metricData, + function createResourceMetrics(metricData: MetricData[]): ResourceMetrics { + const resource = new Resource({ + 'resource-attribute': 'resource attribute value', + }); + return { + resource: resource, + scopeMetrics: [ + { + scope: { + name: 'mylib', + version: '0.1.0', + schemaUrl: expectedSchemaUrl, }, - ], - }; - } + metrics: metricData, + }, + ], + }; + } + describe('createExportMetricsServiceRequest', () => { it('serializes a monotonic sum metric record', () => { const metrics = createResourceMetrics([ createCounterData(10, AggregationTemporality.DELTA), @@ -779,4 +785,160 @@ describe('Metrics', () => { }); }); }); + + describe('ProtobufMetricsSerializer', function () { + it('serializes an export request', () => { + const serialized = ProtobufMetricsSerializer.serializeRequest([ + createResourceMetrics([ + createCounterData(10, AggregationTemporality.DELTA), + ]), + ]); + assert.ok(serialized, 'serialized response is undefined'); + const decoded = + root.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest.decode( + serialized + ); + + const decodedObj = + root.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest.toObject( + decoded, + { + longs: Number, + } + ); + + const expected = { + resourceMetrics: [ + { + resource: expectedResource, + scopeMetrics: [ + { + scope: expectedScope, + schemaUrl: expectedSchemaUrl, + metrics: [ + { + name: 'counter', + description: 'this is a description', + unit: '1', + sum: { + dataPoints: [ + { + attributes: expectedAttributes, + startTimeUnixNano: hrTimeToNanoseconds(START_TIME), + timeUnixNano: hrTimeToNanoseconds(END_TIME), + asInt: 10, + }, + ], + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + isMonotonic: true, + }, + }, + ], + }, + ], + }, + ], + }; + assert.deepStrictEqual(decodedObj, expected); + }); + + it('deserializes a response', () => { + const protobufSerializedResponse = + root.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse.encode( + { + partialSuccess: { + errorMessage: 'foo', + rejectedDataPoints: 1, + }, + } + ).finish(); + + const deserializedResponse = + ProtobufMetricsSerializer.deserializeResponse( + protobufSerializedResponse + ); + + assert.ok( + deserializedResponse.partialSuccess, + 'partialSuccess not present in the deserialized message' + ); + assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo'); + assert.equal( + Number(deserializedResponse.partialSuccess.rejectedDataPoints), + 1 + ); + }); + }); + + describe('JsonMetricsSerializer', function () { + it('serializes an export request', () => { + const serialized = JsonMetricsSerializer.serializeRequest([ + createResourceMetrics([ + createCounterData(10, AggregationTemporality.DELTA), + ]), + ]); + + const decoder = new TextDecoder(); + const expected = { + resourceMetrics: [ + { + resource: expectedResource, + scopeMetrics: [ + { + scope: expectedScope, + schemaUrl: expectedSchemaUrl, + metrics: [ + { + name: 'counter', + description: 'this is a description', + unit: '1', + sum: { + dataPoints: [ + { + attributes: expectedAttributes, + startTimeUnixNano: encodeAsString(START_TIME), + timeUnixNano: encodeAsString(END_TIME), + asInt: 10, + }, + ], + aggregationTemporality: + EAggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, + isMonotonic: true, + }, + }, + ], + }, + ], + }, + ], + }; + + assert.deepStrictEqual(JSON.parse(decoder.decode(serialized)), expected); + }); + + it('deserializes a response', () => { + const expectedResponse = { + partialSuccess: { + errorMessage: 'foo', + rejectedDataPoints: 1, + }, + }; + const encoder = new TextEncoder(); + const encodedResponse = encoder.encode(JSON.stringify(expectedResponse)); + + const deserializedResponse = + JsonMetricsSerializer.deserializeResponse(encodedResponse); + + assert.ok( + deserializedResponse.partialSuccess, + 'partialSuccess not present in the deserialized message' + ); + assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo'); + assert.equal( + Number(deserializedResponse.partialSuccess.rejectedDataPoints), + 1 + ); + }); + }); }); diff --git a/experimental/packages/otlp-transformer/test/trace.test.ts b/experimental/packages/otlp-transformer/test/trace.test.ts index 30aeeec1586..b740430dc30 100644 --- a/experimental/packages/otlp-transformer/test/trace.test.ts +++ b/experimental/packages/otlp-transformer/test/trace.test.ts @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import * as root from '../src/generated/root'; import { SpanKind, SpanStatusCode, TraceFlags } from '@opentelemetry/api'; import { TraceState, hexToBinary } from '@opentelemetry/core'; import { Resource } from '@opentelemetry/resources'; @@ -23,7 +24,10 @@ import { createExportTraceServiceRequest, ESpanKind, EStatusCode, + ProtobufTraceSerializer, + JsonTraceSerializer, } from '../src'; +import { toBase64 } from './utils'; function createExpectedSpanJson(options: OtlpEncodingOptions) { const useHex = options.useHex ?? false; @@ -136,69 +140,158 @@ function createExpectedSpanJson(options: OtlpEncodingOptions) { }; } -describe('Trace', () => { - describe('createExportTraceServiceRequest', () => { - let resource: Resource; - let span: ReadableSpan; +function createExpectedSpanProtobuf() { + const startTime = 1640715557342725400; + const endTime = 1640715558642725400; + const eventTime = 1640715558542725400; - beforeEach(() => { - resource = new Resource({ - 'resource-attribute': 'resource attribute value', - }); - span = { - spanContext: () => ({ - spanId: '0000000000000002', - traceFlags: TraceFlags.SAMPLED, - traceId: '00000000000000000000000000000001', - isRemote: false, - traceState: new TraceState('span=bar'), - }), - parentSpanId: '0000000000000001', - attributes: { 'string-attribute': 'some attribute value' }, - duration: [1, 300000000], - endTime: [1640715558, 642725388], - ended: true, - events: [ - { - name: 'some event', - time: [1640715558, 542725388], - attributes: { - 'event-attribute': 'some string value', + const traceId = toBase64('00000000000000000000000000000001'); + const spanId = toBase64('0000000000000002'); + const parentSpanId = toBase64('0000000000000001'); + const linkSpanId = toBase64('0000000000000003'); + const linkTraceId = toBase64('00000000000000000000000000000002'); + + return { + resourceSpans: [ + { + resource: { + attributes: [ + { + key: 'resource-attribute', + value: { stringValue: 'resource attribute value' }, }, - }, - ], - instrumentationLibrary: { - name: 'myLib', - version: '0.1.0', - schemaUrl: 'http://url.to.schema', + ], + droppedAttributesCount: 0, }, - kind: SpanKind.CLIENT, - links: [ + scopeSpans: [ { - context: { - spanId: '0000000000000003', - traceId: '00000000000000000000000000000002', - traceFlags: TraceFlags.SAMPLED, - isRemote: false, - traceState: new TraceState('link=foo'), - }, - attributes: { - 'link-attribute': 'string value', - }, + scope: { name: 'myLib', version: '0.1.0' }, + spans: [ + { + traceId: traceId, + spanId: spanId, + traceState: 'span=bar', + parentSpanId: parentSpanId, + name: 'span-name', + kind: ESpanKind.SPAN_KIND_CLIENT, + links: [ + { + droppedAttributesCount: 0, + spanId: linkSpanId, + traceId: linkTraceId, + traceState: 'link=foo', + attributes: [ + { + key: 'link-attribute', + value: { + stringValue: 'string value', + }, + }, + ], + }, + ], + startTimeUnixNano: startTime, + endTimeUnixNano: endTime, + events: [ + { + droppedAttributesCount: 0, + attributes: [ + { + key: 'event-attribute', + value: { + stringValue: 'some string value', + }, + }, + ], + name: 'some event', + timeUnixNano: eventTime, + }, + ], + attributes: [ + { + key: 'string-attribute', + value: { stringValue: 'some attribute value' }, + }, + ], + droppedAttributesCount: 0, + droppedEventsCount: 0, + droppedLinksCount: 0, + status: { + code: EStatusCode.STATUS_CODE_OK, + }, + }, + ], + schemaUrl: 'http://url.to.schema', }, ], - name: 'span-name', - resource, - startTime: [1640715557, 342725388], - status: { - code: SpanStatusCode.OK, - }, - droppedAttributesCount: 0, - droppedEventsCount: 0, - droppedLinksCount: 0, - }; + }, + ], + }; +} + +describe('Trace', () => { + let resource: Resource; + let span: ReadableSpan; + + beforeEach(() => { + resource = new Resource({ + 'resource-attribute': 'resource attribute value', }); + span = { + spanContext: () => ({ + spanId: '0000000000000002', + traceFlags: TraceFlags.SAMPLED, + traceId: '00000000000000000000000000000001', + isRemote: false, + traceState: new TraceState('span=bar'), + }), + parentSpanId: '0000000000000001', + attributes: { 'string-attribute': 'some attribute value' }, + duration: [1, 300000000], + endTime: [1640715558, 642725388], + ended: true, + events: [ + { + name: 'some event', + time: [1640715558, 542725388], + attributes: { + 'event-attribute': 'some string value', + }, + }, + ], + instrumentationLibrary: { + name: 'myLib', + version: '0.1.0', + schemaUrl: 'http://url.to.schema', + }, + kind: SpanKind.CLIENT, + links: [ + { + context: { + spanId: '0000000000000003', + traceId: '00000000000000000000000000000002', + traceFlags: TraceFlags.SAMPLED, + isRemote: false, + traceState: new TraceState('link=foo'), + }, + attributes: { + 'link-attribute': 'string value', + }, + }, + ], + name: 'span-name', + resource, + startTime: [1640715557, 342725388], + status: { + code: SpanStatusCode.OK, + }, + droppedAttributesCount: 0, + droppedEventsCount: 0, + droppedLinksCount: 0, + }; + }); + describe('createExportTraceServiceRequest', () => { it('returns null on an empty list', () => { assert.deepStrictEqual( createExportTraceServiceRequest([], { useHex: true }), @@ -346,4 +439,100 @@ describe('Trace', () => { }); }); }); + + describe('ProtobufTracesSerializer', function () { + it('serializes an export request', () => { + const serialized = ProtobufTraceSerializer.serializeRequest([span]); + assert.ok(serialized, 'serialized response is undefined'); + const decoded = + root.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest.decode( + serialized + ); + + const expected = createExpectedSpanProtobuf(); + const decodedObj = + root.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest.toObject( + decoded, + { + // This incurs some precision loss that's taken into account in createExpectedSpanProtobuf() + // Using String here will incur the same precision loss on browser only, using Number to prevent having to + // have different assertions for browser and Node.js + longs: Number, + // Convert to String (Base64) as otherwise the type will be different for Node.js (Buffer) and Browser (Uint8Array) + // and this fails assertions. + bytes: String, + } + ); + + assert.deepStrictEqual(decodedObj, expected); + }); + + it('deserializes a response', () => { + const protobufSerializedResponse = + root.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse.encode( + { + partialSuccess: { + errorMessage: 'foo', + rejectedSpans: 1, + }, + } + ).finish(); + + const deserializedResponse = ProtobufTraceSerializer.deserializeResponse( + protobufSerializedResponse + ); + + assert.ok( + deserializedResponse.partialSuccess, + 'partialSuccess not present in the deserialized message' + ); + assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo'); + assert.equal( + Number(deserializedResponse.partialSuccess.rejectedSpans), + 1 + ); + }); + }); + + describe('JsonTracesSerializer', function () { + it('serializes an export request', () => { + // stringify, then parse to remove undefined keys in the expected JSON + const expected = JSON.parse( + JSON.stringify( + createExpectedSpanJson({ + useHex: true, + useLongBits: false, + }) + ) + ); + const serialized = JsonTraceSerializer.serializeRequest([span]); + + const decoder = new TextDecoder(); + assert.deepStrictEqual(JSON.parse(decoder.decode(serialized)), expected); + }); + + it('deserializes a response', () => { + const expectedResponse = { + partialSuccess: { + errorMessage: 'foo', + rejectedSpans: 1, + }, + }; + const encoder = new TextEncoder(); + const encodedResponse = encoder.encode(JSON.stringify(expectedResponse)); + + const deserializedResponse = + JsonTraceSerializer.deserializeResponse(encodedResponse); + + assert.ok( + deserializedResponse.partialSuccess, + 'partialSuccess not present in the deserialized message' + ); + assert.equal(deserializedResponse.partialSuccess.errorMessage, 'foo'); + assert.equal( + Number(deserializedResponse.partialSuccess.rejectedSpans), + 1 + ); + }); + }); }); diff --git a/experimental/packages/otlp-transformer/test/utils.ts b/experimental/packages/otlp-transformer/test/utils.ts new file mode 100644 index 00000000000..d13c9cf314c --- /dev/null +++ b/experimental/packages/otlp-transformer/test/utils.ts @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { hexToBinary } from '@opentelemetry/core'; + +/** + * utility function to convert a string representing a hex value to a base64 string + * that represents the bytes of that hex value. This is needed as we need to support Node.js 14 + * where btoa() does not exist, and the Browser, where Buffer does not exist. + * @param hexStr + */ +export function toBase64(hexStr: string) { + if (typeof btoa !== 'undefined') { + const decoder = new TextDecoder('utf8'); + return btoa(decoder.decode(hexToBinary(hexStr))); + } + + return Buffer.from(hexToBinary(hexStr)).toString('base64'); +} diff --git a/experimental/packages/otlp-transformer/tsconfig.esm.json b/experimental/packages/otlp-transformer/tsconfig.esm.json index 0dc676825ce..b2f0a9b99c9 100644 --- a/experimental/packages/otlp-transformer/tsconfig.esm.json +++ b/experimental/packages/otlp-transformer/tsconfig.esm.json @@ -1,12 +1,15 @@ { "extends": "../../../tsconfig.base.esm.json", "compilerOptions": { + "allowJs": true, "outDir": "build/esm", "rootDir": "src", "tsBuildInfoFile": "build/esm/tsconfig.esm.tsbuildinfo" }, "include": [ - "src/**/*.ts" + "src/**/*.ts", + "src/generated/*.js", + "src/generated/*.ts" ], "references": [ { diff --git a/experimental/packages/otlp-transformer/tsconfig.esnext.json b/experimental/packages/otlp-transformer/tsconfig.esnext.json index 56f7465cebf..ff98b47cd16 100644 --- a/experimental/packages/otlp-transformer/tsconfig.esnext.json +++ b/experimental/packages/otlp-transformer/tsconfig.esnext.json @@ -1,12 +1,15 @@ { "extends": "../../../tsconfig.base.esnext.json", "compilerOptions": { + "allowJs": true, "outDir": "build/esnext", "rootDir": "src", "tsBuildInfoFile": "build/esnext/tsconfig.esnext.tsbuildinfo" }, "include": [ - "src/**/*.ts" + "src/**/*.ts", + "src/generated/*.js", + "src/generated/*.ts" ], "references": [ { diff --git a/experimental/packages/otlp-transformer/tsconfig.json b/experimental/packages/otlp-transformer/tsconfig.json index cb92fea5754..d68d60fa7e7 100644 --- a/experimental/packages/otlp-transformer/tsconfig.json +++ b/experimental/packages/otlp-transformer/tsconfig.json @@ -1,12 +1,15 @@ { "extends": "../../../tsconfig.base.json", "compilerOptions": { + "allowJs": true, "outDir": "build", "rootDir": "." }, "files": [], "include": [ "src/**/*.ts", + "src/generated/*.js", + "src/generated/*.ts", "test/**/*.ts" ], "references": [ diff --git a/package-lock.json b/package-lock.json index 74e42e92fff..4cd1dfeb25d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5227,7 +5227,8 @@ "version": "0.51.0", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/core": "1.24.0" + "@opentelemetry/core": "1.24.0", + "@opentelemetry/otlp-transformer": "0.51.0" }, "devDependencies": { "@babel/core": "7.23.6", @@ -5639,7 +5640,6 @@ "lerna": "6.6.2", "mocha": "10.2.0", "nyc": "15.1.0", - "protobufjs-cli": "1.1.2", "sinon": "15.1.2", "ts-loader": "8.4.0", "ts-mocha": "10.0.0", @@ -5909,7 +5909,8 @@ "@opentelemetry/resources": "1.24.0", "@opentelemetry/sdk-logs": "0.51.0", "@opentelemetry/sdk-metrics": "1.24.0", - "@opentelemetry/sdk-trace-base": "1.24.0" + "@opentelemetry/sdk-trace-base": "1.24.0", + "protobufjs": "^7.2.3" }, "devDependencies": { "@opentelemetry/api": "1.8.0", @@ -5927,6 +5928,7 @@ "lerna": "6.6.2", "mocha": "10.2.0", "nyc": "15.1.0", + "protobufjs-cli": "1.1.2", "ts-loader": "8.4.0", "ts-mocha": "10.0.0", "typescript": "4.4.4", @@ -47424,6 +47426,7 @@ "@babel/preset-env": "7.22.20", "@opentelemetry/api": "1.8.0", "@opentelemetry/core": "1.24.0", + "@opentelemetry/otlp-transformer": "0.51.0", "@types/mocha": "10.0.6", "@types/node": "18.6.5", "@types/sinon": "10.0.20", @@ -47714,7 +47717,6 @@ "mocha": "10.2.0", "nyc": "15.1.0", "protobufjs": "^7.2.3", - "protobufjs-cli": "1.1.2", "sinon": "15.1.2", "ts-loader": "8.4.0", "ts-mocha": "10.0.0", @@ -47948,6 +47950,8 @@ "lerna": "6.6.2", "mocha": "10.2.0", "nyc": "15.1.0", + "protobufjs": "^7.2.3", + "protobufjs-cli": "1.1.2", "ts-loader": "8.4.0", "ts-mocha": "10.0.0", "typescript": "4.4.4",