Skip to content

Commit

Permalink
Merge 2c6d78f into 6710cdd
Browse files Browse the repository at this point in the history
  • Loading branch information
blumamir authored Apr 26, 2022
2 parents 6710cdd + 2c6d78f commit fba60d2
Show file tree
Hide file tree
Showing 34 changed files with 257 additions and 255 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ All notable changes to this project will be documented in this file.
* chore: require changelog entry to merge PR [#2847](https://github.com/open-telemetry/opentelemetry-js/pull/2847) @dyladan
* chore: remove peer API check [#2892](https://github.com/open-telemetry/opentelemetry-js/pull/2892) @dyladan
* chore: merge lerna subdirectories into a single monorepo [#2892](https://github.com/open-telemetry/opentelemetry-js/pull/2892) @dyladan
* chore: indent the code with eslint [#2923](https://github.com/open-telemetry/opentelemetry-js/pull/2923) @blumamir
* `opentelemetry-propagator-jaeger`
* [#2906](https://github.com/open-telemetry/opentelemetry-js/pull/2906) fix: support extract one digit '0' in jaeger traceFlag ([@shmilyoo](https://github.com/shmilyoo))

Expand Down
1 change: 1 addition & 0 deletions eslint.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ module.exports = {
"project": "./tsconfig.json"
},
rules: {
"indent": ["error", 2, { "SwitchCase": 1 }],
"no-trailing-spaces": "error",
"eol-last": "error",
"quotes": [2, "single", { "avoidEscape": true }],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,14 +97,14 @@ const testCollectorExporter = (params: TestParams) =>
);
const credentials = params.useTLS
? grpc.ServerCredentials.createSsl(
fs.readFileSync('./test/certs/ca.crt'),
[
{
cert_chain: fs.readFileSync('./test/certs/server.crt'),
private_key: fs.readFileSync('./test/certs/server.key'),
},
]
)
fs.readFileSync('./test/certs/ca.crt'),
[
{
cert_chain: fs.readFileSync('./test/certs/server.crt'),
private_key: fs.readFileSync('./test/certs/server.key'),
},
]
)
: grpc.ServerCredentials.createInsecure();
server.bindAsync(address, credentials, () => {
server.start();
Expand All @@ -120,10 +120,10 @@ const testCollectorExporter = (params: TestParams) =>
beforeEach(done => {
const credentials = params.useTLS
? grpc.credentials.createSsl(
fs.readFileSync('./test/certs/ca.crt'),
fs.readFileSync('./test/certs/client.key'),
fs.readFileSync('./test/certs/client.crt')
)
fs.readFileSync('./test/certs/ca.crt'),
fs.readFileSync('./test/certs/client.key'),
fs.readFileSync('./test/certs/client.crt')
)
: undefined;
collectorExporter = new OTLPTraceExporter({
url: 'grpcs://' + address,
Expand Down Expand Up @@ -203,12 +203,12 @@ const testCollectorExporter = (params: TestParams) =>
describe('export - with gzip compression', () => {
beforeEach(() => {
const credentials = params.useTLS
? grpc.credentials.createSsl(
? grpc.credentials.createSsl(
fs.readFileSync('./test/certs/ca.crt'),
fs.readFileSync('./test/certs/client.key'),
fs.readFileSync('./test/certs/client.crt')
)
: undefined;
: undefined;
collectorExporter = new OTLPTraceExporter({
url: 'grpcs://' + address,
credentials,
Expand Down Expand Up @@ -254,12 +254,12 @@ const testCollectorExporter = (params: TestParams) =>
const envSource = process.env;
it('should return gzip compression algorithm on exporter', () => {
const credentials = params.useTLS
? grpc.credentials.createSsl(
? grpc.credentials.createSsl(
fs.readFileSync('./test/certs/ca.crt'),
fs.readFileSync('./test/certs/client.key'),
fs.readFileSync('./test/certs/client.crt')
)
: undefined;
: undefined;

envSource.OTEL_EXPORTER_OTLP_COMPRESSION='gzip';
collectorExporter = new OTLPTraceExporter({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ export class OTLPTraceExporter
return typeof config.url === 'string'
? config.url
: getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT.length > 0
? getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
: getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0
? appendResourcePathToUrlIfNotPresent(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH)
: DEFAULT_COLLECTOR_URL;
? getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
: getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0
? appendResourcePathToUrlIfNotPresent(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH)
: DEFAULT_COLLECTOR_URL;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,10 @@ export class OTLPTraceExporter
return typeof config.url === 'string'
? config.url
: getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT.length > 0
? getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
: getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0
? appendResourcePathToUrlIfNotPresent(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH)
: DEFAULT_COLLECTOR_URL;
? getEnv().OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
: getEnv().OTEL_EXPORTER_OTLP_ENDPOINT.length > 0
? appendResourcePathToUrlIfNotPresent(getEnv().OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_COLLECTOR_RESOURCE_PATH)
: DEFAULT_COLLECTOR_URL;
}

getServiceClientType() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef
export const _globalThis: typeof globalThis =
typeof globalThis === 'object' ? globalThis :
typeof self === 'object' ? self :
typeof window === 'object' ? window :
typeof global === 'object' ? global :
typeof self === 'object' ? self :
typeof window === 'object' ? window :
typeof global === 'object' ? global :
{} as typeof globalThis;
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ import { OTLPExporterBase } from '@opentelemetry/otlp-exporter-base';
export class OTLPMetricExporterBase<T extends OTLPExporterBase<OTLPMetricExporterOptions,
ResourceMetrics,
otlpTypes.opentelemetryProto.collector.metrics.v1.ExportMetricsServiceRequest>>
implements PushMetricExporter {
implements PushMetricExporter {
public _otlpExporter: T;
protected _preferredAggregationTemporality: AggregationTemporality;

constructor(exporter: T,
config: OTLPMetricExporterOptions = defaultOptions) {
config: OTLPMetricExporterOptions = defaultOptions) {
this._otlpExporter = exporter;
this._preferredAggregationTemporality = config.aggregationTemporality ?? AggregationTemporality.CUMULATIVE;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,114 +50,114 @@ describe('transformMetrics', () => {
});

it('should convert counter', async () => {
const counter = mockCounter();
counter.add(1);
const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime)
);
}
const counter = mockCounter();
counter.add(1);
const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime)
);
}
);

it('should convert double counter', async () => {
const doubleCounter = mockDoubleCounter();
doubleCounter.add(8);
const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureDoubleCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
);
}
const doubleCounter = mockDoubleCounter();
doubleCounter.add(8);
const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureDoubleCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
);
}
);

it('should convert observable gauge', async () => {
let count = 0;
mockObservableGauge(observableResult => {
count++;
observableResult.observe(getValue(count), {});
});

// collect three times.
await collect();
await collect();
const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureObservableGaugeIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
-1,
);
}
let count = 0;
mockObservableGauge(observableResult => {
count++;
observableResult.observe(getValue(count), {});
});

// collect three times.
await collect();
await collect();
const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureObservableGaugeIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
-1,
);
}
);


it('should convert observable counter', async () => {
mockObservableCounter(observableResult => {
observableResult.observe(1, {});
});

// collect three times.
await collect();
await collect();
const metrics = (await collect());
// TODO: Collect seems to not deliver the last observation -> why?

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureObservableCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
2,
);
}
mockObservableCounter(observableResult => {
observableResult.observe(1, {});
});

// collect three times.
await collect();
await collect();
const metrics = (await collect());
// TODO: Collect seems to not deliver the last observation -> why?

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureObservableCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
2,
);
}
);

it('should convert observable up-down counter', async () => {
mockObservableUpDownCounter(observableResult => {
observableResult.observe(1, {});
});

// collect three times.
await collect();
await collect();
const metrics = (await collect());
// TODO: Collect seems to not deliver the last observation -> why?

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureObservableUpDownCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
2,
);
}
mockObservableUpDownCounter(observableResult => {
observableResult.observe(1, {});
});

// collect three times.
await collect();
await collect();
const metrics = (await collect());
// TODO: Collect seems to not deliver the last observation -> why?

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureObservableUpDownCounterIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
2,
);
}
);

it('should convert observable histogram', async () => {
const histogram = mockHistogram();
histogram.record(7);
histogram.record(14);

const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureHistogramIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
[0, 100],
[0, 2, 0]
);
}
const histogram = mockHistogram();
histogram.record(7);
histogram.record(14);

const metrics = (await collect());

const metric = metrics.instrumentationLibraryMetrics[0].metrics[0];
ensureHistogramIsCorrect(
transform.toCollectorMetric(metric, AggregationTemporality.CUMULATIVE),
hrTimeToNanoseconds(metric.dataPoints[0].endTime),
hrTimeToNanoseconds(metric.dataPoints[0].startTime),
[0, 100],
[0, 2, 0]
);
}
);

it('should convert metric attributes value to string', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,13 +133,13 @@ export function mockHistogram(): Histogram {
const name = 'int-histogram';

meterProvider.addView({
aggregation: new ExplicitBucketHistogramAggregation([0, 100])
},
{
instrument: {
name: name
}
});
aggregation: new ExplicitBucketHistogramAggregation([0, 100])
},
{
instrument: {
name: name
}
});

return meter.createHistogram(name, {
description: 'sample histogram description',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,8 +209,8 @@ describe('PrometheusExporter', () => {
);
let resolve: () => void;
const deferred = new Promise<void>(res => {
resolve = res;
});
resolve = res;
});
mockResponse.end.callsFake(() => resolve());
exporter.getMetricsRequestHandler(
(mockRequest as unknown) as http.IncomingMessage,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ const attributes = {

class TestMetricReader extends MetricReader {
constructor() {
super(AggregationTemporality.CUMULATIVE);
}
super(AggregationTemporality.CUMULATIVE);
}
async onForceFlush() {}
async onShutdown() {}
}
Expand Down
Loading

0 comments on commit fba60d2

Please sign in to comment.