Skip to content

Commit

Permalink
feat: add support for pointing lambda metrics to generic downstream o… (
Browse files Browse the repository at this point in the history
  • Loading branch information
bruuuuuuuce authored May 28, 2024
1 parent 732ec4d commit 78a7056
Showing 1 changed file with 75 additions and 1 deletion.
76 changes: 75 additions & 1 deletion src/goodmetrics/metricsSetups.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,41 @@ interface LightstepNativeLambdaOtlpProps {
onSendUnary?: (metrics: Metrics[]) => void;
}

interface RawNativeLambdaOtlpForLambdaProps {
/**
* programmatic access token for the otlp metric backend
*/
accessToken: string;
/**
* Name of the header to use for authentication. Ex. `api-token`
*/
authHeaderName: string;
/**
* Included resource dimensions on the OTLP resource. Ex. AWS_REGION, ACCOUNT_ID etc...
*/
resourceDimensions: Map<string, Dimension>;
/**
* Include resource dimensions on each metric instead of on the Resource. You'd use this for
* downstreams that either do not support or do something undesirable with Resource dimensions.
*/
sharedDimensions: Map<string, Dimension>;
/**
* example `ingest.lightstep.com`
*/
ingestUrl: string;
/**
* defaults to 443
*/
ingestPort?: number;
logError: (message: string, error: unknown) => void;
/**
* Mostly for debugging purposes, logs after successfully sending metrics to the backend.
* Used to tell if the promise fully resolved
*/
doLogSuccess?: boolean;
onSendUnary?: (metrics: Metrics[]) => void;
}

interface ConfigureBatchedUnaryLightstepSinkProps {
batchSize: number;
batchMaxAgeSeconds: number;
Expand Down Expand Up @@ -165,7 +200,7 @@ export class MetricsSetups {
}

/**
* Configures a unary metric factory which will send and record metrics upon lambda
* Configures a unary metric factory pointing to lightstep downstream, which will send and record metrics upon lambda
* completion
* @param props
*/
Expand Down Expand Up @@ -205,6 +240,45 @@ export class MetricsSetups {
});
}

/**
* Configures a unary metric factory pointing to any arbitrary oltp metrics backend, which will send and record metrics upon lambda
* completion
* @param props
*/
static rawNativeOtlpButItSendsMetricsUponRecordingForLambda(
props: RawNativeLambdaOtlpForLambdaProps
): MetricsFactory {
const headers = [new Header(props.authHeaderName, props.accessToken)];
const client = OpenTelemetryClient.connect({
sillyOtlpHostname: props.ingestUrl,
port: props.ingestPort ?? 443,
metricDimensions: props.sharedDimensions,
resourceDimensions: props.resourceDimensions,
interceptors: [
new HeaderInterceptorProvider(headers).createHeadersInterceptor(),
],
});
const unarySink: MetricsSink = {
close(): void {
client.close();
},
async emit(metrics: _Metrics): Promise<void> {
props?.onSendUnary && props.onSendUnary([metrics]);
try {
await client.sendMetricsBatch([metrics]);
props.doLogSuccess && console.log('metrics sent to backend');
} catch (e) {
props.logError('error while sending blocking metrics', e);
}
},
};

return new MetricsFactory({
metricsSink: unarySink,
totalTimeType: TotaltimeType.DistributionMilliseconds,
});
}

private static configureBatchedUnaryLightstepSink(
props: ConfigureBatchedUnaryLightstepSinkProps
): SynchronizingBuffer {
Expand Down

0 comments on commit 78a7056

Please sign in to comment.