refactor: Update CommonServer/package.json and Ingestor/Service/OTelIngest.ts

This code change updates the CommonServer/package.json file to remove the "@opentelemetry/metrics" dependency and adds the "@opentelemetry/sdk-metrics" dependency with version "^1.21.0". It also updates the Ingestor/Service/OTelIngest.ts file to import the "JSONFunctions" module from "Common/Types/JSONFunctions" and use the "flattenObject" function from that module to flatten the final object before returning it.
This commit is contained in:
Simon Larsen 2024-06-07 16:57:24 +01:00
parent 63461343ba
commit 26c900d8e2
No known key found for this signature in database
GPG Key ID: 96C5DCA24769DBCA
7 changed files with 115 additions and 190 deletions

View File

@ -1,3 +1,4 @@
import OpenTelemetryAPI, { Meter } from '@opentelemetry/api';
import { Logger, logs } from '@opentelemetry/api-logs';
import { Counter } from '@opentelemetry/api/build/src/metrics/Metric';
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
@ -11,15 +12,15 @@ import {
BatchLogRecordProcessor,
LoggerProvider,
} from '@opentelemetry/sdk-logs';
import { PeriodicExportingMetricReader, MeterProvider } from '@opentelemetry/sdk-metrics';
import {
MeterProvider,
PeriodicExportingMetricReader,
} from '@opentelemetry/sdk-metrics';
import * as opentelemetry from '@opentelemetry/sdk-node';
import { SpanExporter } from '@opentelemetry/sdk-trace-node';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import URL from 'Common/Types/API/URL';
import Dictionary from 'Common/Types/Dictionary';
import OpenTelemetryAPI, { Meter } from '@opentelemetry/api';
// Enable this line to see debug logs
// diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
@ -214,9 +215,10 @@ export default class Telemetry {
}): Counter {
const { name, description } = data;
const counter = this.getMeter().createCounter(name, {
description: description,
});
const counter: Counter<opentelemetry.api.Attributes> =
this.getMeter().createCounter(name, {
description: description,
});
return counter;
}
}

View File

@ -18,7 +18,6 @@
"@opentelemetry/exporter-metrics-otlp-proto": "^0.49.1",
"@opentelemetry/exporter-trace-otlp-proto": "^0.49.1",
"@opentelemetry/id-generator-aws-xray": "^1.2.1",
"@opentelemetry/metrics": "^0.24.0",
"@opentelemetry/sdk-logs": "^0.49.1",
"@opentelemetry/sdk-metrics": "^1.21.0",
"@opentelemetry/sdk-node": "^0.48.0",
@ -10078,18 +10077,6 @@
"node": ">=14"
}
},
"node_modules/@opentelemetry/api-metrics": {
"version": "0.24.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api-metrics/-/api-metrics-0.24.0.tgz",
"integrity": "sha512-hdpkMeVlRGTuMshD2ZFaDjA/U0cZTkxUkJFvS/4yOiWfw+kEASmGE+U0/i9lbdQKuCR7X1rXSjbcYumlHcMG+A==",
"deprecated": "Please use @opentelemetry/api >= 1.3.0",
"engines": {
"node": ">=8.0.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.0.1"
}
},
"node_modules/@opentelemetry/auto-instrumentations-node": {
"version": "0.43.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/auto-instrumentations-node/-/auto-instrumentations-node-0.43.0.tgz",
@ -12267,62 +12254,6 @@
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/metrics": {
"version": "0.24.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/metrics/-/metrics-0.24.0.tgz",
"integrity": "sha512-QqmQCzrSuJE+sCOJ2xXNhctWPp/Am9ILs0Y01MDS08PRJoK20akKHM7eC4oU8ZdXphMg8rYgW2w7tY8rqvYnJg==",
"deprecated": "Package renamed to @opentelemetry/sdk-metrics-base",
"dependencies": {
"@opentelemetry/api-metrics": "0.24.0",
"@opentelemetry/core": "0.24.0",
"@opentelemetry/resources": "0.24.0",
"lodash.merge": "^4.6.2"
},
"engines": {
"node": ">=8.0.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.0.1"
}
},
"node_modules/@opentelemetry/metrics/node_modules/@opentelemetry/core": {
"version": "0.24.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-0.24.0.tgz",
"integrity": "sha512-KpsfxBbFTZT9zaB4Es/fFLbvSzVl9Io/8UUu/TYl4/HgqkmyVInNlWTgRiKyz9nsHzFpGP1kdZJj+YIut0IFsw==",
"dependencies": {
"@opentelemetry/semantic-conventions": "0.24.0",
"semver": "^7.1.3"
},
"engines": {
"node": ">=8.5.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.0.1"
}
},
"node_modules/@opentelemetry/metrics/node_modules/@opentelemetry/resources": {
"version": "0.24.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-0.24.0.tgz",
"integrity": "sha512-uEr2m13IRkjQAjX6fsYqJ21aONCspRvuQunaCl8LbH1NS1Gj82TuRUHF6TM82ulBPK8pU+nrrqXKuky2cMcIzw==",
"dependencies": {
"@opentelemetry/core": "0.24.0",
"@opentelemetry/semantic-conventions": "0.24.0"
},
"engines": {
"node": ">=8.0.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.0.1"
}
},
"node_modules/@opentelemetry/metrics/node_modules/@opentelemetry/semantic-conventions": {
"version": "0.24.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-0.24.0.tgz",
"integrity": "sha512-a/szuMQV0Quy0/M7kKdglcbRSoorleyyOwbTNNJ32O+RBN766wbQlMTvdimImTmwYWGr+NJOni1EcC242WlRcA==",
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@opentelemetry/otlp-exporter-base": {
"version": "0.48.0",
"license": "Apache-2.0",

View File

@ -23,7 +23,6 @@
"@opentelemetry/exporter-metrics-otlp-proto": "^0.49.1",
"@opentelemetry/exporter-trace-otlp-proto": "^0.49.1",
"@opentelemetry/id-generator-aws-xray": "^1.2.1",
"@opentelemetry/metrics": "^0.24.0",
"@opentelemetry/sdk-logs": "^0.49.1",
"@opentelemetry/sdk-metrics": "^1.21.0",
"@opentelemetry/sdk-node": "^0.48.0",

View File

@ -29,9 +29,9 @@ const MetricView: FunctionComponent<ComponentProps> = (
serviceId: props.serviceId,
});
const [isLoading, setIsLoading] = React.useState<boolean>(true);
// const [isLoading, setIsLoading] = React.useState<boolean>(true);
const [values, setValues] = React.useState<Metric[]>([]);
// const [values, setValues] = React.useState<Metric[]>([]);
useEffect(() => {}, []);

View File

@ -153,7 +153,7 @@ router.post(
span['status'] &&
(span['status'] as JSONObject)?.['code'] &&
typeof (span['status'] as JSONObject)?.['code'] ===
'number'
'number'
) {
spanStatusCode = (span['status'] as JSONObject)?.[
'code'
@ -164,7 +164,7 @@ router.post(
span['status'] &&
(span['status'] as JSONObject)?.['code'] &&
typeof (span['status'] as JSONObject)?.['code'] ===
'string'
'string'
) {
if (
(span['status'] as JSONObject)?.['code'] ===
@ -317,191 +317,152 @@ router.post(
'description'
] as string;
const metricUnit: string = metric['unit'] as string;
const dbMetric: Metric = new Metric();
dbMetric.projectId = (
req as TelemetryRequest
).projectId;
dbMetric.serviceId = (
req as TelemetryRequest
).serviceId;
dbMetric.name = metricName;
dbMetric.description = metricDescription;
dbMetric.unit = metricUnit;
dbMetric.attributes = {
...OTelIngestService.getAttributes(
metric['attributes'] as JSONArray
),
resource: OTelIngestService.getAttributes(
resourceMetric['attributes'] as JSONArray
),
};
if (
metric['sum'] &&
(metric['sum'] as JSONObject)['dataPoints'] &&
(
(metric['sum'] as JSONObject)[
'dataPoints'
'dataPoints'
] as JSONArray
).length > 0
) {
for (const datapoint of (
metric['sum'] as JSONObject
)['dataPoints'] as JSONArray) {
const dbMetricSum: Metric = new Metric();
dbMetricSum.projectId = (
req as TelemetryRequest
).projectId;
dbMetricSum.serviceId = (
req as TelemetryRequest
).serviceId;
dbMetricSum.name = metricName;
dbMetricSum.description = metricDescription;
dbMetricSum.startTimeUnixNano = datapoint[
dbMetric.startTimeUnixNano = datapoint[
'startTimeUnixNano'
] as number;
dbMetricSum.startTime =
OneUptimeDate.fromUnixNano(
datapoint['startTimeUnixNano'] as number
);
dbMetric.startTime = OneUptimeDate.fromUnixNano(
datapoint['startTimeUnixNano'] as number
);
dbMetricSum.timeUnixNano = datapoint[
dbMetric.timeUnixNano = datapoint[
'timeUnixNano'
] as number;
dbMetricSum.time = OneUptimeDate.fromUnixNano(
dbMetric.time = OneUptimeDate.fromUnixNano(
datapoint['timeUnixNano'] as number
);
if (Object.keys(datapoint).includes('asInt')) {
dbMetricSum.value = datapoint[
dbMetric.value = datapoint[
'asInt'
] as number;
} else if (Object.keys(datapoint).includes('asDouble')) {
dbMetricSum.value = datapoint[
} else if (
Object.keys(datapoint).includes('asDouble')
) {
dbMetric.value = datapoint[
'asDouble'
] as number;
}
dbMetricSum.attributes =
OTelIngestService.getAttributes(
metric['attributes'] as JSONArray
);
dbMetrics.push(dbMetricSum);
dbMetrics.push(dbMetric);
}
} else if (
metric['gauge'] &&
(metric['gauge'] as JSONObject)['dataPoints'] &&
(
(metric['gauge'] as JSONObject)[
'dataPoints'
'dataPoints'
] as JSONArray
).length > 0
) {
for (const datapoint of (
metric['gauge'] as JSONObject
)['dataPoints'] as JSONArray) {
const dbMetricGauge: Metric = new Metric();
dbMetricGauge.projectId = (
req as TelemetryRequest
).projectId;
dbMetricGauge.serviceId = (
req as TelemetryRequest
).serviceId;
dbMetricGauge.name = metricName;
dbMetricGauge.description = metricDescription;
dbMetricGauge.startTimeUnixNano = datapoint[
dbMetric.startTimeUnixNano = datapoint[
'startTimeUnixNano'
] as number;
dbMetricGauge.startTime =
OneUptimeDate.fromUnixNano(
datapoint['startTimeUnixNano'] as number
);
dbMetric.startTime = OneUptimeDate.fromUnixNano(
datapoint['startTimeUnixNano'] as number
);
dbMetricGauge.timeUnixNano = datapoint[
dbMetric.timeUnixNano = datapoint[
'timeUnixNano'
] as number;
dbMetricGauge.time = OneUptimeDate.fromUnixNano(
dbMetric.time = OneUptimeDate.fromUnixNano(
datapoint['timeUnixNano'] as number
);
if (Object.keys(datapoint).includes('asDouble')) {
dbMetricGauge.value = datapoint[
if (
Object.keys(datapoint).includes('asDouble')
) {
dbMetric.value = datapoint[
'asDouble'
] as number;
} else if (Object.keys(datapoint).includes('asInt')) {
dbMetricGauge.value = datapoint[
} else if (
Object.keys(datapoint).includes('asInt')
) {
dbMetric.value = datapoint[
'asInt'
] as number;
}
dbMetricGauge.attributes =
OTelIngestService.getAttributes(
metric['attributes'] as JSONArray
);
dbMetrics.push(dbMetricGauge);
dbMetrics.push(dbMetric);
}
} else if (
metric['histogram'] &&
(metric['histogram'] as JSONObject)['dataPoints'] &&
(
(metric['histogram'] as JSONObject)[
'dataPoints'
'dataPoints'
] as JSONArray
).length > 0
) {
for (const datapoint of (
metric['histogram'] as JSONObject
)['dataPoints'] as JSONArray) {
const dbMetricHistogram: Metric = new Metric();
dbMetricHistogram.projectId = (
req as TelemetryRequest
).projectId;
dbMetricHistogram.serviceId = (
req as TelemetryRequest
).serviceId;
dbMetricHistogram.name = metricName;
dbMetricHistogram.description =
metricDescription;
dbMetricHistogram.startTimeUnixNano = datapoint[
dbMetric.startTimeUnixNano = datapoint[
'startTimeUnixNano'
] as number;
dbMetricHistogram.startTime =
OneUptimeDate.fromUnixNano(
datapoint['startTimeUnixNano'] as number
);
dbMetric.startTime = OneUptimeDate.fromUnixNano(
datapoint['startTimeUnixNano'] as number
);
dbMetricHistogram.timeUnixNano = datapoint[
dbMetric.timeUnixNano = datapoint[
'timeUnixNano'
] as number;
dbMetricHistogram.time =
OneUptimeDate.fromUnixNano(
datapoint['timeUnixNano'] as number
);
dbMetric.time = OneUptimeDate.fromUnixNano(
datapoint['timeUnixNano'] as number
);
dbMetricHistogram.count = datapoint[
'count'
] as number;
dbMetricHistogram.sum = datapoint[
'sum'
] as number;
dbMetric.count = datapoint['count'] as number;
dbMetric.sum = datapoint['sum'] as number;
dbMetricHistogram.min = datapoint[
'min'
] as number;
dbMetricHistogram.max = datapoint[
'max'
] as number;
dbMetric.min = datapoint['min'] as number;
dbMetric.max = datapoint['max'] as number;
dbMetricHistogram.bucketCounts = datapoint[
dbMetric.bucketCounts = datapoint[
'bucketCounts'
] as Array<number>;
dbMetricHistogram.explicitBounds = datapoint[
dbMetric.explicitBounds = datapoint[
'explicitBounds'
] as Array<number>;
// dbMetricHistogram.attributes =
// OTelIngestService.getKeyValues(
// metric['attributes'] as JSONArray
// );
dbMetrics.push(dbMetricHistogram);
dbMetrics.push(dbMetric);
}
} else {
logger.warn('Unknown metric type');

View File

@ -1,4 +1,5 @@
import { JSONArray, JSONObject, JSONValue } from 'Common/Types/JSON';
import JSONFunctions from 'Common/Types/JSONFunctions';
export default class OTelIngestService {
public static getAttributes(items: JSONArray): JSONObject {
@ -22,6 +23,6 @@ export default class OTelIngestService {
}
}
return finalObj;
return JSONFunctions.flattenObject(finalObj);
}
}

View File

@ -137,6 +137,29 @@ export default class Metric extends AnalyticsBaseModel {
},
}),
new AnalyticsTableColumn({
key: 'unit',
title: 'Unit',
description: 'Unit of the Metric',
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// this is end time.
new AnalyticsTableColumn({
key: 'time',
@ -450,6 +473,14 @@ export default class Metric extends AnalyticsBaseModel {
this.setColumnValue('description', v);
}
public get unit(): string | undefined {
return this.getColumnValue('unit') as string | undefined;
}
public set unit(v: string | undefined) {
this.setColumnValue('unit', v);
}
public set serviceId(v: ObjectID | undefined) {
this.setColumnValue('serviceId', v);
}