From 267f8761333135f2085079cc94792ade8906b7fe Mon Sep 17 00:00:00 2001 From: rjenkins Date: Thu, 24 Jul 2025 15:57:20 -0700 Subject: [PATCH 01/11] wip multiple exporters --- npm/app/__tests__/config.ts | 18 +++- npm/app/config.ts | 183 +++++++++++++++++++++++++++--------- npm/app/package.json | 5 +- npm/app/yarn.lock | 22 ++--- 4 files changed, 171 insertions(+), 57 deletions(-) diff --git a/npm/app/__tests__/config.ts b/npm/app/__tests__/config.ts index bdd4dbf..96307c3 100644 --- a/npm/app/__tests__/config.ts +++ b/npm/app/__tests__/config.ts @@ -51,7 +51,7 @@ describe('configuration and validation', () => { process.env.ROTEL_OTLP_EXPORTER_TLS_KEY_FILE = "key.file"; process.env.ROTEL_OTLP_EXPORTER_TLS_CA_FILE = "ca.file"; process.env.ROTEL_OTLP_EXPORTER_TLS_SKIP_VERIFY = "true"; - let c = Config._load_otlp_exporter_options_from_env(null); + let c = Config._load_otlp_exporter_options_from_env("OTLP_EXPORTER_", null); expect(c?.endpoint).toBe("https://api.foo.com"); expect(c?.protocol).toBe("http"); expect(c?.headers).toStrictEqual({"[x-api-key": "123]"}) @@ -68,8 +68,18 @@ describe('configuration and validation', () => { expect(c?.tls_skip_verify).toBe(true); }); + it('Load DatadogExporter config from ENV', () => { + process.env.ROTEL_DATADOG_EXPORTER_REGION = "us1"; + process.env.ROTEL_DATADOG_EXPORTER_CUSTOM_ENDPOINT = "http://localhost:5555"; + process.env.ROTEL_DATADOG_EXPORTER_API_KEY = "123abc"; + let c = Config._load_datadog_exporter_options_from_env("DATADOG_EXPORTER_"); + expect(c.region).toBe("us1"); + expect(c.custom_endpoint).toBe("http://localhost:5555"); + expect(c.api_key).toBe("123abc"); + }); + it('fails validation', () => { - + process.env.ROTEL_ENABLED = "true"; const c1 = new Config(); c1.options.exporter = {protocol: "X.500"}; expect(c1.validate()).toBe(false) @@ -78,4 +88,6 @@ describe('configuration and validation', () => { c2.options.log_format = "ascii"; expect(c2.validate()).toBe(false) }); -}); \ No newline at end of file +}); + + diff --git a/npm/app/config.ts b/npm/app/config.ts index a19ee64..2ad5ecc 100644 --- a/npm/app/config.ts +++ b/npm/app/config.ts @@ -23,11 +23,19 @@ export interface OTLPExporterEndpoint { // TODO: when we have more, include a key that defines this exporter type export interface OTLPExporter extends OTLPExporterEndpoint { - traces?: OTLPExporterEndpoint; + _type?: string + traces?: OTLPExporterEndpoint | DatadogExporter; metrics?: OTLPExporterEndpoint; logs?: OTLPExporterEndpoint; } +export interface DatadogExporter { + _type?: string + region?: string + custom_endpoint?: string + api_key?: string +} + export interface Options { enabled?: boolean; pid_file?: string; @@ -39,7 +47,11 @@ export interface Options { otlp_receiver_traces_disabled?: boolean; otlp_receiver_metrics_disabled?: boolean; otlp_receiver_logs_disabled?: boolean; - exporter?: OTLPExporter; + exporter?: OTLPExporter | DatadogExporter; + exporters?: Record + exporters_metrics?: string[] | undefined + exporters_traces?: string[] | undefined + exporters_logs?: string[] | undefined } export class Config { @@ -84,26 +96,74 @@ export class Config { otlp_receiver_logs_disabled: as_bool(rotel_env("OTLP_RECEIVER_LOGS_DISABLED")), }; - const exporter_type = as_lower(rotel_env("EXPORTER")); - if (exporter_type === null || exporter_type === "otlp") { - let exporter: OTLPExporter = Config._load_otlp_exporter_options_from_env(null) as OTLPExporter; - if (exporter === null) { - // make sure we always construct the top-level exporter config - exporter = {}; - } - env.exporter = exporter; + const exporters = as_lower(rotel_env("EXPORTERS")); + if (exporters !== null && exporters !== undefined) { + env["exporters"] = {}; + for (const exporterStr of exporters.split(",")) { + let name = exporterStr; + let value = exporterStr; + if (exporterStr.includes(":")) { + [name, value] = exporterStr.split(":", 2); + } - const traces_endpoint = Config._load_otlp_exporter_options_from_env("TRACES"); - if (traces_endpoint !== null) { - exporter.traces = traces_endpoint; - } - const metrics_endpoint = Config._load_otlp_exporter_options_from_env("METRICS"); - if (metrics_endpoint !== null) { - exporter.metrics = metrics_endpoint; + let exporter: OTLPExporter | DatadogExporter | undefined = undefined; + let pfx = "EXPORTER_" + name.toUpperCase + "_" + switch(value) { + case "otlp": + exporter = Config._load_otlp_exporter_options_from_env(pfx, "OTLPExporter") as OTLPExporter; + exporter._type = "otlp" + if (exporter === null || exporter === undefined) { + exporter = {}; + exporter._type = "otlp"; + } + case "datadog": + const datadogExporter: DatadogExporter = { + _type: "datadog", + region: rotel_env(pfx + "REGION"), + custom_endpoint: rotel_env(pfx + "CUSTOM_ENDPOINT"), + api_key: rotel_env(pfx + "API_KEY"), + }; + exporter = datadogExporter; + } + if (exporter !== undefined) { + env.exporters[name] = exporter; + } } - const logs_endpoint = Config._load_otlp_exporter_options_from_env("LOGS"); - if (logs_endpoint != null) { - exporter.logs = logs_endpoint; + env.exporters_traces = as_list(rotel_env("EXPORTERS_TRACES")) + env.exporters_metrics = as_list(rotel_env("EXPORTERS_METRICS")) + env.exporters_logs = as_list(rotel_env("EXPORTERS_LOGS")) + } else { + const exporter_type = as_lower(rotel_env("EXPORTER")); + if (exporter_type === null || exporter_type === "otlp") { + let exporter: OTLPExporter = Config._load_otlp_exporter_options_from_env("OTLP_EXPORTER_", null) as OTLPExporter; + if (exporter === null) { + // make sure we always construct the top-level exporter config + exporter = {}; + } + exporter._type = "otlp"; + env.exporter = exporter; + + const traces_endpoint = Config._load_otlp_exporter_options_from_env("OTLP_EXPORTER_TRACES_", "TRACES"); + if (traces_endpoint !== null) { + exporter.traces = traces_endpoint; + } + const metrics_endpoint = Config._load_otlp_exporter_options_from_env("OTLP_EXPORTER_METRICS_", "METRICS"); + if (metrics_endpoint !== null) { + exporter.metrics = metrics_endpoint; + } + const logs_endpoint = Config._load_otlp_exporter_options_from_env("OTLP_EXPORTER_LOGS_", "LOGS"); + if (logs_endpoint != null) { + exporter.logs = logs_endpoint; + } + } else if (exporter_type === "datadog") { + const pfx = "DATADOG_EXPORTER_" + var c: DatadogExporter = { + _type: "datadog", + region: rotel_env(pfx + "REGION"), + custom_endpoint: rotel_env(pfx + "CUSTOM_ENDPOINT"), + api_key: rotel_env(pfx + "API_KEY"), + } + env.exporter = c; } } @@ -117,11 +177,20 @@ export class Config { return final_env; } - static _load_otlp_exporter_options_from_env(endpoint_type: string | null): OTLPExporter | OTLPExporterEndpoint | undefined { - let pfx = "OTLP_EXPORTER_"; - if (endpoint_type !== null) { - pfx += `${endpoint_type}_`; - } + static _load_datadog_exporter_options_from_env(pfx: string): DatadogExporter { + const datadogExporter: DatadogExporter = { + region: rotel_env(pfx + "REGION"), + custom_endpoint: rotel_env(pfx + "CUSTOM_ENDPOINT"), + api_key: rotel_env(pfx + "API_KEY"), + }; + return datadogExporter; + } + + static _load_otlp_exporter_options_from_env(pfx: string, endpoint_type: string | null): OTLPExporter | OTLPExporterEndpoint | undefined { + // let pfx = "OTLP_EXPORTER_"; + // if (endpoint_type !== null) { + // pfx += `${endpoint_type}_`; + // } const endpoint: OTLPExporterEndpoint = { endpoint: rotel_env(pfx + "ENDPOINT"), protocol: as_lower(rotel_env(pfx + "PROTOCOL")), @@ -170,20 +239,30 @@ export class Config { const exporter = opts.exporter; if (exporter !== undefined) { - _set_otlp_exporter_agent_env(updates, null, exporter); - - const traces = exporter.traces; - if (traces !== undefined) { - _set_otlp_exporter_agent_env(updates, "TRACES", traces); - } + console.log("exporter._type in build_agent_environment is " + exporter._type); + switch (exporter._type) { + case "otlp" || undefined: + const otlpExporter: OTLPExporter = exporter; + _set_otlp_exporter_agent_env(updates, null, exporter); + + const traces = otlpExporter.traces; + if (traces !== undefined) { + _set_otlp_exporter_agent_env(updates, "TRACES", traces); + } - const metrics = exporter.metrics; - if (metrics !== undefined) { - _set_otlp_exporter_agent_env(updates, "METRICS", metrics); - } - const logs = exporter.logs; - if (logs !== undefined) { - _set_otlp_exporter_agent_env(updates, "LOGS", logs); + const metrics = otlpExporter.metrics; + if (metrics !== undefined) { + _set_otlp_exporter_agent_env(updates, "METRICS", metrics); + } + const logs = otlpExporter.logs; + if (logs !== undefined) { + _set_otlp_exporter_agent_env(updates, "LOGS", logs); + } + break; + case "datadog": + const datadogExporter: DatadogExporter = exporter; + _set_datadog_exporter_agent_env(updates, "DATADOG_EXPORTER_" , exporter); + break; } } @@ -217,10 +296,19 @@ export class Config { const exporter = this.options.exporter; if (exporter !== undefined) { - const protocol = exporter.protocol; - if (protocol !== undefined && protocol !== 'grpc' && protocol !== 'http') { - console.error("exporter protocol must be 'grpc' or 'http'"); - return false; + if (exporter._type === undefined) { + exporter._type = "otlp" + } + switch (exporter._type) { + case "otlp": + const otlpExporter: OTLPExporter = exporter; + const protocol = otlpExporter.protocol; + console.log("protocol is " + protocol); + if (protocol !== undefined && protocol !== 'grpc' && protocol !== 'http') { + console.error("exporter protocol must be 'grpc' or 'http'"); + return false; + } + } } @@ -234,6 +322,15 @@ export class Config { } } +function _set_datadog_exporter_agent_env(updates: Record, pfx: string, exporter: DatadogExporter) { + Object.assign(updates, { + [pfx + "EXPORTER"]: "datadog", + [pfx + "REGION"]: exporter.region, + [pfx + "CUSTOM_ENDPOINT"]: exporter.custom_endpoint, + [pfx + "API_KEY"]: exporter.api_key, + }) +} + function _set_otlp_exporter_agent_env(updates: Record, endpoint_type: string | null, exporter: OTLPExporter | OTLPExporterEndpoint | null): void { let pfx = "OTLP_EXPORTER_"; if (endpoint_type !== null) { @@ -317,6 +414,8 @@ function as_bool(value: string | null | undefined): boolean | undefined { } function rotel_env(base_key: string): string | undefined { + // let key = rotel_expand_env_key(base_key); + // console.log("key is " + key); const envVar = process.env[rotel_expand_env_key(base_key)]; return envVar !== undefined ? envVar : undefined; } diff --git a/npm/app/package.json b/npm/app/package.json index af51840..1dc363e 100644 --- a/npm/app/package.json +++ b/npm/app/package.json @@ -21,8 +21,8 @@ }, "homepage": "https://github.com/streamfold/rotel-nodejs#readme", "devDependencies": { - "@babel/preset-typescript": "^7.26.0", "@babel/preset-env": "^7.26.0", + "@babel/preset-typescript": "^7.26.0", "@opentelemetry/api": "^1.9.0", "@opentelemetry/exporter-trace-otlp-grpc": "^0.200.0", "@opentelemetry/resources": "^2.0.0", @@ -57,5 +57,8 @@ "lib/*" ], "root": true + }, + "dependencies": { + "yarn": "^1.22.22" } } diff --git a/npm/app/yarn.lock b/npm/app/yarn.lock index 4e6c825..37b8318 100644 --- a/npm/app/yarn.lock +++ b/npm/app/yarn.lock @@ -1595,6 +1595,11 @@ dependencies: "@sinonjs/commons" "^3.0.0" +"@streamfold/rotel-darwin-arm64@0.0.7-alpha": + version "0.0.7-alpha" + resolved "https://registry.npmjs.org/@streamfold/rotel-darwin-arm64/-/rotel-darwin-arm64-0.0.7-alpha.tgz" + integrity sha512-OQpkxS6GgAoeM/queJkNilqFMtXmjmjJNUhTgKb5Vuyg8Y5lNAW8zyXrQ9kvc8TcUGgN0olGElcimAd8gYigrQ== + "@types/babel__core@^7.1.14": version "7.20.5" resolved "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz" @@ -3625,11 +3630,6 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" -rotel-agent-darwin-arm64@0.0.12-alpha: - version "0.0.12-alpha" - resolved "https://registry.npmjs.org/rotel-agent-darwin-arm64/-/rotel-agent-darwin-arm64-0.0.12-alpha.tgz" - integrity sha512-b5kXW731JQopiM3EEO2mQmR6wfUoN1F7y2Fpg/8HCv6/S0PvTQPPrfh516wBRwxTPErCoXnq1fBAYICwK6C5Og== - run-parallel@^1.1.9: version "1.2.0" resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" @@ -3637,12 +3637,7 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -semver@^6.3.0: - version "6.3.1" - resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^6.3.1: +semver@^6.3.0, semver@^6.3.1: version "6.3.1" resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== @@ -3985,6 +3980,11 @@ yargs@^17.3.1, yargs@^17.7.2: y18n "^5.0.5" yargs-parser "^21.1.1" +yarn@^1.22.22: + version "1.22.22" + resolved "https://registry.npmjs.org/yarn/-/yarn-1.22.22.tgz" + integrity sha512-prL3kGtyG7o9Z9Sv8IPfBNrWTDmXB4Qbes8A9rEzt6wkJV8mUvoirjU0Mp3GGAU06Y0XQyA3/2/RQFVuK7MTfg== + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" From 6d569feefcb2c0db6f60fac1333624042ab7319c Mon Sep 17 00:00:00 2001 From: rjenkins Date: Fri, 25 Jul 2025 12:22:31 -0700 Subject: [PATCH 02/11] support for clickhouse, datadog, and blackhole --- .github/workflows/build.yml | 2 +- npm/app/config.ts | 276 +++++++++++++++++++++++++++++------- npm/app/index.ts | 1 + 3 files changed, 225 insertions(+), 54 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 83d5ffb..4d29897 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,7 +4,7 @@ on: push: env: - ROTEL_VERSION: "tags/v0.0.1-alpha1" + ROTEL_VERSION: "tags/v0.0.1-alpha22" jobs: publish-npm-binaries: diff --git a/npm/app/config.ts b/npm/app/config.ts index 2ad5ecc..59b334b 100644 --- a/npm/app/config.ts +++ b/npm/app/config.ts @@ -24,9 +24,9 @@ export interface OTLPExporterEndpoint { // TODO: when we have more, include a key that defines this exporter type export interface OTLPExporter extends OTLPExporterEndpoint { _type?: string - traces?: OTLPExporterEndpoint | DatadogExporter; - metrics?: OTLPExporterEndpoint; - logs?: OTLPExporterEndpoint; + traces?: OTLPExporterEndpoint | DatadogExporter | BlackholeExporter; + metrics?: OTLPExporterEndpoint | BlackholeExporter; + logs?: OTLPExporterEndpoint | BlackholeExporter; } export interface DatadogExporter { @@ -36,6 +36,24 @@ export interface DatadogExporter { api_key?: string } +export interface BlackholeExporter { + _type?: string +} + + +export interface ClickhouseExporter { + _type?: string + endpoint?: string + database?: string + table_prefix?: string + compression?: string + async_insert?: boolean + user?: string + password?: string + enable_json?: boolean +} + + export interface Options { enabled?: boolean; pid_file?: string; @@ -47,8 +65,8 @@ export interface Options { otlp_receiver_traces_disabled?: boolean; otlp_receiver_metrics_disabled?: boolean; otlp_receiver_logs_disabled?: boolean; - exporter?: OTLPExporter | DatadogExporter; - exporters?: Record + exporter?: OTLPExporter | DatadogExporter | ClickhouseExporter | BlackholeExporter; + exporters?: Record exporters_metrics?: string[] | undefined exporters_traces?: string[] | undefined exporters_logs?: string[] | undefined @@ -97,7 +115,7 @@ export class Config { }; const exporters = as_lower(rotel_env("EXPORTERS")); - if (exporters !== null && exporters !== undefined) { + if (exporters !== undefined) { env["exporters"] = {}; for (const exporterStr of exporters.split(",")) { let name = exporterStr; @@ -106,7 +124,7 @@ export class Config { [name, value] = exporterStr.split(":", 2); } - let exporter: OTLPExporter | DatadogExporter | undefined = undefined; + let exporter: OTLPExporter | DatadogExporter | ClickhouseExporter | BlackholeExporter | undefined = undefined; let pfx = "EXPORTER_" + name.toUpperCase + "_" switch(value) { case "otlp": @@ -124,6 +142,24 @@ export class Config { api_key: rotel_env(pfx + "API_KEY"), }; exporter = datadogExporter; + case "blackhole": + const blackholeExporter: BlackholeExporter = { + _type: "blackhole", + } + exporter = blackholeExporter; + case "clickhouse": + const clickhouseExporter: ClickhouseExporter = { + _type: "clickhouse", + endpoint: rotel_env(pfx + "ENDPOINT"), + database: rotel_env(pfx + "DATABASE"), + table_prefix: rotel_env(pfx + "TABLE_PREFIX"), + compression: rotel_env(pfx + "COMPRESSION"), + async_insert: as_bool(rotel_env(pfx + "ASYNC_INSERT")), + user: rotel_env(pfx + "USER"), + password: rotel_env(pfx + "PASSWORD"), + enable_json: as_bool(rotel_env(pfx + "ENABLE_JSON")), + } + exporter = clickhouseExporter; } if (exporter !== undefined) { env.exporters[name] = exporter; @@ -156,14 +192,34 @@ export class Config { exporter.logs = logs_endpoint; } } else if (exporter_type === "datadog") { - const pfx = "DATADOG_EXPORTER_" - var c: DatadogExporter = { + const pfx = "DATADOG_EXPORTER_"; + var d: DatadogExporter = { _type: "datadog", region: rotel_env(pfx + "REGION"), custom_endpoint: rotel_env(pfx + "CUSTOM_ENDPOINT"), api_key: rotel_env(pfx + "API_KEY"), } - env.exporter = c; + env.exporter = d; + } else if (exporter_type === "blackhole") { + const pfx = "BLACKHOLE_EXPORTER_"; + var b: BlackholeExporter = { + _type: "blackhole", + } + env.exporter = b + } else if (exporter_type === "clickhouse") { + const pfx = "CLICKHOUSE_EXPORTER_" + var c: ClickhouseExporter = { + _type: "clickhouse", + endpoint: rotel_env(pfx + "ENDPOINT"), + database: rotel_env(pfx + "DATABASE"), + table_prefix: rotel_env(pfx + "TABLE_PREFIX"), + compression: rotel_env(pfx + "COMPRESSION"), + async_insert: as_bool(rotel_env(pfx + "ASYNC_INSERT")), + user: rotel_env(pfx + "USER"), + password: rotel_env(pfx + "PASSWORD"), + enable_json: as_bool(rotel_env(pfx + "ENABLE_JSON")), + } + env.exporter = c; } } @@ -177,20 +233,35 @@ export class Config { return final_env; } - static _load_datadog_exporter_options_from_env(pfx: string): DatadogExporter { - const datadogExporter: DatadogExporter = { - region: rotel_env(pfx + "REGION"), - custom_endpoint: rotel_env(pfx + "CUSTOM_ENDPOINT"), - api_key: rotel_env(pfx + "API_KEY"), + static otlp_exporter(config?: Partial): OTLPExporter { + return { + _type: "otlp", + ...config + }; + } + + static datadog_exporter(config?: Partial): DatadogExporter { + return { + _type: "datadog", + ...config + }; + } + + static blackhole_exporter(config?: Partial): BlackholeExporter { + return { + _type: "blackhole", + ...config }; - return datadogExporter; } + static clickhouse_exporter(config?: Partial): ClickhouseExporter { + return { + _type: "clickhouse", + ...config + } + } + static _load_otlp_exporter_options_from_env(pfx: string, endpoint_type: string | null): OTLPExporter | OTLPExporterEndpoint | undefined { - // let pfx = "OTLP_EXPORTER_"; - // if (endpoint_type !== null) { - // pfx += `${endpoint_type}_`; - // } const endpoint: OTLPExporterEndpoint = { endpoint: rotel_env(pfx + "ENDPOINT"), protocol: as_lower(rotel_env(pfx + "PROTOCOL")), @@ -236,33 +307,45 @@ export class Config { "OTLP_RECEIVER_METRICS_DISABLED": opts.otlp_receiver_metrics_disabled, "OTLP_RECEIVER_LOGS_DISABLED": opts.otlp_receiver_logs_disabled, }; - - const exporter = opts.exporter; - if (exporter !== undefined) { - console.log("exporter._type in build_agent_environment is " + exporter._type); - switch (exporter._type) { - case "otlp" || undefined: - const otlpExporter: OTLPExporter = exporter; - _set_otlp_exporter_agent_env(updates, null, exporter); - - const traces = otlpExporter.traces; - if (traces !== undefined) { - _set_otlp_exporter_agent_env(updates, "TRACES", traces); - } - - const metrics = otlpExporter.metrics; - if (metrics !== undefined) { - _set_otlp_exporter_agent_env(updates, "METRICS", metrics); - } - const logs = otlpExporter.logs; - if (logs !== undefined) { - _set_otlp_exporter_agent_env(updates, "LOGS", logs); - } - break; - case "datadog": - const datadogExporter: DatadogExporter = exporter; - _set_datadog_exporter_agent_env(updates, "DATADOG_EXPORTER_" , exporter); - break; + + const exporters = opts.exporters; + if (exporters) { + const exportersList: string[] = []; + for (const [name, exporter] of Object.entries(exporters)) { + const exporterType = (exporter as Record).get?.("_type") || (exporter as any)["_type"]; + if (name === exporterType) { + exportersList.push(`${name}`); + } else { + exportersList.push(`${name}:${exporterType}`); + } + const pfx = `EXPORTER_${name.toUpperCase()}_`; + this._set_exporter_agent_env(updates, pfx, exporter); + } + Object.assign(updates, { + "EXPORTERS": exportersList.join(","), + }); + + if (opts.exporters_metrics !== null) { + Object.assign(updates, { + "EXPORTERS_METRICS": opts.exporters_metrics?.join(","), + }); + } + + if (opts.exporters_traces !== null) { + Object.assign(updates, { + "EXPORTERS_TRACES": opts.exporters_traces?.join(","), + }); + } + + if (opts.exporters_logs !== null) { + Object.assign(updates, { + "EXPORTERS_LOGS": opts.exporters_logs?.join(","), + }); + } + } else { + const exporter = opts.exporter; + if (exporter !== undefined) { + this._set_exporter_agent_env(updates, null, exporter) } } @@ -285,8 +368,64 @@ export class Config { } } + //this.log_spawn_env(spawn_env); return spawn_env; } + + // for local dev debugging purposes. + log_spawn_env(spawn_env: { [x: string]: string | undefined; TZ?: string | undefined; }): void { + console.log("spawn_env contents:"); + for (const [key, value] of Object.entries(spawn_env)) { + console.log(` ${key}: ${value}`); + } + } + + _set_exporter_agent_env( + updates: Record, + pfx: string | null, + exporter: OTLPExporter | DatadogExporter | undefined + ): void { + const expType = (exporter as Record).get?.("_type") || (exporter as any)["_type"]; + + if (expType === "datadog") { + const d: DatadogExporter = exporter as DatadogExporter; + _set_datadog_exporter_agent_env(updates, pfx, d); + return; + } + + if (expType === "blackhole") { + const b: BlackholeExporter = exporter as BlackholeExporter; + _set_blackhole_exporter_agent_env(updates, pfx, b); + return; + } + + if (expType == "clickhouse") { + const c: ClickhouseExporter = exporter as ClickhouseExporter; + _set_clickhouse_exporter_agent_env(updates, pfx, c) + return; + } + + // + // Fall through to OTLP exporter + // + const e: OTLPExporter = exporter as OTLPExporter; + _set_otlp_exporter_agent_env(updates, pfx, null, e); + + const traces = (exporter as any).get?.("traces") || (exporter as any)["traces"]; + if (traces !== null && traces !== undefined) { + _set_otlp_exporter_agent_env(updates, null, "TRACES", traces); + } + + const metrics = (exporter as any).get?.("metrics") || (exporter as any)["metrics"]; + if (metrics !== null && metrics !== undefined) { + _set_otlp_exporter_agent_env(updates, null, "METRICS", metrics); + } + + const logs = (exporter as any).get?.("logs") || (exporter as any)["logs"]; + if (logs !== null && logs !== undefined) { + _set_otlp_exporter_agent_env(updates, null, "LOGS", logs); // Note: was "metrics" in original, assuming this is correct + } + } // Perform some minimal validation for now, we can expand this as needed validate(): boolean | null { @@ -303,7 +442,6 @@ export class Config { case "otlp": const otlpExporter: OTLPExporter = exporter; const protocol = otlpExporter.protocol; - console.log("protocol is " + protocol); if (protocol !== undefined && protocol !== 'grpc' && protocol !== 'http') { console.error("exporter protocol must be 'grpc' or 'http'"); return false; @@ -322,7 +460,19 @@ export class Config { } } -function _set_datadog_exporter_agent_env(updates: Record, pfx: string, exporter: DatadogExporter) { +function _set_blackhole_exporter_agent_env(updates: Record, pfx: string | null, exporter: BlackholeExporter) { + if (pfx === null) { + pfx = "BLACKHOLE_EXPORTER_"; + } + Object.assign(updates, { + [pfx + "EXPORTER"]: "blackhole", + }) +} + +function _set_datadog_exporter_agent_env(updates: Record, pfx: string | null, exporter: DatadogExporter) { + if (pfx === null) { + pfx = "DATADOG_EXPORTER_"; + } Object.assign(updates, { [pfx + "EXPORTER"]: "datadog", [pfx + "REGION"]: exporter.region, @@ -331,8 +481,30 @@ function _set_datadog_exporter_agent_env(updates: Record, pfx: stri }) } -function _set_otlp_exporter_agent_env(updates: Record, endpoint_type: string | null, exporter: OTLPExporter | OTLPExporterEndpoint | null): void { - let pfx = "OTLP_EXPORTER_"; +function _set_clickhouse_exporter_agent_env(updates: Record, pfx: string | null, exporter: ClickhouseExporter) { + if (pfx === null) { + pfx = "CLICKHOUSE_EXPORTER_" + updates.update({ + "EXPORTER": "clickhouse", + }) + } + + Object.assign(updates, { + [pfx + "ENDPOINT"]: exporter.endpoint, + [pfx + "DATABASE"]: exporter.database, + [pfx + "TABLE_PREFIX"]: exporter.table_prefix, + [pfx + "COMPRESSION"]: exporter.compression, + [pfx + "ASYNC_INSERT"]: exporter.async_insert, + [pfx + "USER"]: exporter.user, + [pfx + "PASSWORD"]: exporter.password, + [pfx + "ENABLE_JSON"]: exporter.enable_json, + }) +} + +function _set_otlp_exporter_agent_env(updates: Record, pfx: string | null, endpoint_type: string | null, exporter: OTLPExporter | OTLPExporterEndpoint | null): void { + if (pfx === null) { + pfx = "OTLP_EXPORTER_"; + } if (endpoint_type !== null) { pfx += `${endpoint_type}_`; } @@ -414,8 +586,6 @@ function as_bool(value: string | null | undefined): boolean | undefined { } function rotel_env(base_key: string): string | undefined { - // let key = rotel_expand_env_key(base_key); - // console.log("key is " + key); const envVar = process.env[rotel_expand_env_key(base_key)]; return envVar !== undefined ? envVar : undefined; } diff --git a/npm/app/index.ts b/npm/app/index.ts index 83fc660..cb7e92e 100644 --- a/npm/app/index.ts +++ b/npm/app/index.ts @@ -1,2 +1,3 @@ // SPDX-License-Identifier: Apache-2.0 export { Client as Rotel } from "./client" +export { Config as Config} from "./config" From 1f90c0eef8c094ed52a2585ca60f0cc73e43ab24 Mon Sep 17 00:00:00 2001 From: rjenkins Date: Fri, 25 Jul 2025 14:13:45 -0700 Subject: [PATCH 03/11] kafka support --- npm/app/config.ts | 152 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 148 insertions(+), 4 deletions(-) diff --git a/npm/app/config.ts b/npm/app/config.ts index 59b334b..74762ed 100644 --- a/npm/app/config.ts +++ b/npm/app/config.ts @@ -40,7 +40,6 @@ export interface BlackholeExporter { _type?: string } - export interface ClickhouseExporter { _type?: string endpoint?: string @@ -53,6 +52,35 @@ export interface ClickhouseExporter { enable_json?: boolean } +export interface KafkaExporter { + _type?: string + brokers?: string + traces_topic?: string + metrics_topic?: string + logs_topic?: string + format?: string + compression?: string + request_timeout?: string + acks?: string + client_id?: string + max_message_bytes?: number + linger_ms?: number + retries?: number + retry_backoff_ms?: number + retry_backoff_max_ms?: number + message_timeout_ms?:number + request_timeout_ms?: number + batch_size?: number + partitioner?: string + partition_metrics_by_resource_attributes?: boolean + partition_logs_by_resource_attributes?: boolean + custom_config?: string + sasl_username?: string + sasl_password?: string + sasl_mechanism?: string + security_protocol?: string +} + export interface Options { enabled?: boolean; @@ -124,7 +152,7 @@ export class Config { [name, value] = exporterStr.split(":", 2); } - let exporter: OTLPExporter | DatadogExporter | ClickhouseExporter | BlackholeExporter | undefined = undefined; + let exporter: OTLPExporter | DatadogExporter | ClickhouseExporter | BlackholeExporter | KafkaExporter | undefined = undefined; let pfx = "EXPORTER_" + name.toUpperCase + "_" switch(value) { case "otlp": @@ -160,6 +188,35 @@ export class Config { enable_json: as_bool(rotel_env(pfx + "ENABLE_JSON")), } exporter = clickhouseExporter; + case "kafka": + const kafkaExporter: KafkaExporter = { + _type: "kafka", + brokers: rotel_env(pfx + "BROKERS"), + traces_topic: rotel_env(pfx + "TRACES_TOPIC"), + metrics_topic: rotel_env(pfx + "METRICS_TOPIC"), + logs_topic: rotel_env(pfx + "LOGS_TOPIC"), + format: rotel_env(pfx + "FORMAT"), + compression: rotel_env(pfx + "COMPRESSION"), + request_timeout: rotel_env(pfx + "REQUEST_TIMEOUT"), + acks: rotel_env(pfx + "ACKS"), + client_id: rotel_env(pfx + "CLIENT_ID"), + max_message_bytes: as_int(rotel_env(pfx + "MAX_MESSAGE_BYTES")), + linger_ms: as_int(rotel_env(pfx + "LINGER_MS")), + retries: as_int(rotel_env(pfx + "RETRIES")), + retry_backoff_ms: as_int(rotel_env(pfx + "RETRY_BACKOFF_MS")), + retry_backoff_max_ms: as_int(rotel_env(pfx + "RETRY_BACKOFF_MAX_MS")), + message_timeout_ms: as_int(rotel_env(pfx + "MESSAGE_TIMEOUT_MS")), + request_timeout_ms: as_int(rotel_env(pfx + "REQUEST_TIMEOUT_MS")), + batch_size: as_int(rotel_env(pfx + "BATCH_SIZE")), + partitioner: rotel_env(pfx + "REQUEST_TIMEOUT_MS"), + partition_metrics_by_resource_attributes: as_bool(rotel_env(pfx + "PARTITION_METRICS_BY_RESOURCE_ATTRIBUTES")), + partition_logs_by_resource_attributes: as_bool(rotel_env(pfx + "PARTITION_LOGS_BY_RESOURCE_ATTRIBUTES")), + custom_config: rotel_env(pfx + "CUSTOM_CONFIG"), + sasl_username: rotel_env(pfx + "SASL_USERNAME"), + sasl_password: rotel_env(pfx + "SASL_PASSWORD"), + sasl_mechanism: rotel_env(pfx + "SASL_MECHANISM"), + security_protocol: rotel_env(pfx + "SECURITY_PROTOCOL"), + } } if (exporter !== undefined) { env.exporters[name] = exporter; @@ -220,6 +277,36 @@ export class Config { enable_json: as_bool(rotel_env(pfx + "ENABLE_JSON")), } env.exporter = c; + } else if(exporter_type === "kafka") { + const pfx = "KAFKA_EXPORTER_"; + var k: KafkaExporter = { + _type: "kafka", + brokers: rotel_env(pfx + "BROKERS"), + traces_topic: rotel_env(pfx + "TRACES_TOPIC"), + metrics_topic: rotel_env(pfx + "METRICS_TOPIC"), + logs_topic: rotel_env(pfx + "LOGS_TOPIC"), + format: rotel_env(pfx + "FORMAT"), + compression: rotel_env(pfx + "COMPRESSION"), + request_timeout: rotel_env(pfx + "REQUEST_TIMEOUT"), + acks: rotel_env(pfx + "ACKS"), + client_id: rotel_env(pfx + "CLIENT_ID"), + max_message_bytes: as_int(rotel_env(pfx + "MAX_MESSAGE_BYTES")), + linger_ms: as_int(rotel_env(pfx + "LINGER_MS")), + retries: as_int(rotel_env(pfx + "RETRIES")), + retry_backoff_ms: as_int(rotel_env(pfx + "RETRY_BACKOFF_MS")), + retry_backoff_max_ms: as_int(rotel_env(pfx + "RETRY_BACKOFF_MAX_MS")), + message_timeout_ms: as_int(rotel_env(pfx + "MESSAGE_TIMEOUT_MS")), + request_timeout_ms: as_int(rotel_env(pfx + "REQUEST_TIMEOUT_MS")), + batch_size: as_int(rotel_env(pfx + "BATCH_SIZE")), + partitioner: rotel_env(pfx + "REQUEST_TIMEOUT_MS"), + partition_metrics_by_resource_attributes: as_bool(rotel_env(pfx + "PARTITION_METRICS_BY_RESOURCE_ATTRIBUTES")), + partition_logs_by_resource_attributes: as_bool(rotel_env(pfx + "PARTITION_LOGS_BY_RESOURCE_ATTRIBUTES")), + custom_config: rotel_env(pfx + "CUSTOM_CONFIG"), + sasl_username: rotel_env(pfx + "SASL_USERNAME"), + sasl_password: rotel_env(pfx + "SASL_PASSWORD"), + sasl_mechanism: rotel_env(pfx + "SASL_MECHANISM"), + security_protocol: rotel_env(pfx + "SECURITY_PROTOCOL"), + } } } @@ -259,6 +346,14 @@ export class Config { _type: "clickhouse", ...config } + } + + static kafka_exporter(config?: Partial): KafkaExporter { + return { + _type: "kafka", + security_protocol: "plaintext", + ...config + } } static _load_otlp_exporter_options_from_env(pfx: string, endpoint_type: string | null): OTLPExporter | OTLPExporterEndpoint | undefined { @@ -404,6 +499,12 @@ export class Config { _set_clickhouse_exporter_agent_env(updates, pfx, c) return; } + + if (expType == "kafka") { + const k: KafkaExporter = exporter as KafkaExporter; + _set_kafka_exporter_agent_env(updates, pfx, k) + return; + } // // Fall through to OTLP exporter @@ -463,6 +564,9 @@ export class Config { function _set_blackhole_exporter_agent_env(updates: Record, pfx: string | null, exporter: BlackholeExporter) { if (pfx === null) { pfx = "BLACKHOLE_EXPORTER_"; + Object.assign(updates, { + "EXPORTER": "blackhole", + }); } Object.assign(updates, { [pfx + "EXPORTER"]: "blackhole", @@ -472,6 +576,9 @@ function _set_blackhole_exporter_agent_env(updates: Record, pfx: st function _set_datadog_exporter_agent_env(updates: Record, pfx: string | null, exporter: DatadogExporter) { if (pfx === null) { pfx = "DATADOG_EXPORTER_"; + Object.assign(updates, { + "EXPORTER": "clickhouse", + }); } Object.assign(updates, { [pfx + "EXPORTER"]: "datadog", @@ -484,9 +591,9 @@ function _set_datadog_exporter_agent_env(updates: Record, pfx: stri function _set_clickhouse_exporter_agent_env(updates: Record, pfx: string | null, exporter: ClickhouseExporter) { if (pfx === null) { pfx = "CLICKHOUSE_EXPORTER_" - updates.update({ + Object.assign(updates, { "EXPORTER": "clickhouse", - }) + }); } Object.assign(updates, { @@ -501,6 +608,43 @@ function _set_clickhouse_exporter_agent_env(updates: Record, pfx: s }) } +function _set_kafka_exporter_agent_env(updates: Record, pfx: string | null, exporter: KafkaExporter) { + if (pfx === null) { + pfx = "KAFKA_EXPORTER_" + Object.assign(updates, { + "EXPORTER": "kafka", + }); + } + + Object.assign(updates, { + [pfx + "BROKERS"]: exporter.brokers, + [pfx + "TRACES_TOPIC"]: exporter.traces_topic, + [pfx + "METRICS_TOPIC"]: exporter.metrics_topic, + [pfx + "LOGS_TOPIC"]: exporter.logs_topic, + [pfx + "FORMAT"]: exporter.format, + [pfx + "COMPRESSION"]: exporter.compression, + [pfx + "REQUEST_TIMEOUT"]: exporter.request_timeout, + [pfx + "ACKS"]: exporter.acks, + [pfx + "CLIENT_ID"]: exporter.client_id, + [pfx + "MAX_MESSAGE_BYTES"]: exporter.max_message_bytes, + [pfx + "LINGER_MS"]: exporter.linger_ms, + [pfx + "RETRIES"]: exporter.retries, + [pfx + "RETRY_BACKOFF_MS"]: exporter.retry_backoff_ms, + [pfx + "RETRY_BACKOFF_MAX_MS"]: exporter.retry_backoff_max_ms, + [pfx + "MESSAGE_TIMEOUT_MS"]: exporter.message_timeout_ms, + [pfx + "REQUEST_TIMEOUT_MS"]: exporter.request_timeout_ms, + [pfx + "BATCH_SIZE"]: exporter.batch_size, + [pfx + "PARTITIONER"]: exporter.partitioner, + [pfx + "PARTITION_METRICS_BY_RESOURCE_ATTRIBUTES"]: exporter.partition_metrics_by_resource_attributes, + [pfx + "PARTITION_LOGS_BY_RESOURCE_ATTRIBUTES"]: exporter.partition_logs_by_resource_attributes, + [pfx + "CUSTOM_CONFIG"]: exporter.custom_config, + [pfx + "SASL_USERNAME"]: exporter.sasl_username, + [pfx + "SASL_PASSWORD"]: exporter.sasl_password, + [pfx + "SASL_MECHANISM"]: exporter.sasl_mechanism, + [pfx + "SECURITY_PROTOCOL"]: exporter.security_protocol, + }) +} + function _set_otlp_exporter_agent_env(updates: Record, pfx: string | null, endpoint_type: string | null, exporter: OTLPExporter | OTLPExporterEndpoint | null): void { if (pfx === null) { pfx = "OTLP_EXPORTER_"; From cc5b7e181b5c53c009cf1bca58cab236d0853715 Mon Sep 17 00:00:00 2001 From: rjenkins Date: Fri, 25 Jul 2025 14:16:45 -0700 Subject: [PATCH 04/11] update rotel version --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4d29897..e387884 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,7 +20,7 @@ jobs: runner: ubuntu-latest, TOOLCHAIN: stable, TARGET: x86_64-unknown-linux-gnu, - ARTIFACT: rotel_v0.0.1-alpha1_x86_64-unknown-linux-gnu.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha22_x86_64-unknown-linux-gnu.tar.gz, } - { NAME: linux-arm64-glibc, @@ -28,7 +28,7 @@ jobs: runner: ubuntu-latest, TOOLCHAIN: stable, TARGET: aarch64-unknown-linux-gnu, - ARTIFACT: rotel_v0.0.1-alpha1_aarch64-unknown-linux-gnu.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha22_aarch64-unknown-linux-gnu.tar.gz, } - { NAME: darwin-arm64, @@ -36,7 +36,7 @@ jobs: runner: macos-latest, TOOLCHAIN: stable, TARGET: aarch64-apple-darwin, - ARTIFACT: rotel_v0.0.1-alpha1_aarch64-apple-darwin.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha22_aarch64-apple-darwin.tar.gz, } steps: - name: Checkout From e7e8c2c4cbdb8dafe115a392ce6aa0f268706cb3 Mon Sep 17 00:00:00 2001 From: rjenkins Date: Fri, 25 Jul 2025 14:20:45 -0700 Subject: [PATCH 05/11] remove debugging test --- npm/app/__tests__/config.ts | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/npm/app/__tests__/config.ts b/npm/app/__tests__/config.ts index 96307c3..164fce2 100644 --- a/npm/app/__tests__/config.ts +++ b/npm/app/__tests__/config.ts @@ -68,16 +68,6 @@ describe('configuration and validation', () => { expect(c?.tls_skip_verify).toBe(true); }); - it('Load DatadogExporter config from ENV', () => { - process.env.ROTEL_DATADOG_EXPORTER_REGION = "us1"; - process.env.ROTEL_DATADOG_EXPORTER_CUSTOM_ENDPOINT = "http://localhost:5555"; - process.env.ROTEL_DATADOG_EXPORTER_API_KEY = "123abc"; - let c = Config._load_datadog_exporter_options_from_env("DATADOG_EXPORTER_"); - expect(c.region).toBe("us1"); - expect(c.custom_endpoint).toBe("http://localhost:5555"); - expect(c.api_key).toBe("123abc"); - }); - it('fails validation', () => { process.env.ROTEL_ENABLED = "true"; const c1 = new Config(); From 63a625f409caa344d84503cceb21fc374eacdb9f Mon Sep 17 00:00:00 2001 From: rjenkins Date: Fri, 25 Jul 2025 21:29:52 -0700 Subject: [PATCH 06/11] README.md wip --- README.md | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 4fcff8f..6efe906 100644 --- a/README.md +++ b/README.md @@ -34,16 +34,22 @@ In the startup section of your `index.js` or `index.ts` add the following code b --- ```javascript -const { Rotel } = require("@streamfold/rotel"); +const { Rotel,Config } = require("@streamfold/rotel"); +const { Client } require("@streamfold/rotel/client"); const rotel = new Rotel({ enabled: true, - exporter: { - endpoint: "https://foo.example.com", - headers: { - "x-api-key" : "xxxxx", - } - }, + exporters: { + "otlp" : Config.otlp_exporter({ + endpoint: "https://foo.example.com", + headers: { + "x-api-key": "xxxxx", + }, + }), + }, + exporters_traces: ["otlp"], +│ exporters_metrics: ["otlp"], +│ exporters_logs: ["otlp"], }) rotel.start() ``` @@ -58,8 +64,12 @@ new Rotel().start(); In your application deployment configuration, set the following environment variables. These match the typed configuration above: * `ROTEL_ENABLED=true` -* `ROTEL_OTLP_EXPORTER_ENDPOINT=https://foo.example.com` -* `ROTEL_OTLP_EXPORTER_CUSTOM_HEADERS=x-api-key={API_KEY}` +* `ROTEL_EXPORTERS=otlp` +* `ROTEL_EXPORTER_OTLP_ENDPOINT=https://foo.example.com` +* `ROTEL_EXPORTER_OTLP_CUSTOM_HEADERS=x-api-key={API_KEY}` +* `ROTEL_EXPORTERS_TRACES=otlp` +* `ROTEL_EXPORTERS_METRICS=otlp` +* `ROTEL_EXPORTERS_LOGS=otlp` Any typed configuration options will override environment variables of the same name. @@ -77,7 +87,7 @@ To set the endpoint the OpenTelemetry SDK will use, set the following environmen ## Configuration -This is the full list of options and their environment variable alternatives. Any defaults left blank in the table are either False or None. +This is the full list of options and their environment variable alternatives. Any defaults left blank in the table are either False or None. | Option Name | Type | Environ | Default | Options | |--------------------------------|--------------|--------------------------------------|----------------------|-----------------| @@ -93,7 +103,7 @@ This is the full list of options and their environment variable alternatives. An | otlp_receiver_logs_disabled | boolean | ROTEL_OTLP_RECEIVER_LOGS_DISABLED | | | | exporter | OTLPExporter | | | | -The OTLPExporter can be enabled with the following options. +The OTLPExporter can be enabled with the following options. To construct an OTLP exporter, use the method `Config.otlp_exporter()` with the following options. | Option Name | Type | Environ | Default | Options | |------------------------|------------------------|--------------------------------------------|---------|--------------| From ab6104943649772f20434af5cb7d483f812fd145 Mon Sep 17 00:00:00 2001 From: rjenkins Date: Mon, 28 Jul 2025 09:19:42 -0700 Subject: [PATCH 07/11] update README.md --- README.md | 135 ++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 121 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 6efe906..f7b87e3 100644 --- a/README.md +++ b/README.md @@ -101,9 +101,16 @@ This is the full list of options and their environment variable alternatives. An | otlp_receiver_traces_disabled | boolean | ROTEL_OTLP_RECEIVER_TRACES_DISABLED | | | | otlp_receiver_metrics_disabled | boolean | ROTEL_OTLP_RECEIVER_METRICS_DISABLED | | | | otlp_receiver_logs_disabled | boolean | ROTEL_OTLP_RECEIVER_LOGS_DISABLED | | | -| exporter | OTLPExporter | | | | +| exporters | Map | | | | +| exporters_traces | string[] | ROTEL_EXPORTERS_TRACES | | | +| exporters_metrics | string[] | ROTEL_EXPORTERS_METRICS | | | +| exporters_logs | string[] | ROTEL_EXPORTERS_LOGS | | | -The OTLPExporter can be enabled with the following options. To construct an OTLP exporter, use the method `Config.otlp_exporter()` with the following options. +For each exporter you would like to use, see the configuration options below. Exporters should be assigned to the `exporters` object with a custom name. + +### OTLP Exporter + +To construct an OTLP exporter, use the method `Config.otlp_exporter()` with the following options. | Option Name | Type | Environ | Default | Options | |------------------------|------------------------|--------------------------------------------|---------|--------------| @@ -122,27 +129,122 @@ The OTLPExporter can be enabled with the following options. To construct an OTLP | tls_ca_file | string | ROTEL_OTLP_EXPORTER_TLS_CA_FILE | | | | tls_skip_verify | boolean | ROTEL_OTLP_EXPORTER_TLS_SKIP_VERIFY | | | +### Datadog Exporter + +Rotel provides an experimental [Datadog exporter](https://github.com/streamfold/rotel/blob/main/src/exporters/datadog/README.md) that supports traces at the moment. Construct a Datadog exporter with the method `Config.datadog_exporter()` using the following options. + +| Option Name | Type | Environ | Default | Options | +|------------------------|------------------------|--------------------------------------------|---------|------------------------| +| region | string | ROTEL_DATADOG_EXPORTER_REGION | us1 | us1, us3, us5, eu, ap1 | +| custom_endpoint | string | ROTEL_DATADOG_EXPORTER_CUSTOM_ENDPOINT | | | +| api_key | string | ROTEL_DATADOG_EXPORTER_API_KEY | | | + +### ClickHouse Exporter + +Rotel provides a ClickHouse exporter with support for metrics, logs, and traces. Construct a ClickHouse exporter with the method `Config.clickhouse_exporter()` using the following options. + +| Option Name | Type | Environ | Default | Options | +|------------------------|------------------------|--------------------------------------------|---------|---------| +| endpoint | string | ROTEL_CLICKHOUSE_EXPORTER_ENDPOINT | | | +| database | string | ROTEL_CLICKHOUSE_EXPORTER_DATABASE | otel | | +| table_prefix | string | ROTEL_CLICKHOUSE_EXPORTER_TABLE_PREFIX | otel | | +| compression | string | ROTEL_CLICKHOUSE_EXPORTER_COMPRESSION | lz4 | | +| async_insert | boolean | ROTEL_CLICKHOUSE_EXPORTER_ASYNC_INSERT | true | | +| user | string | ROTEL_CLICKHOUSE_EXPORTER_USER | | | +| password | string | ROTEL_CLICKHOUSE_EXPORTER_PASSWORD | | | +| enable_json | boolean | ROTEL_CLICKHOUSE_EXPORTER_ENABLE_JSON | | | +| json_underscore | boolean | ROTEL_CLICKHOUSE_EXPORTER_JSON_UNDERSCORE | | | + +### Kafka Exporter + +Rotel provides a Kafka exporter with support for metrics, logs, and traces. Construct a Kafka exporter with the method `Config.kafka_exporter()` using the following options. + +| Option Name | Type | Environ | Default | Options | +|--------------------------------------------|----------|--------------------------------------------------------------|-------------------|------------------------------------------------------------------------------| +| brokers | string[] | ROTEL_KAFKA_EXPORTER_BROKERS | localhost:9092 | | +| traces_topic | string | ROTEL_KAFKA_EXPORTER_TRACES_TOPIC | otlp_traces | | +| logs_topic | string | ROTEL_KAFKA_EXPORTER_LOGS_TOPIC | otlp_logs | | +| metrics_topic | string | ROTEL_KAFKA_EXPORTER_METRICS_TOPIC | otlp_metrics | | +| format | string | ROTEL_KAFKA_EXPORTER_FORMAT | protobuf | json, protobuf | +| compression | string | ROTEL_KAFKA_EXPORTER_COMPRESSION | none | gzip, snappy, lz4, zstd, none | +| acks | string | ROTEL_KAFKA_EXPORTER_ACKS | one | all, one, none | +| client_id | string | ROTEL_KAFKA_EXPORTER_CLIENT_ID | rotel | | +| max_message_bytes | number | ROTEL_KAFKA_EXPORTER_MAX_MESSAGE_BYTES | 1000000 | | +| linger_ms | number | ROTEL_KAFKA_EXPORTER_LINGER_MS | 5 | | +| retries | number | ROTEL_KAFKA_EXPORTER_RETRIES | 2147483647 | | +| retry_backoff_ms | number | ROTEL_KAFKA_EXPORTER_RETRY_BACKOFF_MS | 100 | | +| retry_backoff_max_ms | number | ROTEL_KAFKA_EXPORTER_RETRY_BACKOFF_MAX_MS | 1000 | | +| message_timeout_ms | number | ROTEL_KAFKA_EXPORTER_MESSAGE_TIMEOUT_MS | 300000 | | +| request_timeout_ms | number | ROTEL_KAFKA_EXPORTER_REQUEST_TIMEOUT_MS | 30000 | | +| batch_size | number | ROTEL_KAFKA_EXPORTER_BATCH_SIZE | 1000000 | | +| partitioner | string | ROTEL_KAFKA_EXPORTER_PARTITIONER | consistent-random | consistent, consistent-random, murmur2-random, murmur2, fnv1a, fnv1a-random | +| partition_metrics_by_resource_attributes | boolean | ROTEL_KAFKA_EXPORTER_PARTITION_METRICS_BY_RESOURCE_ATTRIBUTES | | | +| partition_logs_by_resource_attributes | boolean | ROTEL_KAFKA_EXPORTER_PARTITION_LOGS_BY_RESOURCE_ATTRIBUTES | | | +| custom_config | string | ROTEL_KAFKA_EXPORTER_CUSTOM_CONFIG | | | +| sasl_username | string | ROTEL_KAFKA_EXPORTER_SASL_USERNAME | | | +| sasl_password | string | ROTEL_KAFKA_EXPORTER_SASL_PASSWORD | | | +| sasl_mechanism | string | ROTEL_KAFKA_EXPORTER_SASL_MECHANISM | | | +| security_protocol | string | ROTEL_KAFKA_EXPORTER_SECURITY_PROTOCOL | PLAINTEXT | PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL | + +### Blackhole Exporter + +The Blackhole exporter is useful for testing purposes. It accepts telemetry data but does not forward it anywhere. Construct a Blackhole exporter with the method `Config.blackhole_exporter()`. This exporter has no configuration options. + +### Multiple Exporters + +Rotel supports [multiple exporters](https://rotel.dev/docs/configuration/multiple-exporters), allowing you to send data to different destinations per telemetry type. Just set the `exporters` entry to an object of exporter definitions and then configure the exporters per telemetry type. For example, this will send metrics and logs to an OTLP endpoint while sending traces to Datadog: + +```javascript +const { Rotel, Config } = require("@streamfold/rotel"); + +const rotel = new Rotel({ + enabled: true, + exporters: { + "logs_and_metrics": Config.otlp_exporter({ + endpoint: "https://foo.example.com", + headers: { + "x-api-key": process.env.API_KEY, + "x-data-set": "testing" + } + }), + "tracing": Config.datadog_exporter({ + api_key: "1234abcd", + }), + }, + // Define exporters per telemetry type + exporters_traces: ["tracing"], + exporters_metrics: ["logs_and_metrics"], + exporters_logs: ["logs_and_metrics"] +}); +rotel.start(); +``` + ### Endpoint overrides When using the OTLP exporter over HTTP, the exporter will append `/v1/traces`, `/v1/metrics`, or `/v1/logs` to the endpoint URL for traces, metrics, and logs respectively. If the service you are exporting telemetry data to does not support these standard URL paths, you can individually override them for traces, metrics, and logs. For example, to override the endpoint for traces and metrics you can do the following: ```javascript -const { Rotel } = require("@streamfold/rotel"); +const { Rotel, Config } = require("@streamfold/rotel"); const rotel = new Rotel({ enabled: true, - exporter: { + exporters: { + "otlp": Config.otlp_exporter({ headers: { - "x-api-key" : "xxxxx", + "x-api-key": "xxxxx", }, traces: { endpoint: "http://foo.example.com:4318/api/otlp/traces", }, metrics: { - endpoint = "http://foo.example.com:4318/api/otlp/metrics", + endpoint: "http://foo.example.com:4318/api/otlp/metrics", } + }) }, + exporters_traces: ["otlp"], + exporters_metrics: ["otlp"], + exporters_logs: ["otlp"] }); rotel.start(); ``` @@ -179,7 +281,7 @@ The code sample depends on the following environment variables: * `AXIOM_API_TOKEN`: Set to an API token that has access to the Axiom dataset ```javascript -const { Rotel } = require("@streamfold/rotel"); +const { Rotel, Config } = require("@streamfold/rotel"); const { NodeTracerProvider } = require('@opentelemetry/sdk-trace-node'); const { OTLPTraceExporter } = require('@opentelemetry/exporter-trace-otlp-grpc'); @@ -191,14 +293,19 @@ const { resourceFromAttributes } = require('@opentelemetry/resources'); function initRotel() { const rotel = new Rotel({ enabled: true, - exporter: { - endpoint: "https://api.axiom.co", - protocol: "http", - headers: { - "Authorization": "Bearer " + process.env.AXIOM_API_TOKEN, - "X-Axiom-Dataset": process.env.AXIOM_DATASET - } + exporters: { + "axiom": Config.otlp_exporter({ + endpoint: "https://api.axiom.co", + protocol: "http", + headers: { + "Authorization": "Bearer " + process.env.AXIOM_API_TOKEN, + "X-Axiom-Dataset": process.env.AXIOM_DATASET + } + }) }, + exporters_traces: ["axiom"], + exporters_metrics: ["axiom"], + exporters_logs: ["axiom"] }) return rotel; } From 36af754cedde4b515300ac2fc47d47fbcacf6b40 Mon Sep 17 00:00:00 2001 From: Ray Jenkins Date: Mon, 28 Jul 2025 10:32:06 -0700 Subject: [PATCH 08/11] Update README.md Co-authored-by: Mike Heffner --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f7b87e3..46c3f4d 100644 --- a/README.md +++ b/README.md @@ -48,8 +48,8 @@ const rotel = new Rotel({ }), }, exporters_traces: ["otlp"], -│ exporters_metrics: ["otlp"], -│ exporters_logs: ["otlp"], + exporters_metrics: ["otlp"], + exporters_logs: ["otlp"], }) rotel.start() ``` From 1033c05b8891ec287b28e795536e53826fc6019f Mon Sep 17 00:00:00 2001 From: rjenkins Date: Mon, 28 Jul 2025 11:12:04 -0700 Subject: [PATCH 09/11] remove environ columns --- README.md | 116 +++++++++++++++++++++++++++--------------------------- 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/README.md b/README.md index f7b87e3..9a96695 100644 --- a/README.md +++ b/README.md @@ -112,79 +112,79 @@ For each exporter you would like to use, see the configuration options below. Ex To construct an OTLP exporter, use the method `Config.otlp_exporter()` with the following options. -| Option Name | Type | Environ | Default | Options | -|------------------------|------------------------|--------------------------------------------|---------|--------------| -| endpoint | string | ROTEL_OTLP_EXPORTER_ENDPOINT | | | -| protocol | string | ROTEL_OTLP_EXPORTER_PROTOCOL | grpc | grpc or http | -| headers | Map | ROTEL_OTLP_EXPORTER_CUSTOM_HEADERS | | | -| compression | string | ROTEL_OTLP_EXPORTER_COMPRESSION | gzip | gzip or none | -| request_timeout | string | ROTEL_OTLP_EXPORTER_REQUEST_TIMEOUT | 5s | | -| retry_initial_backoff | string | ROTEL_OTLP_EXPORTER_RETRY_INITIAL_BACKOFF | 5s | | -| retry_max_backoff | string | ROTEL_OTLP_EXPORTER_RETRY_MAX_BACKOFF | 30s | | -| retry_max_elapsed_time | string | ROTEL_OTLP_EXPORTER_RETRY_MAX_ELAPSED_TIME | 300s | | -| batch_max_size | number | ROTEL_OTLP_EXPORTER_BATCH_MAX_SIZE | 8192 | | -| batch_timeout | string | ROTEL_OTLP_EXPORTER_BATCH_TIMEOUT | 200ms | | -| tls_cert_file | string | ROTEL_OTLP_EXPORTER_TLS_CERT_FILE | | | -| tls_key_file | string | ROTEL_OTLP_EXPORTER_TLS_KEY_FILE | | | -| tls_ca_file | string | ROTEL_OTLP_EXPORTER_TLS_CA_FILE | | | -| tls_skip_verify | boolean | ROTEL_OTLP_EXPORTER_TLS_SKIP_VERIFY | | | +| Option Name | Type | Default | Options | +|------------------------|------------------------|---------|--------------| +| endpoint | string | | | +| protocol | string | grpc | grpc or http | +| headers | Map | | | +| compression | string | gzip | gzip or none | +| request_timeout | string | 5s | | +| retry_initial_backoff | string | 5s | | +| retry_max_backoff | string | 30s | | +| retry_max_elapsed_time | string | 300s | | +| batch_max_size | number | 8192 | | +| batch_timeout | string | 200ms | | +| tls_cert_file | string | | | +| tls_key_file | string | | | +| tls_ca_file | string | | | +| tls_skip_verify | boolean | | | ### Datadog Exporter Rotel provides an experimental [Datadog exporter](https://github.com/streamfold/rotel/blob/main/src/exporters/datadog/README.md) that supports traces at the moment. Construct a Datadog exporter with the method `Config.datadog_exporter()` using the following options. -| Option Name | Type | Environ | Default | Options | -|------------------------|------------------------|--------------------------------------------|---------|------------------------| -| region | string | ROTEL_DATADOG_EXPORTER_REGION | us1 | us1, us3, us5, eu, ap1 | -| custom_endpoint | string | ROTEL_DATADOG_EXPORTER_CUSTOM_ENDPOINT | | | -| api_key | string | ROTEL_DATADOG_EXPORTER_API_KEY | | | +| Option Name | Type | Default | Options | +|------------------------|------------------------|---------|------------------------| +| region | string | us1 | us1, us3, us5, eu, ap1 | +| custom_endpoint | string | | | +| api_key | string | | | ### ClickHouse Exporter Rotel provides a ClickHouse exporter with support for metrics, logs, and traces. Construct a ClickHouse exporter with the method `Config.clickhouse_exporter()` using the following options. -| Option Name | Type | Environ | Default | Options | -|------------------------|------------------------|--------------------------------------------|---------|---------| -| endpoint | string | ROTEL_CLICKHOUSE_EXPORTER_ENDPOINT | | | -| database | string | ROTEL_CLICKHOUSE_EXPORTER_DATABASE | otel | | -| table_prefix | string | ROTEL_CLICKHOUSE_EXPORTER_TABLE_PREFIX | otel | | -| compression | string | ROTEL_CLICKHOUSE_EXPORTER_COMPRESSION | lz4 | | -| async_insert | boolean | ROTEL_CLICKHOUSE_EXPORTER_ASYNC_INSERT | true | | -| user | string | ROTEL_CLICKHOUSE_EXPORTER_USER | | | -| password | string | ROTEL_CLICKHOUSE_EXPORTER_PASSWORD | | | -| enable_json | boolean | ROTEL_CLICKHOUSE_EXPORTER_ENABLE_JSON | | | -| json_underscore | boolean | ROTEL_CLICKHOUSE_EXPORTER_JSON_UNDERSCORE | | | +| Option Name | Type | Default | Options | +|------------------------|------------------------|---------|---------| +| endpoint | string | | | +| database | string | otel | | +| table_prefix | string | otel | | +| compression | string | lz4 | | +| async_insert | boolean | true | | +| user | string | | | +| password | string | | | +| enable_json | boolean | | | +| json_underscore | boolean | | | ### Kafka Exporter Rotel provides a Kafka exporter with support for metrics, logs, and traces. Construct a Kafka exporter with the method `Config.kafka_exporter()` using the following options. -| Option Name | Type | Environ | Default | Options | -|--------------------------------------------|----------|--------------------------------------------------------------|-------------------|------------------------------------------------------------------------------| -| brokers | string[] | ROTEL_KAFKA_EXPORTER_BROKERS | localhost:9092 | | -| traces_topic | string | ROTEL_KAFKA_EXPORTER_TRACES_TOPIC | otlp_traces | | -| logs_topic | string | ROTEL_KAFKA_EXPORTER_LOGS_TOPIC | otlp_logs | | -| metrics_topic | string | ROTEL_KAFKA_EXPORTER_METRICS_TOPIC | otlp_metrics | | -| format | string | ROTEL_KAFKA_EXPORTER_FORMAT | protobuf | json, protobuf | -| compression | string | ROTEL_KAFKA_EXPORTER_COMPRESSION | none | gzip, snappy, lz4, zstd, none | -| acks | string | ROTEL_KAFKA_EXPORTER_ACKS | one | all, one, none | -| client_id | string | ROTEL_KAFKA_EXPORTER_CLIENT_ID | rotel | | -| max_message_bytes | number | ROTEL_KAFKA_EXPORTER_MAX_MESSAGE_BYTES | 1000000 | | -| linger_ms | number | ROTEL_KAFKA_EXPORTER_LINGER_MS | 5 | | -| retries | number | ROTEL_KAFKA_EXPORTER_RETRIES | 2147483647 | | -| retry_backoff_ms | number | ROTEL_KAFKA_EXPORTER_RETRY_BACKOFF_MS | 100 | | -| retry_backoff_max_ms | number | ROTEL_KAFKA_EXPORTER_RETRY_BACKOFF_MAX_MS | 1000 | | -| message_timeout_ms | number | ROTEL_KAFKA_EXPORTER_MESSAGE_TIMEOUT_MS | 300000 | | -| request_timeout_ms | number | ROTEL_KAFKA_EXPORTER_REQUEST_TIMEOUT_MS | 30000 | | -| batch_size | number | ROTEL_KAFKA_EXPORTER_BATCH_SIZE | 1000000 | | -| partitioner | string | ROTEL_KAFKA_EXPORTER_PARTITIONER | consistent-random | consistent, consistent-random, murmur2-random, murmur2, fnv1a, fnv1a-random | -| partition_metrics_by_resource_attributes | boolean | ROTEL_KAFKA_EXPORTER_PARTITION_METRICS_BY_RESOURCE_ATTRIBUTES | | | -| partition_logs_by_resource_attributes | boolean | ROTEL_KAFKA_EXPORTER_PARTITION_LOGS_BY_RESOURCE_ATTRIBUTES | | | -| custom_config | string | ROTEL_KAFKA_EXPORTER_CUSTOM_CONFIG | | | -| sasl_username | string | ROTEL_KAFKA_EXPORTER_SASL_USERNAME | | | -| sasl_password | string | ROTEL_KAFKA_EXPORTER_SASL_PASSWORD | | | -| sasl_mechanism | string | ROTEL_KAFKA_EXPORTER_SASL_MECHANISM | | | -| security_protocol | string | ROTEL_KAFKA_EXPORTER_SECURITY_PROTOCOL | PLAINTEXT | PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL | +| Option Name | Type | Default | Options | +|--------------------------------------------|----------|-------------------|------------------------------------------------------------------------------| +| brokers | string[] | localhost:9092 | | +| traces_topic | string | otlp_traces | | +| logs_topic | string | otlp_logs | | +| metrics_topic | string | otlp_metrics | | +| format | string | protobuf | json, protobuf | +| compression | string | none | gzip, snappy, lz4, zstd, none | +| acks | string | one | all, one, none | +| client_id | string | rotel | | +| max_message_bytes | number | 1000000 | | +| linger_ms | number | 5 | | +| retries | number | 2147483647 | | +| retry_backoff_ms | number | 100 | | +| retry_backoff_max_ms | number | 1000 | | +| message_timeout_ms | number | 300000 | | +| request_timeout_ms | number | 30000 | | +| batch_size | number | 1000000 | | +| partitioner | string | consistent-random | consistent, consistent-random, murmur2-random, murmur2, fnv1a, fnv1a-random | +| partition_metrics_by_resource_attributes | boolean | | | +| partition_logs_by_resource_attributes | boolean | | | +| custom_config | string | | | +| sasl_username | string | | | +| sasl_password | string | | | +| sasl_mechanism | string | | | +| security_protocol | string | PLAINTEXT | PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL | ### Blackhole Exporter From 7d571e1581b629dfbbbdc85992ec3566302574dd Mon Sep 17 00:00:00 2001 From: rjenkins Date: Mon, 28 Jul 2025 11:16:13 -0700 Subject: [PATCH 10/11] add json_underscore to clickhouse options --- npm/app/config.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/npm/app/config.ts b/npm/app/config.ts index 74762ed..7b72f31 100644 --- a/npm/app/config.ts +++ b/npm/app/config.ts @@ -50,6 +50,7 @@ export interface ClickhouseExporter { user?: string password?: string enable_json?: boolean + json_underscore?: boolean } export interface KafkaExporter { @@ -186,6 +187,7 @@ export class Config { user: rotel_env(pfx + "USER"), password: rotel_env(pfx + "PASSWORD"), enable_json: as_bool(rotel_env(pfx + "ENABLE_JSON")), + json_underscore: as_bool(rotel_env(pfx + "JSON_UNDERSCORE")), } exporter = clickhouseExporter; case "kafka": @@ -275,6 +277,7 @@ export class Config { user: rotel_env(pfx + "USER"), password: rotel_env(pfx + "PASSWORD"), enable_json: as_bool(rotel_env(pfx + "ENABLE_JSON")), + json_underscore: as_bool(rotel_env(pfx + "JSON_UNDERSCORE")), } env.exporter = c; } else if(exporter_type === "kafka") { @@ -605,6 +608,7 @@ function _set_clickhouse_exporter_agent_env(updates: Record, pfx: s [pfx + "USER"]: exporter.user, [pfx + "PASSWORD"]: exporter.password, [pfx + "ENABLE_JSON"]: exporter.enable_json, + [pfx + "JSON_UNDERSCORE"]: exporter.json_underscore, }) } From 0ab3247b17a9ae5bc6edaeaa3d622e9114f24bf3 Mon Sep 17 00:00:00 2001 From: rjenkins Date: Mon, 28 Jul 2025 11:20:18 -0700 Subject: [PATCH 11/11] bump to rotel alpha23 --- .github/workflows/build.yml | 8 ++++---- .github/workflows/release.yml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e387884..1418223 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,7 +4,7 @@ on: push: env: - ROTEL_VERSION: "tags/v0.0.1-alpha22" + ROTEL_VERSION: "tags/v0.0.1-alpha23" jobs: publish-npm-binaries: @@ -20,7 +20,7 @@ jobs: runner: ubuntu-latest, TOOLCHAIN: stable, TARGET: x86_64-unknown-linux-gnu, - ARTIFACT: rotel_v0.0.1-alpha22_x86_64-unknown-linux-gnu.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha23_x86_64-unknown-linux-gnu.tar.gz, } - { NAME: linux-arm64-glibc, @@ -28,7 +28,7 @@ jobs: runner: ubuntu-latest, TOOLCHAIN: stable, TARGET: aarch64-unknown-linux-gnu, - ARTIFACT: rotel_v0.0.1-alpha22_aarch64-unknown-linux-gnu.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha23_aarch64-unknown-linux-gnu.tar.gz, } - { NAME: darwin-arm64, @@ -36,7 +36,7 @@ jobs: runner: macos-latest, TOOLCHAIN: stable, TARGET: aarch64-apple-darwin, - ARTIFACT: rotel_v0.0.1-alpha22_aarch64-apple-darwin.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha23_aarch64-apple-darwin.tar.gz, } steps: - name: Checkout diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 74f8525..3f1278e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,7 +6,7 @@ on: - "v*.*.*" env: - ROTEL_VERSION: "tags/v0.0.1-alpha1" + ROTEL_VERSION: "tags/v0.0.1-alpha23" jobs: publish-npm-binaries: @@ -22,7 +22,7 @@ jobs: runner: ubuntu-latest, TOOLCHAIN: stable, TARGET: x86_64-unknown-linux-gnu, - ARTIFACT: rotel_v0.0.1-alpha1_x86_64-unknown-linux-gnu.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha23_x86_64-unknown-linux-gnu.tar.gz, } - { NAME: linux-arm64-glibc, @@ -30,7 +30,7 @@ jobs: runner: ubuntu-latest, TOOLCHAIN: stable, TARGET: aarch64-unknown-linux-gnu, - ARTIFACT: rotel_v0.0.1-alpha1_aarch64-unknown-linux-gnu.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha23_aarch64-unknown-linux-gnu.tar.gz, } - { NAME: darwin-arm64, @@ -38,7 +38,7 @@ jobs: runner: macos-latest, TOOLCHAIN: stable, TARGET: aarch64-apple-darwin, - ARTIFACT: rotel_v0.0.1-alpha1_aarch64-apple-darwin.tar.gz, + ARTIFACT: rotel_v0.0.1-alpha23_aarch64-apple-darwin.tar.gz, } steps: - name: Checkout