Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,29 @@
<groupId>io.projectreactor.addons</groupId>
<artifactId>reactor-extra</artifactId>
</dependency>


<!-- Start - Dependencies for Opentelemetry lightstep -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>31.0.1-jre</version>
</dependency>
<dependency>
<groupId>com.lightstep.opentelemetry</groupId>
<artifactId>opentelemetry-launcher</artifactId>
<version>1.5.0</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-api</artifactId>
<version>1.7.1</version>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-extension-annotations</artifactId>
<version>1.7.1</version>
</dependency>
<!-- End - Dependencies for Opentelemetry lightstep -->
</dependencies>

<!-- <build> -->
Expand Down
31 changes: 31 additions & 0 deletions src/main/java/com/uci/utils/UtilAppConfiguration.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@

import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.lightstep.opentelemetry.launcher.OpenTelemetryConfiguration;

import io.opentelemetry.api.GlobalOpenTelemetry;
import io.opentelemetry.api.trace.Tracer;

@Configuration
@EnableAutoConfiguration
Expand All @@ -28,6 +32,21 @@ public class UtilAppConfiguration {

@Value("${caffeine.cache.exprie.duration.seconds}")
public Integer cacheExpireDuration;

@Value("${opentelemetry.lightstep.service}")
private String lightstepService;

@Value("${opentelemetry.lightstep.access.token}")
private String lightstepAccessToken;

@Value("${opentelemetry.lightstep.end.point}")
private String lightstepEndPoint;

@Value("${opentelemetry.lightstep.tracer}")
private String lightstepTracer;

@Value("${opentelemetry.lightstep.tracer.version}")
private String lightstepTracerVersion;

public Caffeine<Object, Object> caffeineCacheBuilder() {
return Caffeine.newBuilder()
Expand All @@ -45,5 +64,17 @@ public Cache<Object, Object> cache() {
public WebClient getWebClient() {
return WebClient.builder().baseUrl(CAMPAIGN_URL).defaultHeader("admin-token", CAMPAIGN_ADMIN_TOKEN).build();
}

@Bean
public Tracer OpenTelemetryTracer() {
OpenTelemetryConfiguration.newBuilder()
.setServiceName(lightstepService)
.setAccessToken(lightstepAccessToken)
.setTracesEndpoint(lightstepEndPoint)
.install();

Tracer tracer = GlobalOpenTelemetry
.getTracer(lightstepTracer, lightstepTracerVersion);
return tracer;
}
}
63 changes: 63 additions & 0 deletions src/main/java/com/uci/utils/kafka/RecordProducer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package com.uci.utils.kafka;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.uci.utils.kafka.adapter.TextMapSetterAdapter;

import io.fusionauth.jwt.domain.Header;
import io.opentelemetry.api.GlobalOpenTelemetry;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.propagation.TextMapGetter;
import io.opentelemetry.context.propagation.TextMapSetter;
import lombok.extern.slf4j.Slf4j;

import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;

import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;

import javax.validation.constraints.NotNull;

@Service
@Slf4j
public class RecordProducer {

private final KafkaTemplate<String, String> producer;

public RecordProducer(KafkaTemplate<String, String> producer) {
this.producer = producer;
}

public void send(String topic, String message, Context currentContext) {
List<RecordHeader> headers = Arrays.asList();
ProducerRecord<String, String> record = new ProducerRecord(topic, null, "", message, headers);
/* Propagate open telemetry current context by injecting it to kafka headers */
GlobalOpenTelemetry.getPropagators().getTextMapPropagator().inject(currentContext, record.headers(), TextMapSetterAdapter.setter);
// log.info("headers:"+record.headers());
// Context extracted = GlobalOpenTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), record.headers(), getter);
// log.info("extracted: "+extracted);

producer
.send(record)
.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
@Override
public void onFailure(@NotNull Throwable throwable) {
log.error("Unable to push {} to {} topic due to {}", message, topic, throwable.getMessage());
}

@Override
public void onSuccess(SendResult<String, String> stringStringSendResult) {
log.info("Pushed to topic {}", topic);
}
});
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
package com.uci.utils.kafka.adapter;

import java.nio.charset.StandardCharsets;
import java.util.List;

import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.Header;

import io.opentelemetry.context.propagation.TextMapGetter;

public class TextMapGetterAdapter {
public static TextMapGetter<Headers> getter = new TextMapGetter<Headers>() {
@Override
public String get(Headers headers, String key) {
Header header = headers.lastHeader(key);
if (header == null) {
return null;
}
byte[] value = header.value();
if (value == null) {
return null;
}
return new String(value, StandardCharsets.UTF_8);
}

@Override
public Iterable<String> keys(Headers headers) {
List<String> keyset = null;
headers.forEach(h -> {
String key = h.key();
keyset.add(key);
});
return keyset;
}
};
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package com.uci.utils.kafka.adapter;

import java.nio.charset.StandardCharsets;

import org.apache.kafka.common.header.Headers;

import io.opentelemetry.context.propagation.TextMapSetter;

public class TextMapSetterAdapter {
public static TextMapSetter<Headers> setter = new TextMapSetter<Headers>() {
@Override
public void set(Headers headers, String key, String value) {
headers.remove(key).add(key, value.getBytes(StandardCharsets.UTF_8));
}
};
}