diff --git a/apps/opik-backend/entrypoint.sh b/apps/opik-backend/entrypoint.sh
index 140cc93987..3dbb72594b 100644
--- a/apps/opik-backend/entrypoint.sh
+++ b/apps/opik-backend/entrypoint.sh
@@ -5,12 +5,13 @@ echo $(pwd)
jwebserver -d /opt/opik/redoc -b 0.0.0.0 -p 3003 &
echo "OPIK_VERSION=$OPIK_VERSION"
-echo "NEW_RELIC_ENABLED=$NEW_RELIC_ENABLED"
-echo "NEW_RELIC_VERSION=$NEW_RELIC_VERSION"
+echo "OPIK_OTEL_SDK_ENABLED=$OPIK_OTEL_SDK_ENABLED"
+echo "OTEL_VERSION=$OTEL_VERSION"
-if [[ "${NEW_RELIC_ENABLED}" == "true" && "${NEW_RELIC_LICENSE_KEY}" != "" ]];then
- curl -o /tmp/newrelic-agent.jar https://download.newrelic.com/newrelic/java-agent/newrelic-agent/${NEW_RELIC_VERSION}/newrelic-agent-${NEW_RELIC_VERSION}.jar
- JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/newrelic-agent.jar"
+if [[ "${OPIK_OTEL_SDK_ENABLED}" == "true" && "${OTEL_VERSION}" != "" && "${OTEL_EXPORTER_OTLP_ENDPOINT}" != "" ]];then
+ OTEL_RESOURCE_ATTRIBUTES="service.name=opik-backend,service.version=${OPIK_VERSION}"
+ curl -L -o /tmp/opentelemetry-javaagent.jar https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v${OTEL_VERSION}/opentelemetry-javaagent.jar
+ JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/opentelemetry-javaagent.jar"
fi
# Check if ENABLE_VIRTUAL_THREADS is set to true
diff --git a/apps/opik-backend/pom.xml b/apps/opik-backend/pom.xml
index 4ed764fdff..c79b4ce5d5 100644
--- a/apps/opik-backend/pom.xml
+++ b/apps/opik-backend/pom.xml
@@ -30,6 +30,7 @@
5.1.0
3.9.1
3.34.1
+ 2.8.0
2.25.70
com.comet.opik.OpikApplication
@@ -62,9 +63,14 @@
- com.newrelic.agent.java
- newrelic-api
- 8.14.0
+ io.opentelemetry.instrumentation
+ opentelemetry-instrumentation-annotations
+ ${opentelmetry.version}
+
+
+ io.opentelemetry.instrumentation
+ opentelemetry-r2dbc-1.0
+ ${opentelmetry.version}-alpha
software.amazon.awssdk
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java
index 026ee5f77e..20317ad3c2 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java
@@ -12,8 +12,7 @@
import com.comet.opik.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.inject.ImplementedBy;
-import com.newrelic.api.agent.Segment;
-import com.newrelic.api.agent.Trace;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.Result;
import io.r2dbc.spi.Statement;
@@ -40,6 +39,8 @@
import static com.comet.opik.api.DatasetItem.DatasetItemPage;
import static com.comet.opik.domain.AsyncContextUtils.bindWorkspaceIdToFlux;
+import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.Segment;
+import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.endSegment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.startSegment;
import static com.comet.opik.utils.AsyncUtils.makeFluxContextAware;
import static com.comet.opik.utils.AsyncUtils.makeMonoContextAware;
@@ -408,7 +409,7 @@ LEFT JOIN (
private final @NonNull FilterQueryBuilder filterQueryBuilder;
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono save(@NonNull UUID datasetId, @NonNull List items) {
if (items.isEmpty()) {
@@ -455,7 +456,7 @@ private Mono mapAndInsert(
return Flux.from(statement.execute())
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
- .doFinally(signalType -> segment.end());
+ .doFinally(signalType -> endSegment(segment));
});
}
@@ -550,7 +551,7 @@ private List getFeedbackScores(Object feedbackScoresRaw) {
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono get(@NonNull UUID id) {
return asyncTemplate.nonTransaction(connection -> {
@@ -560,14 +561,14 @@ public Mono get(@NonNull UUID id) {
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_item");
return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
- .doFinally(signalType -> segment.end())
+ .doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.singleOrEmpty();
});
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Flux getItems(@NonNull UUID datasetId, int limit, UUID lastRetrievedId) {
log.info("Getting dataset items by datasetId '{}', limit '{}', lastRetrievedId '{}'",
datasetId, limit, lastRetrievedId);
@@ -591,13 +592,13 @@ public Flux getItems(@NonNull UUID datasetId, int limit, UUID lastR
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_items_stream");
return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
- .doFinally(signalType -> segment.end())
+ .doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem);
});
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono> getDatasetItemWorkspace(@NonNull Set datasetItemIds) {
if (datasetItemIds.isEmpty()) {
@@ -618,7 +619,7 @@ public Mono> getDatasetItemWorkspace(@NonNull Set delete(@NonNull List ids) {
if (ids.isEmpty()) {
return Mono.empty();
@@ -633,7 +634,7 @@ public Mono delete(@NonNull List ids) {
return bindAndDelete(ids, statement)
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
- .doFinally(signalType -> segment.end());
+ .doFinally(signalType -> endSegment(segment));
});
}
@@ -645,7 +646,7 @@ private Flux extends Result> bindAndDelete(List ids, Statement statement
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono getItems(@NonNull UUID datasetId, int page, int size) {
Segment segmentCount = startSegment("dataset_items", "Clickhouse", "select_dataset_items_page_count");
@@ -656,7 +657,7 @@ public Mono getItems(@NonNull UUID datasetId, int page, int siz
.bind("datasetId", datasetId)
.bind("workspace_id", workspaceId)
.execute())
- .doFinally(signalType -> segmentCount.end())
+ .doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
@@ -672,7 +673,7 @@ public Mono getItems(@NonNull UUID datasetId, int page, int siz
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)))
- .doFinally(signalType -> segment.end());
+ .doFinally(signalType -> endSegment(segment));
})));
}
@@ -705,7 +706,7 @@ private void bindSearchCriteria(DatasetItemSearchCriteria datasetItemSearchCrite
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono getItems(
@NonNull DatasetItemSearchCriteria datasetItemSearchCriteria, int page, int size) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
@@ -725,7 +726,7 @@ public Mono getItems(
bindSearchCriteria(datasetItemSearchCriteria, statement);
return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
- .doFinally(signalType -> segmentCount.end())
+ .doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
@@ -744,7 +745,7 @@ public Mono getItems(
bindSearchCriteria(datasetItemSearchCriteria, selectStatement);
return makeFluxContextAware(bindWorkspaceIdToFlux(selectStatement))
- .doFinally(signalType -> segment.end())
+ .doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)));
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemService.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemService.java
index e1a2ec04d2..d69a09c566 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemService.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemService.java
@@ -9,7 +9,7 @@
import com.comet.opik.api.error.IdentifierMismatchException;
import com.comet.opik.infrastructure.auth.RequestContext;
import com.google.inject.ImplementedBy;
-import com.newrelic.api.agent.Trace;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.ws.rs.ClientErrorException;
@@ -57,7 +57,7 @@ class DatasetItemServiceImpl implements DatasetItemService {
private final @NonNull SpanService spanService;
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono save(@NonNull DatasetItemBatch batch) {
if (batch.datasetId() == null && batch.datasetName() == null) {
return Mono.error(failWithError("dataset_id or dataset_name must be provided"));
@@ -102,14 +102,13 @@ private ClientErrorException newConflict(String error) {
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono get(@NonNull UUID id) {
return dao.get(id)
.switchIfEmpty(Mono.defer(() -> Mono.error(failWithNotFound("Dataset item not found"))));
}
-
- @Override
- @Trace(dispatcher = true)
+
+ @WithSpan
public Flux getItems(@NonNull String workspaceId, @NonNull DatasetItemStreamRequest request) {
log.info("Getting dataset items by '{}' on workspaceId '{}'", request, workspaceId);
return Mono.fromCallable(() -> datasetService.findByName(workspaceId, request.datasetName()))
@@ -188,7 +187,7 @@ private NotFoundException failWithNotFound(String message) {
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono delete(@NonNull List ids) {
if (ids.isEmpty()) {
return Mono.empty();
@@ -198,13 +197,13 @@ public Mono delete(@NonNull List ids) {
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono getItems(@NonNull UUID datasetId, int page, int size) {
return dao.getItems(datasetId, page, size);
}
@Override
- @Trace(dispatcher = true)
+ @WithSpan
public Mono getItems(
int page, int size, @NonNull DatasetItemSearchCriteria datasetItemSearchCriteria) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentDAO.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentDAO.java
index 2e24de9ffe..4be64b5fdd 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentDAO.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentDAO.java
@@ -6,6 +6,7 @@
import com.comet.opik.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.ConnectionFactory;
import io.r2dbc.spi.Result;
@@ -372,6 +373,7 @@ AND ilike(name, CONCAT('%', :name, '%'))
private final @NonNull ConnectionFactory connectionFactory;
+ @WithSpan
Mono insert(@NonNull Experiment experiment) {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> insert(experiment, connection))
@@ -398,6 +400,7 @@ private String getOrDefault(JsonNode jsonNode) {
return Optional.ofNullable(jsonNode).map(JsonNode::toString).orElse("");
}
+ @WithSpan
Mono getById(@NonNull UUID id) {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> getById(id, connection))
@@ -447,6 +450,7 @@ private static List getFeedbackScores(Row row) {
return feedbackScoresAvg.isEmpty() ? null : feedbackScoresAvg;
}
+ @WithSpan
Mono find(
int page, int size, @NonNull ExperimentSearchCriteria experimentSearchCriteria) {
return countTotal(experimentSearchCriteria).flatMap(total -> find(page, size, experimentSearchCriteria, total));
@@ -507,6 +511,7 @@ private void bindSearchCriteria(Statement statement, ExperimentSearchCriteria cr
}
}
+ @WithSpan
Flux findByName(String name) {
Preconditions.checkArgument(StringUtils.isNotBlank(name), "Argument 'name' must not be blank");
return Mono.from(connectionFactory.create())
@@ -520,6 +525,7 @@ private Publisher extends Result> findByName(String name, Connection connectio
return makeFluxContextAware(bindWorkspaceIdToFlux(statement));
}
+ @WithSpan
public Flux getExperimentWorkspaces(@NonNull Set experimentIds) {
if (experimentIds.isEmpty()) {
return Flux.empty();
@@ -535,6 +541,7 @@ public Flux getExperimentWorkspaces(@NonNull Set e
row.get("id", UUID.class))));
}
+ @WithSpan
public Mono delete(Set ids) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(ids), "Argument 'ids' must not be empty");
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java
index 2bf9d394d8..b456640e2a 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java
@@ -2,6 +2,7 @@
import com.comet.opik.api.ExperimentItem;
import com.google.common.base.Preconditions;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.ConnectionFactory;
import io.r2dbc.spi.Result;
@@ -128,6 +129,7 @@ INSERT INTO experiment_items (
private final @NonNull ConnectionFactory connectionFactory;
+ @WithSpan
public Flux findExperimentSummaryByDatasetIds(Collection datasetIds) {
if (datasetIds.isEmpty()) {
@@ -148,6 +150,7 @@ public Flux findExperimentSummaryByDatasetIds(Collection insert(@NonNull Set experimentItems) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(experimentItems),
"Argument 'experimentItems' must not be empty");
@@ -207,6 +210,7 @@ private Publisher mapToExperimentItem(Result result) {
.build());
}
+ @WithSpan
public Mono get(@NonNull UUID id) {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> get(id, connection))
@@ -251,6 +255,7 @@ private Publisher extends Result> getItems(
return makeFluxContextAware(bindWorkspaceIdToFlux(statement));
}
+ @WithSpan
public Mono delete(Set ids) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(ids),
"Argument 'ids' must not be empty");
@@ -270,6 +275,7 @@ private Publisher extends Result> delete(Set ids, Connection connection)
return makeFluxContextAware(bindWorkspaceIdToFlux(statement));
}
+ @WithSpan
public Mono deleteByExperimentIds(Set experimentIds) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(experimentIds),
diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/FeedbackScoreDAO.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/FeedbackScoreDAO.java
index e63db2530f..44b605fbf1 100644
--- a/apps/opik-backend/src/main/java/com/comet/opik/domain/FeedbackScoreDAO.java
+++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/FeedbackScoreDAO.java
@@ -5,6 +5,7 @@
import com.comet.opik.api.ScoreSource;
import com.google.common.base.Preconditions;
import com.google.inject.ImplementedBy;
+import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.Result;
import io.r2dbc.spi.Row;
@@ -256,6 +257,7 @@ AND entity_id IN (
""";
@Override
+ @WithSpan
public Mono