Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[NA] Remove New Relic dependency #254

Merged
merged 17 commits into from
Oct 14, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions apps/opik-backend/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@ echo $(pwd)
jwebserver -d /opt/opik/redoc -b 0.0.0.0 -p 3003 &

echo "OPIK_VERSION=$OPIK_VERSION"
echo "NEW_RELIC_ENABLED=$NEW_RELIC_ENABLED"
echo "NEW_RELIC_VERSION=$NEW_RELIC_VERSION"
echo "OPIK_OTEL_SDK_ENABLED=$OPIK_OTEL_SDK_ENABLED"
echo "OTEL_VERSION=$OTEL_VERSION"

if [[ "${NEW_RELIC_ENABLED}" == "true" && "${NEW_RELIC_LICENSE_KEY}" != "" ]];then
curl -o /tmp/newrelic-agent.jar https://download.newrelic.com/newrelic/java-agent/newrelic-agent/${NEW_RELIC_VERSION}/newrelic-agent-${NEW_RELIC_VERSION}.jar
JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/newrelic-agent.jar"
if [[ "${OPIK_OTEL_SDK_ENABLED}" == "true" && "${OTEL_VERSION}" != "" && "${OTEL_EXPORTER_OTLP_ENDPOINT}" != "" ]];then
OTEL_RESOURCE_ATTRIBUTES="service.name=opik-backend,service.version=${OPIK_VERSION}"
curl -L -o /tmp/opentelemetry-javaagent.jar https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v${OTEL_VERSION}/opentelemetry-javaagent.jar
JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/opentelemetry-javaagent.jar"
fi

# Check if ENABLE_VIRTUAL_THREADS is set to true
Expand Down
12 changes: 9 additions & 3 deletions apps/opik-backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
<uuid.java.generator.version>5.1.0</uuid.java.generator.version>
<wiremock.version>3.9.1</wiremock.version>
<redisson.version>3.34.1</redisson.version>
<opentelmetry.version>2.8.0</opentelmetry.version>
<mainClass>com.comet.opik.OpikApplication</mainClass>
</properties>

Expand All @@ -54,9 +55,14 @@

<dependencies>
<dependency>
<groupId>com.newrelic.agent.java</groupId>
<artifactId>newrelic-api</artifactId>
<version>8.14.0</version>
<groupId>io.opentelemetry.instrumentation</groupId>
<artifactId>opentelemetry-instrumentation-annotations</artifactId>
<version>${opentelmetry.version}</version>
</dependency>
<dependency>
<groupId>io.opentelemetry.instrumentation</groupId>
<artifactId>opentelemetry-r2dbc-1.0</artifactId>
<version>${opentelmetry.version}-alpha</version>
Comment on lines +70 to +73
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I found mutiple opentelemetry libraries for R2DBC. This seems to be an early one in alpha state. Why this choice over others?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the agent instrumentation, and all modules are in alpha: https://mvnrepository.com/artifact/io.opentelemetry.instrumentation/opentelemetry-r2dbc-1.0. Here is the definition of alpha:

https://github.com/open-telemetry/opentelemetry-java/blob/main/VERSIONING.md#stable-vs-alpha. Basically, they mention such modules don't have backward compatibility guarantees and a few other problems that may require changes from version to version, but that doesn't mean they are not mature.

</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@
import com.comet.opik.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.inject.ImplementedBy;
import com.newrelic.api.agent.Segment;
import com.newrelic.api.agent.Trace;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.Result;
import io.r2dbc.spi.Statement;
Expand All @@ -38,6 +37,8 @@

import static com.comet.opik.api.DatasetItem.DatasetItemPage;
import static com.comet.opik.domain.AsyncContextUtils.bindWorkspaceIdToFlux;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.Segment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.endSegment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.startSegment;
import static com.comet.opik.utils.AsyncUtils.makeFluxContextAware;
import static com.comet.opik.utils.AsyncUtils.makeMonoContextAware;
Expand Down Expand Up @@ -340,7 +341,7 @@ LEFT JOIN (
private final @NonNull TransactionTemplate asyncTemplate;

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Long> save(@NonNull UUID datasetId, @NonNull List<DatasetItem> items) {

if (items.isEmpty()) {
Expand Down Expand Up @@ -387,7 +388,7 @@ private Mono<Long> mapAndInsert(
return Flux.from(statement.execute())
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
});
}

Expand Down Expand Up @@ -480,7 +481,7 @@ private List<FeedbackScore> getFeedbackScores(Object feedbackScoresRaw) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItem> get(@NonNull UUID id) {
return asyncTemplate.nonTransaction(connection -> {

Expand All @@ -490,14 +491,14 @@ public Mono<DatasetItem> get(@NonNull UUID id) {
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_item");

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.singleOrEmpty();
});
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastRetrievedId) {
ST template = new ST(SELECT_DATASET_ITEMS_STREAM);

Expand All @@ -518,13 +519,13 @@ public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastR
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_items_stream");

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem);
});
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<List<WorkspaceAndResourceId>> getDatasetItemWorkspace(@NonNull Set<UUID> datasetItemIds) {

if (datasetItemIds.isEmpty()) {
Expand All @@ -545,7 +546,7 @@ public Mono<List<WorkspaceAndResourceId>> getDatasetItemWorkspace(@NonNull Set<U
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Long> delete(@NonNull List<UUID> ids) {
if (ids.isEmpty()) {
return Mono.empty();
Expand All @@ -560,7 +561,7 @@ public Mono<Long> delete(@NonNull List<UUID> ids) {
return bindAndDelete(ids, statement)
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
});
}

Expand All @@ -572,7 +573,7 @@ private Flux<? extends Result> bindAndDelete(List<UUID> ids, Statement statement
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int size) {

Segment segmentCount = startSegment("dataset_items", "Clickhouse", "select_dataset_items_page_count");
Expand All @@ -583,7 +584,7 @@ public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int siz
.bind("datasetId", datasetId)
.bind("workspace_id", workspaceId)
.execute())
.doFinally(signalType -> segmentCount.end())
.doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
Expand All @@ -599,12 +600,12 @@ public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int siz
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)))
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
})));
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(
@NonNull DatasetItemSearchCriteria datasetItemSearchCriteria, int page, int size) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
Expand All @@ -621,7 +622,7 @@ public Mono<DatasetItemPage> getItems(
.bind("experimentIds", datasetItemSearchCriteria.experimentIds())
.bind("workspace_id", workspaceId)
.execute())
.doFinally(signalType -> segmentCount.end())
.doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
Expand All @@ -639,7 +640,7 @@ public Mono<DatasetItemPage> getItems(
.bind("limit", size)
.bind("offset", (page - 1) * size)
.execute())
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import com.comet.opik.api.error.IdentifierMismatchException;
import com.comet.opik.infrastructure.auth.RequestContext;
import com.google.inject.ImplementedBy;
import com.newrelic.api.agent.Trace;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.ws.rs.ClientErrorException;
Expand Down Expand Up @@ -57,7 +57,7 @@ class DatasetItemServiceImpl implements DatasetItemService {
private final @NonNull SpanService spanService;

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Void> save(@NonNull DatasetItemBatch batch) {
if (batch.datasetId() == null && batch.datasetName() == null) {
return Mono.error(failWithError("dataset_id or dataset_name must be provided"));
Expand Down Expand Up @@ -102,14 +102,14 @@ private ClientErrorException newConflict(String error) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItem> get(@NonNull UUID id) {
return dao.get(id)
.switchIfEmpty(Mono.defer(() -> Mono.error(failWithNotFound("Dataset item not found"))));
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastRetrievedId) {
return dao.getItems(datasetId, limit, lastRetrievedId);
}
Expand Down Expand Up @@ -185,7 +185,7 @@ private NotFoundException failWithNotFound(String message) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Void> delete(@NonNull List<UUID> ids) {
if (ids.isEmpty()) {
return Mono.empty();
Expand All @@ -195,13 +195,13 @@ public Mono<Void> delete(@NonNull List<UUID> ids) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int size) {
return dao.getItems(datasetId, page, size);
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(
int page, int size, @NonNull DatasetItemSearchCriteria datasetItemSearchCriteria) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
Expand Down
Loading