Skip to content

Commit

Permalink
[NA] Remove New Relic dependency (#254)
Browse files Browse the repository at this point in the history
* [NA] Remove New Relic dependecy

* Update Helm documentation

* Fix versions

* Fix setup

* Update Helm documentation

* Fix config

* Update Helm documentation

* Fix default

* Update Helm documentation

* Fix

---------

Co-authored-by: CometActions <[email protected]>
  • Loading branch information
thiagohora and CometActions authored Oct 14, 2024
1 parent 019a007 commit fd0fc81
Show file tree
Hide file tree
Showing 16 changed files with 153 additions and 90 deletions.
11 changes: 6 additions & 5 deletions apps/opik-backend/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@ echo $(pwd)
jwebserver -d /opt/opik/redoc -b 0.0.0.0 -p 3003 &

echo "OPIK_VERSION=$OPIK_VERSION"
echo "NEW_RELIC_ENABLED=$NEW_RELIC_ENABLED"
echo "NEW_RELIC_VERSION=$NEW_RELIC_VERSION"
echo "OPIK_OTEL_SDK_ENABLED=$OPIK_OTEL_SDK_ENABLED"
echo "OTEL_VERSION=$OTEL_VERSION"

if [[ "${NEW_RELIC_ENABLED}" == "true" && "${NEW_RELIC_LICENSE_KEY}" != "" ]];then
curl -o /tmp/newrelic-agent.jar https://download.newrelic.com/newrelic/java-agent/newrelic-agent/${NEW_RELIC_VERSION}/newrelic-agent-${NEW_RELIC_VERSION}.jar
JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/newrelic-agent.jar"
if [[ "${OPIK_OTEL_SDK_ENABLED}" == "true" && "${OTEL_VERSION}" != "" && "${OTEL_EXPORTER_OTLP_ENDPOINT}" != "" ]];then
OTEL_RESOURCE_ATTRIBUTES="service.name=opik-backend,service.version=${OPIK_VERSION}"
curl -L -o /tmp/opentelemetry-javaagent.jar https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v${OTEL_VERSION}/opentelemetry-javaagent.jar
JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/opentelemetry-javaagent.jar"
fi

# Check if ENABLE_VIRTUAL_THREADS is set to true
Expand Down
12 changes: 9 additions & 3 deletions apps/opik-backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
<uuid.java.generator.version>5.1.0</uuid.java.generator.version>
<wiremock.version>3.9.1</wiremock.version>
<redisson.version>3.34.1</redisson.version>
<opentelmetry.version>2.8.0</opentelmetry.version>
<aws.java.sdk.version>2.25.70</aws.java.sdk.version>
<mainClass>com.comet.opik.OpikApplication</mainClass>
</properties>
Expand Down Expand Up @@ -62,9 +63,14 @@

<dependencies>
<dependency>
<groupId>com.newrelic.agent.java</groupId>
<artifactId>newrelic-api</artifactId>
<version>8.14.0</version>
<groupId>io.opentelemetry.instrumentation</groupId>
<artifactId>opentelemetry-instrumentation-annotations</artifactId>
<version>${opentelmetry.version}</version>
</dependency>
<dependency>
<groupId>io.opentelemetry.instrumentation</groupId>
<artifactId>opentelemetry-r2dbc-1.0</artifactId>
<version>${opentelmetry.version}-alpha</version>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
import com.comet.opik.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.inject.ImplementedBy;
import com.newrelic.api.agent.Segment;
import com.newrelic.api.agent.Trace;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.Result;
import io.r2dbc.spi.Statement;
Expand All @@ -40,6 +39,8 @@

import static com.comet.opik.api.DatasetItem.DatasetItemPage;
import static com.comet.opik.domain.AsyncContextUtils.bindWorkspaceIdToFlux;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.Segment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.endSegment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.startSegment;
import static com.comet.opik.utils.AsyncUtils.makeFluxContextAware;
import static com.comet.opik.utils.AsyncUtils.makeMonoContextAware;
Expand Down Expand Up @@ -408,7 +409,7 @@ LEFT JOIN (
private final @NonNull FilterQueryBuilder filterQueryBuilder;

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Long> save(@NonNull UUID datasetId, @NonNull List<DatasetItem> items) {

if (items.isEmpty()) {
Expand Down Expand Up @@ -455,7 +456,7 @@ private Mono<Long> mapAndInsert(
return Flux.from(statement.execute())
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
});
}

Expand Down Expand Up @@ -550,7 +551,7 @@ private List<FeedbackScore> getFeedbackScores(Object feedbackScoresRaw) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItem> get(@NonNull UUID id) {
return asyncTemplate.nonTransaction(connection -> {

Expand All @@ -560,14 +561,14 @@ public Mono<DatasetItem> get(@NonNull UUID id) {
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_item");

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.singleOrEmpty();
});
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastRetrievedId) {
log.info("Getting dataset items by datasetId '{}', limit '{}', lastRetrievedId '{}'",
datasetId, limit, lastRetrievedId);
Expand All @@ -591,13 +592,13 @@ public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastR
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_items_stream");

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem);
});
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<List<WorkspaceAndResourceId>> getDatasetItemWorkspace(@NonNull Set<UUID> datasetItemIds) {

if (datasetItemIds.isEmpty()) {
Expand All @@ -618,7 +619,7 @@ public Mono<List<WorkspaceAndResourceId>> getDatasetItemWorkspace(@NonNull Set<U
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Long> delete(@NonNull List<UUID> ids) {
if (ids.isEmpty()) {
return Mono.empty();
Expand All @@ -633,7 +634,7 @@ public Mono<Long> delete(@NonNull List<UUID> ids) {
return bindAndDelete(ids, statement)
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
});
}

Expand All @@ -645,7 +646,7 @@ private Flux<? extends Result> bindAndDelete(List<UUID> ids, Statement statement
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int size) {

Segment segmentCount = startSegment("dataset_items", "Clickhouse", "select_dataset_items_page_count");
Expand All @@ -656,7 +657,7 @@ public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int siz
.bind("datasetId", datasetId)
.bind("workspace_id", workspaceId)
.execute())
.doFinally(signalType -> segmentCount.end())
.doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
Expand All @@ -672,7 +673,7 @@ public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int siz
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)))
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
})));
}

Expand Down Expand Up @@ -705,7 +706,7 @@ private void bindSearchCriteria(DatasetItemSearchCriteria datasetItemSearchCrite
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(
@NonNull DatasetItemSearchCriteria datasetItemSearchCriteria, int page, int size) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
Expand All @@ -725,7 +726,7 @@ public Mono<DatasetItemPage> getItems(
bindSearchCriteria(datasetItemSearchCriteria, statement);

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segmentCount.end())
.doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
Expand All @@ -744,7 +745,7 @@ public Mono<DatasetItemPage> getItems(
bindSearchCriteria(datasetItemSearchCriteria, selectStatement);

return makeFluxContextAware(bindWorkspaceIdToFlux(selectStatement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import com.comet.opik.api.error.IdentifierMismatchException;
import com.comet.opik.infrastructure.auth.RequestContext;
import com.google.inject.ImplementedBy;
import com.newrelic.api.agent.Trace;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.ws.rs.ClientErrorException;
Expand Down Expand Up @@ -57,7 +57,7 @@ class DatasetItemServiceImpl implements DatasetItemService {
private final @NonNull SpanService spanService;

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Void> save(@NonNull DatasetItemBatch batch) {
if (batch.datasetId() == null && batch.datasetName() == null) {
return Mono.error(failWithError("dataset_id or dataset_name must be provided"));
Expand Down Expand Up @@ -102,14 +102,13 @@ private ClientErrorException newConflict(String error) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItem> get(@NonNull UUID id) {
return dao.get(id)
.switchIfEmpty(Mono.defer(() -> Mono.error(failWithNotFound("Dataset item not found"))));
}

@Override
@Trace(dispatcher = true)

@WithSpan
public Flux<DatasetItem> getItems(@NonNull String workspaceId, @NonNull DatasetItemStreamRequest request) {
log.info("Getting dataset items by '{}' on workspaceId '{}'", request, workspaceId);
return Mono.fromCallable(() -> datasetService.findByName(workspaceId, request.datasetName()))
Expand Down Expand Up @@ -188,7 +187,7 @@ private NotFoundException failWithNotFound(String message) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Void> delete(@NonNull List<UUID> ids) {
if (ids.isEmpty()) {
return Mono.empty();
Expand All @@ -198,13 +197,13 @@ public Mono<Void> delete(@NonNull List<UUID> ids) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int size) {
return dao.getItems(datasetId, page, size);
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(
int page, int size, @NonNull DatasetItemSearchCriteria datasetItemSearchCriteria) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.comet.opik.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.ConnectionFactory;
import io.r2dbc.spi.Result;
Expand Down Expand Up @@ -372,6 +373,7 @@ AND ilike(name, CONCAT('%', :name, '%'))

private final @NonNull ConnectionFactory connectionFactory;

@WithSpan
Mono<Void> insert(@NonNull Experiment experiment) {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> insert(experiment, connection))
Expand All @@ -398,6 +400,7 @@ private String getOrDefault(JsonNode jsonNode) {
return Optional.ofNullable(jsonNode).map(JsonNode::toString).orElse("");
}

@WithSpan
Mono<Experiment> getById(@NonNull UUID id) {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> getById(id, connection))
Expand Down Expand Up @@ -447,6 +450,7 @@ private static List<FeedbackScoreAverage> getFeedbackScores(Row row) {
return feedbackScoresAvg.isEmpty() ? null : feedbackScoresAvg;
}

@WithSpan
Mono<Experiment.ExperimentPage> find(
int page, int size, @NonNull ExperimentSearchCriteria experimentSearchCriteria) {
return countTotal(experimentSearchCriteria).flatMap(total -> find(page, size, experimentSearchCriteria, total));
Expand Down Expand Up @@ -507,6 +511,7 @@ private void bindSearchCriteria(Statement statement, ExperimentSearchCriteria cr
}
}

@WithSpan
Flux<Experiment> findByName(String name) {
Preconditions.checkArgument(StringUtils.isNotBlank(name), "Argument 'name' must not be blank");
return Mono.from(connectionFactory.create())
Expand All @@ -520,6 +525,7 @@ private Publisher<? extends Result> findByName(String name, Connection connectio
return makeFluxContextAware(bindWorkspaceIdToFlux(statement));
}

@WithSpan
public Flux<WorkspaceAndResourceId> getExperimentWorkspaces(@NonNull Set<UUID> experimentIds) {
if (experimentIds.isEmpty()) {
return Flux.empty();
Expand All @@ -535,6 +541,7 @@ public Flux<WorkspaceAndResourceId> getExperimentWorkspaces(@NonNull Set<UUID> e
row.get("id", UUID.class))));
}

@WithSpan
public Mono<Long> delete(Set<UUID> ids) {

Preconditions.checkArgument(CollectionUtils.isNotEmpty(ids), "Argument 'ids' must not be empty");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.comet.opik.api.ExperimentItem;
import com.google.common.base.Preconditions;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.ConnectionFactory;
import io.r2dbc.spi.Result;
Expand Down Expand Up @@ -128,6 +129,7 @@ INSERT INTO experiment_items (

private final @NonNull ConnectionFactory connectionFactory;

@WithSpan
public Flux<ExperimentSummary> findExperimentSummaryByDatasetIds(Collection<UUID> datasetIds) {

if (datasetIds.isEmpty()) {
Expand All @@ -148,6 +150,7 @@ public Flux<ExperimentSummary> findExperimentSummaryByDatasetIds(Collection<UUID
row.get("most_recent_experiment_at", Instant.class))));
}

@WithSpan
public Mono<Long> insert(@NonNull Set<ExperimentItem> experimentItems) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(experimentItems),
"Argument 'experimentItems' must not be empty");
Expand Down Expand Up @@ -207,6 +210,7 @@ private Publisher<ExperimentItem> mapToExperimentItem(Result result) {
.build());
}

@WithSpan
public Mono<ExperimentItem> get(@NonNull UUID id) {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> get(id, connection))
Expand Down Expand Up @@ -251,6 +255,7 @@ private Publisher<? extends Result> getItems(
return makeFluxContextAware(bindWorkspaceIdToFlux(statement));
}

@WithSpan
public Mono<Long> delete(Set<UUID> ids) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(ids),
"Argument 'ids' must not be empty");
Expand All @@ -270,6 +275,7 @@ private Publisher<? extends Result> delete(Set<UUID> ids, Connection connection)
return makeFluxContextAware(bindWorkspaceIdToFlux(statement));
}

@WithSpan
public Mono<Long> deleteByExperimentIds(Set<UUID> experimentIds) {

Preconditions.checkArgument(CollectionUtils.isNotEmpty(experimentIds),
Expand Down
Loading

0 comments on commit fd0fc81

Please sign in to comment.