Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[NA] Remove New Relic dependency #254

Merged
merged 17 commits into from
Oct 14, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions apps/opik-backend/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,6 @@ server:
enableVirtualThreads: ${ENABLE_VIRTUAL_THREADS:-false}
gzip:
enabled: true

openTelemetry:
disabled: ${OTEL_SDK_DISABLED:-true}
andrescrz marked this conversation as resolved.
Show resolved Hide resolved
10 changes: 5 additions & 5 deletions apps/opik-backend/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@ echo $(pwd)
jwebserver -d /opt/opik/redoc -b 0.0.0.0 -p 3003 &

echo "OPIK_VERSION=$OPIK_VERSION"
echo "NEW_RELIC_ENABLED=$NEW_RELIC_ENABLED"
echo "NEW_RELIC_VERSION=$NEW_RELIC_VERSION"
echo "OTEL_SDK_DISABLED=$OTEL_SDK_DISABLED"
echo "OTEL_VERSION=$OTEL_VERSION"

if [[ "${NEW_RELIC_ENABLED}" == "true" && "${NEW_RELIC_LICENSE_KEY}" != "" ]];then
curl -o /tmp/newrelic-agent.jar https://download.newrelic.com/newrelic/java-agent/newrelic-agent/${NEW_RELIC_VERSION}/newrelic-agent-${NEW_RELIC_VERSION}.jar
JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/newrelic-agent.jar"
if [[ "${OTEL_SDK_DISABLED}" == "false" ]];then
andrescrz marked this conversation as resolved.
Show resolved Hide resolved
curl -L -o /tmp/opentelemetry-javaagent.jar https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v${OTEL_VERSION}/opentelemetry-javaagent.jar
JAVA_OPTS="$JAVA_OPTS -javaagent:/tmp/opentelemetry-javaagent.jar"
fi

# Check if ENABLE_VIRTUAL_THREADS is set to true
Expand Down
7 changes: 4 additions & 3 deletions apps/opik-backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
<uuid.java.generator.version>5.1.0</uuid.java.generator.version>
<wiremock.version>3.9.1</wiremock.version>
<redisson.version>3.34.1</redisson.version>
<opentelmetry.version>2.8.0</opentelmetry.version>
<mainClass>com.comet.opik.OpikApplication</mainClass>
</properties>

Expand All @@ -54,9 +55,9 @@

<dependencies>
<dependency>
<groupId>com.newrelic.agent.java</groupId>
<artifactId>newrelic-api</artifactId>
<version>8.14.0</version>
<groupId>io.opentelemetry.instrumentation</groupId>
<artifactId>opentelemetry-instrumentation-annotations</artifactId>
<version>${opentelmetry.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import com.comet.opik.infrastructure.bundle.LiquibaseBundle;
import com.comet.opik.infrastructure.db.DatabaseAnalyticsModule;
import com.comet.opik.infrastructure.db.IdGeneratorModule;
import com.comet.opik.infrastructure.instrumentation.OpenTelemetryModule;
import com.comet.opik.infrastructure.redis.RedisModule;
import com.comet.opik.utils.JsonBigDecimalDeserializer;
import com.fasterxml.jackson.annotation.JsonInclude;
Expand Down Expand Up @@ -58,7 +59,7 @@ public void initialize(Bootstrap<OpikConfiguration> bootstrap) {
bootstrap.addBundle(GuiceBundle.builder()
.bundles(JdbiBundle.<OpikConfiguration>forDatabase((conf, env) -> conf.getDatabase())
.withPlugins(new SqlObjectPlugin(), new Jackson2Plugin()))
.modules(new DatabaseAnalyticsModule(), new IdGeneratorModule(), new AuthModule(), new RedisModule())
.modules(new DatabaseAnalyticsModule(), new IdGeneratorModule(), new AuthModule(), new RedisModule(), new OpenTelemetryModule())
.enableAutoConfig()
.build());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@
import com.comet.opik.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.inject.ImplementedBy;
import com.newrelic.api.agent.Segment;
import com.newrelic.api.agent.Trace;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.Result;
import io.r2dbc.spi.Statement;
Expand All @@ -38,6 +37,8 @@

import static com.comet.opik.api.DatasetItem.DatasetItemPage;
import static com.comet.opik.domain.AsyncContextUtils.bindWorkspaceIdToFlux;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.Segment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.endSegment;
import static com.comet.opik.infrastructure.instrumentation.InstrumentAsyncUtils.startSegment;
import static com.comet.opik.utils.AsyncUtils.makeFluxContextAware;
import static com.comet.opik.utils.AsyncUtils.makeMonoContextAware;
Expand Down Expand Up @@ -340,7 +341,7 @@ LEFT JOIN (
private final @NonNull TransactionTemplate asyncTemplate;

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Long> save(@NonNull UUID datasetId, @NonNull List<DatasetItem> items) {

if (items.isEmpty()) {
Expand Down Expand Up @@ -387,7 +388,7 @@ private Mono<Long> mapAndInsert(
return Flux.from(statement.execute())
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
});
}

Expand Down Expand Up @@ -480,7 +481,7 @@ private List<FeedbackScore> getFeedbackScores(Object feedbackScoresRaw) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItem> get(@NonNull UUID id) {
return asyncTemplate.nonTransaction(connection -> {

Expand All @@ -490,14 +491,14 @@ public Mono<DatasetItem> get(@NonNull UUID id) {
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_item");

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.singleOrEmpty();
});
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastRetrievedId) {
ST template = new ST(SELECT_DATASET_ITEMS_STREAM);

Expand All @@ -518,13 +519,13 @@ public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastR
Segment segment = startSegment("dataset_items", "Clickhouse", "select_dataset_items_stream");

return makeFluxContextAware(bindWorkspaceIdToFlux(statement))
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem);
});
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<List<WorkspaceAndResourceId>> getDatasetItemWorkspace(@NonNull Set<UUID> datasetItemIds) {

if (datasetItemIds.isEmpty()) {
Expand All @@ -545,7 +546,7 @@ public Mono<List<WorkspaceAndResourceId>> getDatasetItemWorkspace(@NonNull Set<U
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Long> delete(@NonNull List<UUID> ids) {
if (ids.isEmpty()) {
return Mono.empty();
Expand All @@ -560,7 +561,7 @@ public Mono<Long> delete(@NonNull List<UUID> ids) {
return bindAndDelete(ids, statement)
.flatMap(Result::getRowsUpdated)
.reduce(0L, Long::sum)
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
});
}

Expand All @@ -572,7 +573,7 @@ private Flux<? extends Result> bindAndDelete(List<UUID> ids, Statement statement
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int size) {

Segment segmentCount = startSegment("dataset_items", "Clickhouse", "select_dataset_items_page_count");
Expand All @@ -583,7 +584,7 @@ public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int siz
.bind("datasetId", datasetId)
.bind("workspace_id", workspaceId)
.execute())
.doFinally(signalType -> segmentCount.end())
.doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
Expand All @@ -599,12 +600,12 @@ public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int siz
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)))
.doFinally(signalType -> segment.end());
.doFinally(signalType -> endSegment(segment));
})));
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(
@NonNull DatasetItemSearchCriteria datasetItemSearchCriteria, int page, int size) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
Expand All @@ -621,7 +622,7 @@ public Mono<DatasetItemPage> getItems(
.bind("experimentIds", datasetItemSearchCriteria.experimentIds())
.bind("workspace_id", workspaceId)
.execute())
.doFinally(signalType -> segmentCount.end())
.doFinally(signalType -> endSegment(segmentCount))
.flatMap(result -> result.map((row, rowMetadata) -> row.get(0, Long.class)))
.reduce(0L, Long::sum)
.flatMap(count -> {
Expand All @@ -639,7 +640,7 @@ public Mono<DatasetItemPage> getItems(
.bind("limit", size)
.bind("offset", (page - 1) * size)
.execute())
.doFinally(signalType -> segment.end())
.doFinally(signalType -> endSegment(segment))
.flatMap(this::mapItem)
.collectList()
.flatMap(items -> Mono.just(new DatasetItemPage(items, page, items.size(), count)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import com.comet.opik.api.error.IdentifierMismatchException;
import com.comet.opik.infrastructure.auth.RequestContext;
import com.google.inject.ImplementedBy;
import com.newrelic.api.agent.Trace;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.ws.rs.ClientErrorException;
Expand Down Expand Up @@ -57,7 +57,7 @@ class DatasetItemServiceImpl implements DatasetItemService {
private final @NonNull SpanService spanService;

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Void> save(@NonNull DatasetItemBatch batch) {
if (batch.datasetId() == null && batch.datasetName() == null) {
return Mono.error(failWithError("dataset_id or dataset_name must be provided"));
Expand Down Expand Up @@ -102,14 +102,14 @@ private ClientErrorException newConflict(String error) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItem> get(@NonNull UUID id) {
return dao.get(id)
.switchIfEmpty(Mono.defer(() -> Mono.error(failWithNotFound("Dataset item not found"))));
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Flux<DatasetItem> getItems(@NonNull UUID datasetId, int limit, UUID lastRetrievedId) {
return dao.getItems(datasetId, limit, lastRetrievedId);
}
Expand Down Expand Up @@ -185,7 +185,7 @@ private NotFoundException failWithNotFound(String message) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<Void> delete(@NonNull List<UUID> ids) {
if (ids.isEmpty()) {
return Mono.empty();
Expand All @@ -195,13 +195,13 @@ public Mono<Void> delete(@NonNull List<UUID> ids) {
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(@NonNull UUID datasetId, int page, int size) {
return dao.getItems(datasetId, page, size);
}

@Override
@Trace(dispatcher = true)
@WithSpan
public Mono<DatasetItemPage> getItems(
int page, int size, @NonNull DatasetItemSearchCriteria datasetItemSearchCriteria) {
log.info("Finding dataset items with experiment items by '{}', page '{}', size '{}'",
Expand Down
Loading
Loading