diff --git a/apps/opik-backend/config.yml b/apps/opik-backend/config.yml index aae3c97b98..946ac62123 100644 --- a/apps/opik-backend/config.yml +++ b/apps/opik-backend/config.yml @@ -58,6 +58,7 @@ redis: authentication: enabled: ${AUTH_ENABLED:-false} + apiKeyResolutionCacheTTLInSec: ${AUTH_API_KEY_RESOLUTION_CACHE_TTL_IN_SEC:-5} #0 means no cache sdk: url: ${AUTH_SDK_URL:-''} ui: diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java index 0ec4eefff5..9236e67a2b 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/DatasetItemDAO.java @@ -403,7 +403,8 @@ private Mono insert(UUID datasetId, List items) { List> batches = Lists.partition(items, bulkConfig.getSize()); return Flux.fromIterable(batches) - .flatMapSequential(batch -> asyncTemplate.nonTransaction(connection -> mapAndInsert(datasetId, batch, connection))) + .flatMapSequential( + batch -> asyncTemplate.nonTransaction(connection -> mapAndInsert(datasetId, batch, connection))) .reduce(0L, Long::sum); } @@ -432,7 +433,7 @@ private Mono mapAndInsert(UUID datasetId, List items, Connect statement.bind("input" + i, getOrDefault(item.input())); statement.bind("expectedOutput" + i, getOrDefault(item.expectedOutput())); statement.bind("metadata" + i, getOrDefault(item.metadata())); - statement.bind("createdBy" + i,userName); + statement.bind("createdBy" + i, userName); statement.bind("lastUpdatedBy" + i, userName); i++; } diff --git a/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java b/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java index 80441c0a26..afe89de6c8 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/domain/ExperimentItemDAO.java @@ -74,7 +74,7 @@ INSERT INTO experiment_items ( new.last_updated_by FROM ( AS id, :experiment_id AS experiment_id, :dataset_item_id AS dataset_item_id, @@ -207,7 +207,6 @@ private Mono insert(Collection experimentItems, Connection }); } - private Publisher mapToExperimentItem(Result result) { return result.map((row, rowMetadata) -> ExperimentItem.builder() .id(row.get("id", UUID.class)) diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/AuthenticationConfig.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/AuthenticationConfig.java index 9291a07f5a..a1e7e7534e 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/AuthenticationConfig.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/AuthenticationConfig.java @@ -15,6 +15,10 @@ public record UrlConfig(@Valid @JsonProperty @NotNull String url) { @JsonProperty private boolean enabled; + @Valid + @JsonProperty + private int apiKeyResolutionCacheTTLInSec; + @Valid @JsonProperty private UrlConfig ui; diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/BulkOperationsConfig.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/BulkOperationsConfig.java index 8856f6df45..aa0ab30dc3 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/BulkOperationsConfig.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/BulkOperationsConfig.java @@ -1,6 +1,5 @@ package com.comet.opik.infrastructure; - import com.fasterxml.jackson.annotation.JsonProperty; import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; @@ -11,6 +10,5 @@ public class BulkOperationsConfig { @Valid @JsonProperty - @NotNull - private int size; + @NotNull private int size; } diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthCredentialsCacheService.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthCredentialsCacheService.java new file mode 100644 index 0000000000..1dc37b4bd6 --- /dev/null +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthCredentialsCacheService.java @@ -0,0 +1,41 @@ +package com.comet.opik.infrastructure.auth; + +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.redisson.api.RListReactive; +import org.redisson.api.RedissonReactiveClient; + +import java.time.Duration; +import java.util.List; +import java.util.Optional; + +@Slf4j +@RequiredArgsConstructor +class AuthCredentialsCacheService implements CacheService { + + public static final String KEY_FORMAT = "auth-%s-%s"; + private final RedissonReactiveClient redissonClient; + private final int ttlInSeconds; + + public Optional resolveApiKeyUserAndWorkspaceIdFromCache(@NonNull String apiKey, + @NonNull String workspaceName) { + String key = KEY_FORMAT.formatted(apiKey, workspaceName); + + RListReactive bucket = redissonClient.getList(key); + + return bucket + .readAll() + .blockOptional() + .filter(pair -> pair.size() == 2) + .map(pair -> new AuthCredentials(pair.getFirst(), pair.getLast())); + } + + public void cache(@NonNull String apiKey, @NonNull String workspaceName, @NonNull String userName, + @NonNull String workspaceId) { + String key = KEY_FORMAT.formatted(apiKey, workspaceName); + redissonClient.getList(key).addAll(List.of(userName, workspaceId)).block(); + redissonClient.getList(key).expire(Duration.ofSeconds(ttlInSeconds)).block(); + } + +} diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthModule.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthModule.java index 6b986ac23f..757a994ec3 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthModule.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/AuthModule.java @@ -2,6 +2,7 @@ import com.comet.opik.infrastructure.AuthenticationConfig; import com.comet.opik.infrastructure.OpikConfiguration; +import com.comet.opik.infrastructure.redis.LockService; import com.google.common.base.Preconditions; import com.google.inject.Provides; import jakarta.inject.Provider; @@ -10,6 +11,7 @@ import jakarta.ws.rs.client.ClientBuilder; import lombok.NonNull; import org.apache.commons.lang3.StringUtils; +import org.redisson.api.RedissonReactiveClient; import ru.vyarus.dropwizard.guice.module.support.DropwizardAwareModule; import ru.vyarus.dropwizard.guice.module.yaml.bind.Config; @@ -21,7 +23,9 @@ public class AuthModule extends DropwizardAwareModule { @Singleton public AuthService authService( @Config("authentication") AuthenticationConfig config, - @NonNull Provider requestContext) { + @NonNull Provider requestContext, + @NonNull RedissonReactiveClient redissonClient, + @NonNull LockService lockService) { if (!config.isEnabled()) { return new AuthServiceImpl(requestContext); @@ -37,7 +41,12 @@ public AuthService authService( Preconditions.checkArgument(StringUtils.isNotBlank(config.getSdk().url()), "The property authentication.sdk.url must not be blank when authentication is enabled"); - return new RemoteAuthService(client(), config.getSdk(), config.getUi(), requestContext); + var cacheService = config.getApiKeyResolutionCacheTTLInSec() > 0 + ? new AuthCredentialsCacheService(redissonClient, config.getApiKeyResolutionCacheTTLInSec()) + : new NoopCacheService(); + + return new RemoteAuthService(client(), config.getSdk(), config.getUi(), requestContext, cacheService, + lockService); } public Client client() { diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/CacheService.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/CacheService.java new file mode 100644 index 0000000000..1d760b7db7 --- /dev/null +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/CacheService.java @@ -0,0 +1,26 @@ +package com.comet.opik.infrastructure.auth; + +import java.util.Optional; + +interface CacheService { + + record AuthCredentials(String userName, String workspaceId) { + } + + void cache(String apiKey, String workspaceName, String userName, String workspaceId); + Optional resolveApiKeyUserAndWorkspaceIdFromCache(String apiKey, String workspaceName); +} + +class NoopCacheService implements CacheService { + + @Override + public void cache(String apiKey, String workspaceName, String userName, String workspaceId) { + // no-op + } + + @Override + public Optional resolveApiKeyUserAndWorkspaceIdFromCache( + String apiKey, String workspaceName) { + return Optional.empty(); + } +} \ No newline at end of file diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/RemoteAuthService.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/RemoteAuthService.java index 5b7c104ccf..f59a328fd8 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/RemoteAuthService.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/auth/RemoteAuthService.java @@ -1,6 +1,7 @@ package com.comet.opik.infrastructure.auth; import com.comet.opik.domain.ProjectService; +import com.comet.opik.infrastructure.redis.LockService; import jakarta.inject.Provider; import jakarta.ws.rs.ClientErrorException; import jakarta.ws.rs.client.Client; @@ -13,26 +14,37 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; +import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; import java.net.URI; import java.util.Optional; import static com.comet.opik.infrastructure.AuthenticationConfig.UrlConfig; +import static com.comet.opik.infrastructure.auth.AuthCredentialsCacheService.AuthCredentials; +import static com.comet.opik.infrastructure.redis.LockService.Lock; @RequiredArgsConstructor @Slf4j class RemoteAuthService implements AuthService { + public static final String NOT_ALLOWED_TO_ACCESS_WORKSPACE = "User not allowed to access workspace"; private final @NonNull Client client; private final @NonNull UrlConfig apiKeyAuthUrl; private final @NonNull UrlConfig uiAuthUrl; private final @NonNull Provider requestContext; + private final @NonNull CacheService cacheService; + private final @NonNull LockService lockService; record AuthRequest(String workspaceName) { } + record AuthResponse(String user, String workspaceId) { } + record ValidatedAuthCredentials(boolean shouldCache, String userName, String workspaceId) { + } + @Override public void authenticate(HttpHeaders headers, Cookie sessionToken) { @@ -66,24 +78,61 @@ private void authenticateUsingSessionToken(Cookie sessionToken, String workspace .cookie(sessionToken) .post(Entity.json(new AuthRequest(workspaceName)))) { - verifyResponse(response); + AuthResponse credentials = verifyResponse(response); + + setCredentialIntoContext(credentials.user(), credentials.workspaceId()); } } private void authenticateUsingApiKey(HttpHeaders headers, String workspaceName) { - try (var response = client.target(URI.create(apiKeyAuthUrl.url())) - .request() - .accept(MediaType.APPLICATION_JSON) - .header(jakarta.ws.rs.core.HttpHeaders.AUTHORIZATION, - Optional.ofNullable(headers.getHeaderString(jakarta.ws.rs.core.HttpHeaders.AUTHORIZATION)) - .orElse("")) - .post(Entity.json(new AuthRequest(workspaceName)))) { - verifyResponse(response); + String apiKey = Optional.ofNullable(headers.getHeaderString(HttpHeaders.AUTHORIZATION)) + .orElse(""); + + if (apiKey.isBlank()) { + log.info("API key not found in headers"); + throw new ClientErrorException(NOT_ALLOWED_TO_ACCESS_WORKSPACE, Response.Status.UNAUTHORIZED); + } + + var lock = new Lock(apiKey, workspaceName); + + ValidatedAuthCredentials credentials = lockService.executeWithLock( + lock, + Mono.fromCallable(() -> validateApiKeyAndGetCredentials(workspaceName, apiKey)) + .subscribeOn(Schedulers.boundedElastic())) + .block(); + + if (credentials.shouldCache()) { + log.debug("Caching user and workspace id for API key"); + cacheService.cache(apiKey, workspaceName, credentials.userName(), credentials.workspaceId()); + } + + setCredentialIntoContext(credentials.userName(), credentials.workspaceId()); + } + + private ValidatedAuthCredentials validateApiKeyAndGetCredentials(String workspaceName, String apiKey) { + Optional credentials = cacheService.resolveApiKeyUserAndWorkspaceIdFromCache(apiKey, + workspaceName); + + if (credentials.isEmpty()) { + log.debug("User and workspace id not found in cache for API key"); + + try (var response = client.target(URI.create(apiKeyAuthUrl.url())) + .request() + .accept(MediaType.APPLICATION_JSON) + .header(HttpHeaders.AUTHORIZATION, + apiKey) + .post(Entity.json(new AuthRequest(workspaceName)))) { + + AuthResponse authResponse = verifyResponse(response); + return new ValidatedAuthCredentials(true, authResponse.user(), authResponse.workspaceId()); + } + } else { + return new ValidatedAuthCredentials(false, credentials.get().userName(), credentials.get().workspaceId()); } } - private void verifyResponse(Response response) { + private AuthResponse verifyResponse(Response response) { if (response.getStatusInfo().getFamily() == Response.Status.Family.SUCCESSFUL) { var authResponse = response.readEntity(AuthResponse.class); @@ -92,12 +141,9 @@ private void verifyResponse(Response response) { throw new ClientErrorException(Response.Status.UNAUTHORIZED); } - requestContext.get().setUserName(authResponse.user()); - requestContext.get().setWorkspaceId(authResponse.workspaceId()); - return; - + return authResponse; } else if (response.getStatus() == Response.Status.UNAUTHORIZED.getStatusCode()) { - throw new ClientErrorException("User not allowed to access workspace", + throw new ClientErrorException(NOT_ALLOWED_TO_ACCESS_WORKSPACE, Response.Status.UNAUTHORIZED); } else if (response.getStatus() == Response.Status.FORBIDDEN.getStatusCode()) { throw new ClientErrorException("User has bot permission to the workspace", Response.Status.FORBIDDEN); @@ -110,4 +156,9 @@ private void verifyResponse(Response response) { throw new ClientErrorException(Response.Status.INTERNAL_SERVER_ERROR); } + private void setCredentialIntoContext(String userName, String workspaceId) { + requestContext.get().setUserName(userName); + requestContext.get().setWorkspaceId(workspaceId); + } + } diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/LockService.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/LockService.java index 90353bd06c..a02bba19d5 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/LockService.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/LockService.java @@ -7,7 +7,18 @@ public interface LockService { - record Lock(UUID id, String name) { + record Lock(String key) { + + private static final String KEY_FORMAT = "%s-%s"; + + public Lock(UUID id, String name) { + this(KEY_FORMAT.formatted(id, name)); + } + + public Lock(String id, String name) { + this(KEY_FORMAT.formatted(id, name)); + } + } Mono executeWithLock(Lock lock, Mono action); diff --git a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/RedissonLockService.java b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/RedissonLockService.java index b79adf3d71..ded698ca27 100644 --- a/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/RedissonLockService.java +++ b/apps/opik-backend/src/main/java/com/comet/opik/infrastructure/redis/RedissonLockService.java @@ -25,7 +25,7 @@ public Mono executeWithLock(Lock lock, Mono action) { RPermitExpirableSemaphoreReactive semaphore = redisClient.getPermitExpirableSemaphore( CommonOptions - .name("%s-%s".formatted(lock.id(), lock.name())) + .name(lock.key()) .timeout(Duration.ofMillis(distributedLockConfig.getLockTimeoutMS())) .retryInterval(Duration.ofMillis(10)) .retryAttempts(distributedLockConfig.getLockTimeoutMS() / 10)); @@ -56,7 +56,7 @@ private Mono runAction(Lock lock, Mono action, String locked) { public Flux executeWithLock(Lock lock, Flux stream) { RPermitExpirableSemaphoreReactive semaphore = redisClient.getPermitExpirableSemaphore( CommonOptions - .name("%s-%s".formatted(lock.id(), lock.name())) + .name(lock.key()) .timeout(Duration.ofMillis(distributedLockConfig.getLockTimeoutMS())) .retryInterval(Duration.ofMillis(10)) .retryAttempts(distributedLockConfig.getLockTimeoutMS() / 10)); diff --git a/apps/opik-backend/src/test/java/com/comet/opik/api/resources/utils/TestDropwizardAppExtensionUtils.java b/apps/opik-backend/src/test/java/com/comet/opik/api/resources/utils/TestDropwizardAppExtensionUtils.java index 27b2123f2c..1c339b9be4 100644 --- a/apps/opik-backend/src/test/java/com/comet/opik/api/resources/utils/TestDropwizardAppExtensionUtils.java +++ b/apps/opik-backend/src/test/java/com/comet/opik/api/resources/utils/TestDropwizardAppExtensionUtils.java @@ -27,8 +27,19 @@ public static TestDropwizardAppExtension newTestDropwizardAppExtension( } public static TestDropwizardAppExtension newTestDropwizardAppExtension( - String jdbcUrl, DatabaseAnalyticsFactory databaseAnalyticsFactory, WireMockRuntimeInfo runtimeInfo, + String jdbcUrl, + DatabaseAnalyticsFactory databaseAnalyticsFactory, + WireMockRuntimeInfo runtimeInfo, String redisUrl) { + return newTestDropwizardAppExtension(jdbcUrl, databaseAnalyticsFactory, runtimeInfo, redisUrl, null); + } + + public static TestDropwizardAppExtension newTestDropwizardAppExtension( + String jdbcUrl, + DatabaseAnalyticsFactory databaseAnalyticsFactory, + WireMockRuntimeInfo runtimeInfo, + String redisUrl, + Integer cacheTtlInSeconds) { var list = new ArrayList(); list.add("database.url: " + jdbcUrl); @@ -43,6 +54,10 @@ public static TestDropwizardAppExtension newTestDropwizardAppExtension( list.add("authentication.enabled: true"); list.add("authentication.sdk.url: " + "%s/opik/auth".formatted(runtimeInfo.getHttpsBaseUrl())); list.add("authentication.ui.url: " + "%s/opik/auth-session".formatted(runtimeInfo.getHttpsBaseUrl())); + + if (cacheTtlInSeconds != null) { + list.add("authentication.apiKeyResolutionCacheTTLInSec: " + cacheTtlInSeconds); + } } GuiceyConfigurationHook hook = injector -> { diff --git a/apps/opik-backend/src/test/java/com/comet/opik/infrastructure/auth/AuthModuleCache2E2Test.java b/apps/opik-backend/src/test/java/com/comet/opik/infrastructure/auth/AuthModuleCache2E2Test.java new file mode 100644 index 0000000000..d071713cf9 --- /dev/null +++ b/apps/opik-backend/src/test/java/com/comet/opik/infrastructure/auth/AuthModuleCache2E2Test.java @@ -0,0 +1,139 @@ +package com.comet.opik.infrastructure.auth; + +import com.comet.opik.api.Project; +import com.comet.opik.api.resources.utils.AuthTestUtils; +import com.comet.opik.api.resources.utils.ClickHouseContainerUtils; +import com.comet.opik.api.resources.utils.ClientSupportUtils; +import com.comet.opik.api.resources.utils.MigrationUtils; +import com.comet.opik.api.resources.utils.MySQLContainerUtils; +import com.comet.opik.api.resources.utils.RedisContainerUtils; +import com.comet.opik.api.resources.utils.TestDropwizardAppExtensionUtils; +import com.comet.opik.api.resources.utils.WireMockUtils; +import com.github.tomakehurst.wiremock.client.WireMock; +import com.redis.testcontainers.RedisContainer; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.Response; +import org.jdbi.v3.core.Jdbi; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.testcontainers.containers.ClickHouseContainer; +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Testcontainers; +import reactor.core.publisher.Mono; +import ru.vyarus.dropwizard.guice.test.ClientSupport; +import ru.vyarus.dropwizard.guice.test.jupiter.ext.TestDropwizardAppExtension; + +import java.sql.SQLException; +import java.time.Duration; +import java.util.UUID; + +import static com.comet.opik.infrastructure.auth.RequestContext.WORKSPACE_HEADER; +import static org.assertj.core.api.Assertions.assertThat; + +@Testcontainers(parallel = true) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class AuthModuleCache2E2Test { + + public static final String URL_TEMPLATE = "%s/v1/private/projects"; + + private static final RedisContainer REDIS = RedisContainerUtils.newRedisContainer(); + + private static final MySQLContainer MYSQL = MySQLContainerUtils.newMySQLContainer(); + + private static final ClickHouseContainer CLICKHOUSE = ClickHouseContainerUtils.newClickHouseContainer(); + + private static final WireMockUtils.WireMockRuntime wireMock; + + @RegisterExtension + private static final TestDropwizardAppExtension app; + + private static final int CACHE_TTL_IN_SECONDS = 1; + + public static final String AUTH_PATH = "/opik/auth"; + + public static final String API_KEY = UUID.randomUUID().toString(); + private static final String USER = UUID.randomUUID().toString(); + private static final String WORKSPACE_ID = UUID.randomUUID().toString(); + private static final String TEST_WORKSPACE = UUID.randomUUID().toString(); + + static { + MYSQL.start(); + CLICKHOUSE.start(); + REDIS.start(); + + wireMock = WireMockUtils.startWireMock(); + + var databaseAnalyticsFactory = ClickHouseContainerUtils.newDatabaseAnalyticsFactory(CLICKHOUSE, + ClickHouseContainerUtils.DATABASE_NAME); + + app = TestDropwizardAppExtensionUtils.newTestDropwizardAppExtension(MYSQL.getJdbcUrl(), + databaseAnalyticsFactory, wireMock.runtimeInfo(), REDIS.getRedisURI(), CACHE_TTL_IN_SECONDS); + } + + private String baseURI; + private ClientSupport client; + + @BeforeAll + void beforeAll(ClientSupport client, Jdbi jdbi) throws SQLException { + MigrationUtils.runDbMigration(jdbi, MySQLContainerUtils.migrationParameters()); + + try (var connection = CLICKHOUSE.createConnection("")) { + MigrationUtils.runDbMigration(connection, MigrationUtils.CLICKHOUSE_CHANGELOG_FILE, + ClickHouseContainerUtils.migrationParameters()); + } + + baseURI = "http://localhost:%d".formatted(client.getPort()); + this.client = client; + + ClientSupportUtils.config(client); + + AuthTestUtils.mockTargetWorkspace(wireMock.server(), API_KEY, TEST_WORKSPACE, WORKSPACE_ID, USER); + } + + @Test + void testAuthCache__whenApiKeyAndWorkspaceAreCached__thenUseTheCacheUntilTTLExpire() { + + try (Response response = client.target(URL_TEMPLATE.formatted(baseURI)) + .request() + .header(WORKSPACE_HEADER, TEST_WORKSPACE) + .header(HttpHeaders.AUTHORIZATION, API_KEY) + .post(Entity.json(Project.builder().name(UUID.randomUUID().toString()).build()))) { + + assertThat(response.getStatus()).isEqualTo(201); + } + + try (var response = callEndpoint()) { + + assertThat(response.getStatus()).isEqualTo(200); + + wireMock.server().verify(1, WireMock.postRequestedFor(WireMock.urlEqualTo(AUTH_PATH))); + } + + try (var response = callEndpoint()) { + + assertThat(response.getStatus()).isEqualTo(200); + + wireMock.server().verify(1, WireMock.postRequestedFor(WireMock.urlEqualTo(AUTH_PATH))); + } + + Mono.delay(Duration.ofMillis((CACHE_TTL_IN_SECONDS * 1000) + 100)).block(); + + try (var response = callEndpoint()) { + + assertThat(response.getStatus()).isEqualTo(200); + + wireMock.server().verify(2, WireMock.postRequestedFor(WireMock.urlEqualTo(AUTH_PATH))); + } + } + + private Response callEndpoint() { + return client.target(URL_TEMPLATE.formatted(baseURI)) + .request() + .header(WORKSPACE_HEADER, TEST_WORKSPACE) + .header(HttpHeaders.AUTHORIZATION, API_KEY) + .get(); + } +} diff --git a/apps/opik-backend/src/test/resources/config-test.yml b/apps/opik-backend/src/test/resources/config-test.yml index 3ff30b40d5..85f1ff66c9 100644 --- a/apps/opik-backend/src/test/resources/config-test.yml +++ b/apps/opik-backend/src/test/resources/config-test.yml @@ -58,6 +58,7 @@ bulkOperations: authentication: enabled: ${AUTH_ENABLED:-false} + apiKeyResolutionCacheTTLInSec: ${AUTH_API_KEY_RESOLUTION_CACHE_TTL_IN_SEC:-0} #0 means no cache sdk: url: ${AUTH_SDK_URL:-''} ui: