Skip to content

Commit

Permalink
[OPIK-135] Add support to RDS auth for MySQL (#306)
Browse files Browse the repository at this point in the history
* [OPIK-135] Add support to RDS auth for MySQL

* Change variables

* Update Helm documentation

* Add AWS tests

---------

Co-authored-by: CometActions <[email protected]>
  • Loading branch information
2 people authored and Douglas Blank committed Oct 4, 2024
1 parent 5b49ce6 commit e1f4f45
Show file tree
Hide file tree
Showing 7 changed files with 157 additions and 6 deletions.
6 changes: 4 additions & 2 deletions apps/opik-backend/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ logging:
com.comet: ${OPIK_LOG_LEVEL:-INFO}

database:
url: ${STATE_DB_URL:-jdbc:mysql://localhost:3306/opik?createDatabaseIfNotExist=true&rewriteBatchedStatements=true}
url: ${STATE_DB_PROTOCOL:-jdbc:mysql://}${STATE_DB_URL:-localhost:3306/opik?createDatabaseIfNotExist=true&rewriteBatchedStatements=true}
user: ${STATE_DB_USER:-opik}
password: ${STATE_DB_PASS:-opik}
driverClass: com.mysql.cj.jdbc.Driver
driverClass: ${STATE_DB_DRIVER_CLASS:-com.mysql.cj.jdbc.Driver}
properties:
wrapperPlugins: ${STATE_DB_PLUGINS:-''}

# For migrations
databaseAnalyticsMigrations:
Expand Down
17 changes: 17 additions & 0 deletions apps/opik-backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
<uuid.java.generator.version>5.1.0</uuid.java.generator.version>
<wiremock.version>3.9.1</wiremock.version>
<redisson.version>3.34.1</redisson.version>
<aws.java.sdk.version>2.25.70</aws.java.sdk.version>
<mainClass>com.comet.opik.OpikApplication</mainClass>
</properties>

Expand All @@ -49,6 +50,13 @@
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>bom</artifactId>
<version>${aws.java.sdk.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>

Expand All @@ -58,6 +66,15 @@
<artifactId>newrelic-api</artifactId>
<version>8.14.0</version>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>rds</artifactId>
</dependency>
<dependency>
<groupId>software.amazon.jdbc</groupId>
<artifactId>aws-advanced-jdbc-wrapper</artifactId>
<version>2.3.8</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-core</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ public record AppContextConfig(
Long limit,
Long limitDurationInSeconds,
Map<String, LimitConfig> customLimits,
List<Object> customBeans) {
List<Object> customBeans,
String jdbcUserName,
String jdbcDriverClass,
String awsJdbcDriverPlugins) {
}

public static TestDropwizardAppExtension newTestDropwizardAppExtension(String jdbcUrl,
Expand Down Expand Up @@ -79,6 +82,18 @@ public static TestDropwizardAppExtension newTestDropwizardAppExtension(AppContex
var list = new ArrayList<String>();
list.add("database.url: " + appContextConfig.jdbcUrl());

if (appContextConfig.jdbcUserName() != null) {
list.add("database.user: " + appContextConfig.jdbcUserName());
}

if (appContextConfig.jdbcDriverClass() != null) {
list.add("database.driverClass: " + appContextConfig.jdbcDriverClass());
}

if (appContextConfig.awsJdbcDriverPlugins() != null) {
list.add("database.properties.wrapperPlugins: " + appContextConfig.awsJdbcDriverPlugins());
}

if (appContextConfig.databaseAnalyticsFactory() != null) {
list.add("databaseAnalytics.port: " + appContextConfig.databaseAnalyticsFactory().getPort());
list.add("databaseAnalytics.username: " + appContextConfig.databaseAnalyticsFactory().getUsername());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
package com.comet.opik.infrastructure.aws.rds;

import com.comet.opik.api.Project;
import com.comet.opik.api.resources.utils.AuthTestUtils;
import com.comet.opik.api.resources.utils.ClickHouseContainerUtils;
import com.comet.opik.api.resources.utils.ClientSupportUtils;
import com.comet.opik.api.resources.utils.MigrationUtils;
import com.comet.opik.api.resources.utils.MySQLContainerUtils;
import com.comet.opik.api.resources.utils.RedisContainerUtils;
import com.comet.opik.api.resources.utils.TestDropwizardAppExtensionUtils;
import com.comet.opik.api.resources.utils.TestDropwizardAppExtensionUtils.AppContextConfig;
import com.github.tomakehurst.wiremock.client.WireMock;
import com.redis.testcontainers.RedisContainer;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.HttpHeaders;
import jakarta.ws.rs.core.Response;
import org.jdbi.v3.core.Jdbi;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.testcontainers.containers.ClickHouseContainer;
import org.testcontainers.junit.jupiter.Testcontainers;
import reactor.core.publisher.Mono;
import ru.vyarus.dropwizard.guice.test.ClientSupport;
import ru.vyarus.dropwizard.guice.test.jupiter.ext.TestDropwizardAppExtension;

import java.sql.SQLException;
import java.time.Duration;
import java.util.UUID;

import static com.comet.opik.infrastructure.auth.RequestContext.WORKSPACE_HEADER;
import static org.assertj.core.api.Assertions.assertThat;

@Disabled
@Testcontainers(parallel = true)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class MysqlRdsIamE2eTest {


private static final String URL_TEMPLATE = "%s/v1/private/projects";

/// See PR: https://github.com/comet-ml/opik/pull/306
// RDS DB endpoint, port, and database name
// JDBC URL format: jdbc:aws-wrapper:mysql://<rds-endpoint>:<port>/<db-name>?createDatabaseIfNotExist=true&rewriteBatchedStatements=true
// DB endpoint: <rds-instance>.<region>.rds.amazonaws.com
// AWS Driver only supports rds.amazonaws.com endpoints, not custom endpoints
private static final String MYSQL_TEMPLATE_URL = "jdbc:aws-wrapper:mysql://%s";
private static final String DB_PLUGINS = "iam";
private static final String DB_USER = "db_user";
private static final String AWS_JDBC_DRIVER = "software.amazon.jdbc.Driver";

private static final ClickHouseContainer CLICKHOUSE = ClickHouseContainerUtils.newClickHouseContainer();

private static final RedisContainer REDIS = RedisContainerUtils.newRedisContainer();

@RegisterExtension
private static final TestDropwizardAppExtension app;

private static final String TEST_WORKSPACE = "default";

static {
CLICKHOUSE.start();
REDIS.start();

var databaseAnalyticsFactory = ClickHouseContainerUtils.newDatabaseAnalyticsFactory(CLICKHOUSE,
ClickHouseContainerUtils.DATABASE_NAME);

String rdsEndpoint = "<rds-instance>.<aws-region>.rds.amazonaws.com:3306/opik?createDatabaseIfNotExist=true&rewriteBatchedStatements=true";

app = TestDropwizardAppExtensionUtils.newTestDropwizardAppExtension(
AppContextConfig.builder()
.jdbcUrl(String.format(MYSQL_TEMPLATE_URL, rdsEndpoint))
.awsJdbcDriverPlugins(DB_PLUGINS)
.jdbcUserName(DB_USER)
.jdbcDriverClass(AWS_JDBC_DRIVER)
.databaseAnalyticsFactory(databaseAnalyticsFactory)
.redisUrl(REDIS.getRedisURI())
.build());
}

private String baseURI;
private ClientSupport client;

@BeforeAll
void beforeAll(ClientSupport client, Jdbi jdbi) throws SQLException {
MigrationUtils.runDbMigration(jdbi, MySQLContainerUtils.migrationParameters());

try (var connection = CLICKHOUSE.createConnection("")) {
MigrationUtils.runDbMigration(connection, MigrationUtils.CLICKHOUSE_CHANGELOG_FILE,
ClickHouseContainerUtils.migrationParameters());
}

baseURI = "http://localhost:%d".formatted(client.getPort());
this.client = client;

ClientSupportUtils.config(client);
}

@Test
void testAwsRds__whenRdsIamDbAuthenticationIsEnabled__shouldAcceptRequest() {

try (Response response = client.target(URL_TEMPLATE.formatted(baseURI))
.request()
.header(WORKSPACE_HEADER, TEST_WORKSPACE)
.post(Entity.json(Project.builder().name(UUID.randomUUID().toString()).build()))) {

assertThat(response.getStatus()).isEqualTo(201);
}
}


}
3 changes: 2 additions & 1 deletion deployment/docker-compose/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ services:
command: [ "bash", "-c", "./run_db_migrations.sh && ./entrypoint.sh" ]
environment:
DOCKER_BUILDKIT: 1
STATE_DB_URL: "jdbc:mysql://mysql:3306/opik?createDatabaseIfNotExist=true&rewriteBatchedStatements=true"
STATE_DB_PROTOCOL: "jdbc:mysql://"
STATE_DB_URL: "mysql:3306/opik?createDatabaseIfNotExist=true&rewriteBatchedStatements=true"
STATE_DB_DATABASE_NAME: opik
STATE_DB_USER: opik
STATE_DB_PASS: opik
Expand Down
3 changes: 2 additions & 1 deletion deployment/helm_chart/opik/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@ Call opik api on http://localhost:5173/api
| component.backend.env.REDIS_URL | string | `"redis://:wFSuJX9nDBdCa25sKZG7bh@opik-redis-master:6379/"` | |
| component.backend.env.STATE_DB_DATABASE_NAME | string | `"opik"` | |
| component.backend.env.STATE_DB_PASS | string | `"opik"` | |
| component.backend.env.STATE_DB_URL | string | `"jdbc:mysql://opik-mysql:3306/opik?rewriteBatchedStatements=true"` | |
| component.backend.env.STATE_DB_PROTOCOL | string | `"jdbc:mysql://"` | |
| component.backend.env.STATE_DB_URL | string | `"opik-mysql:3306/opik?rewriteBatchedStatements=true"` | |
| component.backend.env.STATE_DB_USER | string | `"opik"` | |
| component.backend.envFrom[0].configMapRef.name | string | `"opik-backend"` | |
| component.backend.image.pullPolicy | string | `"IfNotPresent"` | |
Expand Down
3 changes: 2 additions & 1 deletion deployment/helm_chart/opik/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ component:
# initialDelaySeconds: 20

env:
STATE_DB_URL: "jdbc:mysql://opik-mysql:3306/opik?rewriteBatchedStatements=true"
STATE_DB_PROTOCOL: "jdbc:mysql://"
STATE_DB_URL: "opik-mysql:3306/opik?rewriteBatchedStatements=true"
STATE_DB_DATABASE_NAME: "opik"
STATE_DB_USER: opik
ANALYTICS_DB_MIGRATIONS_URL: "jdbc:clickhouse://clickhouse-opik-clickhouse:8123"
Expand Down

0 comments on commit e1f4f45

Please sign in to comment.