Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[OPIK-473] Add Daily report #769

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions apps/opik-backend/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,13 @@ rateLimit:
usageReport:
enabled: ${OPIK_USAGE_REPORT_ENABLED:-true}
url: ${OPIK_USAGE_REPORT_URL:-https://stats.comet.com/notify/event/}
serverStats:
enabled: ${OPIK_USAGE_REPORT_SERVER_STATS_ENABLED:-true}

metadata:
version: ${OPIK_VERSION:-latest}

cors:
enabled: ${CORS:-false}

stateDatabaseName: ${STATE_DB_DATABASE_NAME:-opik}
5 changes: 5 additions & 0 deletions apps/opik-backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,11 @@
<artifactId>guicey-jdbi3</artifactId>
<version>${dropwizard-guicey.version}</version>
</dependency>
<dependency>
<groupId>io.github.dropwizard-jobs</groupId>
<artifactId>dropwizard-jobs-guice</artifactId>
<version>6.0.0</version>
</dependency>

<dependency>
<groupId>org.jdbi</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@
import com.comet.opik.infrastructure.ConfigurationModule;
import com.comet.opik.infrastructure.OpikConfiguration;
import com.comet.opik.infrastructure.auth.AuthModule;
import com.comet.opik.infrastructure.bi.BiModule;
import com.comet.opik.infrastructure.bi.OpikGuiceyLifecycleEventListener;
import com.comet.opik.infrastructure.bundle.LiquibaseBundle;
import com.comet.opik.infrastructure.db.DatabaseAnalyticsModule;
import com.comet.opik.infrastructure.db.IdGeneratorModule;
import com.comet.opik.infrastructure.db.NameGeneratorModule;
import com.comet.opik.infrastructure.events.EventModule;
import com.comet.opik.infrastructure.http.HttpModule;
import com.comet.opik.infrastructure.job.JobGuiceyInstaller;
import com.comet.opik.infrastructure.ratelimit.RateLimitModule;
import com.comet.opik.infrastructure.redis.RedisModule;
import com.comet.opik.utils.JsonBigDecimalDeserializer;
Expand Down Expand Up @@ -37,6 +39,8 @@

public class OpikApplication extends Application<OpikConfiguration> {

private GuiceBundle guiceBundle;

public static void main(String[] args) throws Exception {
new OpikApplication().run(args);
}
Expand All @@ -61,19 +65,24 @@ public void initialize(Bootstrap<OpikConfiguration> bootstrap) {
.migrationsFileName(DB_APP_ANALYTICS_MIGRATIONS_FILE_NAME)
.dataSourceFactoryFunction(OpikConfiguration::getDatabaseAnalyticsMigrations)
.build());
bootstrap.addBundle(GuiceBundle.builder()

guiceBundle = GuiceBundle.builder()
.bundles(JdbiBundle.<OpikConfiguration>forDatabase((conf, env) -> conf.getDatabase())
.withPlugins(new SqlObjectPlugin(), new Jackson2Plugin()))
.modules(new DatabaseAnalyticsModule(), new IdGeneratorModule(), new AuthModule(), new RedisModule(),
new RateLimitModule(), new NameGeneratorModule(), new HttpModule(), new EventModule(),
new ConfigurationModule())
new ConfigurationModule(), new BiModule())
.installers(JobGuiceyInstaller.class)
.listen(new OpikGuiceyLifecycleEventListener())
.enableAutoConfig()
.build());
.build();

bootstrap.addBundle(guiceBundle);
}

@Override
public void run(OpikConfiguration configuration, Environment environment) {

// Resources
var jersey = environment.jersey();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,12 @@ int[] recordExperiments(@Bind("workspace_id") String workspaceId,
"FROM datasets " +
"WHERE created_at BETWEEN DATE_SUB(CURDATE(), INTERVAL 1 DAY) AND CURDATE() " +
"GROUP BY workspace_id,created_by")
List<BiInformationResponse.BiInformation> getExperimentBIInformation();
List<BiInformationResponse.BiInformation> getDatasetsBIInformation();

@SqlUpdate("CREATE TEMPORARY TABLE experiment_dataset_ids_<table_name> (id CHAR(36) PRIMARY KEY)")
void createTempTable(@Define("table_name") String tableName);

@SqlBatch("INSERT INTO experiment_dataset_ids_<table_name> (id) VALUES (:id)")
@SqlBatch("INSERT INTO experiment_dataset_ids_<table_name>(id) VALUES (:id)")
int[] insertTempTable(@Define("table_name") String tableName, @Bind("id") List<UUID> id);

@SqlUpdate("DROP TEMPORARY TABLE IF EXISTS experiment_dataset_ids_<table_name>")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ public interface DatasetService {
BiInformationResponse getDatasetBIInformation();

Set<UUID> exists(Set<UUID> datasetIds, String workspaceId);

long getDailyCreatedCount();
}

@Singleton
Expand Down Expand Up @@ -380,7 +382,7 @@ public BiInformationResponse getDatasetBIInformation() {
log.info("Getting dataset BI events daily data");
return template.inTransaction(READ_ONLY, handle -> {
var dao = handle.attach(DatasetDAO.class);
var biInformation = dao.getExperimentBIInformation();
var biInformation = dao.getDatasetsBIInformation();
return BiInformationResponse.builder()
.biInformation(biInformation)
.build();
Expand Down Expand Up @@ -488,4 +490,16 @@ public Mono<Void> recordExperiments(Set<DatasetLastExperimentCreated> datasetsLa
.then();
}

@Override
@WithSpan
public long getDailyCreatedCount() {
return template.inTransaction(READ_ONLY, handle -> {
var dao = handle.attach(DatasetDAO.class);
return dao.getDatasetsBIInformation()
.stream()
.mapToLong(BiInformationResponse.BiInformation::count)
.sum();
});
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -730,4 +730,10 @@ private Publisher<ExperimentDatasetId> mapDatasetId(Result result) {
return result.map((row, rowMetadata) -> new ExperimentDatasetId(row.get("dataset_id", UUID.class)));
}

public Mono<Long> getDailyCreatedCount() {
return Mono.from(connectionFactory.create())
.flatMapMany(connection -> connection.createStatement(EXPERIMENT_DAILY_BI_INFORMATION).execute())
.flatMap(result -> result.map((row, rowMetadata) -> row.get("experiment_count", Long.class)))
.reduce(0L, Long::sum);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,7 @@ public Flux<DatasetLastExperimentCreated> getMostRecentCreatedExperimentFromData
return experimentDAO.getMostRecentCreatedExperimentFromDatasets(datasetIds);
}

@WithSpan
public Mono<BiInformationResponse> getExperimentBIInformation() {
log.info("Getting experiment BI events daily data");
return experimentDAO.getExperimentBIInformation()
Expand All @@ -330,4 +331,9 @@ public Mono<BiInformationResponse> getExperimentBIInformation() {
.switchIfEmpty(Mono.just(BiInformationResponse.empty()));

}

@WithSpan
public Mono<Long> getDailyCreatedCount() {
return experimentDAO.getDailyCreatedCount();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ interface TraceDAO {
Flux<BiInformation> getTraceBIInformation(Connection connection);

Mono<ProjectStats> getStats(TraceSearchCriteria criteria);

Mono<Long> getDailyTraces();
}

@Slf4j
Expand Down Expand Up @@ -1125,6 +1127,15 @@ public Mono<ProjectStats> getStats(@NonNull TraceSearchCriteria criteria) {
});
}

@Override
public Mono<Long> getDailyTraces() {
return asyncTemplate
.nonTransaction(
connection -> Mono.from(connection.createStatement(TRACE_COUNT_BY_WORKSPACE_ID).execute()))
.flatMapMany(result -> result.map((row, rowMetadata) -> row.get("trace_count", Long.class)))
.reduce(0L, Long::sum);
}

@Override
@WithSpan
public Mono<Map<UUID, Instant>> getLastUpdatedTraceAt(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,8 @@ public interface TraceService {
Mono<BiInformationResponse> getTraceBIInformation();

Mono<ProjectStats> getStats(TraceSearchCriteria searchCriteria);

Mono<Long> getDailyCreatedCount();
}

@Slf4j
Expand Down Expand Up @@ -207,11 +209,10 @@ private Mono<UUID> create(Trace trace, Project project, UUID id) {

private Mono<Project> handleProjectCreationError(Throwable exception, String projectName, String workspaceId) {
return switch (exception) {
case EntityAlreadyExistsException __ ->
Mono.fromCallable(
() -> projectService.findByNames(workspaceId, List.of(projectName)).stream().findFirst()
.orElseThrow())
.subscribeOn(Schedulers.boundedElastic());
case EntityAlreadyExistsException __ -> Mono.fromCallable(
() -> projectService.findByNames(workspaceId, List.of(projectName)).stream().findFirst()
.orElseThrow())
.subscribeOn(Schedulers.boundedElastic());
default -> Mono.error(exception);
};
}
Expand Down Expand Up @@ -352,6 +353,7 @@ public Mono<BiInformationResponse> getTraceBIInformation() {
.switchIfEmpty(Mono.just(BiInformationResponse.empty()));
}

@Override
@WithSpan
public Mono<ProjectStats> getStats(@NonNull TraceSearchCriteria criteria) {

Expand All @@ -361,8 +363,14 @@ public Mono<ProjectStats> getStats(@NonNull TraceSearchCriteria criteria) {
}

return getProjectByName(criteria.projectName())
.flatMap(project -> template.nonTransaction(
connection -> dao.getStats(criteria.toBuilder().projectId(project.id()).build())))
.flatMap(project -> dao.getStats(criteria.toBuilder().projectId(project.id()).build()))
.switchIfEmpty(Mono.just(ProjectStats.empty()));
}

@Override
@WithSpan
public Mono<Long> getDailyCreatedCount() {
return dao.getDailyTraces();
}

}
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
package com.comet.opik.infrastructure;

import com.fasterxml.jackson.annotation.JsonProperty;
import io.dropwizard.core.Configuration;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.jobs.JobConfiguration;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import lombok.Getter;

@Getter
public class OpikConfiguration extends Configuration {
public class OpikConfiguration extends JobConfiguration {

@Valid
@NotNull @JsonProperty
Expand Down Expand Up @@ -53,4 +53,9 @@ public class OpikConfiguration extends Configuration {
@Valid
@NotNull @JsonProperty
private BatchOperationsConfig batchOperations = new BatchOperationsConfig();

@Valid
@NotNull @JsonProperty
private String stateDatabaseName;

}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@

import com.fasterxml.jackson.annotation.JsonProperty;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import lombok.Data;

@Data
public class UsageReportConfig {

public record ServerStatsConfig(boolean enabled) {
}

@Valid
@JsonProperty
private boolean enabled;
Expand All @@ -15,4 +19,8 @@ public class UsageReportConfig {
@JsonProperty
private String url;

@Valid
@NotNull @JsonProperty
private UsageReportConfig.ServerStatsConfig serverStats;

}
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,5 @@
@Builder(toBuilder = true)
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy.class)
record StartupEvent(String anonymousId, String eventType, Map<String, String> eventProperties) {
record BiEvent(String anonymousId, String eventType, Map<String, String> eventProperties) {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package com.comet.opik.infrastructure.bi;

import com.comet.opik.infrastructure.OpikConfiguration;
import com.google.inject.Provides;
import com.google.inject.Singleton;
import ru.vyarus.dropwizard.guice.module.support.DropwizardAwareModule;

import static com.comet.opik.infrastructure.UsageReportConfig.ServerStatsConfig;

public class BiModule extends DropwizardAwareModule<OpikConfiguration> {

@Provides
@Singleton
public ServerStatsConfig provideServerStatsConfig() {
return configuration().getUsageReport().getServerStats();
}
}
Loading
Loading