From df706716828747a13c2afd1f7588cad838d4871f Mon Sep 17 00:00:00 2001 From: Stan Brubaker <120737309+stanbrub@users.noreply.github.com> Date: Tue, 11 Jun 2024 16:45:52 -0600 Subject: [PATCH] Fix csv read benchmark data types (#309) --- .../tests/standard/file/CsvColTypeTest.java | 12 ++++++------ .../tests/standard/file/FileTestRunner.java | 4 +++- .../tests/standard/file/ParquetColTypeTest.java | 13 +++++++------ 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/src/it/java/io/deephaven/benchmark/tests/standard/file/CsvColTypeTest.java b/src/it/java/io/deephaven/benchmark/tests/standard/file/CsvColTypeTest.java index 4efd5019..a7658fd4 100644 --- a/src/it/java/io/deephaven/benchmark/tests/standard/file/CsvColTypeTest.java +++ b/src/it/java/io/deephaven/benchmark/tests/standard/file/CsvColTypeTest.java @@ -12,16 +12,16 @@ class CsvColTypeTest { @Test @Order(1) - void writeThreeIntegralCols() { - runner.setScaleFactors(5, 3); - runner.runCsvWriteTest("CsvWrite- 3 Integral Cols -Static", "short10K", "int10K", "long10K"); + void writeFourIntegralCols() { + runner.setScaleFactors(5, 2); + runner.runCsvWriteTest("CsvWrite- 4 Integral Cols -Static", "byte100", "short10K", "int10K", "long10K"); } @Test @Order(2) - void readThreeIntegralCols() { - runner.setScaleFactors(5, 3); - runner.runCsvReadTest("CsvRead- 3 Integral Cols -Static", "short10K", "int10K", "long10K"); + void readFourIntegralCols() { + runner.setScaleFactors(5, 2); + runner.runCsvReadTest("CsvRead- 4 Integral Cols -Static", "byte100", "short10K", "int10K", "long10K"); } @Test diff --git a/src/it/java/io/deephaven/benchmark/tests/standard/file/FileTestRunner.java b/src/it/java/io/deephaven/benchmark/tests/standard/file/FileTestRunner.java index 9a749727..b488036e 100644 --- a/src/it/java/io/deephaven/benchmark/tests/standard/file/FileTestRunner.java +++ b/src/it/java/io/deephaven/benchmark/tests/standard/file/FileTestRunner.java @@ -194,6 +194,7 @@ private String getGenerator(final String columnName) { case "long10K" -> "(ii % 10000)"; case "int10K" -> "((int)(ii % 10000))"; case "short10K" -> "((short)(ii % 10000))"; + case "byte100" -> "((byte)(ii % 100))"; case "bigDec10K" -> "java.math.BigDecimal.valueOf(ii % 10000)"; case "intArr5" -> array5; case "intVec5" -> "vec(" + array5 + ")"; @@ -214,8 +215,9 @@ private String getType(String columnName) { return switch (columnName) { case "str10K" -> "dht.string"; case "long10K" -> "dht.long"; - case "int10K" -> "dht.int_"; + case "int10K" -> "dht.int32"; case "short10K" -> "dht.short"; + case "byte100" -> "dht.byte"; case "bigDec10K" -> "dht.BigDecimal"; case "intArr5" -> "dht.int_array"; case "intVec5" -> "dht.int_array"; diff --git a/src/it/java/io/deephaven/benchmark/tests/standard/file/ParquetColTypeTest.java b/src/it/java/io/deephaven/benchmark/tests/standard/file/ParquetColTypeTest.java index 5a47a074..81f70023 100644 --- a/src/it/java/io/deephaven/benchmark/tests/standard/file/ParquetColTypeTest.java +++ b/src/it/java/io/deephaven/benchmark/tests/standard/file/ParquetColTypeTest.java @@ -12,16 +12,17 @@ class ParquetColTypeTest { @Test @Order(1) - void writeThreeIntegralCols() { - runner.setScaleFactors(5, 15); - runner.runParquetWriteTest("ParquetWrite- 3 Integral Cols -Static", "NONE", "short10K", "int10K", "long10K"); + void writeFourIntegralCols() { + runner.setScaleFactors(5, 12); + runner.runParquetWriteTest("ParquetWrite- 4 Integral Cols -Static", "NONE", "byte100", "short10K", "int10K", + "long10K"); } @Test @Order(2) - void readThreeIntegralCols() { - runner.setScaleFactors(5, 15); - runner.runParquetReadTest("ParquetRead- 3 Integral Cols -Static"); + void readFourIntegralCols() { + runner.setScaleFactors(5, 12); + runner.runParquetReadTest("ParquetRead- 4 Integral Cols -Static"); } @Test