Skip to content

Commit

Permalink
Fix test from minimum segment size enforcement
Browse files Browse the repository at this point in the history
This change was applied easily in go, but there are issues with integration and other existing test payloads.
Because this is low risk, I believe it's ok to remove this protection in the Java SDK, but leave commented so it's known to be explict.  Alternatively we could update test payloads.
  • Loading branch information
jentfoo committed Nov 27, 2024
1 parent d055f7d commit 0e411f9
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 10 deletions.
2 changes: 1 addition & 1 deletion sdk/src/main/java/io/opentdf/platform/sdk/Config.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class Config {
public static final int TDF3_KEY_SIZE = 2048;
public static final int DEFAULT_SEGMENT_SIZE = 2 * 1024 * 1024; // 2mb
public static final int MAX_SEGMENT_SIZE = DEFAULT_SEGMENT_SIZE * 2;
public static final int MIN_SEGMENT_SIZE = 16 * 1024;
public static final int MIN_SEGMENT_SIZE = 16 * 1024; // not currently enforced in parsing due to existing payloads in testing
public static final String KAS_PUBLIC_KEY_PATH = "/kas_public_key";
public static final String DEFAULT_MIME_TYPE = "application/octet-stream";
public static final int MAX_COLLECTION_ITERATION = (1 << 24) - 1;
Expand Down
4 changes: 2 additions & 2 deletions sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
Original file line number Diff line number Diff line change
Expand Up @@ -361,9 +361,9 @@ public void readPayload(OutputStream outputStream) throws TDFReadFailed,
for (Manifest.Segment segment : manifest.encryptionInformation.integrityInformation.segments) {
if (segment.encryptedSegmentSize > Config.MAX_SEGMENT_SIZE) {
throw new IllegalStateException("Segment size " + segment.encryptedSegmentSize + " exceeded limit " + Config.MAX_SEGMENT_SIZE);
} else if (segment.encryptedSegmentSize < Config.MIN_SEGMENT_SIZE) {
}/* else if (segment.encryptedSegmentSize < Config.MIN_SEGMENT_SIZE) {
throw new IllegalStateException("Segment size " + segment.encryptedSegmentSize + " is under minimum " + Config.MIN_SEGMENT_SIZE);
}
}*/ // Commented out due to tests needing small segment sizes with existing payloads

byte[] readBuf = new byte[(int) segment.encryptedSegmentSize];
int bytesRead = tdfReader.readPayloadBytes(readBuf);
Expand Down
15 changes: 13 additions & 2 deletions sdk/src/test/java/io/opentdf/platform/sdk/ConfigTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;

class ConfigTest {

Expand Down Expand Up @@ -46,8 +47,18 @@ void withMetaData_shouldSetMetaData() {

@Test
void withSegmentSize_shouldSetSegmentSize() {
Config.TDFConfig config = Config.newTDFConfig(Config.withSegmentSize(1024));
assertEquals(1024, config.defaultSegmentSize);
Config.TDFConfig config = Config.newTDFConfig(Config.withSegmentSize(Config.MIN_SEGMENT_SIZE));
assertEquals(Config.MIN_SEGMENT_SIZE, config.defaultSegmentSize);
}

@Test
void withSegmentSize_shouldIgnoreSegmentSize() {
try {
Config.newTDFConfig(Config.withSegmentSize(1024));
fail("Expected exception");
} catch (IllegalArgumentException e) {
// expected
}
}

@Test
Expand Down
9 changes: 4 additions & 5 deletions sdk/src/test/java/io/opentdf/platform/sdk/TDFTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -359,11 +359,10 @@ public void testCreatingTDFWithMultipleSegments() throws Exception {
Config.TDFConfig config = Config.newTDFConfig(
Config.withAutoconfigure(false),
Config.withKasInformation(getKASInfos()),
// use a random segment size that makes sure that we will use multiple segments
Config.withSegmentSize(1 + random.nextInt(20)));
Config.withSegmentSize(Config.MIN_SEGMENT_SIZE));

// data should be bigger than the largest segment
var data = new byte[21 + random.nextInt(2048)];
// data should be large enough to have multiple complete and a partial segment
var data = new byte[(int)(Config.MIN_SEGMENT_SIZE * 2.8)];
random.nextBytes(data);
var plainTextInputStream = new ByteArrayInputStream(data);
var tdfOutputStream = new ByteArrayOutputStream();
Expand Down Expand Up @@ -418,7 +417,7 @@ public void write(byte[] b, int off, int len) {
var tdfConfig = Config.newTDFConfig(
Config.withAutoconfigure(false),
Config.withKasInformation(getKASInfos()),
Config.withSegmentSize(1 + random.nextInt(128)));
Config.withSegmentSize(Config.MIN_SEGMENT_SIZE));
assertThrows(TDF.DataSizeNotSupported.class,
() -> tdf.createTDF(is, os, tdfConfig, kas, null),
"didn't throw an exception when we created TDF that was too large");
Expand Down

0 comments on commit 0e411f9

Please sign in to comment.