Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,7 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur
this.nullMarker = loadConfigurationPb.getNullMarker();
}
if (loadConfigurationPb.getAllowJaggedRows() != null
|| loadConfigurationPb.getPreserveAsciiControlCharacters() != null
|| loadConfigurationPb.getAllowQuotedNewlines() != null
|| loadConfigurationPb.getEncoding() != null
|| loadConfigurationPb.getFieldDelimiter() != null
Expand All @@ -229,6 +230,10 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur
.setEncoding(loadConfigurationPb.getEncoding())
.setFieldDelimiter(loadConfigurationPb.getFieldDelimiter())
.setQuote(loadConfigurationPb.getQuote());
if (loadConfigurationPb.getPreserveAsciiControlCharacters() != null) {
builder.setPreserveAsciiControlCharacters(
loadConfigurationPb.getPreserveAsciiControlCharacters());
}
if (loadConfigurationPb.getAllowJaggedRows() != null) {
builder.setAllowJaggedRows(loadConfigurationPb.getAllowJaggedRows());
}
Expand Down Expand Up @@ -907,6 +912,7 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() {
.setAllowJaggedRows(csvOptions.allowJaggedRows())
.setAllowQuotedNewlines(csvOptions.allowQuotedNewLines())
.setEncoding(csvOptions.getEncoding())
.setPreserveAsciiControlCharacters(csvOptions.getPreserveAsciiControlCharacters())
.setQuote(csvOptions.getQuote());
if (csvOptions.getSkipLeadingRows() != null) {
// todo(mziccard) remove checked cast or comment when #1044 is closed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ public class LoadJobConfigurationTest {
.setAllowJaggedRows(true)
.setAllowQuotedNewLines(false)
.setEncoding(StandardCharsets.UTF_8)
.setPreserveAsciiControlCharacters(true)
.build();
private static final TableId TABLE_ID = TableId.of("dataset", "table");
private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -613,6 +613,7 @@ public class ITBigQueryTest {
private static final String LOAD_FILE_LARGE = "load_large.csv";

private static final String LOAD_FILE_FLEXIBLE_COLUMN_NAME = "load_flexible_column_name.csv";
private static final String LOAD_FILE_NULL = "load_null.csv";
private static final String JSON_LOAD_FILE = "load.json";
private static final String JSON_LOAD_FILE_BQ_RESULTSET = "load_bq_resultset.json";
private static final String JSON_LOAD_FILE_SIMPLE = "load_simple.json";
Expand All @@ -628,6 +629,7 @@ public class ITBigQueryTest {
private static final TableId TABLE_ID_FASTQUERY_BQ_RESULTSET =
TableId.of(DATASET, "fastquery_testing_bq_resultset");
private static final String CSV_CONTENT = "StringValue1\nStringValue2\n";
private static final String CSV_CONTENT_NULL = "String\0Value1\n";
private static final String CSV_CONTENT_FLEXIBLE_COLUMN = "name,&ampersand\nrow_name,1";

private static final String JSON_CONTENT =
Expand Down Expand Up @@ -1080,6 +1082,9 @@ public static void beforeClass() throws InterruptedException, IOException {
storage.create(
BlobInfo.newBuilder(BUCKET, LOAD_FILE).setContentType("text/plain").build(),
CSV_CONTENT.getBytes(StandardCharsets.UTF_8));
storage.create(
BlobInfo.newBuilder(BUCKET, LOAD_FILE_NULL).setContentType("text/plain").build(),
CSV_CONTENT_NULL.getBytes(StandardCharsets.UTF_8));
storage.create(
BlobInfo.newBuilder(BUCKET, LOAD_FILE_FLEXIBLE_COLUMN_NAME)
.setContentType("text/plain")
Expand Down Expand Up @@ -6600,9 +6605,9 @@ public void testLocation() throws Exception {
}

@Test
public void testPreserveAsciiControlCharacters()
public void testWriteChannelPreserveAsciiControlCharacters()
throws InterruptedException, IOException, TimeoutException {
String destinationTableName = "test_preserve_ascii_control_characters";
String destinationTableName = "test_write_channel_preserve_ascii_control_characters";
TableId tableId = TableId.of(DATASET, destinationTableName);
WriteChannelConfiguration configuration =
WriteChannelConfiguration.newBuilder(tableId)
Expand All @@ -6625,6 +6630,26 @@ public void testPreserveAsciiControlCharacters()
assertTrue(bigquery.delete(tableId));
}

@Test
public void testLoadJobPreserveAsciiControlCharacters() throws InterruptedException {
String destinationTableName = "test_load_job_preserve_ascii_control_characters";
TableId destinationTable = TableId.of(DATASET, destinationTableName);

try {
LoadJobConfiguration configuration =
LoadJobConfiguration.newBuilder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE_NULL)
.setFormatOptions(
CsvOptions.newBuilder().setPreserveAsciiControlCharacters(true).build())
.setSchema(SIMPLE_SCHEMA)
.build();
Job remoteLoadJob = bigquery.create(JobInfo.of(configuration));
remoteLoadJob = remoteLoadJob.waitFor();
assertNull(remoteLoadJob.getStatus().getError());
} finally {
assertTrue(bigquery.delete(destinationTable));
}
}

@Test
public void testReferenceFileSchemaUriForAvro() {
try {
Expand Down