Skip to content

Commit

Permalink
Added VOTable to parquet.
Browse files Browse the repository at this point in the history
  • Loading branch information
aratikakadiya committed Dec 17, 2024
1 parent a9d5dc2 commit febce06
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ public TableShape read(InputStream inputStream) throws IOException {
MessageType schema = metadata.getFileMetaData().getSchema();
columnCount = schema.getFieldCount();

String votable = metadata.getFileMetaData().getKeyValueMetaData().get("votable");
log.debug("VOTable: " + votable);

try (org.apache.parquet.hadoop.ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord>builder(inputFile).build()) {
GenericRecord record;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,21 @@
package ca.nrc.cadc.dali.tables.parquet;

import ca.nrc.cadc.dali.tables.TableData;
import ca.nrc.cadc.dali.tables.TableWriter;
import ca.nrc.cadc.dali.tables.votable.VOTableDocument;
import ca.nrc.cadc.dali.tables.votable.VOTableResource;
import ca.nrc.cadc.dali.tables.votable.VOTableWriter;
import ca.nrc.cadc.dali.util.FormatFactory;

import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
import java.io.Writer;

import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
Expand Down Expand Up @@ -65,6 +70,16 @@ public void write(VOTableDocument voTableDocument, OutputStream out, Long maxRec
Schema schema = DynamicSchemaGenerator.generateSchema(resource.getTable().getFields());
OutputFile outputFile = outputFileFromStream(out);

TableData tableData = resource.getTable().getTableData();
resource.getTable().setTableData(null);

StringWriter stringWriter = new StringWriter();
VOTableWriter votableWriter = new VOTableWriter();
votableWriter.write(voTableDocument, stringWriter, maxRec);

Map<String, String> customMetaData = new HashMap<>();
customMetaData.put("votable", stringWriter.toString());

try (org.apache.parquet.hadoop.ParquetWriter<GenericRecord> writer = AvroParquetWriter.<GenericRecord>builder(outputFile)
.withSchema(schema)
.withCompressionCodec(CompressionCodecName.SNAPPY)
Expand All @@ -74,9 +89,10 @@ public void write(VOTableDocument voTableDocument, OutputStream out, Long maxRec
.withWriteMode(ParquetFileWriter.Mode.OVERWRITE)
.withValidation(false)
.withDictionaryEncoding(false)
.withExtraMetaData(customMetaData)
.build()) {

Iterator<List<Object>> iterator = resource.getTable().getTableData().iterator();
Iterator<List<Object>> iterator = tableData.iterator();
int recordCount = 1;

while (iterator.hasNext() && recordCount <= maxRec) {
Expand Down

0 comments on commit febce06

Please sign in to comment.