Skip to content

Commit

Permalink
testComplexArrayQuery1
Browse files Browse the repository at this point in the history
This parses all the data, but doesn't construct the
drill data correctly (DaffodilDrillInfosetOutputter).

Fails with "Found one or more vector errors from OperatorRecordBatch"
  • Loading branch information
mbeckerle committed Nov 6, 2023
1 parent a495d1d commit c36cc07
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.drill.exec.vector.accessor.ObjectWriter;
import org.apache.drill.exec.vector.accessor.ScalarWriter;
import org.apache.drill.exec.vector.accessor.TupleWriter;
import org.apache.drill.exec.vector.accessor.writer.AbstractArrayWriter;
import org.apache.drill.exec.vector.accessor.writer.AbstractTupleWriter;
import org.apache.drill.exec.vector.accessor.writer.MapWriter;
import org.apache.drill.exec.record.metadata.TupleMetadata;
Expand Down Expand Up @@ -95,10 +96,12 @@ public void startSimple(InfosetSimpleElement diSimple) {
switch (cw.type()) {
case ARRAY: {
assert(erd.isArray());
// do nothing startArray has this ready to write.
// do nothing startArray has this ready to write.'
break;
}
case TUPLE: {
cw = ((TupleWriter)cw).column(colName);
break;
}
}
ColumnMetadata cm = cw.schema();
Expand All @@ -123,7 +126,27 @@ public void endSimple(InfosetSimpleElement diSimple) {
public void startComplex(InfosetComplexElement diComplex) {
ComplexElementMetadata erd = diComplex.metadata();
String colName = colName(erd);
ColumnWriter cw = ((TupleWriter)columnWriter()).column(colName).tuple();
ColumnWriter cw = columnWriter();
switch (cw.type()) {
case ARRAY: {
//
// FIXME: Unsupported operation.
// I have an array named "record" which contains two fields a1 and a2 inside each record.
// In Json like this
// { "ex_r1":{ "record":[{"a1":"257", "a2":"258"}, {"a1":"259", "a2":"260"}, ...] } }
// I got from ex_r1 and requested the "record" column, which has some sort of array writer.
// (Though you cannot cast it to ArrayWriter).
// How do I get from an array writer created from a tupleWriter.column("record")
// To the tuple that is the array element's value so I can fill in its fields.
// Do I have to allocate an empty map object or does that happen behind the scenes?
cw = ((ArrayWriter)columnWriter()).tuple();
break;
}
case TUPLE: {
cw = ((TupleWriter)columnWriter()).column(colName).tuple();
break;
}
}
columnWriterStack.push(cw);
}

Expand All @@ -138,14 +161,14 @@ public void startArray(InfosetArray diArray) {
ElementMetadata erd = diArray.metadata();
assert (erd.isArray());
String colName = colName(erd);
ColumnWriter cw = ((TupleWriter) columnWriter()).column(colName);
ArrayWriter cw = ((TupleWriter) columnWriter()).array(colName);
assert (cw instanceof ArrayWriter);
columnWriterStack.push(cw);
}

@Override
public void endArray(InfosetArray diArray) {
assert(columnWriter() instanceof ArrayWriter);
// assert(columnWriter() instanceof ArrayWriter);
columnWriterStack.pop();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,4 +134,35 @@ public void testComplexQuery1() throws Exception {
System.err.println(results.reader().column("ex_r").getAsString());
//new RowSetComparison(expected).verifyAndClearAll(results);
}

@Test
public void testComplexArrayQuery1() throws Exception {

String sql = "SELECT * FROM table(dfs.`data/complexArray1.dat` " +
" (type => 'daffodil'," +
" validationMode => 'true', " +
" schemaURI => '" + schemaURIRoot + "schema/complexArray1.dfdl.xsd'," +
" rootName => ''," +
" rootNamespace => '' " +
"))";

// String sql2 = "show files in dfs.`data`";
QueryBuilder qb = client.queryBuilder();
QueryBuilder query = qb.sql(sql);
RowSet results = query.rowSet();
results.print();
assertEquals(3, results.rowCount());

// create the expected metadata and data for this test
// metadata first
TupleMetadata expectedSchema = new SchemaBuilder()
.addMap("ex_r")
.add("a1", MinorType.INT)
.add("a2", MinorType.INT)
.resumeSchema()
.buildSchema();

System.err.println(results.reader().column("ex_r").getAsString());
//new RowSetComparison(expected).verifyAndClearAll(results);
}
}
Binary file not shown.

0 comments on commit c36cc07

Please sign in to comment.