Home | About | Sematext search-lucene.com search-hadoop.com
 Search Hadoop and all its subprojects:

Switch to Threaded View
Drill, mail # dev - [31/52] [abbrv] [partial] DRILL-165: Reorganize directories (moves only)


Copy link to this message
-
[31/52] [abbrv] [partial] DRILL-165: Reorganize directories (moves only)
jacques@... 2013-09-04, 11:10
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a593d908/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/SVRemoverCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/SVRemoverCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/SVRemoverCreator.java
new file mode 100644
index 0000000..88708e2
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/SVRemoverCreator.java
@@ -0,0 +1,23 @@
+package org.apache.drill.exec.physical.impl.svremover;
+
+import java.util.List;
+
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.config.SelectionVectorRemover;
+import org.apache.drill.exec.physical.impl.BatchCreator;
+import org.apache.drill.exec.record.RecordBatch;
+
+import com.google.common.base.Preconditions;
+
+public class SVRemoverCreator implements BatchCreator<SelectionVectorRemover>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SVRemoverCreator.class);
+
+  @Override
+  public RecordBatch getBatch(FragmentContext context, SelectionVectorRemover config, List<RecordBatch> children) throws ExecutionSetupException {
+    Preconditions.checkArgument(children.size() == 1);
+    return new RemovingRecordBatch(config, context, children.iterator().next());
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a593d908/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionBatchCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionBatchCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionBatchCreator.java
new file mode 100644
index 0000000..1945139
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionBatchCreator.java
@@ -0,0 +1,23 @@
+package org.apache.drill.exec.physical.impl.union;
+
+import java.util.List;
+
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.config.Union;
+import org.apache.drill.exec.physical.impl.BatchCreator;
+import org.apache.drill.exec.record.RecordBatch;
+
+import com.google.common.base.Preconditions;
+
+public class UnionBatchCreator implements BatchCreator<Union>{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UnionBatchCreator.class);
+
+  @Override
+  public RecordBatch getBatch(FragmentContext context, Union config, List<RecordBatch> children) throws ExecutionSetupException {
+    Preconditions.checkArgument(children.size() >= 1);
+    return new UnionRecordBatch(config, children, context);
+  }
+  
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/a593d908/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionRecordBatch.java
new file mode 100644
index 0000000..b77d4f4
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionRecordBatch.java
@@ -0,0 +1,130 @@
+package org.apache.drill.exec.physical.impl.union;
+
+import com.google.common.collect.Lists;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.config.Union;
+import org.apache.drill.exec.record.*;
+import org.apache.drill.exec.record.selection.SelectionVector2;
+import org.apache.drill.exec.vector.ValueVector;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+public class UnionRecordBatch extends AbstractRecordBatch<Union> {
+
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UnionRecordBatch.class);
+
+  private final List<RecordBatch> incoming;
+  private SelectionVector2 sv;
+  private Iterator<RecordBatch> incomingIterator = null;
+  private RecordBatch current = null;
+  private ArrayList<TransferPair> transfers;
+  private int outRecordCount;
+
+  public UnionRecordBatch(Union config, List<RecordBatch> children, FragmentContext context) {
+    super(config, context);
+    this.incoming = children;
+    this.incomingIterator = incoming.iterator();
+    current = incomingIterator.next();
+    sv = null;
+  }
+
+
+  @Override
+  public int getRecordCount() {
+    return outRecordCount;
+  }
+
+  @Override
+  public void kill() {
+    if(current != null){
+      current.kill();
+      current = null;
+    }
+    for(;incomingIterator.hasNext();){
+      incomingIterator.next().kill();
+    }
+  }
+
+  @Override
+  protected void killIncoming() {
+    for (int i = 0; i < incoming.size(); i++) {
+      RecordBatch in = incoming.get(i);
+      in.kill();
+    }
+  }
+
+
+  @Override
+  public SelectionVector2 getSelectionVector2() {
+    return sv;
+  }
+
+  @Override
+  public IterOutcome next() {
+    if (current == null) { // end of iteration
+      return IterOutcome.NONE;
+    }
+    IterOutcome upstream = current.next();
+    logger.debug("Upstream... {}", upstream);
+    while (upstream == IterOutcome.NONE) {
+      if (!incomingIterator.hasNext()) {
+        current = null;
+        return IterOutcome.NONE;
+      }
+      current = incomingIterator.next();
+      upstream = current.next();
+    }
+    switch (upstream) {
+      case NONE:
+        throw new IllegalArgumentException("not possible!");
+      case NOT_YET:
+      case STOP:
+        return upstream;
+      case OK_NEW_SCHEMA:
+        setupSchema();
+