Skip to content

Commit

Permalink
Merge branch 'Spark4.0preview1-mergemaster' of github.com:RumbleDB/ru…
Browse files Browse the repository at this point in the history
…mble into Spark4.0preview1-mergemaster
  • Loading branch information
Ghislain Fourny committed Oct 28, 2024
2 parents 810623a + c10c139 commit 5949e9a
Show file tree
Hide file tree
Showing 4 changed files with 2 additions and 18 deletions.
7 changes: 0 additions & 7 deletions src/main/java/org/rumbledb/api/Rumble.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import org.rumbledb.context.DynamicContext;
import org.rumbledb.expressions.module.MainModule;
import org.rumbledb.runtime.RuntimeIterator;
import org.rumbledb.runtime.update.PendingUpdateList;

import sparksoniq.spark.SparkSessionManager;

import java.io.IOException;
Expand Down Expand Up @@ -74,11 +72,6 @@ public SequenceOfItems runQuery(URI location) throws IOException {
this.configuration
);

if (iterator.isUpdating()) {
PendingUpdateList pul = iterator.getPendingUpdateList(dynamicContext);
pul.applyUpdates(iterator.getMetadata());
}

return new SequenceOfItems(iterator, dynamicContext, this.configuration);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,6 @@ public FlworDataFrame getDataFrameAsJoin(
);
}
}

if (rightDependencies.size() == 1 && rightDependencies.contains(Name.CONTEXT_ITEM)) {
if (!leftDependencies.contains(Name.CONTEXT_ITEM)) {
contextItemValueExpression = rightHandSideOfJoinEqualityCriterion;
Expand All @@ -295,12 +294,6 @@ public FlworDataFrame getDataFrameAsJoin(
);
}
}
if (inputTupleValueExpression == null) {
throw new JobWithinAJobException(
"A let clause expression cannot produce a big sequence of items for a big number of tuples, as this would lead to a data flow explosion. We did detect a predicate expression, but the criterion inside the predicate is not comparing the left-hand-side of this predicate to the input tuple.",
getMetadata()
);
}

// Now we know we can execute the query as an equi-join.
// First, we evaluate all input tuples.
Expand Down Expand Up @@ -335,6 +328,8 @@ public FlworDataFrame getDataFrameAsJoin(
sequenceDependencies
).getDataFrame();

LogManager.getLogger("LetClauseSparkIterator").info("Rumble detected an equi-join in the left clause.");

// We compute the hashes for both sides of the equality predicate.
expressionDF = LetClauseSparkIterator.bindLetVariableInDataFrame(
expressionDF,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@


import java.util.ArrayList;
// import java.util.Iterator;
import java.util.List;

public class GroupClauseArrayMergeAggregateResultsUDF implements UDF1<ArraySeq<Object>, Object[]> {
Expand Down
3 changes: 0 additions & 3 deletions src/test/java/iq/RuntimeTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@
package iq;

import iq.base.AnnotationsTestsBase;
import scala.Function0;
import scala.util.Properties;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.junit.Assert;
Expand Down

0 comments on commit 5949e9a

Please sign in to comment.