Skip to content

Commit

Permalink
[DO NOT MERGE][skip ci] JAVA 17 BWARE COMMIT
Browse files Browse the repository at this point in the history
[skip ci] revert fed

[skip ci] revert a few files

vvector
  • Loading branch information
Baunsgaard committed Feb 3, 2025
1 parent 2e1147a commit 0deb183
Show file tree
Hide file tree
Showing 46 changed files with 1,790 additions and 443 deletions.
4 changes: 4 additions & 0 deletions bin/systemds
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,7 @@ if [ $WORKER == 1 ]; then
print_out "# starting Federated worker on port $PORT"
CMD=" \
java $SYSTEMDS_STANDALONE_OPTS \
--add-modules=jdk.incubator.vector \
$LOG4JPROPFULL \
-jar $SYSTEMDS_JAR_FILE \
-w $PORT \
Expand All @@ -422,6 +423,7 @@ elif [ "$FEDMONITORING" == 1 ]; then
print_out "# starting Federated backend monitoring on port $PORT"
CMD=" \
java $SYSTEMDS_STANDALONE_OPTS \
--add-modules=jdk.incubator.vector \
$LOG4JPROPFULL \
-jar $SYSTEMDS_JAR_FILE \
-fedMonitoring $PORT \
Expand All @@ -433,6 +435,7 @@ elif [ $SYSDS_DISTRIBUTED == 0 ]; then
CMD=" \
java $SYSTEMDS_STANDALONE_OPTS \
$LOG4JPROPFULL \
--add-modules=jdk.incubator.vector \
-jar $SYSTEMDS_JAR_FILE \
-f $SCRIPT_FILE \
-exec $SYSDS_EXEC_MODE \
Expand All @@ -442,6 +445,7 @@ else
print_out "# Running script $SCRIPT_FILE distributed with opts: $*"
CMD=" \
spark-submit $SYSTEMDS_DISTRIBUTED_OPTS \
--add-modules=jdk.incubator.vector \
$SYSTEMDS_JAR_FILE \
-f $SCRIPT_FILE \
-exec $SYSDS_EXEC_MODE \
Expand Down
12 changes: 9 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
<!-- aws-java-sdk-bundle version should align with hadoop-aws version -->
<!-- aws-java-sdk-bundle.version>1.12.367</aws-java-sdk-bundle.version -->
<!-- Set java compile level via argument, ex: 1.8 1.9 10 11-->
<java.level>11</java.level>
<java.level>17</java.level>
<java.version>{java.level}</java.version>
<!-->Testing settings<!-->
<maven.test.skip>false</maven.test.skip>
Expand All @@ -77,6 +77,7 @@
<test-forkCount>1C</test-forkCount>
<rerun.failing.tests.count>2</rerun.failing.tests.count>
<jacoco.skip>false</jacoco.skip>
<doc.skip>false</doc.skip>
<jacoco.include>**</jacoco.include>
<automatedtestbase.outputbuffering>false</automatedtestbase.outputbuffering>
<argLine>-Xms3000m -Xmx3000m -Xmn300m</argLine>
Expand Down Expand Up @@ -345,6 +346,9 @@
<source>${java.level}</source>
<target>${java.level}</target>
<release>${java.level}</release>
<compilerArgs>
<arg>--add-modules=jdk.incubator.vector</arg>
</compilerArgs>
</configuration>
</plugin>

Expand All @@ -367,6 +371,7 @@
<systemPropertyVariables>
<log4j.configurationFile>file:src/test/resources/log4j.properties</log4j.configurationFile>
</systemPropertyVariables>
<argLine>--add-modules=jdk.incubator.vector</argLine>
</configuration>
</plugin>

Expand Down Expand Up @@ -875,9 +880,10 @@
<configuration>
<excludePackageNames>*.protobuf</excludePackageNames>
<notimestamp>true</notimestamp>
<failOnWarnings>true</failOnWarnings>
<failOnWarnings>false</failOnWarnings>
<quiet>true</quiet>
<skip>false</skip>
<additionalJOption>--add-modules=jdk.incubator.vector</additionalJOption>
<skip>${doc.skip}</skip>
<show>public</show>
<source>${java.level}</source>
</configuration>
Expand Down
3 changes: 1 addition & 2 deletions src/main/java/org/apache/sysds/hops/AggBinaryOp.java
Original file line number Diff line number Diff line change
Expand Up @@ -439,8 +439,7 @@ private boolean isApplicableForTransitiveSparkExecType(boolean left)
|| (left && !isLeftTransposeRewriteApplicable(true)))
&& getInput(index).getParent().size()==1 //bagg is only parent
&& !getInput(index).areDimsBelowThreshold()
&& (getInput(index).optFindExecType() == ExecType.SPARK
|| (getInput(index) instanceof DataOp && ((DataOp)getInput(index)).hasOnlyRDD()))
&& getInput(index).hasSparkOutput()
&& getInput(index).getOutputMemEstimate()>getOutputMemEstimate();
}

Expand Down
38 changes: 24 additions & 14 deletions src/main/java/org/apache/sysds/hops/BinaryOp.java
Original file line number Diff line number Diff line change
Expand Up @@ -747,8 +747,8 @@ protected ExecType optFindExecType(boolean transitive) {

checkAndSetForcedPlatform();

DataType dt1 = getInput().get(0).getDataType();
DataType dt2 = getInput().get(1).getDataType();
final DataType dt1 = getInput(0).getDataType();
final DataType dt2 = getInput(1).getDataType();

if( _etypeForced != null ) {
setExecType(_etypeForced);
Expand Down Expand Up @@ -796,18 +796,28 @@ else if ( dt1 == DataType.SCALAR && dt2 == DataType.MATRIX ) {
checkAndSetInvalidCPDimsAndSize();
}

//spark-specific decision refinement (execute unary scalar w/ spark input and
// spark-specific decision refinement (execute unary scalar w/ spark input and
// single parent also in spark because it's likely cheap and reduces intermediates)
if(transitive && _etype == ExecType.CP && _etypeForced != ExecType.CP && _etypeForced != ExecType.FED &&
getDataType().isMatrix() // output should be a matrix
&& (dt1.isScalar() || dt2.isScalar()) // one side should be scalar
&& supportsMatrixScalarOperations() // scalar operations
&& !(getInput().get(dt1.isScalar() ? 1 : 0) instanceof DataOp) // input is not checkpoint
&& getInput().get(dt1.isScalar() ? 1 : 0).getParent().size() == 1 // unary scalar is only parent
&& !HopRewriteUtils.isSingleBlock(getInput().get(dt1.isScalar() ? 1 : 0)) // single block triggered exec
&& getInput().get(dt1.isScalar() ? 1 : 0).optFindExecType() == ExecType.SPARK) {
// pull unary scalar operation into spark
_etype = ExecType.SPARK;
if(transitive // we allow transitive Spark operations. continue sequences of spark operations
&& _etype == ExecType.CP // The instruction is currently in CP
&& _etypeForced != ExecType.CP // not forced CP
&& _etypeForced != ExecType.FED // not federated
&& (getDataType().isMatrix() || getDataType().isFrame()) // output should be a matrix or frame
) {
final boolean v1 = getInput(0).isScalarOrVectorBellowBlockSize();
final boolean v2 = getInput(1).isScalarOrVectorBellowBlockSize();
final boolean left = v1 == true; // left side is the vector or scalar
final Hop sparkIn = getInput(left ? 1 : 0);
if((v1 ^ v2) // XOR only one side is allowed to be a vector or a scalar.
&& (supportsMatrixScalarOperations() || op == OpOp2.APPLY_SCHEMA) // supported operation
&& sparkIn.getParent().size() == 1 // only one parent
&& !HopRewriteUtils.isSingleBlock(sparkIn) // single block triggered exec
&& sparkIn.optFindExecType() == ExecType.SPARK // input was spark op.
&& !(sparkIn instanceof DataOp) // input is not checkpoint
) {
// pull operation into spark
_etype = ExecType.SPARK;
}
}

if( OptimizerUtils.ALLOW_BINARY_UPDATE_IN_PLACE &&
Expand Down Expand Up @@ -837,7 +847,7 @@ else if( (op == OpOp2.CBIND && getDataType().isList())
|| (op == OpOp2.RBIND && getDataType().isList())) {
_etype = ExecType.CP;
}

//mark for recompile (forever)
setRequiresRecompileIfNecessary();

Expand Down
11 changes: 11 additions & 0 deletions src/main/java/org/apache/sysds/hops/Hop.java
Original file line number Diff line number Diff line change
Expand Up @@ -1040,6 +1040,12 @@ public final String toString() {
// ========================================================================================


protected boolean isScalarOrVectorBellowBlockSize(){
return getDataType().isScalar() || (dimsKnown() &&
(( _dc.getRows() == 1 && _dc.getCols() < ConfigurationManager.getBlocksize())
|| _dc.getCols() == 1 && _dc.getRows() < ConfigurationManager.getBlocksize()));
}

protected boolean isVector() {
return (dimsKnown() && (_dc.getRows() == 1 || _dc.getCols() == 1) );
}
Expand Down Expand Up @@ -1624,6 +1630,11 @@ protected void setMemoryAndComputeEstimates(Lop lop) {
lop.setComputeEstimate(ComputeCost.getHOPComputeCost(this));
}

protected boolean hasSparkOutput(){
return (this.optFindExecType() == ExecType.SPARK
|| (this instanceof DataOp && ((DataOp)this).hasOnlyRDD()));
}

/**
* Set parse information.
*
Expand Down
34 changes: 24 additions & 10 deletions src/main/java/org/apache/sysds/hops/UnaryOp.java
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,11 @@ protected double computeOutputMemEstimate( long dim1, long dim2, long nnz )
} else {
sparsity = OptimizerUtils.getSparsity(dim1, dim2, nnz);
}
return OptimizerUtils.estimateSizeExactSparsity(dim1, dim2, sparsity);

if(getDataType() == DataType.FRAME)
return OptimizerUtils.estimateSizeExactFrame(dim1, dim2);
else
return OptimizerUtils.estimateSizeExactSparsity(dim1, dim2, sparsity);
}

@Override
Expand Down Expand Up @@ -463,6 +467,13 @@ public boolean isMetadataOperation() {
|| _op == OpOp1.CAST_AS_LIST;
}

private boolean isDisallowedSparkOps(){
return isCumulativeUnaryOperation()
|| isCastUnaryOperation()
|| _op==OpOp1.MEDIAN
|| _op==OpOp1.IQM;
}

@Override
protected ExecType optFindExecType(boolean transitive)
{
Expand Down Expand Up @@ -493,19 +504,22 @@ else if ( getInput().get(0).areDimsBelowThreshold() || getInput().get(0).isVecto
checkAndSetInvalidCPDimsAndSize();
}


//spark-specific decision refinement (execute unary w/ spark input and
//single parent also in spark because it's likely cheap and reduces intermediates)
if( _etype == ExecType.CP && _etypeForced != ExecType.CP
&& getInput().get(0).optFindExecType() == ExecType.SPARK
&& getDataType().isMatrix()
&& !isCumulativeUnaryOperation() && !isCastUnaryOperation()
&& _op!=OpOp1.MEDIAN && _op!=OpOp1.IQM
&& !(getInput().get(0) instanceof DataOp) //input is not checkpoint
&& getInput().get(0).getParent().size()==1 ) //unary is only parent
{
if(_etype == ExecType.CP // currently CP instruction
&& _etype != ExecType.SPARK /// currently not SP.
&& _etypeForced != ExecType.CP // not forced as CP instruction
&& getInput(0).hasSparkOutput() // input is a spark instruction
&& (getDataType().isMatrix() || getDataType().isFrame()) // output is a matrix or frame
&& !isDisallowedSparkOps() // is invalid spark instruction
// && !(getInput().get(0) instanceof DataOp) // input is not checkpoint
// && getInput(0).getParent().size() <= 1// unary is only parent
) {
//pull unary operation into spark
_etype = ExecType.SPARK;
}


//mark for recompile (forever)
setRequiresRecompileIfNecessary();
Expand All @@ -520,7 +534,7 @@ && getInput().get(0).getParent().size()==1 ) //unary is only parent
} else {
setRequiresRecompileIfNecessary();
}

return _etype;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
import org.apache.sysds.runtime.compress.lib.CLALibMMChain;
import org.apache.sysds.runtime.compress.lib.CLALibMatrixMult;
import org.apache.sysds.runtime.compress.lib.CLALibMerge;
import org.apache.sysds.runtime.compress.lib.CLALibReorg;
import org.apache.sysds.runtime.compress.lib.CLALibReplace;
import org.apache.sysds.runtime.compress.lib.CLALibReshape;
import org.apache.sysds.runtime.compress.lib.CLALibRexpand;
Expand All @@ -72,7 +73,6 @@
import org.apache.sysds.runtime.data.DenseBlock;
import org.apache.sysds.runtime.data.SparseBlock;
import org.apache.sysds.runtime.data.SparseRow;
import org.apache.sysds.runtime.functionobjects.SwapIndex;
import org.apache.sysds.runtime.instructions.InstructionUtils;
import org.apache.sysds.runtime.instructions.cp.CM_COV_Object;
import org.apache.sysds.runtime.instructions.cp.ScalarObject;
Expand Down Expand Up @@ -226,6 +226,7 @@ public void allocateColGroup(AColGroup cg) {
* @param colGroups new ColGroups in the MatrixBlock
*/
public void allocateColGroupList(List<AColGroup> colGroups) {
cachedMemorySize = -1;
_colGroups = colGroups;
}

Expand Down Expand Up @@ -351,7 +352,6 @@ public long recomputeNonZeros(int k) {
List<Future<Long>> tasks = new ArrayList<>();
for(AColGroup g : _colGroups)
tasks.add(pool.submit(() -> g.getNumberNonZeros(rlen)));

long nnz = 0;
for(Future<Long> t : tasks)
nnz += t.get();
Expand Down Expand Up @@ -398,7 +398,6 @@ public long estimateSizeInMemory() {
public long estimateCompressedSizeInMemory() {

if(cachedMemorySize <= -1L) {

long total = baseSizeInMemory();
// take into consideration duplicate dictionaries
Set<IDictionary> dicts = new HashSet<>();
Expand All @@ -413,7 +412,6 @@ public long estimateCompressedSizeInMemory() {
}
cachedMemorySize = total;
return total;

}
else
return cachedMemorySize;
Expand Down Expand Up @@ -635,21 +633,7 @@ public MatrixBlock replaceOperations(MatrixValue result, double pattern, double

@Override
public MatrixBlock reorgOperations(ReorgOperator op, MatrixValue ret, int startRow, int startColumn, int length) {
if(op.fn instanceof SwapIndex && this.getNumColumns() == 1) {
MatrixBlock tmp = decompress(op.getNumThreads());
long nz = tmp.setNonZeros(tmp.getNonZeros());
tmp = new MatrixBlock(tmp.getNumColumns(), tmp.getNumRows(), tmp.getDenseBlockValues());
tmp.setNonZeros(nz);
return tmp;
}
else {
// Allow transpose to be compressed output. In general we need to have a transposed flag on
// the compressed matrix. https://issues.apache.org/jira/browse/SYSTEMDS-3025
String message = op.getClass().getSimpleName() + " -- " + op.fn.getClass().getSimpleName();
MatrixBlock tmp = getUncompressed(message, op.getNumThreads());
return tmp.reorgOperations(op, ret, startRow, startColumn, length);
}

return CLALibReorg.reorg(this, op, (MatrixBlock) ret, startRow, startColumn, length);
}

public boolean isOverlapping() {
Expand Down Expand Up @@ -1002,6 +986,10 @@ public MatrixBlock getUncompressed() {
return getUncompressed((String) null);
}

public MatrixBlock getUncompressed(int k){
return getUncompressed((String) null, k);
}

public MatrixBlock getUncompressed(String operation) {
return getUncompressed(operation,
ConfigurationManager.isParallelMatrixOperations() ? InfrastructureAnalyzer.getLocalParallelism() : 1);
Expand Down Expand Up @@ -1213,8 +1201,8 @@ public void examSparsity(boolean allowCSR, int k) {
}

@Override
public void sparseToDense(int k) {
// do nothing
public MatrixBlock sparseToDense(int k) {
return this; // do nothing
}

@Override
Expand Down
Loading

0 comments on commit 0deb183

Please sign in to comment.