Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/main/java/org/apache/sysds/common/InstructionType.java
Original file line number Diff line number Diff line change
Expand Up @@ -89,5 +89,5 @@ public enum InstructionType {
PMM,
MatrixReshape,
Write,
Init,
Init, Tee,
}
1 change: 1 addition & 0 deletions src/main/java/org/apache/sysds/common/Opcodes.java
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,7 @@ public enum Opcodes {
READ("read", InstructionType.Variable),
WRITE("write", InstructionType.Variable, InstructionType.Write),
CREATEVAR("createvar", InstructionType.Variable),
TEE("tee", InstructionType.Tee),

//Reorg instruction opcodes
TRANSPOSE("r'", InstructionType.Reorg),
Expand Down
113 changes: 66 additions & 47 deletions src/main/java/org/apache/sysds/hops/DataOp.java
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.sysds.common.Types.ExecType;
import org.apache.sysds.lops.LopsException;
import org.apache.sysds.lops.Sql;
import org.apache.sysds.lops.Tee;
import org.apache.sysds.parser.DataExpression;
import static org.apache.sysds.parser.DataExpression.FED_RANGES;
import org.apache.sysds.runtime.controlprogram.caching.MatrixObject.UpdateType;
Expand All @@ -60,6 +61,8 @@ public class DataOp extends Hop {

private boolean _recompileRead = true;

private boolean _isTeeOp = false;

/**
* List of "named" input parameters. They are maintained as a hashmap:
* parameter names (String) are mapped as indices (Integer) into getInput()
Expand All @@ -73,6 +76,10 @@ public class DataOp extends Hop {
private DataOp() {
//default constructor for clone
}

public void setIsTeeOp(boolean isTeeOp) {
this._isTeeOp = isTeeOp;
}

/**
* READ operation for Matrix w/ dim1, dim2.
Expand Down Expand Up @@ -251,57 +258,65 @@ public Lop constructLops()

ExecType et = optFindExecType();
Lop l = null;

// construct lops for all input parameters
HashMap<String, Lop> inputLops = new HashMap<>();
for (Entry<String, Integer> cur : _paramIndexMap.entrySet()) {
inputLops.put(cur.getKey(), getInput().get(cur.getValue()).constructLops());

if (_isTeeOp) {
Tee teeLop = new Tee(getInput().get(0).constructLops(),
getDataType(), getValueType());
setLineNumbers(teeLop);
setLops(teeLop);
setOutputDimensions(teeLop);
}
else {

// Create the lop
switch(_op)
{
case TRANSIENTREAD:
l = new Data(_op, null, inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
setOutputDimensions(l);
break;

case PERSISTENTREAD:
l = new Data(_op, null, inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
l.getOutputParameters().setDimensions(getDim1(), getDim2(), _inBlocksize, getNnz(), getUpdateType());
break;

case PERSISTENTWRITE:
case FUNCTIONOUTPUT:
l = new Data(_op, getInput().get(0).constructLops(), inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
((Data)l).setExecType(et);
setOutputDimensions(l);
break;

case TRANSIENTWRITE:
l = new Data(_op, getInput().get(0).constructLops(), inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
setOutputDimensions(l);
break;

case SQLREAD:
l = new Sql(inputLops, getDataType(), getValueType());
break;

case FEDERATED:
l = new Federated(inputLops, getDataType(), getValueType());
break;

default:
throw new LopsException("Invalid operation type for Data LOP: " + _op);
// construct lops for all input parameters
HashMap<String, Lop> inputLops = new HashMap<>();
for (Entry<String, Integer> cur : _paramIndexMap.entrySet()) {
inputLops.put(cur.getKey(), getInput().get(cur.getValue()).constructLops());
}

// Create the lop
switch (_op) {
case TRANSIENTREAD:
l = new Data(_op, null, inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
setOutputDimensions(l);
break;

case PERSISTENTREAD:
l = new Data(_op, null, inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
l.getOutputParameters().setDimensions(getDim1(), getDim2(), _inBlocksize, getNnz(), getUpdateType());
break;

case PERSISTENTWRITE:
case FUNCTIONOUTPUT:
l = new Data(_op, getInput().get(0).constructLops(), inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
((Data) l).setExecType(et);
setOutputDimensions(l);
break;

case TRANSIENTWRITE:
l = new Data(_op, getInput().get(0).constructLops(), inputLops, getName(), null,
getDataType(), getValueType(), getFileFormat());
setOutputDimensions(l);
break;

case SQLREAD:
l = new Sql(inputLops, getDataType(), getValueType());
break;

case FEDERATED:
l = new Federated(inputLops, getDataType(), getValueType());
break;

default:
throw new LopsException("Invalid operation type for Data LOP: " + _op);
}
setLineNumbers(l);
setLops(l);
}

setLineNumbers(l);
setLops(l);

//add reblock/checkpoint lops if necessary
constructAndSetLopsDataFlowProperties();

Expand Down Expand Up @@ -346,6 +361,9 @@ public boolean isFederatedDataOp(){
public String getOpString() {
String s = new String("");
s += _op.toString();
if (_isTeeOp) {
s += " tee";
}
s += " "+getName();
return s;
}
Expand Down Expand Up @@ -536,6 +554,7 @@ public Object clone() throws CloneNotSupportedException
ret._inFormat = _inFormat;
ret._inBlocksize = _inBlocksize;
ret._recompileRead = _recompileRead;
ret._isTeeOp = _isTeeOp; // copy the Tee flag
ret._paramIndexMap = (HashMap<String, Integer>) _paramIndexMap.clone();
//note: no deep cp of params since read-only

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ public ProgramRewriter(boolean staticRewrites, boolean dynamicRewrites)
//add static HOP DAG rewrite rules
_dagRuleSet.add( new RewriteRemoveReadAfterWrite() ); //dependency: before blocksize
_dagRuleSet.add( new RewriteBlockSizeAndReblock() );
_dagRuleSet.add( new RewriteInjectOOCTee() );
if( OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION )
_dagRuleSet.add( new RewriteRemoveUnnecessaryCasts() );
if( OptimizerUtils.ALLOW_COMMON_SUBEXPRESSION_ELIMINATION )
Expand Down
Loading