From 7ad718173bd9d7a7ef226d1333861248f1b326d8 Mon Sep 17 00:00:00 2001 From: e-strauss Date: Wed, 18 Feb 2026 01:08:15 +0100 Subject: [PATCH 1/5] better logging --- .../java/org/apache/sysds/hops/DataOp.java | 14 +- .../apache/sysds/hops/cost/CostEstimator.java | 15 +- .../hops/cost/CostEstimatorStaticRuntime.java | 14 +- .../hops/rewrite/RewriteInjectOOCTee.java | 19 +- .../CompressedMatrixBlockFactory.java | 148 +++++++------- .../controlprogram/caching/CacheableData.java | 41 ++-- .../federated/FederatedWorkerHandler.java | 13 +- .../controlprogram/paramserv/ParamServer.java | 23 +-- ...turnParameterizedBuiltinCPInstruction.java | 7 +- .../instructions/fed/InitFEDInstruction.java | 18 +- .../sysds/runtime/io/MatrixReaderFactory.java | 13 +- .../runtime/lineage/LineageRewriteReuse.java | 48 ++--- .../sketch/countdistinctapprox/KMVSketch.java | 24 +-- .../encode/ColumnEncoderDummycode.java | 7 +- .../transform/encode/CompressedEncode.java | 32 ++-- .../sysds/utils/ParameterizedLogger.java | 159 ++++++++++++++++ .../utils/ParameterizedLoggerTest.java | 180 ++++++++++++++++++ 17 files changed, 516 insertions(+), 259 deletions(-) create mode 100644 src/main/java/org/apache/sysds/utils/ParameterizedLogger.java create mode 100644 src/test/java/org/apache/sysds/test/component/utils/ParameterizedLoggerTest.java diff --git a/src/main/java/org/apache/sysds/hops/DataOp.java b/src/main/java/org/apache/sysds/hops/DataOp.java index 7b912bd39ed..9a104b981fa 100644 --- a/src/main/java/org/apache/sysds/hops/DataOp.java +++ b/src/main/java/org/apache/sysds/hops/DataOp.java @@ -19,13 +19,14 @@ package org.apache.sysds.hops; +import static org.apache.sysds.parser.DataExpression.FED_RANGES; + import java.util.HashMap; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types.DataType; +import org.apache.sysds.common.Types.ExecType; import org.apache.sysds.common.Types.FileFormat; import org.apache.sysds.common.Types.OpOpData; import org.apache.sysds.common.Types.ValueType; @@ -35,22 +36,21 @@ import org.apache.sysds.lops.Data; import org.apache.sysds.lops.Federated; import org.apache.sysds.lops.Lop; -import org.apache.sysds.common.Types.ExecType; import org.apache.sysds.lops.LopsException; import org.apache.sysds.lops.Sql; import org.apache.sysds.lops.Tee; import org.apache.sysds.parser.DataExpression; -import static org.apache.sysds.parser.DataExpression.FED_RANGES; import org.apache.sysds.runtime.controlprogram.caching.MatrixObject.UpdateType; import org.apache.sysds.runtime.meta.DataCharacteristics; import org.apache.sysds.runtime.util.LocalFileUtils; +import org.apache.sysds.utils.ParameterizedLogger; /** * A DataOp can be either a persistent read/write or transient read/write - writes will always have at least one input, * but all types can have parameters (e.g., for csv literals of delimiter, header, etc). */ public class DataOp extends Hop { - private static final Log LOG = LogFactory.getLog(DataOp.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(DataOp.class); private OpOpData _op; private String _fileName = null; @@ -130,9 +130,7 @@ public DataOp(String l, DataType dt, ValueType vt, String s = e.getKey(); Hop input = e.getValue(); getInput().add(input); - if (LOG.isDebugEnabled()){ - LOG.debug(String.format("%15s - %s",s,input)); - } + LOG.debug("{%15s} - {%s}", s, input); input.getParent().add(this); _paramIndexMap.put(s, index); diff --git a/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java b/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java index ffd3ff1bb82..d4d6ba6c1c9 100644 --- a/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java +++ b/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java @@ -19,8 +19,6 @@ package org.apache.sysds.hops.cost; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.common.Opcodes; import org.apache.sysds.conf.ConfigurationManager; import org.apache.sysds.hops.OptimizerUtils; @@ -54,6 +52,7 @@ import org.apache.sysds.runtime.matrix.operators.CMOperator; import org.apache.sysds.runtime.matrix.operators.CMOperator.AggregateOperationTypes; import org.apache.sysds.runtime.meta.DataCharacteristics; +import org.apache.sysds.utils.ParameterizedLogger; import java.util.ArrayList; import java.util.HashMap; @@ -61,7 +60,7 @@ public abstract class CostEstimator { - protected static final Log LOG = LogFactory.getLog(CostEstimator.class.getName()); + protected static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(CostEstimator.class); private static final int DEFAULT_NUMITER = 15; @@ -155,18 +154,16 @@ else if( pb instanceof BasicProgramBlock ) //awareness of recursive functions, missing program if( !memoFunc.contains(fkey) && pb.getProgram()!=null ) { - if(LOG.isDebugEnabled()) - LOG.debug("Begin Function "+fkey); - + LOG.debug("Begin Function {}", fkey); + memoFunc.add(fkey); Program prog = pb.getProgram(); FunctionProgramBlock fpb = prog.getFunctionProgramBlock( finst.getNamespace(), finst.getFunctionName()); ret += rGetTimeEstimate(fpb, stats, memoFunc, recursive); memoFunc.remove(fkey); - - if(LOG.isDebugEnabled()) - LOG.debug("End Function "+fkey); + + LOG.debug("End Function {}", fkey); } } } diff --git a/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java b/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java index da2788e8c6c..80ca9c20c35 100644 --- a/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java +++ b/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java @@ -85,9 +85,8 @@ protected double getCPInstTimeEstimate( Instruction inst, VarStats[] vs, String[ } vs[1]._inmem = true; } - if( LOG.isDebugEnabled() && ltime!=0 ) { - LOG.debug("Cost["+cpinst.getOpcode()+" - read] = "+ltime); - } + if(ltime != 0) + LOG.debug("Cost[{} - read] = {}", cpinst.getOpcode(), ltime); //exec time CP instruction String opcode = (cpinst instanceof FunctionCallCPInstruction) ? InstructionUtils.getOpCode(cpinst.toString()) : cpinst.getOpcode(); @@ -99,9 +98,8 @@ protected double getCPInstTimeEstimate( Instruction inst, VarStats[] vs, String[ if( inst instanceof VariableCPInstruction && ((VariableCPInstruction)inst).getOpcode().equals(Opcodes.WRITE.toString()) ) wtime += getHDFSWriteTime(vs[2].getRows(), vs[2].getCols(), vs[2].getSparsity(), ((VariableCPInstruction)inst).getInput3().getName() ); - if( LOG.isDebugEnabled() && wtime!=0 ) { - LOG.debug("Cost["+cpinst.getOpcode()+" - write] = "+wtime); - } + if(wtime != 0) + LOG.debug("Cost[{} - write] = {}", cpinst.getOpcode(), wtime); //total costs double costs = ltime + etime + wtime; @@ -261,8 +259,8 @@ private static double getInstTimeEstimate( String opcode, boolean inMR, long d1m double nflops = getNFLOP(opcode, inMR, d1m, d1n, d1s, d2m, d2n, d2s, d3m, d3n, d3s, args); double time = nflops / DEFAULT_FLOPS; - if( LOG.isDebugEnabled() ) - LOG.debug("Cost["+opcode+"] = "+time+"s, "+nflops+" flops ("+d1m+","+d1n+","+d1s+","+d2m+","+d2n+","+d2s+","+d3m+","+d3n+","+d3s+")."); + LOG.debug("Cost[{}] = {}s, {} flops ({},{},{},{},{},{},{},{},{}).", + opcode, time, nflops, d1m, d1n, d1s, d2m, d2n, d2s, d3m, d3n, d3s); return time; } diff --git a/src/main/java/org/apache/sysds/hops/rewrite/RewriteInjectOOCTee.java b/src/main/java/org/apache/sysds/hops/rewrite/RewriteInjectOOCTee.java index 7abfb15d1d4..5af0cf0d75f 100644 --- a/src/main/java/org/apache/sysds/hops/rewrite/RewriteInjectOOCTee.java +++ b/src/main/java/org/apache/sysds/hops/rewrite/RewriteInjectOOCTee.java @@ -26,6 +26,7 @@ import org.apache.sysds.hops.Hop; import org.apache.sysds.hops.ReorgOp; import org.apache.sysds.parser.StatementBlock; +import org.apache.sysds.utils.ParameterizedLogger; import java.util.ArrayList; import java.util.HashMap; @@ -51,6 +52,7 @@ * {@code TeeOp}, and safely rewire the graph. */ public class RewriteInjectOOCTee extends StatementBlockRewriteRule { + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(RewriteInjectOOCTee.class); public static boolean APPLY_ONLY_XtX_PATTERN = false; @@ -138,10 +140,7 @@ private void applyTopDownTeeRewrite(Hop sharedInput) { } int consumerCount = sharedInput.getParent().size(); - if (LOG.isDebugEnabled()) { - LOG.debug("Inject tee for hop " + sharedInput.getHopID() + " (" - + sharedInput.getName() + "), consumers=" + consumerCount); - } + LOG.debug("Inject tee for hop {} ({}), consumers={}", sharedInput.getHopID(), sharedInput.getName(), consumerCount); // Take a defensive copy of consumers before modifying the graph ArrayList consumers = new ArrayList<>(sharedInput.getParent()); @@ -161,10 +160,7 @@ private void applyTopDownTeeRewrite(Hop sharedInput) { handledHop.put(sharedInput.getHopID(), teeOp); rewrittenHops.add(sharedInput.getHopID()); - if (LOG.isDebugEnabled()) { - LOG.debug("Created tee hop " + teeOp.getHopID() + " -> " - + teeOp.getName()); - } + LOG.debug("Created tee hop {} -> {}", teeOp.getHopID(), teeOp.getName()); } @SuppressWarnings("unused") @@ -276,11 +272,8 @@ private void removeRedundantTeeChains(Hop hop) { if (HopRewriteUtils.isData(hop, OpOpData.TEE) && hop.getInput().size() == 1) { Hop teeInput = hop.getInput().get(0); if (HopRewriteUtils.isData(teeInput, OpOpData.TEE)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Remove redundant tee hop " + hop.getHopID() - + " (" + hop.getName() + ") -> " + teeInput.getHopID() - + " (" + teeInput.getName() + ")"); - } + LOG.debug("Remove redundant tee hop {} ({}) -> {} ({})", + hop.getHopID(), hop.getName(), teeInput.getHopID(), teeInput.getName()); HopRewriteUtils.rewireAllParentChildReferences(hop, teeInput); HopRewriteUtils.removeAllChildReferences(hop); } diff --git a/src/main/java/org/apache/sysds/runtime/compress/CompressedMatrixBlockFactory.java b/src/main/java/org/apache/sysds/runtime/compress/CompressedMatrixBlockFactory.java index 4c48effb4df..d2e3c9c10df 100644 --- a/src/main/java/org/apache/sysds/runtime/compress/CompressedMatrixBlockFactory.java +++ b/src/main/java/org/apache/sysds/runtime/compress/CompressedMatrixBlockFactory.java @@ -27,8 +27,6 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.runtime.compress.cocode.CoCoderFactory; import org.apache.sysds.runtime.compress.colgroup.AColGroup; import org.apache.sysds.runtime.compress.colgroup.AColGroupValue; @@ -55,6 +53,7 @@ import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.util.CommonThreadPool; import org.apache.sysds.utils.DMLCompressionStatistics; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.stats.Timing; /** @@ -62,7 +61,7 @@ */ public class CompressedMatrixBlockFactory { - private static final Log LOG = LogFactory.getLog(CompressedMatrixBlockFactory.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(CompressedMatrixBlockFactory.class); /** Timing object to measure the time of each phase in the compression */ private final Timing time = new Timing(true); @@ -336,7 +335,7 @@ else if(mb instanceof CompressedMatrixBlock && ((CompressedMatrixBlock) mb).isOv if(CompressedMatrixBlock.debug) { final double afterComp = mb.sum(k).getDouble(0, 0); final double deltaSum = Math.abs(orgSum - afterComp); - LOG.debug("compression Sum: Before:" + orgSum + " after: " + afterComp + " |delta|: " + deltaSum); + LOG.debug("compression Sum: Before:{} after: {} |delta|: {}", orgSum, afterComp, deltaSum); } return new ImmutablePair<>(res, _stats); @@ -351,8 +350,8 @@ private void classifyPhase() { if(LOG.isTraceEnabled()) { LOG.trace("Logging all individual columns estimated cost:"); for(CompressedSizeInfoColGroup g : compressionGroups.getInfo()) - LOG.trace(String.format("Cost: %8.0f Size: %16.0f %15s", costEstimator.getCost(g), g.getMinSize(), - g.getColumns())); + LOG.trace("Cost: {%8.0f} Size: {%16.0f} {%15s}", costEstimator.getCost(g), g.getMinSize(), + g.getColumns()); } _stats.estimatedSizeCols = compressionGroups.memoryEstimate(); @@ -376,16 +375,14 @@ private void classifyPhase() { else { // abort compression compressionGroups = null; - if(LOG.isInfoEnabled()) { - LOG.info("Aborting before co-code, because the compression looks bad"); - LOG.info("Threshold was set to : " + threshold + " but it was above original " + _stats.originalCost); - LOG.info("Original size : " + _stats.originalSize); - LOG.info("single col size : " + _stats.estimatedSizeCols); - LOG.debug(String.format("--compressed size: %16d", _stats.originalSize)); - if(!(costEstimator instanceof MemoryCostEstimator)) { - LOG.info("original cost : " + _stats.originalCost); - LOG.info("single col cost : " + _stats.estimatedCostCols); - } + LOG.info("Aborting before co-code, because the compression looks bad"); + LOG.info("Threshold was set to : {} but it was above original {}", threshold, _stats.originalCost); + LOG.info("Original size : {}", _stats.originalSize); + LOG.info("single col size : {}", _stats.estimatedSizeCols); + LOG.debug("compressed size: {%16d}", _stats.originalSize); + if(!(costEstimator instanceof MemoryCostEstimator)) { + LOG.info("original cost : {}", _stats.originalCost); + LOG.info("single col cost : {}", _stats.estimatedCostCols); } } } @@ -403,15 +400,13 @@ private void coCodePhase() { if(_stats.estimatedCostCoCoded > _stats.originalCost) { // abort compression compressionGroups = null; - if(LOG.isInfoEnabled()) { - LOG.info("Aborting after co-code, because the compression looks bad"); - LOG.info("co-code size : " + _stats.estimatedSizeCoCoded); - LOG.info("original size : " + _stats.originalSize); - if(!(costEstimator instanceof MemoryCostEstimator)) { - LOG.info("original cost : " + _stats.originalCost); - LOG.info("single col cost : " + _stats.estimatedCostCols); - LOG.info("co-code cost : " + _stats.estimatedCostCoCoded); - } + LOG.info("Aborting after co-code, because the compression looks bad"); + LOG.info("co-code size : {}", _stats.estimatedSizeCoCoded); + LOG.info("original size : {}", _stats.originalSize); + if(!(costEstimator instanceof MemoryCostEstimator)) { + LOG.info("original cost : {}", _stats.originalCost); + LOG.info("single col cost : {}", _stats.estimatedCostCols); + LOG.info("co-code cost : {}", _stats.estimatedCostCoCoded); } } } @@ -478,15 +473,15 @@ private void finalizePhase() { _stats.setColGroupsCounts(res.getColGroups()); if(_stats.compressedCost > _stats.originalCost) { - LOG.info("--dense size: " + _stats.denseSize); - LOG.info("--original size: " + _stats.originalSize); - LOG.info("--compressed size: " + _stats.compressedSize); - LOG.info("--compression ratio: " + _stats.getRatio()); - LOG.info("--original Cost: " + _stats.originalCost); - LOG.info("--Compressed Cost: " + _stats.compressedCost); - LOG.info("--Cost Ratio: " + _stats.getCostRatio()); - LOG.debug("--col groups types " + _stats.getGroupsTypesString()); - LOG.debug("--col groups sizes " + _stats.getGroupsSizesString()); + LOG.info("--dense size: {}", _stats.denseSize); + LOG.info("--original size: {}", _stats.originalSize); + LOG.info("--compressed size: {}", _stats.compressedSize); + LOG.info("--compression ratio: {}", _stats.getRatio()); + LOG.info("--original Cost: {}", _stats.originalCost); + LOG.info("--Compressed Cost: {}", _stats.compressedCost); + LOG.info("--Cost Ratio: {}", _stats.getCostRatio()); + LOG.debug("--col groups types {}", _stats.getGroupsTypesString()); + LOG.debug("--col groups sizes {}", _stats.getGroupsSizesString()); logLengths(); LOG.info("Abort block compression because cost ratio is less than 1. "); res = null; @@ -504,7 +499,7 @@ private void finalizePhase() { } private Pair abortCompression() { - LOG.warn("Compression aborted at phase: " + phase); + LOG.warn("Compression aborted at phase: {}", phase); if(mb instanceof CompressedMatrixBlock && mb.getInMemorySize() > _stats.denseSize) { MatrixBlock ucmb = ((CompressedMatrixBlock) mb).getUncompressed("Decompressing for abort: ", k); return new ImmutablePair<>(ucmb, _stats); @@ -532,13 +527,11 @@ private Pair abortCompression() { } private void logInit() { - if(LOG.isDebugEnabled()) { - LOG.debug("--Seed used for comp : " + compSettings.seed); - LOG.debug(String.format("--number columns to compress: %10d", mb.getNumColumns())); - LOG.debug(String.format("--number rows to compress : %10d", mb.getNumRows())); - LOG.debug(String.format("--sparsity : %10.5f", mb.getSparsity())); - LOG.debug(String.format("--nonZeros : %10d", mb.getNonZeros())); - } + LOG.debug("--Seed used for comp : {}", compSettings.seed); + LOG.debug("--number columns to compress: {%10d}", mb.getNumColumns()); + LOG.debug("--number rows to compress : {%10d}", mb.getNumRows()); + LOG.debug("--sparsity : {%10.5f}", mb.getSparsity()); + LOG.debug("--nonZeros : {%10d}", mb.getNonZeros()); } private void logPhase() { @@ -552,20 +545,20 @@ private void logPhase() { else { switch(phase) { case 0: - LOG.debug("--compression phase " + phase + " Classify : " + getLastTimePhase()); - LOG.debug("--Individual Columns Estimated Compression: " + _stats.estimatedSizeCols); + LOG.debug("--compression phase {} Classify : {}", phase, getLastTimePhase()); + LOG.debug("--Individual Columns Estimated Compression: {}", _stats.estimatedSizeCols); if(mb instanceof CompressedMatrixBlock) { LOG.debug("--Recompressing already compressed MatrixBlock"); } break; case 1: - LOG.debug("--compression phase " + phase + " Grouping : " + getLastTimePhase()); - LOG.debug("Grouping using: " + compSettings.columnPartitioner); - LOG.debug("Cost Calculated using: " + costEstimator); - LOG.debug("--Cocoded Columns estimated Compression:" + _stats.estimatedSizeCoCoded); + LOG.debug("--compression phase {} Grouping : {}", phase, getLastTimePhase()); + LOG.debug("Grouping using: {}", compSettings.columnPartitioner); + LOG.debug("Cost Calculated using: {}", costEstimator); + LOG.debug("--Cocoded Columns estimated Compression:{}", _stats.estimatedSizeCoCoded); if(compressionGroups.getInfo().size() < 1000) { - LOG.debug("--Cocoded Columns estimated nr distinct:" + compressionGroups.getEstimatedDistinct()); - LOG.debug("--Cocoded Columns nr columns :" + compressionGroups.getNrColumnsString()); + LOG.debug("--Cocoded Columns estimated nr distinct:{}", compressionGroups.getEstimatedDistinct()); + LOG.debug("--Cocoded Columns nr columns :{}", compressionGroups.getNrColumnsString()); } else { LOG.debug( @@ -573,33 +566,32 @@ private void logPhase() { } break; case 2: - LOG.debug("--compression phase " + phase + " Transpose : " + getLastTimePhase()); - LOG.debug("Did transpose: " + compSettings.transposed); + LOG.debug("--compression phase {} Transpose : {}", phase, getLastTimePhase()); + LOG.debug("Did transpose: {}", compSettings.transposed); break; case 3: - LOG.debug("--compression phase " + phase + " Compress : " + getLastTimePhase()); - LOG.debug("--compressed initial actual size:" + _stats.compressedInitialSize); + LOG.debug("--compression phase {} Compress : {}", phase, getLastTimePhase()); + LOG.debug("--compressed initial actual size:{}", _stats.compressedInitialSize); break; case 4: default: - LOG.debug("--num col groups: " + res.getColGroups().size()); - LOG.debug("--compression phase " + phase + " Cleanup : " + getLastTimePhase()); - LOG.debug("--col groups types " + _stats.getGroupsTypesString()); - LOG.debug("--col groups sizes " + _stats.getGroupsSizesString()); - LOG.debug("--input was compressed " + (mb instanceof CompressedMatrixBlock)); - LOG.debug(String.format("--dense size: %16d", _stats.denseSize)); - LOG.debug(String.format("--sparse size: %16d", _stats.sparseSize)); - LOG.debug(String.format("--original size: %16d", _stats.originalSize)); - LOG.debug(String.format("--compressed size: %16d", _stats.compressedSize)); - LOG.debug(String.format("--compression ratio: %4.3f", _stats.getRatio())); - LOG.debug(String.format("--Dense ratio: %4.3f", _stats.getDenseRatio())); + LOG.debug("--num col groups: {}", res.getColGroups().size()); + LOG.debug("--compression phase {} Cleanup : {}", phase, getLastTimePhase()); + LOG.debug("--col groups types {}", _stats.getGroupsTypesString()); + LOG.debug("--col groups sizes {}", _stats.getGroupsSizesString()); + LOG.debug("--input was compressed {}", (mb instanceof CompressedMatrixBlock)); + LOG.debug("--dense size: {%16d}", _stats.denseSize); + LOG.debug("--sparse size: {%16d}", _stats.sparseSize); + LOG.debug("--original size: {%16d}", _stats.originalSize); + LOG.debug("--compressed size: {%16d}", _stats.compressedSize); + LOG.debug("--compression ratio: {%4.3f}", _stats.getRatio()); + LOG.debug("--Dense ratio: {%4.3f}", _stats.getDenseRatio()); if(!(costEstimator instanceof MemoryCostEstimator)) { - LOG.debug(String.format("--original cost: %5.2E", _stats.originalCost)); - LOG.debug(String.format("--single col cost: %5.2E", _stats.estimatedCostCols)); - LOG.debug(String.format("--cocode cost: %5.2E", _stats.estimatedCostCoCoded)); - LOG.debug(String.format("--actual cost: %5.2E", _stats.compressedCost)); - LOG.debug( - String.format("--relative cost: %1.4f", (_stats.compressedCost / _stats.originalCost))); + LOG.debug("--original cost: {%5.2E}", _stats.originalCost); + LOG.debug("--single col cost: {%5.2E}", _stats.estimatedCostCols); + LOG.debug("--cocode cost: {%5.2E}", _stats.estimatedCostCoCoded); + LOG.debug("--actual cost: {%5.2E}", _stats.compressedCost); + LOG.debug("--relative cost: {%1.4f}", (_stats.compressedCost / _stats.originalCost)); } logLengths(); } @@ -615,20 +607,18 @@ private void logLengths() { for(AColGroup colGroup : res.getColGroups()) lengths[i++] = colGroup.getNumValues(); - LOG.debug("--compressed colGroup dictionary sizes: " + Arrays.toString(lengths)); - LOG.debug("--compressed colGroup nr columns : " + constructNrColumnString(res.getColGroups())); + LOG.debug("--compressed colGroup dictionary sizes: {}", Arrays.toString(lengths)); + LOG.debug("--compressed colGroup nr columns : {}", constructNrColumnString(res.getColGroups())); } if(LOG.isTraceEnabled()) { for(AColGroup colGroup : res.getColGroups()) { if(colGroup.estimateInMemorySize() < 1000) LOG.trace(colGroup); else { - LOG.trace( - "--colGroups type : " + colGroup.getClass().getSimpleName() + " size: " - + colGroup.estimateInMemorySize() - + ((colGroup instanceof AColGroupValue) ? " numValues :" - + ((AColGroupValue) colGroup).getNumValues() : "") - + " colIndexes : " + colGroup.getColIndices()); + LOG.trace("--colGroups type : {} size: {}{} colIndexes : {}", + colGroup.getClass().getSimpleName(), colGroup.estimateInMemorySize(), + (colGroup instanceof AColGroupValue) ? " numValues :" + ((AColGroupValue) colGroup).getNumValues() + " " : "", + colGroup.getColIndices()); } } } diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java b/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java index 36637ee8959..8b00aba0380 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java @@ -28,8 +28,6 @@ import java.util.stream.LongStream; import org.apache.commons.lang3.mutable.MutableBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types.DataType; @@ -66,6 +64,7 @@ import org.apache.sysds.runtime.util.HDFSTool; import org.apache.sysds.runtime.util.LocalFileUtils; import org.apache.sysds.runtime.util.UtilFunctions; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.Statistics; import org.apache.sysds.utils.stats.InfrastructureAnalyzer; @@ -85,7 +84,7 @@ public abstract class CacheableData> extends Data private static final long serialVersionUID = -413810592207212835L; /** Global logging instance for all subclasses of CacheableData */ - protected static final Log LOG = LogFactory.getLog(CacheableData.class.getName()); + protected static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(CacheableData.class); // global constant configuration parameters public static final long CACHING_THRESHOLD = (long)Math.max(4*1024, //obj not s.t. caching @@ -902,15 +901,13 @@ public synchronized void exportData(String fName, String outputFormat, FileForma * @param formatProperties file format properties */ public synchronized void exportData (String fName, String outputFormat, int replication, FileFormatProperties formatProperties) { - if( LOG.isTraceEnabled() ) - LOG.trace("Export data "+hashCode()+" "+fName); + LOG.trace("Export data {} {}", hashCode(), fName); long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; //prevent concurrent modifications if ( !isAvailableToRead() ) throw new DMLRuntimeException("MatrixObject not available to read."); - if( LOG.isTraceEnabled() ) - LOG.trace("Exporting " + this.getDebugName() + " to " + fName + " in format " + outputFormat); + LOG.trace("Exporting {} to {} in format {}", this.getDebugName(), fName, outputFormat); if( DMLScript.USE_ACCELERATOR && _gpuObjects != null ) { boolean copiedFromGPU = false; @@ -1040,8 +1037,7 @@ else if( getRDDHandle()!=null && getRDDHandle().isPending() else { //CASE 4: data already in hdfs (do nothing, no need for export) - if( LOG.isTraceEnabled() ) - LOG.trace(this.getDebugName() + ": Skip export to hdfs since data already exists."); + LOG.trace("{}: Skip export to hdfs since data already exists.", this.getDebugName()); } _hdfsFileExists = true; @@ -1073,11 +1069,9 @@ protected boolean isBlobPresent() { */ protected void restoreBlobIntoMemory() { String cacheFilePathAndName = getCacheFilePathAndName(); - long begin = LOG.isTraceEnabled() ? System.currentTimeMillis() : 0; + long begin = LOG.currentTimeMillisIfTraceEnabled(); - if( LOG.isTraceEnabled() ) - LOG.trace ("CACHE: Restoring matrix... " + hashCode() + " HDFS path: " + - (_hdfsFileName == null ? "null" : _hdfsFileName) + ", Restore from path: " + cacheFilePathAndName); + LOG.trace ("CACHE: Restoring matrix... {} HDFS path: {} Restore from path: {}", hashCode(), _hdfsFileName, cacheFilePathAndName); if (_data != null) throw new DMLRuntimeException(cacheFilePathAndName + " : Cannot restore on top of existing in-memory data."); @@ -1093,8 +1087,7 @@ protected void restoreBlobIntoMemory() { if (_data == null) throw new DMLRuntimeException (cacheFilePathAndName + " : Restore failed."); - if( LOG.isTraceEnabled() ) - LOG.trace("Restoring matrix - COMPLETED ... " + (System.currentTimeMillis()-begin) + " msec."); + LOG.trace("Restoring matrix - COMPLETED ... {} msec.", LOG.currentTimeMillisIfTraceEnabled()-begin); } protected abstract T readBlobFromCache(String fname) @@ -1107,10 +1100,8 @@ protected abstract T readBlobFromCache(String fname) */ public final void freeEvictedBlob() { String cacheFilePathAndName = getCacheFilePathAndName(); - long begin = LOG.isTraceEnabled() ? System.currentTimeMillis() : 0; - if( LOG.isTraceEnabled() ) - LOG.trace("CACHE: Freeing evicted matrix... " + hashCode() + " HDFS path: " + - (_hdfsFileName == null ? "null" : _hdfsFileName) + " Eviction path: " + cacheFilePathAndName); + long begin = LOG.currentTimeMillisIfTraceEnabled(); + LOG.trace("CACHE: Freeing evicted matrix... {} HDFS path: {} Eviction path: {}", hashCode(), _hdfsFileName, cacheFilePathAndName); if(isCachingActive()) { if (OptimizerUtils.isUMMEnabled()) @@ -1119,8 +1110,7 @@ public final void freeEvictedBlob() { LazyWriteBuffer.deleteBlock(cacheFilePathAndName); } - if( LOG.isTraceEnabled() ) - LOG.trace("Freeing evicted matrix - COMPLETED ... " + (System.currentTimeMillis()-begin) + " msec."); + LOG.trace("Freeing evicted matrix - COMPLETED ... {} msec.", LOG.currentTimeMillisIfTraceEnabled()-begin); } protected boolean isBelowCachingThreshold() { @@ -1172,8 +1162,7 @@ protected abstract T readBlobFromStream(OOCStream stream) // Federated read protected T readBlobFromFederated(FederationMap fedMap) throws IOException { - if( LOG.isDebugEnabled() ) //common if instructions keep federated outputs - LOG.debug("Pulling data from federated sites"); + LOG.debug("Pulling data from federated sites"); MetaDataFormat iimd = (MetaDataFormat) _metaData; DataCharacteristics dc = iimd.getDataCharacteristics(); return readBlobFromFederated(fedMap, dc.getDims()); @@ -1292,8 +1281,7 @@ protected void acquire (boolean isModify, boolean restore) { throw new DMLRuntimeException("MODIFY-MODIFY not allowed."); } - if( LOG.isTraceEnabled() ) - LOG.trace("Acquired lock on " + getDebugName() + ", status: " + _cacheStatus.name() ); + LOG.trace("Acquired lock on {} status: {}", getDebugName(), _cacheStatus.name()); } @@ -1328,8 +1316,7 @@ protected void release(boolean cacheNoWrite) break; } - if( LOG.isTraceEnabled() ) - LOG.trace("Released lock on " + getDebugName() + ", status: " + _cacheStatus.name()); + LOG.trace("Released lock on {} status: {}", getDebugName(), _cacheStatus.name()); } diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java b/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java index 2cd9e8abf4e..44db0435d1e 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java @@ -27,8 +27,6 @@ import java.util.Arrays; import java.util.concurrent.CompletableFuture; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.sysds.api.DMLScript; @@ -75,6 +73,7 @@ import org.apache.sysds.runtime.meta.MatrixCharacteristics; import org.apache.sysds.runtime.meta.MetaDataAll; import org.apache.sysds.runtime.meta.MetaDataFormat; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.Statistics; import org.apache.sysds.utils.stats.InfrastructureAnalyzer; import org.apache.sysds.utils.stats.ParamServStatistics; @@ -90,7 +89,7 @@ * execution contexts at the federated sites too */ public class FederatedWorkerHandler extends ChannelInboundHandlerAdapter { - private static final Log LOG = LogFactory.getLog(FederatedWorkerHandler.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(FederatedWorkerHandler.class); /** The Federated Lookup Table of the current Federated Worker. */ private final FederatedLookupTable _flt; @@ -290,11 +289,9 @@ private static void printStatistics() { } private static void logRequests(FederatedRequest request, int nrRequest, int totalRequests) { - if(LOG.isDebugEnabled()) { - LOG.debug("Executing command " + (nrRequest + 1) + "/" + totalRequests + ": " + request.getType().name()); - if(LOG.isTraceEnabled()) - LOG.trace("full command: " + request.toString()); - } + LOG.debug("Executing command {}/{}: {}", nrRequest + 1, totalRequests, request.getType().name()); + if(LOG.isTraceEnabled()) + LOG.trace("full command: " + request); } private FederatedResponse executeCommand(FederatedRequest request, ExecutionContextMap ecm, EventStageModel eventStage) diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java b/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java index 333c889b7c1..020ebc6d2d8 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java @@ -30,8 +30,6 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types.DataType; import org.apache.sysds.parser.DMLProgram; @@ -48,12 +46,13 @@ import org.apache.sysds.runtime.instructions.cp.ListObject; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.operators.RightScalarOperator; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.stats.ParamServStatistics; import org.apache.sysds.utils.stats.Timing; public abstract class ParamServer { - protected static final Log LOG = LogFactory.getLog(ParamServer.class.getName()); + protected static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(ParamServer.class); protected static final boolean ACCRUE_BSP_GRADIENTS = true; // worker input queues and global model @@ -203,10 +202,8 @@ protected synchronized void updateGlobalModel(int workerID, ListObject params) { protected synchronized void updateGlobalGradients(int workerID, ListObject gradients) { try { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("Successfully pulled the gradients [size:%d kb] of worker_%d.", - gradients.getDataSize() / 1024, workerID)); - } + LOG.debug("Successfully pulled the gradients [size:{} kb] of worker_{}.", + gradients.getDataSize() / 1024, workerID); switch(_updateType) { case BSP: { @@ -298,8 +295,7 @@ private void performGlobalGradientUpdate() { // Broadcast the updated model broadcastModel(_finishedStates); resetFinishedStates(); - if(LOG.isDebugEnabled()) - LOG.debug("Global parameter is broadcasted successfully."); + LOG.debug("Global parameter is broadcasted successfully."); } private void tagStragglers() { @@ -353,10 +349,8 @@ protected ListObject updateLocalModel(ExecutionContext ec, ListObject gradients, protected synchronized void updateAverageModel(int workerID, ListObject model) { try { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("Successfully pulled the models [size:%d kb] of worker_%d.", - model.getDataSize() / 1024, workerID)); - } + LOG.debug("Successfully pulled the models [size:{} kb] of worker_{}.", + model.getDataSize() / 1024, workerID); Timing tAgg = DMLScript.STATISTICS ? new Timing(true) : null; switch(_updateType) { @@ -443,8 +437,7 @@ protected void updateAndBroadcastModel(ListObject new_model, Timing tAgg, boolea broadcastModel(true); else broadcastModel(workerBroadcastMask); - if(LOG.isDebugEnabled()) - LOG.debug("Global parameter is broadcasted successfully "); + LOG.debug("Global parameter is broadcasted successfully"); } protected ListObject weightModels(ListObject params, int numWorkers) { diff --git a/src/main/java/org/apache/sysds/runtime/instructions/cp/MultiReturnParameterizedBuiltinCPInstruction.java b/src/main/java/org/apache/sysds/runtime/instructions/cp/MultiReturnParameterizedBuiltinCPInstruction.java index 98101348da0..15c3c7ce8ed 100644 --- a/src/main/java/org/apache/sysds/runtime/instructions/cp/MultiReturnParameterizedBuiltinCPInstruction.java +++ b/src/main/java/org/apache/sysds/runtime/instructions/cp/MultiReturnParameterizedBuiltinCPInstruction.java @@ -37,8 +37,11 @@ import org.apache.sysds.runtime.matrix.operators.Operator; import org.apache.sysds.runtime.transform.encode.EncoderFactory; import org.apache.sysds.runtime.transform.encode.MultiColumnEncoder; +import org.apache.sysds.utils.ParameterizedLogger; public class MultiReturnParameterizedBuiltinCPInstruction extends ComputationCPInstruction { + private static final ParameterizedLogger LOG = + ParameterizedLogger.getLogger(MultiReturnParameterizedBuiltinCPInstruction.class); protected final ArrayList _outputs; protected final boolean _metaReturn; @@ -106,9 +109,7 @@ public void processInstruction(ExecutionContext ec) { ec.setMatrixOutput(getOutput(0).getName(), data); ec.setFrameOutput(getOutput(1).getName(), meta); - if(LOG.isDebugEnabled()) - // debug the size of the output metadata. - LOG.debug("Memory size of metadata: " + meta.getInMemorySize()); + LOG.debug("Memory size of metadata: {}", meta.getInMemorySize()); } @Override diff --git a/src/main/java/org/apache/sysds/runtime/instructions/fed/InitFEDInstruction.java b/src/main/java/org/apache/sysds/runtime/instructions/fed/InitFEDInstruction.java index bf2e6b43718..342653be014 100644 --- a/src/main/java/org/apache/sysds/runtime/instructions/fed/InitFEDInstruction.java +++ b/src/main/java/org/apache/sysds/runtime/instructions/fed/InitFEDInstruction.java @@ -35,8 +35,6 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types; import org.apache.sysds.conf.ConfigurationManager; @@ -64,10 +62,11 @@ import org.apache.sysds.runtime.lineage.LineageTraceable; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.meta.DataCharacteristics; +import org.apache.sysds.utils.ParameterizedLogger; public class InitFEDInstruction extends FEDInstruction implements LineageTraceable { - private static final Log LOG = LogFactory.getLog(InitFEDInstruction.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(InitFEDInstruction.class); public static final String FED_MATRIX_IDENTIFIER = "matrix"; public static final String FED_FRAME_IDENTIFIER = "frame"; @@ -139,7 +138,7 @@ private void processFedInit(ExecutionContext ec){ if( dat instanceof StringObject ) { String address = ((StringObject) dat).getStringValue(); if(addCheck.contains(address)) - LOG.warn("Federated data contains address duplicates: " + addresses); + LOG.warn("Federated data contains address duplicates: {}", addresses); addCheck.add(address); } @@ -235,7 +234,7 @@ public void processFromLocalFedInit(ExecutionContext ec) { if(dat instanceof StringObject) { String address = ((StringObject) dat).getStringValue(); if(addCheck.contains(address)) - LOG.warn("Federated data contains address duplicates: " + addresses); + LOG.warn("Federated data contains address duplicates: {}", addresses); addCheck.add(address); } @@ -423,8 +422,7 @@ public static void federateMatrix(CacheableData output, List> idResponse : idResponses) { // wait for initialization and check dimensions FederatedResponse re = idResponse.getRight().get(timeout, TimeUnit.SECONDS); @@ -447,8 +445,7 @@ public static void federateMatrix(CacheableData output, List> workers) { @@ -511,8 +508,7 @@ public static void federateFrame(FrameObject output, List rewriteTsmmCbind (Instruction curr, Execut DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteTsmmCbind APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteTsmmCbind APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); // cleanup buffer pool @@ -209,8 +207,7 @@ private static ArrayList rewriteTsmmCbindOnes (Instruction curr, Ex DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteTsmmCbindOnes APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteTsmmCbindOnes APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -254,8 +251,7 @@ private static ArrayList rewriteTsmmRbind (Instruction curr, Execut DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteTsmmRbind APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteTsmmRbind APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -318,8 +314,7 @@ private static ArrayList rewriteTsmm2Cbind (Instruction curr, Execu DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteTsmm2Cbind APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteTsmm2Cbind APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -389,8 +384,7 @@ private static ArrayList rewriteTsmm2CbindSameLeft (Instruction cur DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteTsmm2CbindSameLeft APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteTsmm2CbindSameLeft APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -435,8 +429,7 @@ private static ArrayList rewriteMatMulRbindLeft (Instruction curr, DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteMetMulRbindLeft APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteMetMulRbindLeft APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -481,8 +474,7 @@ private static ArrayList rewriteMatMulCbindRight (Instruction curr, DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteMatMulCbindRight APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteMatMulCbindRight APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -516,8 +508,7 @@ private static ArrayList rewriteMatMulCbindRightOnes (Instruction c DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteMatMulCbindRightOnes APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteMatMulCbindRightOnes APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -573,8 +564,7 @@ private static ArrayList rewriteElementMulRbind (Instruction curr, DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteElementMulRbind APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteElementMulRbind APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -630,8 +620,7 @@ private static ArrayList rewriteElementMulCbind (Instruction curr, DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteElementMulCbind APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteElementMulCbind APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -687,8 +676,7 @@ private static ArrayList rewriteAggregateCbind (Instruction curr, E DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteElementMulCbind APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteElementMulCbind APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -740,8 +728,7 @@ private static ArrayList rewriteIndexingMatMul (Instruction curr, E DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewriteIndexingMatMul APPLIED"); + LOG.debug("LINEAGE REWRITE rewriteIndexingMatMul APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); // Keep reuse enabled _disableReuse = false; @@ -801,8 +788,7 @@ private static ArrayList rewritePcaTsmm(Instruction curr, Execution DataOp lrwWrite = HopRewriteUtils.createTransientWrite(LR_VAR, lrwHop); // Generate runtime instructions - if (LOG.isDebugEnabled()) - LOG.debug("LINEAGE REWRITE rewritePcaTsmm APPLIED"); + LOG.debug("LINEAGE REWRITE rewritePcaTsmm APPLIED"); ArrayList inst = genInst(lrwWrite, lrwec); _disableReuse = true; @@ -1221,8 +1207,8 @@ private static ArrayList genInst(Hop hops, ExecutionContext ec) { ArrayList newInst = Recompiler.recompileHopsDag(hops, ec.getVariables(), null, true, true, 0); if (LOG.isDebugEnabled()) { LOG.debug("COMPENSATION PLAN: "); - LOG.debug("EXPLAIN LINEAGE REWRITE (HOP) \n" + Explain.explain(hops,1)); - LOG.debug("EXPLAIN LINEAGE REWRITE (INSTRUCTION) \n" + Explain.explain(newInst,1)); + LOG.debug("EXPLAIN LINEAGE REWRITE (HOP) \n{}", Explain.explain(hops,1)); + LOG.debug("EXPLAIN LINEAGE REWRITE (INSTRUCTION) \n{}", Explain.explain(newInst,1)); } return newInst; } diff --git a/src/main/java/org/apache/sysds/runtime/matrix/data/sketch/countdistinctapprox/KMVSketch.java b/src/main/java/org/apache/sysds/runtime/matrix/data/sketch/countdistinctapprox/KMVSketch.java index 29d1706f9e1..52289cc5330 100644 --- a/src/main/java/org/apache/sysds/runtime/matrix/data/sketch/countdistinctapprox/KMVSketch.java +++ b/src/main/java/org/apache/sysds/runtime/matrix/data/sketch/countdistinctapprox/KMVSketch.java @@ -20,8 +20,6 @@ package org.apache.sysds.runtime.matrix.data.sketch.countdistinctapprox; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.runtime.DMLRuntimeException; import org.apache.sysds.runtime.compress.CompressedMatrixBlock; import org.apache.sysds.runtime.data.DenseBlock; @@ -32,6 +30,7 @@ import org.apache.sysds.runtime.matrix.operators.CountDistinctOperatorTypes; import org.apache.sysds.runtime.matrix.operators.Operator; import org.apache.sysds.utils.Hash; +import org.apache.sysds.utils.ParameterizedLogger; /** * KMV synopsis(for k minimum values) Distinct-Value Estimation @@ -45,7 +44,7 @@ */ public class KMVSketch extends CountDistinctSketch { - private static final Log LOG = LogFactory.getLog(KMVSketch.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(KMVSketch.class); public KMVSketch(Operator op) { super(op); @@ -73,13 +72,10 @@ public MatrixBlock getValue(MatrixBlock blkIn) { SmallestPriorityQueue spq = getKSmallestHashes(blkIn, k, M); - if(LOG.isDebugEnabled()) { - LOG.debug("M not forced to int size: " + tmp); - LOG.debug("M: " + M); - LOG.debug("M: " + M); - LOG.debug("kth smallest hash:" + spq.peek()); - LOG.debug("spq: " + spq); - } + LOG.debug("M not forced to int size: {}", tmp); + LOG.debug("M: {}", M); + LOG.debug("kth smallest hash: {}", spq.peek()); + LOG.debug("spq: {}", spq); long res = countDistinctValuesKMV(spq, k, M, D); @@ -197,11 +193,9 @@ private long countDistinctValuesKMV(SmallestPriorityQueue spq, int k, int M, lon double estimate = (k - 1) / U_k; double ceilEstimate = Math.min(estimate, D); - if(LOG.isDebugEnabled()) { - LOG.debug("U_k : " + U_k); - LOG.debug("Estimate: " + estimate); - LOG.debug("Ceil worst case: " + D); - } + LOG.debug("U_k : {}", U_k); + LOG.debug("Estimate: {}", estimate); + LOG.debug("Ceil worst case: {}", D); res = Math.round(ceilEstimate); } diff --git a/src/main/java/org/apache/sysds/runtime/transform/encode/ColumnEncoderDummycode.java b/src/main/java/org/apache/sysds/runtime/transform/encode/ColumnEncoderDummycode.java index 616a6a7ce8b..318b4189f50 100644 --- a/src/main/java/org/apache/sysds/runtime/transform/encode/ColumnEncoderDummycode.java +++ b/src/main/java/org/apache/sysds/runtime/transform/encode/ColumnEncoderDummycode.java @@ -35,10 +35,12 @@ import org.apache.sysds.runtime.frame.data.FrameBlock; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.util.DependencyTask; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.stats.TransformStatistics; public class ColumnEncoderDummycode extends ColumnEncoder { private static final long serialVersionUID = 5832130477659116489L; + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(ColumnEncoderDummycode.class); /** The number of columns outputted from this column group. */ public int _domainSize = -1; @@ -224,10 +226,7 @@ else if(columnEncoder instanceof ColumnEncoderFeatureHash){ if(distinct != -1) { _domainSize = Math.max(1, distinct); - if(LOG.isDebugEnabled()){ - - LOG.debug("DummyCoder for column: " + _colID + " has domain size: " + _domainSize); - } + LOG.debug("DummyCoder for column: {} has domain size: {}", _colID, _domainSize); } } } diff --git a/src/main/java/org/apache/sysds/runtime/transform/encode/CompressedEncode.java b/src/main/java/org/apache/sysds/runtime/transform/encode/CompressedEncode.java index c3903cef006..325815075a0 100644 --- a/src/main/java/org/apache/sysds/runtime/transform/encode/CompressedEncode.java +++ b/src/main/java/org/apache/sysds/runtime/transform/encode/CompressedEncode.java @@ -29,8 +29,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.common.Types.ValueType; import org.apache.sysds.runtime.compress.CompressedMatrixBlock; import org.apache.sysds.runtime.compress.colgroup.AColGroup; @@ -64,10 +62,11 @@ import org.apache.sysds.runtime.transform.encode.ColumnEncoderBin.BinMethod; import org.apache.sysds.runtime.util.CommonThreadPool; import org.apache.sysds.runtime.util.UtilFunctions; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.stats.Timing; public class CompressedEncode { - protected static final Log LOG = LogFactory.getLog(CompressedEncode.class.getName()); + protected static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(CompressedEncode.class); /** Row parallelization threshold for parallel creation of AMapToData for compression */ public static int ROW_PARALLELIZATION_THRESHOLD = 10000; @@ -205,9 +204,8 @@ private int shiftGroups(List groups) { private AColGroup encode(ColumnEncoderComposite c) throws Exception { final Timing t = new Timing(); AColGroup g = executeEncode(c); - if(LOG.isDebugEnabled()) - LOG.debug(String.format("Encode: columns: %4d estimateDistinct: %6d distinct: %6d size: %6d time: %10f", - c._colID, c._estNumDistincts, g.getNumValues(), g.estimateInMemorySize(), t.stop())); + LOG.debug("Encode: columns: {%4d} estimateDistinct: {%6d} distinct: {%6d} size: {%6d} time: {%10f}", + c._colID, c._estNumDistincts, g.getNumValues(), g.estimateInMemorySize(), t.stop()); return g; } @@ -721,8 +719,7 @@ private AColGroup combine(List ucg) throws Interrupte nnz.addAndGet(combinedNNZ); ret.setNonZeros(combinedNNZ); - if(LOG.isDebugEnabled()) - LOG.debug("Combining of : " + ucg.size() + " uncompressed columns Time: " + t.stop()); + LOG.debug("Combining of : {} uncompressed columns Time: {}", ucg.size(), t.stop()); return ColGroupUncompressed.create(ret, combinedCols); } @@ -769,16 +766,13 @@ private final long putIntoRowBlock(List ucg, int jl, } private void logging(MatrixBlock mb) { - if(LOG.isDebugEnabled()) { - LOG.debug(String.format("Uncompressed transform encode Dense size: %16d", mb.estimateSizeDenseInMemory())); - LOG.debug(String.format("Uncompressed transform encode Sparse size: %16d", mb.estimateSizeSparseInMemory())); - LOG.debug(String.format("Compressed transform encode size: %16d", mb.estimateSizeInMemory())); - - double ratio = Math.min(mb.estimateSizeDenseInMemory(), mb.estimateSizeSparseInMemory()) / - mb.estimateSizeInMemory(); - double denseRatio = mb.estimateSizeDenseInMemory() / mb.estimateSizeInMemory(); - LOG.debug(String.format("Compression ratio: %10.3f", ratio)); - LOG.debug(String.format("Dense ratio: %10.3f", denseRatio)); - } + LOG.debug("Uncompressed transform encode Dense size: {%16d}", mb.estimateSizeDenseInMemory()); + LOG.debug("Uncompressed transform encode Sparse size: {%16d}", mb.estimateSizeSparseInMemory()); + LOG.debug("Compressed transform encode size: {%16d}", mb.estimateSizeInMemory()); + double ratio = Math.min(mb.estimateSizeDenseInMemory(), mb.estimateSizeSparseInMemory()) / + mb.estimateSizeInMemory(); + double denseRatio = mb.estimateSizeDenseInMemory() / mb.estimateSizeInMemory(); + LOG.debug("Compression ratio: {%10.3f}", ratio); + LOG.debug("Dense ratio: {%10.3f}", denseRatio); } } diff --git a/src/main/java/org/apache/sysds/utils/ParameterizedLogger.java b/src/main/java/org/apache/sysds/utils/ParameterizedLogger.java new file mode 100644 index 00000000000..4240c463917 --- /dev/null +++ b/src/main/java/org/apache/sysds/utils/ParameterizedLogger.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.sysds.utils; + +import java.util.IllegalFormatException; +import java.util.Locale; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Logger adapter for commons-logging with lightweight '{}' parameter formatting. + */ +public class ParameterizedLogger { + private final Log _log; + + private ParameterizedLogger(Log log) { + _log = log; + } + + public static ParameterizedLogger getLogger(Class cls) { + return new ParameterizedLogger(LogFactory.getLog(cls.getName())); + } + + public long currentTimeMillisIfTraceEnabled() { + return _log.isTraceEnabled() ? System.currentTimeMillis() : 0; + } + + public boolean isTraceEnabled() { + return _log.isTraceEnabled(); + } + + public boolean isDebugEnabled() { + return _log.isDebugEnabled(); + } + + + public void trace(String pattern, Object... args) { + if(_log.isTraceEnabled()) + _log.trace(format(pattern, args)); + } + + public void trace(Object message) { + _log.trace(message); + } + + public void debug(String pattern, Object... args) { + if(_log.isDebugEnabled()) + _log.debug(format(pattern, args)); + } + + public void debug(Object message) { + _log.debug(message); + } + + public void debug(Object message, Throwable t) { + _log.debug(message, t); + } + + public void info(String pattern, Object... args) { + if(_log.isInfoEnabled()) + _log.info(format(pattern, args)); + } + + public void info(Object message) { + _log.info(message); + } + + public void warn(String pattern, Object... args) { + if(_log.isWarnEnabled()) + _log.warn(format(pattern, args)); + } + + public void warn(Object message) { + _log.warn(message); + } + + + public void error(String pattern, Object... args) { + if(_log.isErrorEnabled()) + _log.error(format(pattern, args)); + } + + public void error(Object message) { + _log.error(message); + } + + public void error(Object message, Throwable t) { + _log.error(message, t); + } + + private static String format(String pattern, Object... args) { + if(pattern == null) + return "null"; + if(args == null || args.length == 0) + return pattern; + + StringBuilder sb = new StringBuilder(pattern.length() + args.length * 16); + int argIx = 0; + int start = 0; + int search = 0; + int open; + while((open = pattern.indexOf('{', search)) >= 0 && argIx < args.length) { + int close = pattern.indexOf('}', open + 1); + if(close < 0) + break; + + boolean defaultPlaceholder = close == open + 1; + boolean stringFormatPlaceholder = !defaultPlaceholder && pattern.charAt(open + 1) == '%'; + if(!defaultPlaceholder && !stringFormatPlaceholder) { + search = open + 1; + continue; + } + + sb.append(pattern, start, open); + if(defaultPlaceholder) + sb.append(String.valueOf(args[argIx++])); + else + sb.append(formatArgument(pattern.substring(open + 1, close), args[argIx++])); + start = close + 1; + search = start; + } + sb.append(pattern, start, pattern.length()); + + while(argIx < args.length) { + sb.append(" ["); + sb.append(String.valueOf(args[argIx++])); + sb.append(']'); + } + + return sb.toString(); + } + + private static String formatArgument(String format, Object arg) { + try { + return String.format(Locale.ROOT, format, arg); + } + catch(IllegalFormatException ex) { + return String.valueOf(arg); + } + } +} diff --git a/src/test/java/org/apache/sysds/test/component/utils/ParameterizedLoggerTest.java b/src/test/java/org/apache/sysds/test/component/utils/ParameterizedLoggerTest.java new file mode 100644 index 00000000000..b3ff8df9155 --- /dev/null +++ b/src/test/java/org/apache/sysds/test/component/utils/ParameterizedLoggerTest.java @@ -0,0 +1,180 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.sysds.test.component.utils; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; + +import org.apache.commons.logging.Log; +import org.apache.sysds.utils.ParameterizedLogger; +import org.junit.Test; + +public class ParameterizedLoggerTest { + @Test + public void testGetLogger() { + assertNotNull(ParameterizedLogger.getLogger(ParameterizedLoggerTest.class)); + } + + @Test + public void testCurrentTimeMillisIfTraceEnabled() throws Exception { + Log log = mock(Log.class); + ParameterizedLogger plog = newLogger(log); + + when(log.isTraceEnabled()).thenReturn(false, true); + assertEquals(0, plog.currentTimeMillisIfTraceEnabled()); + long ts = plog.currentTimeMillisIfTraceEnabled(); + assertTrue(ts > 0); + } + + @Test + public void testIsTraceEnabledAndIsDebugEnabled() throws Exception { + Log log = mock(Log.class); + ParameterizedLogger plog = newLogger(log); + + when(log.isTraceEnabled()).thenReturn(true, false); + when(log.isDebugEnabled()).thenReturn(false, true); + assertTrue(plog.isTraceEnabled()); + assertEquals(false, plog.isDebugEnabled()); + + assertEquals(false, plog.isTraceEnabled()); + assertTrue(plog.isDebugEnabled()); + } + + @Test + public void testTracePatternAndObject() throws Exception { + Log log = mock(Log.class); + ParameterizedLogger plog = newLogger(log); + + when(log.isTraceEnabled()).thenReturn(false, true); + plog.trace("value {}", 7); + verify(log, never()).trace(any()); + + plog.trace("value {}", 7); + verify(log).trace(eq("value 7")); + + plog.trace("raw"); + verify(log).trace(eq("raw")); + } + + @Test + public void testDebugMethods() throws Exception { + Log log = mock(Log.class); + ParameterizedLogger plog = newLogger(log); + + when(log.isDebugEnabled()).thenReturn(false, true); + plog.debug("x={%5d}", 3); + verify(log, never()).debug(any()); + + plog.debug("x={%5d}", 3); + verify(log).debug(eq("x= 3")); + + plog.debug("dbg"); + verify(log).debug(eq("dbg")); + + Throwable t = new RuntimeException("boom"); + plog.debug("dbg2", t); + verify(log).debug(eq("dbg2"), same(t)); + } + + @Test + public void testInfoAndWarnPatternAndObject() throws Exception { + Log log = mock(Log.class); + ParameterizedLogger plog = newLogger(log); + + when(log.isInfoEnabled()).thenReturn(false, true); + plog.info("i {}", 1); + verify(log, never()).info(any()); + + plog.info("i {}", 1); + verify(log).info(eq("i 1")); + + plog.info("info"); + verify(log).info(eq("info")); + + when(log.isWarnEnabled()).thenReturn(false, true); + plog.warn("w {}", 2); + verify(log, never()).warn(any()); + + plog.warn("w {}", 2); + verify(log).warn(eq("w 2")); + + plog.warn("warn"); + verify(log).warn(eq("warn")); + } + + @Test + public void testErrorMethods() throws Exception { + Log log = mock(Log.class); + ParameterizedLogger plog = newLogger(log); + + when(log.isErrorEnabled()).thenReturn(false, true); + plog.error("e {}", 1); + verify(log, never()).error(any()); + + plog.error("e {}", 1); + verify(log).error(eq("e 1")); + + plog.error("err"); + verify(log).error(eq("err")); + + Throwable t = new IllegalStateException("x"); + plog.error("err2", t); + verify(log).error(eq("err2"), same(t)); + } + + @Test + public void testFormatPlainAndStringFormattedPlaceholders() throws Exception { + assertEquals("a=7 b= 7 c= 3.14000 d= hi", + invokeFormat("a={} b={%7d} c={%10.5f} d={%16s}", 7, 7, 3.14, "hi")); + } + + @Test + public void testFormatFallbackForInvalidFormatter() throws Exception { + assertEquals("x=12", invokeFormat("x={%q}", 12)); + } + + @Test + public void testFormatNonPlaceholderBracesAndExtraArgs() throws Exception { + assertEquals("prefix {skip} 1 [2]", invokeFormat("prefix {skip} {}", 1, 2)); + } + + private static ParameterizedLogger newLogger(Log log) throws Exception { + Constructor ctor = ParameterizedLogger.class.getDeclaredConstructor(Log.class); + ctor.setAccessible(true); + return ctor.newInstance(log); + } + + private static String invokeFormat(String pattern, Object... args) throws Exception { + Method method = ParameterizedLogger.class.getDeclaredMethod("format", String.class, Object[].class); + method.setAccessible(true); + return (String) method.invoke(null, pattern, args); + } +} From d61285db3c7902b1dc08ad396d257ac909b48548 Mon Sep 17 00:00:00 2001 From: e-strauss Date: Sun, 22 Feb 2026 13:37:24 +0100 Subject: [PATCH 2/5] refactor from isInfoEnabled to the new logging util --- .../java/org/apache/sysds/api/DMLScript.java | 16 ++---- .../sysds/hops/codegen/SpoofCompiler.java | 52 +++++++++---------- .../paramserv/FederatedPSControlThread.java | 13 ++--- .../controlprogram/paramserv/ParamServer.java | 21 +++----- .../cp/CompressionCPInstruction.java | 14 ++--- .../cp/ParamservBuiltinCPInstruction.java | 25 ++++----- 6 files changed, 58 insertions(+), 83 deletions(-) diff --git a/src/main/java/org/apache/sysds/api/DMLScript.java b/src/main/java/org/apache/sysds/api/DMLScript.java index 81acb9deacd..105170c4f6d 100644 --- a/src/main/java/org/apache/sysds/api/DMLScript.java +++ b/src/main/java/org/apache/sysds/api/DMLScript.java @@ -39,8 +39,7 @@ import org.apache.commons.cli.AlreadySelectedException; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.sysds.common.Types.ExecMode; @@ -178,7 +177,7 @@ public class DMLScript public static boolean VALIDATOR_IGNORE_ISSUES = false; public static String _uuid = IDHandler.createDistributedUniqueID(); - private static final Log LOG = LogFactory.getLog(DMLScript.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(DMLScript.class); /////////////////////////////// // public external interface @@ -621,14 +620,9 @@ private static void printInvocationInfo(String fnameScript, String fnameOptConfi } private static void printStartExecInfo(String dmlScriptString) { - boolean info = LOG.isInfoEnabled(); - boolean debug = LOG.isDebugEnabled(); - if(info) - LOG.info("BEGIN DML run " + getDateTime()); - if(debug) - LOG.debug("DML script: \n" + dmlScriptString); - if(info) - LOG.info("Process id: " + IDHandler.getProcessID()); + LOG.info("BEGIN DML run {}", getDateTime()); + LOG.debug("DML script: \n{}", dmlScriptString); + LOG.info("Process id: {}", IDHandler.getProcessID()); } private static void registerForMonitoring() { diff --git a/src/main/java/org/apache/sysds/hops/codegen/SpoofCompiler.java b/src/main/java/org/apache/sysds/hops/codegen/SpoofCompiler.java index 307205dbc89..ba14642da14 100644 --- a/src/main/java/org/apache/sysds/hops/codegen/SpoofCompiler.java +++ b/src/main/java/org/apache/sysds/hops/codegen/SpoofCompiler.java @@ -19,11 +19,23 @@ package org.apache.sysds.hops.codegen; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map.Entry; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types.AggOp; import org.apache.sysds.common.Types.DataType; @@ -97,24 +109,11 @@ import org.apache.sysds.runtime.matrix.data.Pair; import org.apache.sysds.utils.Explain; import org.apache.sysds.utils.NativeHelper; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.stats.CodegenStatistics; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.Map.Entry; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; - public class SpoofCompiler { - private static final Log LOG = LogFactory.getLog(SpoofCompiler.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(SpoofCompiler.class); //internal configuration flags public static CompilerType JAVA_COMPILER = CompilerType.JANINO; @@ -539,21 +538,20 @@ public static ArrayList optimize(ArrayList roots, boolean recompile) } //explain debug output cplans or generated source code - if( LOG.isInfoEnabled() || DMLScript.EXPLAIN.isHopsType(recompile) ) { - LOG.info("Codegen EXPLAIN (generated cplan for HopID: " + cplan.getKey() + - ", line "+tmp.getValue().getBeginLine() + ", hash="+tmp.getValue().hashCode()+"):"); + if( DMLScript.EXPLAIN.isHopsType(recompile) ) { + LOG.info("Codegen EXPLAIN (generated cplan for HopID: {}, line {}, hash={}):", + cplan.getKey(), tmp.getValue().getBeginLine(), tmp.getValue().hashCode()); LOG.info(tmp.getValue().getClassname() + Explain.explainCPlan(cplan.getValue().getValue())); } - if( LOG.isInfoEnabled() || DMLScript.EXPLAIN.isRuntimeType(recompile) ) { - LOG.info("JAVA Codegen EXPLAIN (generated code for HopID: " + cplan.getKey() + - ", line "+tmp.getValue().getBeginLine() + ", hash="+tmp.getValue().hashCode()+"):"); + if( DMLScript.EXPLAIN.isRuntimeType(recompile) ) { + LOG.info("JAVA Codegen EXPLAIN (generated code for HopID: {}, line {}, hash={}):", + cplan.getKey(), tmp.getValue().getBeginLine(), tmp.getValue().hashCode()); LOG.info(CodegenUtils.printWithLineNumber(src)); - - if(API == GeneratorAPI.CUDA) { - LOG.info("CUDA Codegen EXPLAIN (generated code for HopID: " + cplan.getKey() + - ", line " + tmp.getValue().getBeginLine() + ", hash=" + tmp.getValue().hashCode() + "):"); + if(API == GeneratorAPI.CUDA) { + LOG.info("CUDA Codegen EXPLAIN (generated code for HopID: {}, line {}, hash={}):", + cplan.getKey(), tmp.getValue().getBeginLine(), tmp.getValue().hashCode()); LOG.info(CodegenUtils.printWithLineNumber(src_cuda)); } } diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/FederatedPSControlThread.java b/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/FederatedPSControlThread.java index 58cfa448f98..81449a70d41 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/FederatedPSControlThread.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/FederatedPSControlThread.java @@ -34,8 +34,7 @@ import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.api.DMLScript; import org.apache.sysds.parser.DataIdentifier; import org.apache.sysds.parser.Statement; @@ -77,7 +76,7 @@ public class FederatedPSControlThread extends PSWorker implements Callable { private static final long serialVersionUID = 6846648059569648791L; - protected static final Log LOG = LogFactory.getLog(ParamServer.class.getName()); + protected static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(ParamServer.class); private FederatedData _featuresData; private FederatedData _labelsData; @@ -147,11 +146,9 @@ public void setup(double weightingFactor) { if(_runtimeBalancing == PSRuntimeBalancing.BASELINE) _cycleStartAt0 = true; - if( LOG.isInfoEnabled() ) { - LOG.info("Setup config for worker " + this.getWorkerName()); - LOG.info("Batch size: " + _batchSize + " possible batches: " + _possibleBatchesPerLocalEpoch - + " batches to run: " + _numBatchesPerEpoch + " weighting factor: " + _weightingFactor); - } + LOG.info("Setup config for worker {}", this.getWorkerName()); + LOG.info("Batch size: {} possible batches: {} batches to run: {} weighting factor: {}", + _batchSize, _possibleBatchesPerLocalEpoch, _numBatchesPerEpoch, _weightingFactor); // serialize program // create program blocks for the instruction filtering diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java b/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java index 020ebc6d2d8..890f75bb0d2 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/paramserv/ParamServer.java @@ -227,8 +227,7 @@ protected synchronized void updateGlobalGradients(int workerID, ListObject gradi ((_freq == Statement.PSFrequency.EPOCH && ((float) ++_syncCounter % _numWorkers) == 0) || (_freq == Statement.PSFrequency.BATCH && ((float) ++_syncCounter / _numWorkers) % _numBatchesPerEpoch == 0)) || (_freq == Statement.PSFrequency.NBATCHES)) { - if(LOG.isInfoEnabled()) - LOG.info("[+] PARAMSERV: completed PSEUDO EPOCH (ASP) " + _epochCounter); + LOG.info("[+] PARAMSERV: completed PSEUDO EPOCH (ASP) {}", _epochCounter); time_epoch(); @@ -280,8 +279,7 @@ private void performGlobalGradientUpdate() { } if(finishedEpoch()) { - if(LOG.isInfoEnabled()) - LOG.info("[+] PARAMSERV: completed EPOCH " + _epochCounter); + LOG.info("[+] PARAMSERV: completed EPOCH {}", _epochCounter); time_epoch(); @@ -423,8 +421,7 @@ protected void updateAndBroadcastModel(ListObject new_model, Timing tAgg, boolea if(_numBatchesPerEpoch != -1 && (_freq == Statement.PSFrequency.EPOCH || (_freq == Statement.PSFrequency.BATCH && ++_syncCounter % _numBatchesPerEpoch == 0))) { - if(LOG.isInfoEnabled()) - LOG.info("[+] PARAMSERV: completed EPOCH " + _epochCounter); + LOG.info("[+] PARAMSERV: completed EPOCH {}", _epochCounter); time_epoch(); if(_validationPossible) { validate(); @@ -543,11 +540,10 @@ private void time_epoch() { double current_total_validation_time = ParamServStatistics.getValidationTime(); double time_to_epoch = current_total_execution_time - current_total_validation_time; - if (LOG.isInfoEnabled()) - if(_validationPossible) - LOG.info("[+] PARAMSERV: epoch timer (excl. validation): " + time_to_epoch / 1000 + " secs."); - else - LOG.info("[+] PARAMSERV: epoch timer: " + time_to_epoch / 1000 + " secs."); + if(_validationPossible) + LOG.info("[+] PARAMSERV: epoch timer (excl. validation): {} secs.", time_to_epoch / 1000); + else + LOG.info("[+] PARAMSERV: epoch timer: {} secs.", time_to_epoch / 1000); } } @@ -569,8 +565,7 @@ private void validate() { ParamservUtils.cleanupListObject(_ec, Statement.PS_MODEL); // Log validation results - if (LOG.isInfoEnabled()) - LOG.info("[+] PARAMSERV: validation-loss: " + loss + " validation-accuracy: " + accuracy); + LOG.info("[+] PARAMSERV: validation-loss: {} validation-accuracy: {}", loss, accuracy); if(tValidate != null) ParamServStatistics.accValidationTime((long) tValidate.stop()); diff --git a/src/main/java/org/apache/sysds/runtime/instructions/cp/CompressionCPInstruction.java b/src/main/java/org/apache/sysds/runtime/instructions/cp/CompressionCPInstruction.java index 52e0ad81540..b8c85c2af27 100644 --- a/src/main/java/org/apache/sysds/runtime/instructions/cp/CompressionCPInstruction.java +++ b/src/main/java/org/apache/sysds/runtime/instructions/cp/CompressionCPInstruction.java @@ -23,8 +23,7 @@ import java.util.List; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.hops.OptimizerUtils; import org.apache.sysds.runtime.compress.CompressedMatrixBlockFactory; import org.apache.sysds.runtime.compress.CompressionStatistics; @@ -39,7 +38,7 @@ import org.apache.sysds.runtime.matrix.operators.Operator; public class CompressionCPInstruction extends ComputationCPInstruction { - private static final Log LOG = LogFactory.getLog(CompressionCPInstruction.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(CompressionCPInstruction.class); private final int _singletonLookupID; private final int _numThreads; @@ -197,8 +196,7 @@ private void processMatrixBlockCompression(ExecutionContext ec, MatrixBlock in, if(LOG.isTraceEnabled()) LOG.trace(compResult.getRight()); MatrixBlock out = compResult.getLeft(); - if(LOG.isInfoEnabled()) - LOG.info("Compression output class: " + out.getClass().getSimpleName()); + LOG.info("Compression output class: {}", out.getClass().getSimpleName()); // Set output and release input ec.releaseMatrixInput(input1.getName()); ec.setMatrixOutput(output.getName(), out); @@ -216,8 +214,7 @@ private void processMatrixBlockQuantizationFusedCompression(ExecutionContext ec, if(LOG.isTraceEnabled()) LOG.trace(compResult.getRight()); MatrixBlock out = compResult.getLeft(); - if(LOG.isInfoEnabled()) - LOG.info("Compression output class: " + out.getClass().getSimpleName()); + LOG.info("Compression output class: {}", out.getClass().getSimpleName()); // Set output and release input ec.releaseMatrixInput(input1.getName()); ec.releaseMatrixInput(input2.getName()); @@ -229,8 +226,7 @@ private void processMatrixBlockQuantizationFusedCompression(ExecutionContext ec, if(LOG.isTraceEnabled()) LOG.trace(compResult.getRight()); MatrixBlock out = compResult.getLeft(); - if(LOG.isInfoEnabled()) - LOG.info("Compression output class: " + out.getClass().getSimpleName()); + LOG.info("Compression output class: {}", out.getClass().getSimpleName()); // Set output and release input ec.releaseMatrixInput(input1.getName()); if (input2.isMatrix()) { diff --git a/src/main/java/org/apache/sysds/runtime/instructions/cp/ParamservBuiltinCPInstruction.java b/src/main/java/org/apache/sysds/runtime/instructions/cp/ParamservBuiltinCPInstruction.java index be3bf9de11c..825d67aee0d 100644 --- a/src/main/java/org/apache/sysds/runtime/instructions/cp/ParamservBuiltinCPInstruction.java +++ b/src/main/java/org/apache/sysds/runtime/instructions/cp/ParamservBuiltinCPInstruction.java @@ -56,8 +56,6 @@ import java.util.stream.IntStream; import org.apache.commons.lang3.concurrent.BasicThreadFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.spark.network.server.TransportServer; import org.apache.spark.util.LongAccumulator; import org.apache.sysds.api.DMLScript; @@ -92,12 +90,13 @@ import org.apache.sysds.runtime.controlprogram.paramserv.rpc.PSRpcFactory; import org.apache.sysds.runtime.matrix.operators.Operator; import org.apache.sysds.runtime.util.ProgramConverter; +import org.apache.sysds.utils.ParameterizedLogger; import org.apache.sysds.utils.stats.InfrastructureAnalyzer; import org.apache.sysds.utils.stats.ParamServStatistics; import org.apache.sysds.utils.stats.Timing; public class ParamservBuiltinCPInstruction extends ParameterizedBuiltinCPInstruction { - private static final Log LOG = LogFactory.getLog(ParamservBuiltinCPInstruction.class.getName()); + private static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(ParamservBuiltinCPInstruction.class); public static final int DEFAULT_BATCH_SIZE = 64; private static final PSFrequency DEFAULT_UPDATE_FREQUENCY = PSFrequency.EPOCH; @@ -159,14 +158,12 @@ private void runFederated(ExecutionContext ec) { int nbatches = getNbatches(); int numBackupWorkers = getNumBackupWorkers(); - if( LOG.isInfoEnabled() ) { - LOG.info("[+] Update Type: " + updateType); - LOG.info("[+] Frequency: " + freq); - LOG.info("[+] Data Partitioning: " + federatedPSScheme); - LOG.info("[+] Runtime Balancing: " + runtimeBalancing); - LOG.info("[+] Weighting: " + weighting); - LOG.info("[+] Seed: " + seed); - } + LOG.info("[+] Update Type: {}", updateType); + LOG.info("[+] Frequency: {}", freq); + LOG.info("[+] Data Partitioning: {}", federatedPSScheme); + LOG.info("[+] Runtime Balancing: {}", runtimeBalancing); + LOG.info("[+] Weighting: {}", weighting); + LOG.info("[+] Seed: {}", seed); if (tSetup != null) ParamServStatistics.accSetupTime((long) tSetup.stop()); @@ -612,10 +609,8 @@ private void partitionLocally(PSScheme scheme, ExecutionContext ec, List pfs = result.pFeatures; List pls = result.pLabels; if (pfs.size() < workers.size()) { - if (LOG.isWarnEnabled()) { - LOG.warn(String.format("There is only %d batches of data but has %d workers. " - + "Hence, reset the number of workers with %d.", pfs.size(), workers.size(), pfs.size())); - } + LOG.warn("There is only {} batches of data but has {} workers. " + + "Hence, reset the number of workers with {}.", pfs.size(), workers.size(), pfs.size()); if (getUpdateType().isSBP() && pfs.size() <= getNumBackupWorkers()) { throw new DMLRuntimeException( "Effective number of workers is smaller or equal to the number of backup workers." From 5852fc8ca7954c412428a77e0656d6455e06d2a2 Mon Sep 17 00:00:00 2001 From: e-strauss Date: Sun, 22 Feb 2026 14:45:37 +0100 Subject: [PATCH 3/5] reverted changes in unused files --- .../org/apache/sysds/hops/cost/CostEstimator.java | 15 +++++++++------ .../hops/cost/CostEstimatorStaticRuntime.java | 14 ++++++++------ 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java b/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java index d4d6ba6c1c9..ffd3ff1bb82 100644 --- a/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java +++ b/src/main/java/org/apache/sysds/hops/cost/CostEstimator.java @@ -19,6 +19,8 @@ package org.apache.sysds.hops.cost; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.sysds.common.Opcodes; import org.apache.sysds.conf.ConfigurationManager; import org.apache.sysds.hops.OptimizerUtils; @@ -52,7 +54,6 @@ import org.apache.sysds.runtime.matrix.operators.CMOperator; import org.apache.sysds.runtime.matrix.operators.CMOperator.AggregateOperationTypes; import org.apache.sysds.runtime.meta.DataCharacteristics; -import org.apache.sysds.utils.ParameterizedLogger; import java.util.ArrayList; import java.util.HashMap; @@ -60,7 +61,7 @@ public abstract class CostEstimator { - protected static final ParameterizedLogger LOG = ParameterizedLogger.getLogger(CostEstimator.class); + protected static final Log LOG = LogFactory.getLog(CostEstimator.class.getName()); private static final int DEFAULT_NUMITER = 15; @@ -154,16 +155,18 @@ else if( pb instanceof BasicProgramBlock ) //awareness of recursive functions, missing program if( !memoFunc.contains(fkey) && pb.getProgram()!=null ) { - LOG.debug("Begin Function {}", fkey); - + if(LOG.isDebugEnabled()) + LOG.debug("Begin Function "+fkey); + memoFunc.add(fkey); Program prog = pb.getProgram(); FunctionProgramBlock fpb = prog.getFunctionProgramBlock( finst.getNamespace(), finst.getFunctionName()); ret += rGetTimeEstimate(fpb, stats, memoFunc, recursive); memoFunc.remove(fkey); - - LOG.debug("End Function {}", fkey); + + if(LOG.isDebugEnabled()) + LOG.debug("End Function "+fkey); } } } diff --git a/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java b/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java index 80ca9c20c35..da2788e8c6c 100644 --- a/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java +++ b/src/main/java/org/apache/sysds/hops/cost/CostEstimatorStaticRuntime.java @@ -85,8 +85,9 @@ protected double getCPInstTimeEstimate( Instruction inst, VarStats[] vs, String[ } vs[1]._inmem = true; } - if(ltime != 0) - LOG.debug("Cost[{} - read] = {}", cpinst.getOpcode(), ltime); + if( LOG.isDebugEnabled() && ltime!=0 ) { + LOG.debug("Cost["+cpinst.getOpcode()+" - read] = "+ltime); + } //exec time CP instruction String opcode = (cpinst instanceof FunctionCallCPInstruction) ? InstructionUtils.getOpCode(cpinst.toString()) : cpinst.getOpcode(); @@ -98,8 +99,9 @@ protected double getCPInstTimeEstimate( Instruction inst, VarStats[] vs, String[ if( inst instanceof VariableCPInstruction && ((VariableCPInstruction)inst).getOpcode().equals(Opcodes.WRITE.toString()) ) wtime += getHDFSWriteTime(vs[2].getRows(), vs[2].getCols(), vs[2].getSparsity(), ((VariableCPInstruction)inst).getInput3().getName() ); - if(wtime != 0) - LOG.debug("Cost[{} - write] = {}", cpinst.getOpcode(), wtime); + if( LOG.isDebugEnabled() && wtime!=0 ) { + LOG.debug("Cost["+cpinst.getOpcode()+" - write] = "+wtime); + } //total costs double costs = ltime + etime + wtime; @@ -259,8 +261,8 @@ private static double getInstTimeEstimate( String opcode, boolean inMR, long d1m double nflops = getNFLOP(opcode, inMR, d1m, d1n, d1s, d2m, d2n, d2s, d3m, d3n, d3s, args); double time = nflops / DEFAULT_FLOPS; - LOG.debug("Cost[{}] = {}s, {} flops ({},{},{},{},{},{},{},{},{}).", - opcode, time, nflops, d1m, d1n, d1s, d2m, d2n, d2s, d3m, d3n, d3s); + if( LOG.isDebugEnabled() ) + LOG.debug("Cost["+opcode+"] = "+time+"s, "+nflops+" flops ("+d1m+","+d1n+","+d1s+","+d2m+","+d2n+","+d2s+","+d3m+","+d3n+","+d3s+")."); return time; } From 9f29ea0d4307725d5f85ffa4afd09376986d7d3b Mon Sep 17 00:00:00 2001 From: e-strauss Date: Sun, 22 Feb 2026 15:02:05 +0100 Subject: [PATCH 4/5] minor --- .../sysds/runtime/controlprogram/caching/CacheableData.java | 6 ++++-- .../controlprogram/federated/FederatedWorkerHandler.java | 3 +-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java b/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java index 8b00aba0380..ba72e2fb101 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java @@ -1071,7 +1071,8 @@ protected void restoreBlobIntoMemory() { String cacheFilePathAndName = getCacheFilePathAndName(); long begin = LOG.currentTimeMillisIfTraceEnabled(); - LOG.trace ("CACHE: Restoring matrix... {} HDFS path: {} Restore from path: {}", hashCode(), _hdfsFileName, cacheFilePathAndName); + LOG.trace ("CACHE: Restoring matrix... {} HDFS path: {} Restore from path: {}", + hashCode(), _hdfsFileName, cacheFilePathAndName); if (_data != null) throw new DMLRuntimeException(cacheFilePathAndName + " : Cannot restore on top of existing in-memory data."); @@ -1087,7 +1088,8 @@ protected void restoreBlobIntoMemory() { if (_data == null) throw new DMLRuntimeException (cacheFilePathAndName + " : Restore failed."); - LOG.trace("Restoring matrix - COMPLETED ... {} msec.", LOG.currentTimeMillisIfTraceEnabled()-begin); + LOG.trace("Restoring matrix - COMPLETED ... {} msec.", + LOG.currentTimeMillisIfTraceEnabled()-begin); } protected abstract T readBlobFromCache(String fname) diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java b/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java index 44db0435d1e..877dea524c3 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedWorkerHandler.java @@ -290,8 +290,7 @@ private static void printStatistics() { private static void logRequests(FederatedRequest request, int nrRequest, int totalRequests) { LOG.debug("Executing command {}/{}: {}", nrRequest + 1, totalRequests, request.getType().name()); - if(LOG.isTraceEnabled()) - LOG.trace("full command: " + request); + LOG.trace("full command: ", request); } private FederatedResponse executeCommand(FederatedRequest request, ExecutionContextMap ecm, EventStageModel eventStage) From 506b17b9ea2f6f531d07db1bed710e8d88f7ed92 Mon Sep 17 00:00:00 2001 From: e-strauss Date: Sun, 22 Feb 2026 15:23:46 +0100 Subject: [PATCH 5/5] code style --- .../sysds/runtime/controlprogram/caching/CacheableData.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java b/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java index ba72e2fb101..1c19a151d68 100644 --- a/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java +++ b/src/main/java/org/apache/sysds/runtime/controlprogram/caching/CacheableData.java @@ -1072,7 +1072,7 @@ protected void restoreBlobIntoMemory() { long begin = LOG.currentTimeMillisIfTraceEnabled(); LOG.trace ("CACHE: Restoring matrix... {} HDFS path: {} Restore from path: {}", - hashCode(), _hdfsFileName, cacheFilePathAndName); + hashCode(), _hdfsFileName, cacheFilePathAndName); if (_data != null) throw new DMLRuntimeException(cacheFilePathAndName + " : Cannot restore on top of existing in-memory data."); @@ -1089,7 +1089,7 @@ protected void restoreBlobIntoMemory() { throw new DMLRuntimeException (cacheFilePathAndName + " : Restore failed."); LOG.trace("Restoring matrix - COMPLETED ... {} msec.", - LOG.currentTimeMillisIfTraceEnabled()-begin); + LOG.currentTimeMillisIfTraceEnabled()-begin); } protected abstract T readBlobFromCache(String fname)