blob: b3d9fc77addc51af4646e9e10cf2ff6e1c08e1bd [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.compress.colgroup;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import org.apache.sysds.runtime.compress.CompressionSettings;
import org.apache.sysds.runtime.compress.utils.AbstractBitmap;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.matrix.operators.ScalarOperator;
/**
* Class to encapsulate information about a column group that is encoded with dense dictionary encoding (DDC) using 2
* byte codes.
*/
public class ColGroupDDC2 extends ColGroupDDC {
private static final long serialVersionUID = -3995768285207071013L;
private char[] _data;
protected ColGroupDDC2() {
super();
}
protected ColGroupDDC2(int[] colIndices, int numRows, AbstractBitmap ubm, CompressionSettings cs) {
super(colIndices, numRows, ubm, cs);
_data = new char[numRows];
int numVals = ubm.getNumValues();
int numCols = ubm.getNumColumns();
// materialize zero values, if necessary
if(ubm.getNumOffsets() < (long) numRows * numCols) {
int zeroIx = containsAllZeroValue();
if(zeroIx < 0) {
zeroIx = numVals;
_dict = IDictionary.materializeZeroValue(_dict, numCols);
}
Arrays.fill(_data, (char) zeroIx);
}
// iterate over values and write dictionary codes
for(int i = 0; i < numVals; i++) {
int[] tmpList = ubm.getOffsetsList(i).extractValues();
int tmpListSize = ubm.getNumOffsets(i);
for(int k = 0; k < tmpListSize; k++)
_data[tmpList[k]] = (char) i;
}
}
// Internal Constructor, to be used when copying a DDC Colgroup, and for scalar operations
protected ColGroupDDC2(int[] colIndices, int numRows, double[] values, char[] data) {
super(colIndices, numRows, values);
_data = data;
}
@Override
protected ColGroupType getColGroupType() {
return ColGroupType.DDC1;
}
/**
* Getter method to get the data, contained in The DDC ColGroup.
*
* Not safe if modifications is made to the byte list.
*
* @return The contained data
*/
public char[] getData() {
return _data;
}
@Override
protected int getIndex(int r){
return _data[r];
}
@Override
protected int getIndex(int r, int colIx){
return _data[r] * getNumCols() + colIx;
}
@Override
protected double getData(int r, double[] dictionary) {
return _dict.getValue(_data[r]);
}
@Override
protected double getData(int r, int colIx, double[] dictionary) {
return _dict.getValue(_data[r] * getNumCols() + colIx);
}
@Override
protected void setData(int r, int code) {
_data[r] = (char) code;
}
@Override
protected int getCode(int r) {
return _data[r];
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
// write data
// out.writeInt(_data.length);
for(int i = 0; i < _numRows; i++)
out.writeChar(_data[i]);
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
// read data
_data = new char[_numRows];
for(int i = 0; i < _numRows; i++)
_data[i] = in.readChar();
}
@Override
public long getExactSizeOnDisk() {
long ret = super.getExactSizeOnDisk();
// data
ret += 2 * _data.length;
return ret;
}
@Override
public long estimateInMemorySize() {
// LOG.debug(this.toString());
return ColGroupSizes.estimateInMemorySizeDDC2(getNumCols(), getNumValues(), _data.length, isLossy());
}
@Override
public void decompressToBlock(MatrixBlock target, int rl, int ru) {
int ncol = getNumCols();
double[] values = getValues();
for(int i = rl; i < ru; i++)
for(int j = 0; j < ncol; j++)
target.appendValue(i, _colIndexes[j], values[_data[i] * ncol + j]);
// note: append ok because final sort per row
}
@Override
public void decompressToBlock(MatrixBlock target, int colpos) {
int nrow = getNumRows();
int ncol = getNumCols();
double[] c = target.getDenseBlockValues();
double[] values = getValues();
int nnz = 0;
for(int i = 0; i < nrow; i++)
nnz += ((c[i] = values[_data[i] * ncol + colpos]) != 0) ? 1 : 0;
target.setNonZeros(nnz);
}
@Override
public int[] getCounts(int[] counts) {
return getCounts(0, getNumRows(), counts);
}
@Override
public int[] getCounts(int rl, int ru, int[] counts) {
final int numVals = getNumValues();
Arrays.fill(counts, 0, numVals, 0);
for(int i = rl; i < ru; i++)
counts[_data[i]]++;
return counts;
}
@Override
public void countNonZerosPerRow(int[] rnnz, int rl, int ru) {
final int ncol = getNumCols();
final int numVals = getNumValues();
final double[] values = getValues();
// pre-aggregate nnz per value tuple
int[] counts = new int[numVals];
for(int k = 0, valOff = 0; k < numVals; k++, valOff += ncol)
for(int j = 0; j < ncol; j++)
counts[k] += (values[valOff + j] != 0) ? 1 : 0;
// scan data and add counts to output rows
for(int i = rl; i < ru; i++)
rnnz[i - rl] += counts[_data[i]];
}
@Override
public void rightMultByVector(MatrixBlock vector, MatrixBlock result, int rl, int ru) {
double[] b = ColGroupConverter.getDenseVector(vector);
double[] c = result.getDenseBlockValues();
final int numCols = getNumCols();
final int numVals = getNumValues();
// prepare reduced rhs w/ relevant values
double[] sb = new double[numCols];
for(int j = 0; j < numCols; j++) {
sb[j] = b[_colIndexes[j]];
}
// pre-aggregate all distinct values
double[] vals = preaggValues(numVals, sb);
// iterative over codes and add to output
for(int i = rl; i < ru; i++)
c[i] += vals[_data[i]];
}
@Override
public void leftMultByRowVector(MatrixBlock vector, MatrixBlock result) {
double[] a = ColGroupConverter.getDenseVector(vector);
double[] c = result.getDenseBlockValues();
final int nrow = getNumRows();
final int ncol = getNumCols();
final int numVals = getNumValues();
if(8 * numVals < getNumRows()) {
// iterative over codes and pre-aggregate inputs per code
// temporary array also avoids false sharing in multi-threaded environments
double[] vals = allocDVector(numVals, true);
for(int i = 0; i < nrow; i++) {
vals[_data[i]] += a[i];
}
// post-scaling of pre-aggregate with distinct values
postScaling(vals, c);
}
else // general case
{
// iterate over codes, compute all, and add to the result
double[] values = getValues();
for(int i = 0; i < nrow; i++) {
double aval = a[i];
if(aval != 0)
for(int j = 0, valOff = _data[i] * ncol; j < ncol; j++)
c[_colIndexes[j]] += aval * values[valOff + j];
}
}
}
// @Override
// public void leftMultByRowVector(ColGroupDDC a, MatrixBlock result) {
// double[] c = result.getDenseBlockValues();
// final int nrow = getNumRows();
// final int ncol = getNumCols();
// final int numVals = getNumValues();
// final double[] dictionary = getValues();
// if(8 * numVals < getNumRows()) {
// // iterative over codes and pre-aggregate inputs per code
// // temporary array also avoids false sharing in multi-threaded environments
// double[] vals = allocDVector(numVals, true);
// for(int i = 0; i < nrow; i++) {
// vals[_data[i]] += a.getData(i, dictionary);
// }
// // post-scaling of pre-aggregate with distinct values
// postScaling(vals, c);
// }
// else // general case
// {
// // iterate over codes, compute all, and add to the result
// double[] values = getValues();
// for(int i = 0; i < nrow; i++) {
// double aval = a.getData(i, 0, dictionary);
// if(aval != 0)
// for(int j = 0, valOff = _data[i] * ncol; j < ncol; j++)
// c[_colIndexes[j]] += aval * values[valOff + j];
// }
// }
// }
// @Override
// protected void computeRowSums(MatrixBlock result, KahanFunction kplus, int rl, int ru) {
// // note: due to corrections the output might be a large dense block
// DenseBlock c = result.getDenseBlock();
// KahanObject kbuff = new KahanObject(0, 0);
// KahanPlus kplus2 = KahanPlus.getKahanPlusFnObject();
// // pre-aggregate nnz per value tuple
// double[] vals = sumAllValues(kplus, kbuff, false);
// // scan data and add to result (use kahan plus not general KahanFunction
// // for correctness in case of sqk+)
// for(int i = rl; i < ru; i++) {
// double[] cvals = c.values(i);
// int cix = c.pos(i);
// kbuff.set(cvals[cix], cvals[cix + 1]);
// kplus2.execute2(kbuff, vals[_data[i]]);
// cvals[cix] = kbuff._sum;
// cvals[cix + 1] = kbuff._correction;
// }
// }
@Override
public ColGroup scalarOperation(ScalarOperator op) {
// fast path: sparse-safe and -unsafe operations
// as zero are represented, it is sufficient to simply apply the scalar op
return new ColGroupDDC2(_colIndexes, _numRows, applyScalarOp(op), _data);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(super.toString());
sb.append(" DataLength: " + this._data.length);
return sb.toString();
}
}