Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -315,17 +315,6 @@ public ReadOnlyMemChunk query(
// get sorted tv list is synchronized so different query can get right sorted list reference
TVList vectorTvListCopy = vectorMemChunk.getSortedTvListForQuery(columns);
int curSize = vectorTvListCopy.size();
// return normal ReadOnlyMemChunk for query one measurement in vector
if (columns.size() == 1) {
return new ReadOnlyMemChunk(
measurementIdList.get(0),
partialVectorSchema.getValueTSDataTypeList().get(0),
partialVectorSchema.getValueTSEncodingList().get(0),
vectorTvListCopy,
null,
curSize,
deletionList);
}
return new ReadOnlyMemChunk(partialVectorSchema, vectorTvListCopy, curSize, deletionList);
} else {
if (!checkPath(deviceId, measurement)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,41 +186,10 @@ private void initVectorChunkMeta(IMeasurementSchema schema)
while (iterator.hasNextTimeValuePair()) {
TimeValuePair timeValuePair = iterator.nextTimeValuePair();
timeStatistics.update(timeValuePair.getTimestamp());
for (int i = 0; i < schema.getValueTSDataTypeList().size(); i++) {
if (timeValuePair.getValue().getVector()[i] == null) {
continue;
}
switch (schema.getValueTSDataTypeList().get(i)) {
case BOOLEAN:
valueStatistics[i].update(
timeValuePair.getTimestamp(),
timeValuePair.getValue().getVector()[i].getBoolean());
break;
case TEXT:
valueStatistics[i].update(
timeValuePair.getTimestamp(),
timeValuePair.getValue().getVector()[i].getBinary());
break;
case FLOAT:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getFloat());
break;
case INT32:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getInt());
break;
case INT64:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getLong());
break;
case DOUBLE:
valueStatistics[i].update(
timeValuePair.getTimestamp(),
timeValuePair.getValue().getVector()[i].getDouble());
break;
default:
throw new QueryProcessException("Unsupported data type:" + dataType);
}
if (schema.getValueTSDataTypeList().size() == 1) {
updateValueStatisticsForSingleColumn(schema, valueStatistics, timeValuePair);
} else {
updateValueStatistics(schema, valueStatistics, timeValuePair);
}
}
}
Expand All @@ -235,6 +204,76 @@ private void initVectorChunkMeta(IMeasurementSchema schema)
cachedMetaData = vectorChunkMetadata;
}

// When query one measurement in a Vector, the timeValuePair is not a vector type
private void updateValueStatisticsForSingleColumn(
IMeasurementSchema schema, Statistics[] valueStatistics, TimeValuePair timeValuePair)
throws QueryProcessException {
switch (schema.getValueTSDataTypeList().get(0)) {
case BOOLEAN:
valueStatistics[0].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getBoolean());
break;
case TEXT:
valueStatistics[0].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getBinary());
break;
case FLOAT:
valueStatistics[0].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getFloat());
break;
case INT32:
valueStatistics[0].update(timeValuePair.getTimestamp(), timeValuePair.getValue().getInt());
break;
case INT64:
valueStatistics[0].update(timeValuePair.getTimestamp(), timeValuePair.getValue().getLong());
break;
case DOUBLE:
valueStatistics[0].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getDouble());
break;
default:
throw new QueryProcessException("Unsupported data type:" + dataType);
}
}

private void updateValueStatistics(
IMeasurementSchema schema, Statistics[] valueStatistics, TimeValuePair timeValuePair)
throws QueryProcessException {
for (int i = 0; i < schema.getValueTSDataTypeList().size(); i++) {
if (timeValuePair.getValue().getVector()[i] == null) {
continue;
}
switch (schema.getValueTSDataTypeList().get(i)) {
case BOOLEAN:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getBoolean());
break;
case TEXT:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getBinary());
break;
case FLOAT:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getFloat());
break;
case INT32:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getInt());
break;
case INT64:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getLong());
break;
case DOUBLE:
valueStatistics[i].update(
timeValuePair.getTimestamp(), timeValuePair.getValue().getVector()[i].getDouble());
break;
default:
throw new QueryProcessException("Unsupported data type:" + dataType);
}
}
}

public TSDataType getDataType() {
return dataType;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
package org.apache.iotdb.db.query.reader.chunk;

import org.apache.iotdb.tsfile.file.metadata.IChunkMetadata;
import org.apache.iotdb.tsfile.file.metadata.VectorChunkMetadata;
import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
import org.apache.iotdb.tsfile.read.TimeValuePair;
import org.apache.iotdb.tsfile.read.common.BatchData;
Expand All @@ -45,8 +47,15 @@ public MemPageReader(

@Override
public BatchData getAllSatisfiedPageData(boolean ascending) throws IOException {
BatchData batchData =
BatchDataFactory.createBatchData(chunkMetadata.getDataType(), ascending, false);
TSDataType dataType;
if (chunkMetadata instanceof VectorChunkMetadata
&& ((VectorChunkMetadata) chunkMetadata).getValueChunkMetadataList().size() == 1) {
dataType =
((VectorChunkMetadata) chunkMetadata).getValueChunkMetadataList().get(0).getDataType();
} else {
dataType = chunkMetadata.getDataType();
}
BatchData batchData = BatchDataFactory.createBatchData(dataType, ascending, false);
while (timeValuePairIterator.hasNextTimeValuePair()) {
TimeValuePair timeValuePair = timeValuePairIterator.nextTimeValuePair();
if (valueFilter == null
Expand Down