-
Notifications
You must be signed in to change notification settings - Fork 2.4k
Draft: Add support for histogram field and aggregations #20373
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Draft
deshsidd
wants to merge
2
commits into
opensearch-project:main
Choose a base branch
from
deshsidd:sid/histogram-field
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Draft
Changes from all commits
Commits
Show all changes
2 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
62 changes: 62 additions & 0 deletions
62
server/src/main/java/org/opensearch/index/fielddata/HistogramIndexFieldData.java
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,62 @@ | ||
| /* | ||
| * SPDX-License-Identifier: Apache-2.0 | ||
| * | ||
| * The OpenSearch Contributors require contributions made to | ||
| * this file be licensed under the Apache-2.0 license or a | ||
| * compatible open source license. | ||
| */ | ||
| package org.opensearch.index.fielddata; | ||
|
|
||
| import org.apache.lucene.index.LeafReaderContext; | ||
| import org.apache.lucene.search.SortField; | ||
| import org.opensearch.common.util.BigArrays; | ||
| import org.opensearch.search.DocValueFormat; | ||
| import org.opensearch.search.MultiValueMode; | ||
| import org.opensearch.search.aggregations.support.CoreValuesSourceType; | ||
| import org.opensearch.search.aggregations.support.ValuesSourceType; | ||
| import org.opensearch.search.sort.BucketedSort; | ||
| import org.opensearch.search.sort.SortOrder; | ||
|
|
||
| public class HistogramIndexFieldData implements IndexFieldData<HistogramLeafFieldData> { | ||
| private final String fieldName; | ||
|
|
||
| public HistogramIndexFieldData(String fieldName) { | ||
| this.fieldName = fieldName; | ||
| } | ||
|
|
||
| @Override | ||
| public String getFieldName() { | ||
| return fieldName; | ||
| } | ||
|
|
||
| @Override | ||
| public ValuesSourceType getValuesSourceType() { | ||
| return CoreValuesSourceType.HISTOGRAM; | ||
| } | ||
|
|
||
| @Override | ||
| public HistogramLeafFieldData load(LeafReaderContext context) { | ||
| return new HistogramLeafFieldData(context.reader(), fieldName); | ||
| } | ||
|
|
||
| @Override | ||
| public HistogramLeafFieldData loadDirect(LeafReaderContext context) throws Exception { | ||
| return load(context); | ||
| } | ||
|
|
||
| @Override | ||
| public SortField wideSortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { | ||
| return IndexFieldData.super.wideSortField(missingValue, sortMode, nested, reverse); | ||
| } | ||
|
|
||
| @Override | ||
| public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, SortOrder sortOrder, DocValueFormat format, int bucketSize, BucketedSort.ExtraData extra) { | ||
| return null; | ||
| } | ||
|
|
||
| @Override | ||
| public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { | ||
| throw new UnsupportedOperationException("Histogram fields do not support sorting"); | ||
| } | ||
| } | ||
|
|
76 changes: 76 additions & 0 deletions
76
server/src/main/java/org/opensearch/index/fielddata/HistogramLeafFieldData.java
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,76 @@ | ||
| /* | ||
| * SPDX-License-Identifier: Apache-2.0 | ||
| * | ||
| * The OpenSearch Contributors require contributions made to | ||
| * this file be licensed under the Apache-2.0 license or a | ||
| * compatible open source license. | ||
| */ | ||
|
|
||
| package org.opensearch.index.fielddata; | ||
|
|
||
| import org.apache.lucene.index.BinaryDocValues; | ||
| import org.apache.lucene.index.DocValues; | ||
| import org.apache.lucene.index.LeafReader; | ||
| import org.apache.lucene.util.BytesRef; | ||
| import org.opensearch.indices.fielddata.Histogram; | ||
|
|
||
| import java.io.IOException; | ||
| import java.nio.ByteBuffer; | ||
|
|
||
| public class HistogramLeafFieldData implements LeafFieldData { | ||
| private final LeafReader reader; | ||
| private final String fieldName; | ||
|
|
||
| public HistogramLeafFieldData(LeafReader reader, String fieldName) { | ||
| this.reader = reader; | ||
| this.fieldName = fieldName; | ||
| } | ||
|
|
||
| public HistogramValues getHistogramValues() throws IOException { | ||
| final BinaryDocValues values = DocValues.getBinary(reader, fieldName); | ||
| return new HistogramValues() { | ||
| @Override | ||
| public boolean advanceExact(int doc) throws IOException { | ||
| return values.advanceExact(doc); | ||
| } | ||
|
|
||
| @Override | ||
| public Histogram histogram() throws IOException { | ||
| BytesRef bytesRef = values.binaryValue(); | ||
| return decodeHistogram(bytesRef); | ||
| } | ||
| }; | ||
| } | ||
|
|
||
| private Histogram decodeHistogram(BytesRef bytesRef) { | ||
| ByteBuffer buffer = ByteBuffer.wrap(bytesRef.bytes, bytesRef.offset, bytesRef.length); | ||
| int size = buffer.getInt(); | ||
| double[] values = new double[size]; | ||
| long[] counts = new long[size]; | ||
|
|
||
| for (int i = 0; i < size; i++) { | ||
| values[i] = buffer.getDouble(); | ||
| counts[i] = buffer.getLong(); | ||
| } | ||
|
|
||
| return new Histogram(values, counts); | ||
| } | ||
|
|
||
| @Override | ||
| public void close() {} | ||
|
|
||
| @Override | ||
| public ScriptDocValues<?> getScriptValues() { | ||
| return null; | ||
| } | ||
|
|
||
| @Override | ||
| public SortedBinaryDocValues getBytesValues() { | ||
| return null; | ||
| } | ||
|
|
||
| @Override | ||
| public long ramBytesUsed() { | ||
| return 0; | ||
| } | ||
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
134 changes: 134 additions & 0 deletions
134
server/src/main/java/org/opensearch/index/fielddata/HistogramValuesSource.java
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,134 @@ | ||
| /* | ||
| * SPDX-License-Identifier: Apache-2.0 | ||
| * | ||
| * The OpenSearch Contributors require contributions made to | ||
| * this file be licensed under the Apache-2.0 license or a | ||
| * compatible open source license. | ||
| */ | ||
|
|
||
| package org.opensearch.index.fielddata; | ||
|
|
||
| import org.apache.lucene.index.LeafReaderContext; | ||
| import org.apache.lucene.index.SortedNumericDocValues; | ||
| import org.apache.lucene.util.BytesRef; | ||
| import org.opensearch.indices.fielddata.Histogram; | ||
| import org.opensearch.search.aggregations.support.ValuesSource; | ||
|
|
||
| import java.io.IOException; | ||
| import java.nio.ByteBuffer; | ||
|
|
||
| public class HistogramValuesSource extends ValuesSource.Numeric { | ||
| private final HistogramIndexFieldData indexFieldData; | ||
|
|
||
| public HistogramValuesSource(HistogramIndexFieldData indexFieldData) { | ||
| this.indexFieldData = indexFieldData; | ||
| } | ||
|
|
||
| public HistogramIndexFieldData getHistogramFieldData() { | ||
| return indexFieldData; | ||
| } | ||
|
|
||
| @Override | ||
| public boolean isFloatingPoint() { | ||
| return true; | ||
| } | ||
|
|
||
| @Override | ||
| public boolean isBigInteger() { | ||
| return false; | ||
| } | ||
|
|
||
| @Override | ||
| public SortedNumericDocValues longValues(LeafReaderContext context) throws IOException { | ||
| throw new UnsupportedOperationException("Histogram fields only support double values"); | ||
| } | ||
|
|
||
| @Override | ||
| public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException { | ||
| final HistogramLeafFieldData leafFieldData = indexFieldData.load(context); | ||
| final HistogramValues histogramValues = leafFieldData.getHistogramValues(); | ||
|
|
||
| return new SortedNumericDoubleValues() { | ||
| private double[] currentValues; | ||
| private int currentValueIndex; | ||
|
|
||
| @Override | ||
| public boolean advanceExact(int doc) throws IOException { | ||
| if (histogramValues.advanceExact(doc)) { | ||
| Histogram histogram = histogramValues.histogram(); | ||
| currentValues = histogram.getValues(); | ||
| currentValueIndex = 0; | ||
| return currentValues.length > 0; | ||
| } | ||
| currentValues = null; | ||
| return false; | ||
| } | ||
|
|
||
| @Override | ||
| public double nextValue() throws IOException { | ||
| if (currentValues == null || currentValueIndex >= currentValues.length) { | ||
| throw new IllegalStateException("Cannot call nextValue() when there are no more values"); | ||
| } | ||
| return currentValues[currentValueIndex++]; | ||
| } | ||
|
|
||
| @Override | ||
| public int docValueCount() { | ||
| return currentValues == null ? 0 : currentValues.length; | ||
| } | ||
| }; | ||
| } | ||
|
|
||
| /** | ||
| * Returns the counts values for the histogram buckets as doubles. | ||
| * These represent the frequency/count per bucket. | ||
| */ | ||
| public SortedNumericDoubleValues getCounts(LeafReaderContext context) throws IOException { | ||
| final HistogramLeafFieldData leafFieldData = indexFieldData.load(context); | ||
| final HistogramValues histogramValues = leafFieldData.getHistogramValues(); | ||
|
|
||
| return new SortedNumericDoubleValues() { | ||
| private long[] currentCounts; | ||
| private int currentIndex; | ||
|
|
||
| @Override | ||
| public boolean advanceExact(int doc) throws IOException { | ||
| if (histogramValues.advanceExact(doc)) { | ||
| Histogram histogram = histogramValues.histogram(); | ||
| currentCounts = histogram.getCounts(); | ||
| currentIndex = 0; | ||
| return currentCounts.length > 0; | ||
| } | ||
| currentCounts = null; | ||
| return false; | ||
| } | ||
|
|
||
| @Override | ||
| public double nextValue() throws IOException { | ||
| if (currentCounts == null || currentIndex >= currentCounts.length) { | ||
| throw new IllegalStateException("Cannot call nextValue() when there are no more count values"); | ||
| } | ||
| return (double) currentCounts[currentIndex++]; | ||
| } | ||
|
|
||
| @Override | ||
| public int docValueCount() { | ||
| return currentCounts == null ? 0 : currentCounts.length; | ||
| } | ||
| }; | ||
| } | ||
|
|
||
| private double decodeMin(BytesRef bytesRef) { | ||
| ByteBuffer buffer = ByteBuffer.wrap(bytesRef.bytes, bytesRef.offset, bytesRef.length); | ||
| int size = buffer.getInt(); | ||
| if (size > 0) { | ||
| return buffer.getDouble(); | ||
| } | ||
| return Double.NaN; | ||
| } | ||
|
|
||
| @Override | ||
| public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { | ||
| return null; | ||
| } | ||
| } |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Update javadoc to reflect the new return type.
The javadoc still references
HistogramValue, but the method now returnsHistogram. Update the comment to match the actual return type.🔎 Proposed fix
📝 Committable suggestion
🤖 Prompt for AI Agents