Skip to content

Commit

Permalink
Add support for additional grouping
Browse files Browse the repository at this point in the history
Added parameter "additional_grouping" for geohashcell facet. When this
parameter is provided, the region counts are additionaly broken down
according to terms in this field.
  • Loading branch information
gornik committed Oct 22, 2013
1 parent 313eee7 commit 8249e44
Show file tree
Hide file tree
Showing 10 changed files with 301 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

public class GeoHashCellEntry implements GeoHashCellFacetEntry {

private final GeoHashCell cell;
protected final GeoHashCell cell;

public GeoHashCellEntry(GeoHashCell cell) {
this.cell = cell;
Expand All @@ -29,20 +29,23 @@ public boolean equals(Object o) {
return cell.equals(that.cell);
}

public static GeoHashCellEntry createEntry(GeoHashCell cell) {
return createEntry(cell, null);
}

public static GeoHashCellEntry createEntry(GeoHashCell cell, String additionalGroupingValue) {
return additionalGroupingValue == null || additionalGroupingValue.isEmpty()
? new GeoHashCellEntry(cell)
: new GeoHashCellWithGroupingEntry(cell, additionalGroupingValue);
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, AtomicLong value) throws IOException {
GeoPoint center = cell.getCenter();
builder.startObject();
builder.field("lat", center.lat());
builder.field("lon", center.lon());
builder.field("count", value);
builder.endObject();
return builder;
public GeoHashCellWithGroupings createCellWithGroupings(AtomicLong value) {
return new GeoHashCellWithGroupings(cell, value.get());
}

@Override
public String getKey() {
return cell.toString();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,33 +33,36 @@ public class GeoHashCellFacet extends InternalFacet {
private Map<GeoHashCellFacetEntry, AtomicLong> counts = Maps.newHashMap();
private final String fieldName;
private final int userLevel;
private final String additionalGrouping;
private final String additionalGroupingField;

private GeoHashCellFacetResult results;

/**
* Creates the facet instance.
* @param facetName Name of the facet.
* @param counts Map of the group counts.
* @param results
* @param fieldName Name of the field used for grouping.
* @param mapBox Current map viewport.
* @param userLevel Level for geo hash grouping.
* @param additionalGrouping
* @param additionalGroupingField
*/
public GeoHashCellFacet(String facetName, Map<GeoHashCellFacetEntry, AtomicLong> counts,
public GeoHashCellFacet(String facetName, GeoHashCellFacetResult results,
String fieldName, MapBox mapBox,
int userLevel, String additionalGrouping) {
int userLevel, String additionalGroupingField) {
super(facetName);
this.counts = counts;
this.results = results;
this.fieldName = fieldName;
this.mapBox = mapBox;
this.userLevel = userLevel;
this.additionalGrouping = additionalGrouping;
this.additionalGroupingField = additionalGroupingField;
}

private GeoHashCellFacet() {
this.fieldName = null;
this.mapBox = null;
this.userLevel = 0;
this.additionalGrouping = null;
this.results = new GeoHashCellFacetResult();
this.additionalGroupingField = null;
}


Expand Down Expand Up @@ -92,14 +95,7 @@ public Facet reduce(ReduceContext reduceContext) {
if (facet instanceof GeoHashCellFacet) {
GeoHashCellFacet hashCellFacet = (GeoHashCellFacet) facet;

for (Map.Entry<GeoHashCellFacetEntry, AtomicLong> entry : hashCellFacet.counts.entrySet()) {
if (geoHashCellFacet.counts.containsKey(entry.getKey())) {
geoHashCellFacet.counts.get(entry.getKey()).addAndGet(entry.getValue().longValue());
}
else {
geoHashCellFacet.counts.put(entry.getKey(), entry.getValue());
}
}
geoHashCellFacet.results = geoHashCellFacet.results.reduce(hashCellFacet.results);
}
}

Expand All @@ -121,9 +117,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(GeoHashCellFacetParser.ParamName.LEVEL, mapBox.getLevel(userLevel));
builder.field("base_map_level", mapBox.getBaseLevel());
builder.startArray("regions");
for (Map.Entry<GeoHashCellFacetEntry, AtomicLong> entry: counts.entrySet()) {
entry.getKey().toXContent(builder, entry.getValue());
}

results.toXContent(builder);

builder.endArray();
builder.endObject();
return builder;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
package org.elasticsearch.plugin.geohashcellfacet;

import com.sun.tools.corba.se.idl.InterfaceState;
import org.elasticsearch.common.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;

public interface GeoHashCellFacetEntry {
XContentBuilder toXContent(XContentBuilder builder, AtomicLong value) throws IOException;
ResultWithGroupings createCellWithGroupings(AtomicLong value);

String getKey();
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,16 @@
package org.elasticsearch.plugin.geohashcellfacet;

import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.text.BytesText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
Expand All @@ -11,45 +19,47 @@
import org.elasticsearch.search.internal.SearchContext;

import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;

public class GeoHashCellFacetExecutor extends FacetExecutor {
private final String fieldName;
private final int userLevel;
private final String additionalGrouping;
private final String additionalGroupingField;
private final IndexFieldData additionalGroupingFieldData;
private Map<GeoHashCellFacetEntry, AtomicLong> counts = Maps.newHashMap();
private final IndexGeoPointFieldData indexFieldData;
private final IndexFieldData additionalGroupingFieldData;
private final MapBox mapBox;

public GeoHashCellFacetExecutor(String fieldName,
SearchContext searchContext,
MapBox mapBox,
int userLevel, String additionalGrouping) {
int userLevel, String additionalGroupingField) {
this.fieldName = fieldName;
this.mapBox = mapBox;
this.userLevel = userLevel;
this.additionalGrouping = additionalGrouping;
this.additionalGroupingField = additionalGroupingField;

this.indexFieldData = searchContext
.fieldData()
.getForField(searchContext.smartNameFieldMapper(fieldName));

if (additionalGrouping != null && !additionalGrouping.isEmpty()) {
if (additionalGroupingField != null && !additionalGroupingField.isEmpty()) {
this.additionalGroupingFieldData = searchContext
.fieldData()
.getForField(searchContext.smartNameFieldMapper(additionalGrouping));
.getForField(searchContext.smartNameFieldMapper(additionalGroupingField));
} else {
this.additionalGroupingFieldData = null;
}
}

@Override
public InternalFacet buildFacet(String facetName) {
GeoHashCellFacetResult results = GeoHashCellFacetResult.createFromEntryCounts(counts);
return new GeoHashCellFacet(
facetName, counts, fieldName,
mapBox, userLevel, additionalGrouping);
facetName, results, fieldName,
mapBox, userLevel, additionalGroupingField);
}

@Override
Expand All @@ -60,15 +70,19 @@ public Collector collector() {
final class Collector extends FacetExecutor.Collector {

protected GeoPointValues values;
protected String additionalGroupingValue;
private BytesValues additionalValues;

@Override
public void setNextReader(AtomicReaderContext context)
throws IOException {
values = indexFieldData.load(context).getGeoPointValues();

if (additionalGroupingFieldData != null)
additionalGroupingValue = additionalGroupingFieldData.load(context).toString();
if (additionalGroupingFieldData != null) {
additionalValues = additionalGroupingFieldData.load(context).getBytesValues();

} else {
additionalValues = null;
}
}

@Override
Expand All @@ -87,14 +101,26 @@ public void collect(int docId) throws IOException {
continue;

GeoHashCell cell = new GeoHashCell(point, mapBox.getLevel(userLevel));
GeoHashCellEntry entry = GeoHashCellEntry.createEntry(cell, additionalGroupingValue);
increment(entry, 1L);

if (additionalValues != null && additionalValues.hasValue(docId)) {
final BytesValues.Iter iter = additionalValues.getIter(docId);
while (iter.hasNext()) {

BytesRef bytesRef = iter.next();
Text text = new BytesText(new BytesArray(bytesRef));

increment(GeoHashCellEntry.createEntry(cell, text.string()), 1L);
}
}
else {
increment(GeoHashCellEntry.createEntry(cell), 1L);
}
}
}

@Override
public void postCollection() {
// do nothing

}

private void increment(GeoHashCellFacetEntry entry, Long value) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
package org.elasticsearch.plugin.geohashcellfacet;

import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;

public class GeoHashCellFacetResult {

private final Map<String, ResultWithGroupings> counts;

public GeoHashCellFacetResult() {
this.counts = Maps.newHashMap();
}

public GeoHashCellFacetResult(GeoHashCellFacetEntry entry, AtomicLong value) {
Map<String, ResultWithGroupings> counts = Maps.newHashMap();
counts.put(entry.getKey(), entry.createCellWithGroupings(value));

this.counts = counts;
}

private GeoHashCellFacetResult(Map<String, ResultWithGroupings> counts) {
this.counts = Maps.newHashMap(counts);
}

public GeoHashCellFacetResult reduce(GeoHashCellFacetResult other) {

Map<String, ResultWithGroupings> countsCopy = Maps.newHashMap(this.counts);

for (Map.Entry<String, ResultWithGroupings> pair : other.counts.entrySet()) {
String key = pair.getKey();
ResultWithGroupings value = pair.getValue();

if (countsCopy.containsKey(key)) {
ResultWithGroupings oldValue = countsCopy.get(key);
countsCopy.remove(key);
countsCopy.put(key, oldValue.merge(value));
}
else {
countsCopy.put(key, value);
}
}

return new GeoHashCellFacetResult(countsCopy);
}

public static GeoHashCellFacetResult createFromEntryCounts(Map<GeoHashCellFacetEntry, AtomicLong> counts) {
GeoHashCellFacetResult result = new GeoHashCellFacetResult();

for (Map.Entry<GeoHashCellFacetEntry, AtomicLong> entry : counts.entrySet()) {
result = result.reduce(new GeoHashCellFacetResult(entry.getKey(), entry.getValue()));
}

return result;
}

public void toXContent(XContentBuilder builder) throws IOException {
for (Map.Entry<String, ResultWithGroupings> entry : counts.entrySet()) {
entry.getValue().toXContent(builder);
}
}
}


Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
package org.elasticsearch.plugin.geohashcellfacet;

import org.elasticsearch.common.collect.Maps;

import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;

public class GeoHashCellWithGroupingEntry extends GeoHashCellEntry {

private final String additionalGroupingValue;
Expand All @@ -20,6 +25,13 @@ public boolean equals(Object o) {
return additionalGroupingValue.equals(that.additionalGroupingValue);
}

@Override
public GeoHashCellWithGroupings createCellWithGroupings(AtomicLong value) {
Map<String, AtomicLong> counts = Maps.newHashMap();
counts.put(additionalGroupingValue, value);
return new GeoHashCellWithGroupings(cell, value.get(), counts);
}

@Override
public int hashCode() {
int result = super.hashCode();
Expand Down
Loading

0 comments on commit 8249e44

Please sign in to comment.