Skip to content

Commit

Permalink
fix: allow contrib-only filters in contrib extraction endpoint(s), fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
tyrasd committed Aug 17, 2023
1 parent 073db36 commit aef201c
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.heigit.ohsome.oshdb.api.db.OSHDBIgnite.ComputeMode;
import org.heigit.ohsome.oshdb.api.mapreducer.MapReducer;
import org.heigit.ohsome.oshdb.filter.FilterExpression;
import org.heigit.ohsome.oshdb.util.mappable.OSHDBMapReducible;
import org.heigit.ohsome.oshdb.util.mappable.OSMContribution;
import org.heigit.ohsome.oshdb.util.mappable.OSMEntitySnapshot;
import org.heigit.ohsome.oshdb.util.tagtranslator.TagTranslator;
Expand Down Expand Up @@ -61,20 +62,7 @@ public DataRequestExecutor(RequestResource requestResource, ElementsGeometry ele
*/
public void extract() throws Exception {
inputProcessor.getProcessingData().setFullHistory(true);
InputProcessor snapshotInputProcessor = new InputProcessor(servletRequest, true, false);
snapshotInputProcessor.getProcessingData().setFullHistory(true);
MapReducer<OSMEntitySnapshot> mapRedSnapshot = null;
MapReducer<OSMContribution> mapRedContribution = null;
if (DbConnData.db instanceof OSHDBIgnite) {
// on ignite: Use AffinityCall backend, which is the only one properly supporting streaming
// of result data, without buffering the whole result in memory before returning the result.
// This allows to write data out to the client via a chunked HTTP response.
mapRedSnapshot = snapshotInputProcessor.processParameters(ComputeMode.AFFINITY_CALL);
mapRedContribution = inputProcessor.processParameters(ComputeMode.AFFINITY_CALL);
} else {
mapRedSnapshot = snapshotInputProcessor.processParameters();
mapRedContribution = inputProcessor.processParameters();
}
final MapReducer<List<OSMContribution>> mapRedContributions = getMapReducer(inputProcessor);
RequestParameters requestParameters = processingData.getRequestParameters();
String[] time = inputProcessor.splitParamOnComma(
inputProcessor.createEmptyArrayIfNull(servletRequest.getParameterValues("time")));
Expand Down Expand Up @@ -102,17 +90,10 @@ public void extract() throws Exception {
.format(DateTimeFormatter.ISO_DATE_TIME);
String endTimestamp = IsoDateTimeParser.parseIsoDateTime(requestParameters.getTime()[1])
.format(DateTimeFormatter.ISO_DATE_TIME);
MapReducer<List<OSMContribution>> mapRedContributions = mapRedContribution.groupByEntity();
MapReducer<List<OSMEntitySnapshot>> mapRedSnapshots = mapRedSnapshot.groupByEntity();
Optional<FilterExpression> filter = processingData.getFilterExpression();
if (filter.isPresent()) {
mapRedSnapshots = mapRedSnapshots.filter(filter.get());
mapRedContributions = mapRedContributions.filter(filter.get());
}
final boolean isContainingSimpleFeatureTypes = processingData.isContainingSimpleFeatureTypes();
DataExtractionTransformer dataExtractionTransformer = new DataExtractionTransformer(
startTimestamp, endTimestamp, filter.orElse(null), isContributionsEndpoint,
isContributionsLatestEndpoint,
startTimestamp, endTimestamp, processingData.getFilterExpression().orElse(null),
isContributionsEndpoint, isContributionsLatestEndpoint,
clipGeometries, includeTags, includeOSMMetadata, includeContributionTypes, utils, exeUtils,
keysInt, elementsGeometry, simpleFeatureTypes,
isContainingSimpleFeatureTypes);
Expand All @@ -128,6 +109,9 @@ public void extract() throws Exception {
metadata, "FeatureCollection", Collections.emptyList());
MapReducer<Feature> snapshotPreResult = null;
if (!isContributionsEndpoint) {
InputProcessor snapshotInputProcessor = new InputProcessor(servletRequest, true, false);
snapshotInputProcessor.getProcessingData().setFullHistory(true);
MapReducer<List<OSMEntitySnapshot>> mapRedSnapshots = getMapReducer(snapshotInputProcessor);
// handles cases where valid_from = t_start, valid_to = t_end; i.e. non-modified data
snapshotPreResult = mapRedSnapshots
.filter(snapshots -> snapshots.size() == 2)
Expand All @@ -146,4 +130,21 @@ public void extract() throws Exception {
Stream.concat(contributionStream, snapshotStream));
}
}

private <X extends OSHDBMapReducible> MapReducer<List<X>> getMapReducer(
InputProcessor inputProcessor) throws Exception {
MapReducer<X> mapRed;
if (DbConnData.db instanceof OSHDBIgnite) {
// on ignite: Use AffinityCall backend, which is the only one properly supporting streaming
// of result data, without buffering the whole result in memory before returning the result.
// This allows to write data out to the client via a chunked HTTP response.
mapRed = inputProcessor.processParameters(ComputeMode.AFFINITY_CALL);
} else {
mapRed = inputProcessor.processParameters();
}
MapReducer<List<X>> mapRedGrouped = mapRed.groupByEntity();
Optional<FilterExpression> filter = processingData.getFilterExpression();
return filter.map(mapRedGrouped::filter)
.orElse(mapRedGrouped);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -460,6 +460,20 @@ public void contributionTypesPropertiesParameterTest() {
assertEquals("14227603", feature.get("properties").get("@contributionChangesetId").asText());
}

@Test
public void contributionsChangesetFilterTest() {
TestRestTemplate restTemplate = new TestRestTemplate();
ResponseEntity<JsonNode> response = restTemplate.getForEntity(server + port
+ "/contributions/bbox?bboxes=8.67,49.39,8.71,49.42&clipGeometry=true&"
+ "filter=id:way/25316163 and changeset:14227603&"
+ "properties=metadata,contributionTypes&time=2012-12-10,2017-12-11",
JsonNode.class);
var features = response.getBody().get("features");
assertEquals(1, features.size());
assertEquals("14227603",
features.get(0).get("properties").get("@contributionChangesetId").asText());
}

/*
* ./contributions/latest tests
*/
Expand Down

0 comments on commit aef201c

Please sign in to comment.