Skip to content

Commit

Permalink
Working implementation of reverseResolve (single per resource).
Browse files Browse the repository at this point in the history
  • Loading branch information
piotrszul committed Oct 19, 2023
1 parent f879e9e commit 982cd7f
Show file tree
Hide file tree
Showing 33 changed files with 346 additions and 190 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public Dataset<Row> enforce(@Nonnull final ResourceType subjectResource,

// Build a new expression parser, and parse all the column expressions within the query.
final ResourceCollection inputContext = ResourceCollection
.build(getFhirContext(), getDataSource().read(subjectResource), subjectResource
.build(getFhirContext(), subjectResource
);
return filterDataset(inputContext, filters, dataset, dataset.col("id"), Column::or);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
import au.csiro.pathling.view.FromSelection;
import au.csiro.pathling.view.PrimitiveSelection;
import au.csiro.pathling.view.Selection;
import au.csiro.pathling.view.ViewContext;
import au.csiro.pathling.view.ExecutionContext;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.IParser;
import com.google.common.collect.Streams;
Expand Down Expand Up @@ -176,8 +176,8 @@ void setUp() {
}


ViewContext newContext() {
return new ViewContext(spark, fhirContext, dataSource);
ExecutionContext newContext() {
return new ExecutionContext(spark, fhirContext, dataSource);
}

@Test
Expand Down Expand Up @@ -222,36 +222,7 @@ void testExtractView() {
System.out.println(resultDataset.queryExecution().optimizedPlan());

}


@Test
void testExtractWithResoutions() {

final List<String> expressions = List.of(
"id",
"gender",
"reverseResolve(Condition.subject).clinicalStatus"
);

System.out.println("### Expressions: ###");
expressions.forEach(System.out::println);
final ExtractRequest extractRequest = ExtractRequest.fromUserInput(
ResourceType.PATIENT,
Optional.of(expressions),
Optional.empty(),
Optional.empty()
);

final QueryParser queryParser = new QueryParser(new Parser());
final ExtractView extractView = queryParser.toView(extractRequest);
System.out.println("## Extract view ##");
extractView.printTree();
final Dataset<Row> resultDataset = extractView.evaluate(newContext());
resultDataset.show(false);
System.out.println(resultDataset.logicalPlan());
System.out.println(resultDataset.queryExecution().executedPlan());
}


@Test
void testAggregation() {

Expand Down Expand Up @@ -417,7 +388,7 @@ void testAggregationFull() {
groupByCollections.forEach(c -> System.out.println(c.getFhirType()));

final DatasetResult<Collection> datasetCollectionResult = groupByCollections.stream()
.map(k -> (DatasetResult<Collection>) DatasetResult.of(k))
.map(k -> (DatasetResult<Collection>) DatasetResult.pureOne(k))
.reduce(DatasetResult.empty(), DatasetResult::andThen);

System.out.println("### Grouping Collection Result: ###");
Expand Down Expand Up @@ -467,4 +438,34 @@ void mockResource(final ResourceType... resourceTypes) {
TestHelpers.mockResource(dataSource, spark, resourceTypes);
}



@Test
void testExtractViewWithReverseResolve() {

final List<String> expressions = List.of(
"id",
"gender",
"reverseResolve(Condition.subject).clinicalStatus",
"reverseResolve(Observation.subject).id.count()"
);

System.out.println("### Expressions: ###");
expressions.forEach(System.out::println);
final ExtractRequest extractRequest = ExtractRequest.fromUserInput(
ResourceType.PATIENT,
Optional.of(expressions),
Optional.empty(),
Optional.empty()
);

final QueryParser queryParser = new QueryParser(new Parser());
final ExtractView extractView = queryParser.toView(extractRequest);
System.out.println("## Extract view ##");
extractView.printTree();
final Dataset<Row> resultDataset = extractView.evaluate(newContext());
resultDataset.show(false);
System.out.println(resultDataset.logicalPlan());
System.out.println(resultDataset.queryExecution().executedPlan());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -85,12 +85,13 @@ void setUp() {
@SuppressWarnings("SameParameterValue")
void setSubjectResource(@Nonnull final ResourceType resourceType) {
final ResourceCollection subjectResource = ResourceCollection
.build(fhirContext, dataSource.read(resourceType), resourceType);
.build(fhirContext, resourceType);

evaluationContext = new EvaluationContextBuilder(spark, fhirContext)
.dataset(dataSource.read(resourceType))
.resource(subjectResource)
.inputContext(subjectResource)
.dataSource(dataSource)
.build();
}

Expand All @@ -106,7 +107,7 @@ void mockResource(final ResourceType... resourceTypes) {
protected FhirPathAssertion assertThatResultOf(@Nonnull final ResourceType resourceType,
@Nonnull final String expression) {
final ResourceCollection subjectResource = ResourceCollection
.build(fhirContext, dataSource.read(resourceType), resourceType);
.build(fhirContext, resourceType);

final EvaluationContext evaluationContext = new EvaluationContextBuilder(spark, fhirContext)
.dataset(dataSource.read(resourceType))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import au.csiro.pathling.test.builders.DatasetBuilder;
import au.csiro.pathling.test.helpers.TerminologyServiceHelpers;
import au.csiro.pathling.test.helpers.TerminologyServiceHelpers.TranslateExpectations;
import org.apache.spark.sql.execution.SparkPlan;
import org.apache.spark.sql.types.DataTypes;
import org.hl7.fhir.r4.model.Enumerations.ResourceType;
import org.junit.jupiter.api.Assertions;
Expand All @@ -57,7 +58,7 @@ public class ParserTest extends AbstractParserTest {

@SuppressWarnings("SameParameterValue")
private <T extends Throwable> T assertThrows(final Class<T> errorType, final String expression) {
return Assertions.assertThrows(errorType, () -> parser.evaluate(expression, evaluationContext));
return Assertions.assertThrows(errorType, () -> parser.evaluatePure(expression, evaluationContext));

Check failure on line 61 in fhir-server/src/test/java/au/csiro/pathling/fhirpath/parser/ParserTest.java

View workflow job for this annotation

GitHub Actions / FHIR server test report 3

ParserTest.testTraversalToUnsupportedReferenceChild

Unexpected exception type thrown, expected: <au.csiro.pathling.errors.InvalidUserInputError> but was: <java.lang.IllegalStateException>
Raw output
org.opentest4j.AssertionFailedError: Unexpected exception type thrown, expected: <au.csiro.pathling.errors.InvalidUserInputError> but was: <java.lang.IllegalStateException>
	at org.junit.jupiter.api.AssertionFailureBuilder.build(AssertionFailureBuilder.java:151)
	at org.junit.jupiter.api.AssertThrows.assertThrows(AssertThrows.java:67)
	at org.junit.jupiter.api.AssertThrows.assertThrows(AssertThrows.java:35)
	at org.junit.jupiter.api.Assertions.assertThrows(Assertions.java:3083)
	at au.csiro.pathling.fhirpath.parser.ParserTest.assertThrows(ParserTest.java:61)
	at au.csiro.pathling.fhirpath.parser.ParserTest.testTraversalToUnsupportedReferenceChild(ParserTest.java:840)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at org.junit.platform.commons.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:727)
	at org.junit.jupiter.engine.execution.MethodInvocation.proceed(MethodInvocation.java:60)
	at org.junit.jupiter.engine.execution.InvocationInterceptorChain$ValidatingInvocation.proceed(InvocationInterceptorChain.java:131)
	at org.junit.jupiter.engine.extension.TimeoutExtension.intercept(TimeoutExtension.java:156)
	at org.junit.jupiter.engine.extension.TimeoutExtension.interceptTestableMethod(TimeoutExtension.java:147)
	at org.junit.jupiter.engine.extension.TimeoutExtension.interceptTestMethod(TimeoutExtension.java:86)
	at org.junit.jupiter.engine.execution.InterceptingExecutableInvoker$ReflectiveInterceptorCall.lambda$ofVoidMethod$0(InterceptingExecutableInvoker.java:103)
	at org.junit.jupiter.engine.execution.InterceptingExecutableInvoker.lambda$invoke$0(InterceptingExecutableInvoker.java:93)
	at org.junit.jupiter.engine.execution.InvocationInterceptorChain$InterceptedInvocation.proceed(InvocationInterceptorChain.java:106)
	at org.junit.jupiter.engine.execution.InvocationInterceptorChain.proceed(InvocationInterceptorChain.java:64)
	at org.junit.jupiter.engine.execution.InvocationInterceptorChain.chainAndInvoke(InvocationInterceptorChain.java:45)
	at org.junit.jupiter.engine.execution.InvocationInterceptorChain.invoke(InvocationInterceptorChain.java:37)
	at org.junit.jupiter.engine.execution.InterceptingExecutableInvoker.invoke(InterceptingExecutableInvoker.java:92)
	at org.junit.jupiter.engine.execution.InterceptingExecutableInvoker.invoke(InterceptingExecutableInvoker.java:86)
	at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.lambda$invokeTestMethod$7(TestMethodTestDescriptor.java:217)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.invokeTestMethod(TestMethodTestDescriptor.java:213)
	at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:138)
	at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.execute(TestMethodTestDescriptor.java:68)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$6(NodeTestTask.java:151)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:141)
	at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$9(NodeTestTask.java:139)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:138)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:95)
	at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)
	at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:41)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$6(NodeTestTask.java:155)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:141)
	at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$9(NodeTestTask.java:139)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:138)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:95)
	at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)
	at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:41)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$6(NodeTestTask.java:155)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:141)
	at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$9(NodeTestTask.java:139)
	at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:138)
	at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:95)
	at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.submit(SameThreadHierarchicalTestExecutorService.java:35)
	at org.junit.platform.engine.support.hierarchical.HierarchicalTestExecutor.execute(HierarchicalTestExecutor.java:57)
	at org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine.execute(HierarchicalTestEngine.java:54)
	at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:147)
	at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:127)
	at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:90)
	at org.junit.platform.launcher.core.EngineExecutionOrchestrator.lambda$execute$0(EngineExecutionOrchestrator.java:55)
	at org.junit.platform.launcher.core.EngineExecutionOrchestrator.withInterceptedStreams(EngineExecutionOrchestrator.java:102)
	at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:54)
	at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:114)
	at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:86)
	at org.junit.platform.launcher.core.DefaultLauncherSession$DelegatingLauncher.execute(DefaultLauncherSession.java:86)
	at org.apache.maven.surefire.junitplatform.LazyLauncher.execute(LazyLauncher.java:56)
	at org.apache.maven.surefire.junitplatform.JUnitPlatformProvider.lambda$execute$1(JUnitPlatformProvider.java:191)
	at java.base/java.util.Iterator.forEachRemaining(Iterator.java:133)
	at org.apache.maven.surefire.junitplatform.JUnitPlatformProvider.execute(JUnitPlatformProvider.java:186)
	at org.apache.maven.surefire.junitplatform.JUnitPlatformProvider.invokeAllTests(JUnitPlatformProvider.java:148)
	at org.apache.maven.surefire.junitplatform.JUnitPlatformProvider.invoke(JUnitPlatformProvider.java:118)
	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:385)
	at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:162)
	at org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:507)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:495)
Caused by: java.lang.IllegalStateException: Cannot get pure value from transformed result
	at au.csiro.pathling.view.DatasetResult$One.getPureValue(DatasetResult.java:154)
	at au.csiro.pathling.fhirpath.parser.Parser.evaluatePure(Parser.java:53)
	at au.csiro.pathling.fhirpath.parser.ParserTest.lambda$assertThrows$0(ParserTest.java:61)
	at org.junit.jupiter.api.AssertThrows.assertThrows(AssertThrows.java:53)
	... 77 more
}

private TranslateExpectations setupMockTranslationFor_195662009_444814009(
Expand Down Expand Up @@ -245,6 +246,23 @@ void testCodingOperations() {
// .hasCodingValue(expectedCodingWithVersion);
// }


@Test
void testBasicReverseResolve() {
mockResource(ResourceType.PATIENT, ResourceType.CONDITION);

final SparkPlan p = assertThatResultOf(
"reverseResolve(Condition.subject).code.coding")
//.isElementPath(IntegerCollection.class)
//.isSingular()
.selectOrderedResult()
.debugAllRows()
.getDataset().queryExecution().executedPlan();

System.out.println(p);
}


@Test
void testCountWithReverseResolve() {
assertThatResultOf("reverseResolve(Condition.subject).code.coding.count()")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import au.csiro.pathling.query.QueryParser;
import au.csiro.pathling.terminology.TerminologyServiceFactory;
import au.csiro.pathling.view.AggregationView;
import au.csiro.pathling.view.ViewContext;
import au.csiro.pathling.view.ExecutionContext;
import ca.uhn.fhir.context.FhirContext;
import java.util.Collections;
import java.util.List;
Expand Down Expand Up @@ -81,8 +81,8 @@ public ResultWithExpressions buildQuery(@Nonnull final AggregateRequest query) {
Collections.emptyList(), Collections.emptyList());
}

protected ViewContext newContext() {
return new ViewContext(sparkSession, fhirContext, dataSource);
protected ExecutionContext newContext() {
return new ExecutionContext(sparkSession, fhirContext, dataSource);
}

//
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import au.csiro.pathling.query.QueryParser;
import au.csiro.pathling.terminology.TerminologyServiceFactory;
import au.csiro.pathling.view.ExtractView;
import au.csiro.pathling.view.ViewContext;
import au.csiro.pathling.view.ExecutionContext;
import ca.uhn.fhir.context.FhirContext;
import java.util.Optional;
import javax.annotation.Nonnull;
Expand Down Expand Up @@ -64,8 +64,8 @@ public Dataset<Row> buildQuery(@Nonnull final ExtractRequest query,
return query.getLimit().map(resultDataset::limit).orElse(resultDataset);
}

protected ViewContext newContext() {
return new ViewContext(sparkSession, fhirContext, dataSource);
protected ExecutionContext newContext() {
return new ExecutionContext(sparkSession, fhirContext, dataSource);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ class This<I extends Collection> implements FhirPath<I> {
@Override
public DatasetResult.One<I> eval(@Nonnull final I input,
@Nonnull final EvaluationContext context) {
return DatasetResult.of(input);
return DatasetResult.pureOne(input);
}

@Override
Expand Down Expand Up @@ -105,7 +105,7 @@ class Composite<I extends Collection> implements FhirPath<I> {
public DatasetResult.One<I> eval(@Nonnull final I input,
@Nonnull final EvaluationContext context) {
return elements.stream()
.reduce(DatasetResult.of(input),
.reduce(DatasetResult.pureOne(input),
(acc, element) -> element.eval(acc.getValue(), context).withTransformOf(acc),
(a, b) -> b);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,12 @@
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import java.util.EnumSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import lombok.Getter;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.r4.model.Enumerations.FHIRDefinedType;
Expand All @@ -55,35 +50,24 @@
@Getter
public class ResourceCollection extends Collection {

/**
* A mapping between the names of elements in the resource and the corresponding {@link Column}.
*/
@Nonnull
private final Map<String, Column> elementsToColumns;

/**
* The {@link ResourceDefinition} for this resource type.
*/
@Nonnull
private final ResourceDefinition resourceDefinition;

/**
* The {@link Dataset} containing the resource data.
*/
@Nonnull
private final Dataset<Row> dataset;
final Optional<String> prefix;

protected ResourceCollection(@Nonnull final Column column,
@Nonnull final Optional<FhirPathType> type,
@Nonnull final Optional<FHIRDefinedType> fhirType,
@Nonnull final Optional<? extends NodeDefinition> definition,
@Nonnull final Map<String, Column> elementsToColumns,
@Nonnull final ResourceDefinition resourceDefinition,
@Nonnull final Dataset<Row> dataset) {
@Nonnull final Optional<String> prefix) {
super(column, type, fhirType, definition);
this.elementsToColumns = elementsToColumns;
this.resourceDefinition = resourceDefinition;
this.dataset = dataset;
this.prefix = prefix;
}

@Nonnull
Expand All @@ -99,29 +83,30 @@ private static Optional<FHIRDefinedType> getFhirType(@Nonnull final ResourceType
* Build a new ResourcePath using the supplied {@link FhirContext} and {@link DataSource}.
*
* @param fhirContext the {@link FhirContext} to use for sourcing the resource definition
* @param dataset the {@link Dataset} that contains the resource data
* @param resourceType the type of the resource
* @return A shiny new ResourcePath
*/
@Nonnull
public static ResourceCollection build(@Nonnull final FhirContext fhirContext,
@Nonnull final Dataset<Row> dataset, @Nonnull final ResourceType resourceType) {
@Nonnull final ResourceType resourceType, @Nonnull final Optional<String> prefix) {

// Get the resource definition from HAPI.
final String resourceCode = resourceType.toCode();
final RuntimeResourceDefinition hapiDefinition = fhirContext.getResourceDefinition(
resourceCode);
final ResourceDefinition definition = new ResourceDefinition(resourceType, hapiDefinition);

//noinspection ReturnOfNull
final Map<String, Column> elementsToColumns = Stream.of(dataset.columns())
.collect(Collectors.toUnmodifiableMap(Function.identity(), dataset::col));

// We use a literal column as the resource value - the actual value is not important.
// But the non-null value indicates that the resource should be included in any result.
return new ResourceCollection(functions.lit(true), Optional.empty(),
getFhirType(resourceType), Optional.of(definition), elementsToColumns, definition,
dataset);
getFhirType(resourceType), Optional.of(definition), definition, prefix);
}


@Nonnull
public static ResourceCollection build(@Nonnull final FhirContext fhirContext,
@Nonnull final ResourceType resourceType) {
return build(fhirContext, resourceType, Optional.empty());
}

/**
Expand Down Expand Up @@ -149,7 +134,14 @@ public static Set<ResourceType> supportedResourceTypes() {
*/
@Nonnull
public Optional<Column> getElementColumn(@Nonnull final String elementName) {
return Optional.ofNullable(elementsToColumns.get(elementName));
// TODO: do more checking or not (either based on schema or definition)
// return resourceDefinition
// .getChildElement(elementName)
// .map(ChildDefinition::getName)
// .map(functions::col);
return prefix
.map(p -> p + "_" + elementName).or(() -> Optional.of(elementName))
.map(functions::col);
}

@Nonnull
Expand Down Expand Up @@ -178,13 +170,12 @@ protected Collection traverseElement(@Nonnull final ElementDefinition childDef)
functions.when(getCtx().getValue().isNotNull(), value),
childDef)).get();
}



@Nonnull
@Override
public Collection copyWith(@Nonnull final Column newValue) {
return new ResourceCollection(newValue, getType(), getFhirType(), getDefinition(),
elementsToColumns, resourceDefinition, dataset);
resourceDefinition, prefix);
}

@Nonnull
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,6 @@ public static ResourceCollection reverseResolve(@Nonnull final Collection input,

final ResourceCollection foreignResource = ResourceCollection.build(
evaluationContext.getFhirContext(),
dataset,
foreignResourceType);

final Column referenceColumn = foreignResource.traverse(
Expand All @@ -271,7 +270,7 @@ public static ResourceCollection reverseResolve(@Nonnull final Collection input,
final Dataset<Row> joinedDataset = dataset.join(foreignDataset,
dataset.col("id_versioned").equalTo(foreignDataset.col("ref_xxx")), "left_outer");

return ResourceCollection.build(evaluationContext.getFhirContext(), joinedDataset,
return ResourceCollection.build(evaluationContext.getFhirContext(),
foreignResourceType);

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import au.csiro.pathling.fhirpath.path.Paths.EvalFunction;
import au.csiro.pathling.fhirpath.path.Paths.This;
import au.csiro.pathling.fhirpath.path.Paths.Traversal;
import au.csiro.pathling.fhirpath.path.ReverseResolvePath;

/**
* This class is invoked on the right-hand side of the invocation expression, and can optionally be
Expand Down Expand Up @@ -94,7 +95,11 @@ public FhirPath<Collection> visitFunctionInvocation(
.collect(toList())
).orElse(Collections.emptyList());

return new EvalFunction(functionIdentifier, arguments);
if ("reverseResolve".equals(functionIdentifier)) {
return ReverseResolvePath.from(arguments);
} else {
return new EvalFunction(functionIdentifier, arguments);
}
}

@Override
Expand Down
Loading

0 comments on commit 982cd7f

Please sign in to comment.