diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml
index 9f7ed00f27aa..86d82d9379e8 100644
--- a/hapi-deployable-pom/pom.xml
+++ b/hapi-deployable-pom/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml
index 1214bb7c3ee0..83c2140b245b 100644
--- a/hapi-fhir-android/pom.xml
+++ b/hapi-fhir-android/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml
index 303de59fc8f6..d5f8eefd21d9 100644
--- a/hapi-fhir-base/pom.xml
+++ b/hapi-fhir-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java
index 3c11e702124e..7feea8a15d0d 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java
@@ -265,30 +265,48 @@ public static IBase addIssueWithMessageId(
}
public static void addDetailsToIssue(FhirContext theFhirContext, IBase theIssue, String theSystem, String theCode) {
+ addDetailsToIssue(theFhirContext, theIssue, theSystem, theCode, null);
+ }
+
+ public static void addDetailsToIssue(
+ FhirContext theFhirContext, IBase theIssue, String theSystem, String theCode, String theText) {
BaseRuntimeElementCompositeDefinition> issueElement =
(BaseRuntimeElementCompositeDefinition>) theFhirContext.getElementDefinition(theIssue.getClass());
BaseRuntimeChildDefinition detailsChildDef = issueElement.getChildByName("details");
-
- BaseRuntimeElementCompositeDefinition> codingDef =
- (BaseRuntimeElementCompositeDefinition>) theFhirContext.getElementDefinition("Coding");
- ICompositeType coding = (ICompositeType) codingDef.newInstance();
-
- // System
- IPrimitiveType> system =
- (IPrimitiveType>) theFhirContext.getElementDefinition("uri").newInstance();
- system.setValueAsString(theSystem);
- codingDef.getChildByName("system").getMutator().addValue(coding, system);
-
- // Code
- IPrimitiveType> code =
- (IPrimitiveType>) theFhirContext.getElementDefinition("code").newInstance();
- code.setValueAsString(theCode);
- codingDef.getChildByName("code").getMutator().addValue(coding, code);
BaseRuntimeElementCompositeDefinition> ccDef =
(BaseRuntimeElementCompositeDefinition>) theFhirContext.getElementDefinition("CodeableConcept");
-
ICompositeType codeableConcept = (ICompositeType) ccDef.newInstance();
- ccDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding);
+
+ if (isNotBlank(theSystem) || isNotBlank(theCode)) {
+ BaseRuntimeElementCompositeDefinition> codingDef =
+ (BaseRuntimeElementCompositeDefinition>) theFhirContext.getElementDefinition("Coding");
+ ICompositeType coding = (ICompositeType) codingDef.newInstance();
+
+ // System
+ if (isNotBlank(theSystem)) {
+ IPrimitiveType> system = (IPrimitiveType>)
+ theFhirContext.getElementDefinition("uri").newInstance();
+ system.setValueAsString(theSystem);
+ codingDef.getChildByName("system").getMutator().addValue(coding, system);
+ }
+
+ // Code
+ if (isNotBlank(theCode)) {
+ IPrimitiveType> code = (IPrimitiveType>)
+ theFhirContext.getElementDefinition("code").newInstance();
+ code.setValueAsString(theCode);
+ codingDef.getChildByName("code").getMutator().addValue(coding, code);
+ }
+
+ ccDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding);
+ }
+
+ if (isNotBlank(theText)) {
+ IPrimitiveType> textElem = (IPrimitiveType>)
+ ccDef.getChildByName("text").getChildByName("text").newInstance(theText);
+ ccDef.getChildByName("text").getMutator().addValue(codeableConcept, textElem);
+ }
+
detailsChildDef.getMutator().addValue(theIssue, codeableConcept);
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopLimitAccumulator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopLimitAccumulator.java
new file mode 100644
index 000000000000..06de80765bbb
--- /dev/null
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopLimitAccumulator.java
@@ -0,0 +1,69 @@
+/*-
+ * #%L
+ * HAPI FHIR - Core Library
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.util;
+
+import jakarta.annotation.Nonnull;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Stream;
+
+/**
+ * This class collects items from a stream to a given limit and know whether there are
+ * still more items beyond that limit.
+ *
+ * @param the type of object being streamed
+ */
+public class StopLimitAccumulator {
+ private final boolean isTruncated;
+ private final List myList;
+
+ private StopLimitAccumulator(List theList, boolean theIsTruncated) {
+ myList = Collections.unmodifiableList(theList);
+ isTruncated = theIsTruncated;
+ }
+
+ public static StopLimitAccumulator fromStreamAndLimit(@Nonnull Stream theItemStream, long theLimit) {
+ assert theLimit > 0;
+ AtomicBoolean isBeyondLimit = new AtomicBoolean(false);
+ List accumulator = new ArrayList<>();
+
+ theItemStream
+ .limit(theLimit + 1) // Fetch one extra item to see if there are any more items past our limit
+ .forEach(item -> {
+ if (accumulator.size() < theLimit) {
+ accumulator.add(item);
+ } else {
+ isBeyondLimit.set(true);
+ }
+ });
+ return new StopLimitAccumulator<>(accumulator, isBeyondLimit.get());
+ }
+
+ public boolean isTruncated() {
+ return isTruncated;
+ }
+
+ public List getItemList() {
+ return myList;
+ }
+}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopLimitAccumulatorTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopLimitAccumulatorTest.java
new file mode 100644
index 000000000000..3f826424e3f1
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopLimitAccumulatorTest.java
@@ -0,0 +1,69 @@
+package ca.uhn.fhir.util;
+
+import org.junit.jupiter.api.Test;
+
+import java.util.List;
+import java.util.stream.Stream;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+class StopLimitAccumulatorTest {
+
+ @Test
+ void testFromStreamAndLimit_withNoTruncation() {
+ // setup
+ Stream stream = Stream.of(1, 2, 3, 4, 5);
+ int limit = 5;
+
+ // execute
+ StopLimitAccumulator accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
+
+ // verify
+ assertFalse(accumulator.isTruncated(), "The result should not be truncated");
+ assertEquals(List.of(1, 2, 3, 4, 5), accumulator.getItemList(), "The list should contain all items within the limit");
+ }
+
+ @Test
+ void testFromStreamAndLimit_withTruncation() {
+ // setup
+ Stream stream = Stream.of(1, 2, 3, 4, 5, 6, 7);
+ int limit = 5;
+
+ // execute
+ StopLimitAccumulator accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
+
+ // verify
+ assertTrue(accumulator.isTruncated(), "The result should be truncated");
+ assertEquals(List.of(1, 2, 3, 4, 5), accumulator.getItemList(), "The list should contain only the items within the limit");
+ }
+
+ @Test
+ void testFromStreamAndLimit_withEmptyStream() {
+ // setup
+ Stream stream = Stream.empty();
+ int limit = 5;
+
+ // execute
+ StopLimitAccumulator accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
+
+ // verify
+ assertFalse(accumulator.isTruncated(), "The result should not be truncated for an empty stream");
+ assertTrue(accumulator.getItemList().isEmpty(), "The list should be empty");
+ }
+
+ @Test
+ void testImmutabilityOfItemList() {
+ // setup
+ Stream stream = Stream.of(1, 2, 3);
+ int limit = 3;
+
+ StopLimitAccumulator accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
+
+ // execute and Assert
+ List itemList = accumulator.getItemList();
+ assertThrows(UnsupportedOperationException.class, () -> itemList.add(4), "The list should be immutable");
+ }
+}
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index 8eb733fffe3e..8f793eb84569 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -4,7 +4,7 @@
4.0.0
ca.uhn.hapi.fhir
hapi-fhir-bom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
pom
HAPI FHIR BOM
@@ -12,7 +12,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml
index 872153e1c12b..e5ecadca0f08 100644
--- a/hapi-fhir-checkstyle/pom.xml
+++ b/hapi-fhir-checkstyle/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index 8634287b5442..91893f54d9fd 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index 641e9b220d91..a6be9c682ce3 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir-cli
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index d20c929503f0..69bc9c4daee4 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-client-apache-http5/pom.xml b/hapi-fhir-client-apache-http5/pom.xml
index 0b07128d239e..2b6900fa63b8 100644
--- a/hapi-fhir-client-apache-http5/pom.xml
+++ b/hapi-fhir-client-apache-http5/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index 03ef0f6d47b5..2e56ce4f53e6 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index c08fae28c8d1..e35e959013db 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index 3d10a57bf71e..11b14ba36945 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index c1c00c2ee1a1..8a1dae57e7f6 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index 96fcb0df0c20..1421595fc568 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index 497b40cb9c25..9a86748436ba 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index 07cd3ce9cc0f..dadc903e1465 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml
index d6636beece11..528e488ee930 100644
--- a/hapi-fhir-jpa/pom.xml
+++ b/hapi-fhir-jpa/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 756c6520f3ef..0f9f573c8204 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImpl.java
index 06114707f5ec..5579d912d095 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImpl.java
@@ -31,11 +31,13 @@
import ca.uhn.fhir.jpa.api.pid.TypedResourcePid;
import ca.uhn.fhir.jpa.api.pid.TypedResourceStream;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
+import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
@@ -46,6 +48,7 @@
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
+import org.hl7.fhir.instance.model.api.IIdType;
import java.util.Date;
import java.util.function.Supplier;
@@ -56,6 +59,8 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
private final IResourceTableDao myResourceTableDao;
+ private final IResourceLinkDao myResourceLinkDao;
+
private final MatchUrlService myMatchUrlService;
private final DaoRegistry myDaoRegistry;
@@ -71,11 +76,13 @@ public boolean isAllResourceTypeSupported() {
public Batch2DaoSvcImpl(
IResourceTableDao theResourceTableDao,
+ IResourceLinkDao theResourceLinkDao,
MatchUrlService theMatchUrlService,
DaoRegistry theDaoRegistry,
FhirContext theFhirContext,
IHapiTransactionService theTransactionService) {
myResourceTableDao = theResourceTableDao;
+ myResourceLinkDao = theResourceLinkDao;
myMatchUrlService = theMatchUrlService;
myDaoRegistry = theDaoRegistry;
myFhirContext = theFhirContext;
@@ -95,6 +102,11 @@ public IResourcePidStream fetchResourceIdStream(
}
}
+ @Override
+ public Stream streamSourceIdsThatReferenceTargetId(IIdType theTargetId) {
+ return myResourceLinkDao.streamSourceIdsForTargetFhirId(theTargetId.getResourceType(), theTargetId.getIdPart());
+ }
+
private Stream streamResourceIdsWithUrl(
Date theStart, Date theEnd, String theUrl, RequestPartitionId theRequestPartitionId) {
validateUrl(theUrl);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java
index df3bfd089a47..73b288359147 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java
@@ -44,12 +44,18 @@ public class Batch2SupportConfig {
@Bean
public IBatch2DaoSvc batch2DaoSvc(
IResourceTableDao theResourceTableDao,
+ IResourceLinkDao theResourceLinkDao,
MatchUrlService theMatchUrlService,
DaoRegistry theDaoRegistry,
FhirContext theFhirContext,
IHapiTransactionService theTransactionService) {
return new Batch2DaoSvcImpl(
- theResourceTableDao, theMatchUrlService, theDaoRegistry, theFhirContext, theTransactionService);
+ theResourceTableDao,
+ theResourceLinkDao,
+ theMatchUrlService,
+ theDaoRegistry,
+ theFhirContext,
+ theTransactionService);
}
@Bean
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
index c99844050d79..ce4bfd98c26d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
@@ -19,9 +19,11 @@
*/
package ca.uhn.fhir.jpa.config;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobSubmitterImpl;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.support.IValidationSupport;
@@ -55,6 +57,7 @@
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
+import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
@@ -175,6 +178,7 @@
import ca.uhn.fhir.jpa.validation.ResourceLoaderImpl;
import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
@@ -930,7 +934,31 @@ public CacheTagDefinitionDao tagDefinitionDao(
}
@Bean
- public IReplaceReferencesSvc replaceReferencesSvc(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
- return new ReplaceReferencesSvcImpl(theFhirContext, theDaoRegistry);
+ public Batch2TaskHelper batch2TaskHelper() {
+ return new Batch2TaskHelper();
+ }
+
+ @Bean
+ public IReplaceReferencesSvc replaceReferencesSvc(
+ DaoRegistry theDaoRegistry,
+ HapiTransactionService theHapiTransactionService,
+ IResourceLinkDao theResourceLinkDao,
+ IJobCoordinator theJobCoordinator,
+ ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundle,
+ Batch2TaskHelper theBatch2TaskHelper,
+ JpaStorageSettings theStorageSettings) {
+ return new ReplaceReferencesSvcImpl(
+ theDaoRegistry,
+ theHapiTransactionService,
+ theResourceLinkDao,
+ theJobCoordinator,
+ theReplaceReferencesPatchBundle,
+ theBatch2TaskHelper,
+ theStorageSettings);
+ }
+
+ @Bean
+ public ReplaceReferencesPatchBundleSvc replaceReferencesPatchBundleSvc(DaoRegistry theDaoRegistry) {
+ return new ReplaceReferencesPatchBundleSvc(theDaoRegistry);
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java
index 58f8197c0758..a92ce4942746 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java
@@ -19,17 +19,26 @@
*/
package ca.uhn.fhir.jpa.config.r4;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.IDaoRegistry;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.config.GeneratedDaoAndResourceProviderConfigR4;
import ca.uhn.fhir.jpa.config.JpaConfig;
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.graphql.GraphQLProviderWithIntrospection;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.provider.IReplaceReferencesSvc;
import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
+import ca.uhn.fhir.jpa.provider.merge.PatientMergeProvider;
+import ca.uhn.fhir.jpa.provider.merge.ResourceMergeService;
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
import ca.uhn.fhir.jpa.term.TermVersionAdapterSvcR4;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
@@ -96,4 +105,30 @@ public ITermLoaderSvc termLoaderService(
ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) {
return new TermLoaderSvcImpl(theDeferredStorageSvc, theCodeSystemStorageSvc);
}
+
+ @Bean
+ public ResourceMergeService resourceMergeService(
+ DaoRegistry theDaoRegistry,
+ IReplaceReferencesSvc theReplaceReferencesSvc,
+ HapiTransactionService theHapiTransactionService,
+ IRequestPartitionHelperSvc theRequestPartitionHelperSvc,
+ IJobCoordinator theJobCoordinator,
+ Batch2TaskHelper theBatch2TaskHelper,
+ JpaStorageSettings theStorageSettings) {
+
+ return new ResourceMergeService(
+ theStorageSettings,
+ theDaoRegistry,
+ theReplaceReferencesSvc,
+ theHapiTransactionService,
+ theRequestPartitionHelperSvc,
+ theJobCoordinator,
+ theBatch2TaskHelper);
+ }
+
+ @Bean
+ public PatientMergeProvider patientMergeProvider(
+ FhirContext theFhirContext, DaoRegistry theDaoRegistry, ResourceMergeService theResourceMergeService) {
+ return new PatientMergeProvider(theFhirContext, theDaoRegistry, theResourceMergeService);
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java
index 8c2ddcd522b9..cd4980b32b8e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java
@@ -21,12 +21,14 @@
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
+import ca.uhn.fhir.model.primitive.IdDt;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
+import java.util.stream.Stream;
public interface IResourceLinkDao extends JpaRepository, IHapiFhirJpaRepository {
@@ -46,4 +48,30 @@ public interface IResourceLinkDao extends JpaRepository, IHa
*/
@Query("SELECT t FROM ResourceLink t LEFT JOIN FETCH t.myTargetResource tr WHERE t.myId in :pids")
List findByPidAndFetchTargetDetails(@Param("pids") List thePids);
+
+ /**
+ * Stream Resource Ids of all resources that have a reference to the provided resource id
+ *
+ * @param theTargetResourceType the resource type part of the id
+ * @param theTargetResourceFhirId the value part of the id
+ * @return
+ */
+ @Query(
+ "SELECT DISTINCT new ca.uhn.fhir.model.primitive.IdDt(t.mySourceResourceType, t.mySourceResource.myFhirId) FROM ResourceLink t WHERE t.myTargetResourceType = :resourceType AND t.myTargetResource.myFhirId = :resourceFhirId")
+ Stream streamSourceIdsForTargetFhirId(
+ @Param("resourceType") String theTargetResourceType,
+ @Param("resourceFhirId") String theTargetResourceFhirId);
+
+ /**
+ * Count the number of resources that have a reference to the provided resource id
+ *
+ * @param theTargetResourceType the resource type part of the id
+ * @param theTargetResourceFhirId the value part of the id
+ * @return
+ */
+ @Query(
+ "SELECT COUNT(DISTINCT t.mySourceResourcePid) FROM ResourceLink t WHERE t.myTargetResourceType = :resourceType AND t.myTargetResource.myFhirId = :resourceFhirId")
+ Integer countResourcesTargetingFhirTypeAndFhirId(
+ @Param("resourceType") String theTargetResourceType,
+ @Param("resourceFhirId") String theTargetResourceFhirId);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java
index d48e77668753..136eee4120fe 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java
@@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.provider;
+import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
import ca.uhn.fhir.jpa.api.dao.PatientEverythingParameters;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
@@ -42,6 +43,7 @@
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import org.springframework.beans.factory.annotation.Autowired;
import java.util.Arrays;
import java.util.List;
@@ -50,6 +52,8 @@
public abstract class BaseJpaResourceProviderPatient extends BaseJpaResourceProvider {
+ @Autowired
+ private FhirContext myFhirContext;
/**
* Patient/123/$everything
*/
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/IReplaceReferencesSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/IReplaceReferencesSvc.java
index fb1112c63541..fc96b377af27 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/IReplaceReferencesSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/IReplaceReferencesSvc.java
@@ -19,13 +19,25 @@
*/
package ca.uhn.fhir.jpa.provider;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseParameters;
+import org.hl7.fhir.instance.model.api.IIdType;
/**
- * Contract for service which replaces references
+ * Find all references to a source resource and replace them with references to the provided target
*/
public interface IReplaceReferencesSvc {
- IBaseParameters replaceReferences(String theSourceRefId, String theTargetRefId, RequestDetails theRequest);
+ /**
+ * Find all references to a source resource and replace them with references to the provided target
+ */
+ IBaseParameters replaceReferences(
+ ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails);
+
+ /**
+ * To support $merge preview mode, provide a count of how many references would be updated if replaceReferences
+ * was called
+ */
+ Integer countResourcesReferencingResource(IIdType theResourceId, RequestDetails theRequestDetails);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java
index 9954a7e7c7d2..b0e882237629 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java
@@ -19,30 +19,46 @@
*/
package ca.uhn.fhir.jpa.provider;
+import ca.uhn.fhir.batch2.jobs.merge.MergeResourceHelper;
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.model.api.annotation.Description;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.Transaction;
import ca.uhn.fhir.rest.annotation.TransactionParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.ParametersUtil;
+import jakarta.servlet.http.HttpServletResponse;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import org.springframework.beans.factory.annotation.Autowired;
import java.util.Collections;
import java.util.Map;
import java.util.TreeMap;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
+import static software.amazon.awssdk.utils.StringUtils.isBlank;
public final class JpaSystemProvider extends BaseJpaSystemProvider {
+ @Autowired
+ private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Description(
"Marks all currently existing resources of a given type, or all resources of all types, for reindexing.")
@@ -145,13 +161,59 @@ public IBaseBundle transaction(RequestDetails theRequestDetails, @TransactionPar
@Operation(name = ProviderConstants.OPERATION_REPLACE_REFERENCES, global = true)
@Description(
value =
- "This operation searches for all references matching the provided id and updates them to references to the provided newReferenceTargetId.",
+ "This operation searches for all references matching the provided id and updates them to references to the provided target-reference-id.",
shortDefinition = "Repoints referencing resources to another resources instance")
public IBaseParameters replaceReferences(
- @OperationParam(name = ProviderConstants.PARAM_SOURCE_REFERENCE_ID) String theSourceId,
- @OperationParam(name = ProviderConstants.PARAM_TARGET_REFERENCE_ID) String theTargetId,
- RequestDetails theRequest) {
+ @OperationParam(
+ name = ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID,
+ min = 1,
+ typeName = "string")
+ IPrimitiveType theSourceId,
+ @OperationParam(
+ name = ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID,
+ min = 1,
+ typeName = "string")
+ IPrimitiveType theTargetId,
+ @OperationParam(
+ name = ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT,
+ typeName = "unsignedInt")
+ IPrimitiveType theResourceLimit,
+ ServletRequestDetails theServletRequest) {
+ startRequest(theServletRequest);
- return getReplaceReferencesSvc().replaceReferences(theSourceId, theTargetId, theRequest);
+ try {
+ validateReplaceReferencesParams(theSourceId.getValue(), theTargetId.getValue());
+
+ int resourceLimit = MergeResourceHelper.setResourceLimitFromParameter(myStorageSettings, theResourceLimit);
+
+ IdDt sourceId = new IdDt(theSourceId.getValue());
+ IdDt targetId = new IdDt(theTargetId.getValue());
+ RequestPartitionId partitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(
+ theServletRequest, ReadPartitionIdRequestDetails.forRead(targetId));
+ ReplaceReferencesRequest replaceReferencesRequest =
+ new ReplaceReferencesRequest(sourceId, targetId, resourceLimit, partitionId);
+ IBaseParameters retval =
+ getReplaceReferencesSvc().replaceReferences(replaceReferencesRequest, theServletRequest);
+ if (ParametersUtil.getNamedParameter(getContext(), retval, OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK)
+ .isPresent()) {
+ HttpServletResponse response = theServletRequest.getServletResponse();
+ response.setStatus(HttpServletResponse.SC_ACCEPTED);
+ }
+ return retval;
+ } finally {
+ endRequest(theServletRequest);
+ }
+ }
+
+ private static void validateReplaceReferencesParams(String theSourceId, String theTargetId) {
+ if (isBlank(theSourceId)) {
+ throw new InvalidRequestException(Msg.code(2583) + "Parameter '"
+ + OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID + "' is blank");
+ }
+
+ if (isBlank(theTargetId)) {
+ throw new InvalidRequestException(Msg.code(2584) + "Parameter '"
+ + OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID + "' is blank");
+ }
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ReplaceReferencesSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ReplaceReferencesSvcImpl.java
index a0f2fddfae7d..af1262c86554 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ReplaceReferencesSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ReplaceReferencesSvcImpl.java
@@ -19,222 +19,141 @@
*/
package ca.uhn.fhir.jpa.provider;
-import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesJobParameters;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
-import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
-import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
-import ca.uhn.fhir.model.api.Include;
+import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.model.primitive.IdDt;
-import ca.uhn.fhir.rest.api.MethodOutcome;
-import ca.uhn.fhir.rest.api.PatchTypeEnum;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.param.StringParam;
-import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
-import ca.uhn.fhir.util.ResourceReferenceInfo;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
+import ca.uhn.fhir.util.StopLimitAccumulator;
import jakarta.annotation.Nonnull;
import org.hl7.fhir.instance.model.api.IBaseParameters;
-import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
-import org.hl7.fhir.r4.model.CodeType;
+import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Parameters;
-import org.hl7.fhir.r4.model.Reference;
-import org.hl7.fhir.r4.model.Resource;
-import org.hl7.fhir.r4.model.StringType;
-import org.hl7.fhir.r4.model.Type;
-
-import java.security.InvalidParameterException;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-import static ca.uhn.fhir.jpa.patch.FhirPatch.OPERATION_REPLACE;
-import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_OPERATION;
-import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_PATH;
-import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_TYPE;
-import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_VALUE;
-import static ca.uhn.fhir.rest.api.Constants.PARAM_ID;
-import static ca.uhn.fhir.rest.server.provider.ProviderConstants.PARAM_SOURCE_REFERENCE_ID;
-import static ca.uhn.fhir.rest.server.provider.ProviderConstants.PARAM_TARGET_REFERENCE_ID;
-import static software.amazon.awssdk.utils.StringUtils.isBlank;
+import org.hl7.fhir.r4.model.Task;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-public class ReplaceReferencesSvcImpl implements IReplaceReferencesSvc {
+import java.util.stream.Stream;
- private final FhirContext myFhirContext;
- private final DaoRegistry myDaoRegistry;
+import static ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesAppCtx.JOB_REPLACE_REFERENCES;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
- public ReplaceReferencesSvcImpl(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
- myFhirContext = theFhirContext;
+public class ReplaceReferencesSvcImpl implements IReplaceReferencesSvc {
+ private static final Logger ourLog = LoggerFactory.getLogger(ReplaceReferencesSvcImpl.class);
+ public static final String RESOURCE_TYPES_SYSTEM = "http://hl7.org/fhir/ValueSet/resource-types";
+ private final DaoRegistry myDaoRegistry;
+ private final HapiTransactionService myHapiTransactionService;
+ private final IResourceLinkDao myResourceLinkDao;
+ private final IJobCoordinator myJobCoordinator;
+ private final ReplaceReferencesPatchBundleSvc myReplaceReferencesPatchBundleSvc;
+ private final Batch2TaskHelper myBatch2TaskHelper;
+ private final JpaStorageSettings myStorageSettings;
+
+ public ReplaceReferencesSvcImpl(
+ DaoRegistry theDaoRegistry,
+ HapiTransactionService theHapiTransactionService,
+ IResourceLinkDao theResourceLinkDao,
+ IJobCoordinator theJobCoordinator,
+ ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc,
+ Batch2TaskHelper theBatch2TaskHelper,
+ JpaStorageSettings theStorageSettings) {
myDaoRegistry = theDaoRegistry;
+ myHapiTransactionService = theHapiTransactionService;
+ myResourceLinkDao = theResourceLinkDao;
+ myJobCoordinator = theJobCoordinator;
+ myReplaceReferencesPatchBundleSvc = theReplaceReferencesPatchBundleSvc;
+ myBatch2TaskHelper = theBatch2TaskHelper;
+ myStorageSettings = theStorageSettings;
}
@Override
- public IBaseParameters replaceReferences(String theSourceRefId, String theTargetRefId, RequestDetails theRequest) {
-
- validateParameters(theSourceRefId, theTargetRefId);
- IIdType sourceRefId = new IdDt(theSourceRefId);
- IIdType targetRefId = new IdDt(theTargetRefId);
-
- // todo jm: this could be problematic depending on referenceing object set size, however we are adding
- // batch job option to handle that case as part of this feature
- List extends IBaseResource> referencingResources = findReferencingResourceIds(sourceRefId, theRequest);
-
- return replaceReferencesInTransaction(referencingResources, sourceRefId, targetRefId, theRequest);
- }
-
- private IBaseParameters replaceReferencesInTransaction(
- List extends IBaseResource> theReferencingResources,
- IIdType theCurrentTargetId,
- IIdType theNewTargetId,
- RequestDetails theRequest) {
-
- Parameters resultParams = new Parameters();
- // map resourceType -> map resourceId -> patch Parameters
- Map> parametersMap =
- buildPatchParameterMap(theReferencingResources, theCurrentTargetId, theNewTargetId);
-
- for (Map.Entry> mapEntry : parametersMap.entrySet()) {
- String resourceType = mapEntry.getKey();
- IFhirResourceDao> resDao = myDaoRegistry.getResourceDao(resourceType);
- if (resDao == null) {
- throw new InternalErrorException(
- Msg.code(2588) + "No DAO registered for resource type: " + resourceType);
- }
-
- // patch each resource of resourceType
- patchResourceTypeResources(mapEntry, resDao, resultParams, theRequest);
- }
-
- return resultParams;
- }
-
- private void patchResourceTypeResources(
- Map.Entry> mapEntry,
- IFhirResourceDao> resDao,
- Parameters resultParams,
- RequestDetails theRequest) {
-
- for (Map.Entry idParamMapEntry :
- mapEntry.getValue().entrySet()) {
- IIdType resourceId = idParamMapEntry.getKey();
- Parameters parameters = idParamMapEntry.getValue();
-
- MethodOutcome result =
- resDao.patch(resourceId, null, PatchTypeEnum.FHIR_PATCH_JSON, null, parameters, theRequest);
-
- resultParams.addParameter().setResource((Resource) result.getOperationOutcome());
+ public IBaseParameters replaceReferences(
+ ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails) {
+ theReplaceReferencesRequest.validateOrThrowInvalidParameterException();
+
+ if (theRequestDetails.isPreferAsync()) {
+ return replaceReferencesPreferAsync(theReplaceReferencesRequest, theRequestDetails);
+ } else {
+ return replaceReferencesPreferSync(theReplaceReferencesRequest, theRequestDetails);
}
}
- private Map> buildPatchParameterMap(
- List extends IBaseResource> theReferencingResources,
- IIdType theCurrentReferencedResourceId,
- IIdType theNewReferencedResourceId) {
- Map> paramsMap = new HashMap<>();
-
- for (IBaseResource referencingResource : theReferencingResources) {
- // resource can have more than one reference to the same target resource
- for (ResourceReferenceInfo refInfo :
- myFhirContext.newTerser().getAllResourceReferences(referencingResource)) {
-
- addReferenceToMapIfForSource(
- theCurrentReferencedResourceId,
- theNewReferencedResourceId,
- referencingResource,
- refInfo,
- paramsMap);
- }
- }
- return paramsMap;
+ @Override
+ public Integer countResourcesReferencingResource(IIdType theResourceId, RequestDetails theRequestDetails) {
+ return myHapiTransactionService
+ .withRequest(theRequestDetails)
+ .execute(() -> myResourceLinkDao.countResourcesTargetingFhirTypeAndFhirId(
+ theResourceId.getResourceType(), theResourceId.getIdPart()));
}
- private void addReferenceToMapIfForSource(
- IIdType theCurrentReferencedResourceId,
- IIdType theNewReferencedResourceId,
- IBaseResource referencingResource,
- ResourceReferenceInfo refInfo,
- Map> paramsMap) {
- if (!refInfo.getResourceReference()
- .getReferenceElement()
- .toUnqualifiedVersionless()
- .getValueAsString()
- .equals(theCurrentReferencedResourceId
- .toUnqualifiedVersionless()
- .getValueAsString())) {
-
- // not a reference to the resource being replaced
- return;
- }
-
- Parameters.ParametersParameterComponent paramComponent = createReplaceReferencePatchOperation(
- referencingResource.fhirType() + "." + refInfo.getName(),
- new Reference(
- theNewReferencedResourceId.toUnqualifiedVersionless().getValueAsString()));
-
- paramsMap
- // preserve order, in case it could matter
- .computeIfAbsent(referencingResource.fhirType(), k -> new LinkedHashMap<>())
- .computeIfAbsent(referencingResource.getIdElement(), k -> new Parameters())
- .addParameter(paramComponent);
+ private IBaseParameters replaceReferencesPreferAsync(
+ ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails) {
+
+ Task task = myBatch2TaskHelper.startJobAndCreateAssociatedTask(
+ myDaoRegistry.getResourceDao(Task.class),
+ theRequestDetails,
+ myJobCoordinator,
+ JOB_REPLACE_REFERENCES,
+ new ReplaceReferencesJobParameters(
+ theReplaceReferencesRequest, myStorageSettings.getDefaultTransactionEntriesForWrite()));
+
+ Parameters retval = new Parameters();
+ task.setIdElement(task.getIdElement().toUnqualifiedVersionless());
+ task.getMeta().setVersionId(null);
+ retval.addParameter()
+ .setName(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK)
+ .setResource(task);
+ return retval;
}
+ /**
+ * Try to perform the operation synchronously. However if there is more than a page of results, fall back to asynchronous operation
+ */
@Nonnull
- private Parameters.ParametersParameterComponent createReplaceReferencePatchOperation(
- String thePath, Type theValue) {
-
- Parameters.ParametersParameterComponent operation = new Parameters.ParametersParameterComponent();
- operation.setName(PARAMETER_OPERATION);
- operation.addPart().setName(PARAMETER_TYPE).setValue(new CodeType(OPERATION_REPLACE));
- operation.addPart().setName(PARAMETER_PATH).setValue(new StringType(thePath));
- operation.addPart().setName(PARAMETER_VALUE).setValue(theValue);
- return operation;
- }
-
- private List extends IBaseResource> findReferencingResourceIds(
- IIdType theSourceRefIdParam, RequestDetails theRequest) {
- IFhirResourceDao> dao = getDao(theSourceRefIdParam.getResourceType());
- if (dao == null) {
- throw new InternalErrorException(
- Msg.code(2582) + "Couldn't obtain DAO for resource type" + theSourceRefIdParam.getResourceType());
+ private IBaseParameters replaceReferencesPreferSync(
+ ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails) {
+
+ // TODO KHS get partition from request
+ StopLimitAccumulator accumulator = myHapiTransactionService
+ .withRequest(theRequestDetails)
+ .execute(() -> getAllPidsWithLimit(theReplaceReferencesRequest));
+
+ if (accumulator.isTruncated()) {
+ throw new PreconditionFailedException(Msg.code(2597) + "Number of resources with references to "
+ + theReplaceReferencesRequest.sourceId
+ + " exceeds the resource-limit "
+ + theReplaceReferencesRequest.resourceLimit
+ + ". Submit the request asynchronsly by adding the HTTP Header 'Prefer: respond-async'.");
}
- SearchParameterMap parameterMap = new SearchParameterMap();
- parameterMap.add(PARAM_ID, new StringParam(theSourceRefIdParam.getValue()));
- parameterMap.addRevInclude(new Include("*"));
- return dao.search(parameterMap, theRequest).getAllResources();
- }
+ Bundle result = myReplaceReferencesPatchBundleSvc.patchReferencingResources(
+ theReplaceReferencesRequest, accumulator.getItemList(), theRequestDetails);
- private IFhirResourceDao> getDao(String theResourceName) {
- return myDaoRegistry.getResourceDao(theResourceName);
+ Parameters retval = new Parameters();
+ retval.addParameter()
+ .setName(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME)
+ .setResource(result);
+ return retval;
}
- private void validateParameters(String theSourceRefIdParam, String theTargetRefIdParam) {
- if (isBlank(theSourceRefIdParam)) {
- throw new InvalidParameterException(
- Msg.code(2583) + "Parameter '" + PARAM_SOURCE_REFERENCE_ID + "' is blank");
- }
-
- if (isBlank(theTargetRefIdParam)) {
- throw new InvalidParameterException(
- Msg.code(2584) + "Parameter '" + PARAM_TARGET_REFERENCE_ID + "' is blank");
- }
-
- IIdType sourceId = new IdDt(theSourceRefIdParam);
- if (isBlank(sourceId.getResourceType())) {
- throw new InvalidParameterException(
- Msg.code(2585) + "'" + PARAM_SOURCE_REFERENCE_ID + "' must be a resource type qualified id");
- }
-
- IIdType targetId = new IdDt(theTargetRefIdParam);
- if (isBlank(targetId.getResourceType())) {
- throw new InvalidParameterException(
- Msg.code(2586) + "'" + PARAM_TARGET_REFERENCE_ID + "' must be a resource type qualified id");
- }
+ private @Nonnull StopLimitAccumulator getAllPidsWithLimit(
+ ReplaceReferencesRequest theReplaceReferencesRequest) {
- if (!targetId.getResourceType().equals(sourceId.getResourceType())) {
- throw new InvalidParameterException(
- Msg.code(2587) + "Source and target id parameters must be for the same resource type");
- }
+ Stream idStream = myResourceLinkDao.streamSourceIdsForTargetFhirId(
+ theReplaceReferencesRequest.sourceId.getResourceType(),
+ theReplaceReferencesRequest.sourceId.getIdPart());
+ StopLimitAccumulator accumulator =
+ StopLimitAccumulator.fromStreamAndLimit(idStream, theReplaceReferencesRequest.resourceLimit);
+ return accumulator;
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/BaseMergeOperationInputParameters.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/BaseMergeOperationInputParameters.java
new file mode 100644
index 000000000000..4fdd8c77d431
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/BaseMergeOperationInputParameters.java
@@ -0,0 +1,147 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.provider.merge;
+
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.util.CanonicalIdentifier;
+import org.hl7.fhir.instance.model.api.IBaseReference;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.r4.model.Patient;
+
+import java.util.List;
+
+/**
+ * See Patient $merge spec
+ */
+public abstract class BaseMergeOperationInputParameters {
+
+ private List mySourceResourceIdentifiers;
+ private List myTargetResourceIdentifiers;
+ private IBaseReference mySourceResource;
+ private IBaseReference myTargetResource;
+ private boolean myPreview;
+ private boolean myDeleteSource;
+ private IBaseResource myResultResource;
+ private final int myResourceLimit;
+
+ protected BaseMergeOperationInputParameters(int theResourceLimit) {
+ myResourceLimit = theResourceLimit;
+ }
+
+ public abstract String getSourceResourceParameterName();
+
+ public abstract String getTargetResourceParameterName();
+
+ public abstract String getSourceIdentifiersParameterName();
+
+ public abstract String getTargetIdentifiersParameterName();
+
+ public abstract String getResultResourceParameterName();
+
+ public List getSourceIdentifiers() {
+ return mySourceResourceIdentifiers;
+ }
+
+ public boolean hasAtLeastOneSourceIdentifier() {
+ return mySourceResourceIdentifiers != null && !mySourceResourceIdentifiers.isEmpty();
+ }
+
+ public void setSourceResourceIdentifiers(List theSourceIdentifiers) {
+ this.mySourceResourceIdentifiers = theSourceIdentifiers;
+ }
+
+ public List getTargetIdentifiers() {
+ return myTargetResourceIdentifiers;
+ }
+
+ public boolean hasAtLeastOneTargetIdentifier() {
+ return myTargetResourceIdentifiers != null && !myTargetResourceIdentifiers.isEmpty();
+ }
+
+ public void setTargetResourceIdentifiers(List theTargetIdentifiers) {
+ this.myTargetResourceIdentifiers = theTargetIdentifiers;
+ }
+
+ public boolean getPreview() {
+ return myPreview;
+ }
+
+ public void setPreview(boolean thePreview) {
+ this.myPreview = thePreview;
+ }
+
+ public boolean getDeleteSource() {
+ return myDeleteSource;
+ }
+
+ public void setDeleteSource(boolean theDeleteSource) {
+ this.myDeleteSource = theDeleteSource;
+ }
+
+ public IBaseResource getResultResource() {
+ return myResultResource;
+ }
+
+ public void setResultResource(IBaseResource theResultResource) {
+ this.myResultResource = theResultResource;
+ }
+
+ public IBaseReference getSourceResource() {
+ return mySourceResource;
+ }
+
+ public void setSourceResource(IBaseReference theSourceResource) {
+ this.mySourceResource = theSourceResource;
+ }
+
+ public IBaseReference getTargetResource() {
+ return myTargetResource;
+ }
+
+ public void setTargetResource(IBaseReference theTargetResource) {
+ this.myTargetResource = theTargetResource;
+ }
+
+ public int getResourceLimit() {
+ return myResourceLimit;
+ }
+
+ public MergeJobParameters asMergeJobParameters(
+ FhirContext theFhirContext,
+ JpaStorageSettings theStorageSettings,
+ Patient theSourceResource,
+ Patient theTargetResource,
+ RequestPartitionId thePartitionId) {
+ MergeJobParameters retval = new MergeJobParameters();
+ if (getResultResource() != null) {
+ retval.setResultResource(theFhirContext.newJsonParser().encodeResourceToString(getResultResource()));
+ }
+ retval.setDeleteSource(getDeleteSource());
+ retval.setBatchSize(theStorageSettings.getDefaultTransactionEntriesForWrite());
+ retval.setSourceId(new FhirIdJson(theSourceResource.getIdElement().toVersionless()));
+ retval.setTargetId(new FhirIdJson(theTargetResource.getIdElement().toVersionless()));
+ retval.setPartitionId(thePartitionId);
+ return retval;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeOperationOutcome.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeOperationOutcome.java
new file mode 100644
index 000000000000..4953b667e6ec
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeOperationOutcome.java
@@ -0,0 +1,65 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.provider.merge;
+
+import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
+/**
+ * See Patient $merge spec
+ */
+public class MergeOperationOutcome {
+ private IBaseOperationOutcome myOperationOutcome;
+ private int myHttpStatusCode;
+ private IBaseResource myUpdatedTargetResource;
+ private IBaseResource myTask;
+
+ public IBaseOperationOutcome getOperationOutcome() {
+ return myOperationOutcome;
+ }
+
+ public void setOperationOutcome(IBaseOperationOutcome theOperationOutcome) {
+ this.myOperationOutcome = theOperationOutcome;
+ }
+
+ public int getHttpStatusCode() {
+ return myHttpStatusCode;
+ }
+
+ public void setHttpStatusCode(int theHttpStatusCode) {
+ this.myHttpStatusCode = theHttpStatusCode;
+ }
+
+ public IBaseResource getUpdatedTargetResource() {
+ return myUpdatedTargetResource;
+ }
+
+ public void setUpdatedTargetResource(IBaseResource theUpdatedTargetResource) {
+ this.myUpdatedTargetResource = theUpdatedTargetResource;
+ }
+
+ public IBaseResource getTask() {
+ return myTask;
+ }
+
+ public void setTask(IBaseResource theTask) {
+ this.myTask = theTask;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeValidationResult.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeValidationResult.java
new file mode 100644
index 000000000000..da020d659fea
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeValidationResult.java
@@ -0,0 +1,45 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.provider.merge;
+
+import org.hl7.fhir.r4.model.Patient;
+
+class MergeValidationResult {
+ final Patient sourceResource;
+ final Patient targetResource;
+ final boolean isValid;
+ final Integer httpStatusCode;
+
+ private MergeValidationResult(
+ boolean theIsValid, Integer theHttpStatusCode, Patient theSourceResource, Patient theTargetResource) {
+ isValid = theIsValid;
+ httpStatusCode = theHttpStatusCode;
+ sourceResource = theSourceResource;
+ targetResource = theTargetResource;
+ }
+
+ public static MergeValidationResult invalidResult(int theHttpStatusCode) {
+ return new MergeValidationResult(false, theHttpStatusCode, null, null);
+ }
+
+ public static MergeValidationResult validResult(Patient theSourceResource, Patient theTargetResource) {
+ return new MergeValidationResult(true, null, theSourceResource, theTargetResource);
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeValidationService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeValidationService.java
new file mode 100644
index 000000000000..f64dca05c78d
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/MergeValidationService.java
@@ -0,0 +1,462 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.provider.merge;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.param.TokenAndListParam;
+import ca.uhn.fhir.rest.param.TokenParam;
+import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
+import ca.uhn.fhir.util.CanonicalIdentifier;
+import ca.uhn.fhir.util.OperationOutcomeUtil;
+import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
+import org.hl7.fhir.instance.model.api.IBaseReference;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.IdType;
+import org.hl7.fhir.r4.model.Identifier;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Reference;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_400_BAD_REQUEST;
+import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_422_UNPROCESSABLE_ENTITY;
+
+/**
+ * Supporting class that validates input parameters to {@link ResourceMergeService}.
+ */
+class MergeValidationService {
+ private final FhirContext myFhirContext;
+ private final IFhirResourceDao myPatientDao;
+
+ public MergeValidationService(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
+ myFhirContext = theFhirContext;
+ myPatientDao = theDaoRegistry.getResourceDao(Patient.class);
+ }
+
+ MergeValidationResult validate(
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ RequestDetails theRequestDetails,
+ MergeOperationOutcome theMergeOutcome) {
+
+ IBaseOperationOutcome operationOutcome = theMergeOutcome.getOperationOutcome();
+
+ if (!validateMergeOperationParameters(theMergeOperationParameters, operationOutcome)) {
+ return MergeValidationResult.invalidResult(STATUS_HTTP_400_BAD_REQUEST);
+ }
+
+ // cast to Patient, since we only support merging Patient resources for now
+ Patient sourceResource =
+ (Patient) resolveSourceResource(theMergeOperationParameters, theRequestDetails, operationOutcome);
+
+ if (sourceResource == null) {
+ return MergeValidationResult.invalidResult(STATUS_HTTP_422_UNPROCESSABLE_ENTITY);
+ }
+
+ // cast to Patient, since we only support merging Patient resources for now
+ Patient targetResource =
+ (Patient) resolveTargetResource(theMergeOperationParameters, theRequestDetails, operationOutcome);
+
+ if (targetResource == null) {
+ return MergeValidationResult.invalidResult(STATUS_HTTP_422_UNPROCESSABLE_ENTITY);
+ }
+
+ if (!validateSourceAndTargetAreSuitableForMerge(sourceResource, targetResource, operationOutcome)) {
+ return MergeValidationResult.invalidResult(STATUS_HTTP_422_UNPROCESSABLE_ENTITY);
+ }
+
+ if (!validateResultResourceIfExists(
+ theMergeOperationParameters, targetResource, sourceResource, operationOutcome)) {
+ return MergeValidationResult.invalidResult(STATUS_HTTP_400_BAD_REQUEST);
+ }
+ return MergeValidationResult.validResult(sourceResource, targetResource);
+ }
+
+ private boolean validateResultResourceIfExists(
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ Patient theResolvedTargetResource,
+ Patient theResolvedSourceResource,
+ IBaseOperationOutcome theOperationOutcome) {
+
+ if (theMergeOperationParameters.getResultResource() == null) {
+ // result resource is not provided, no further validation is needed
+ return true;
+ }
+
+ boolean retval = true;
+
+ Patient theResultResource = (Patient) theMergeOperationParameters.getResultResource();
+
+ // validate the result resource's id as same as the target resource
+ if (!theResolvedTargetResource.getIdElement().toVersionless().equals(theResultResource.getIdElement())) {
+ String msg = String.format(
+ "'%s' must have the same versionless id as the actual resolved target resource '%s'. "
+ + "The actual resolved target resource's id is: '%s'",
+ theMergeOperationParameters.getResultResourceParameterName(),
+ theResultResource.getIdElement(),
+ theResolvedTargetResource.getIdElement().toVersionless().getValue());
+ addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
+ retval = false;
+ }
+
+ // validate the result resource contains the identifiers provided in the target identifiers param
+ if (theMergeOperationParameters.hasAtLeastOneTargetIdentifier()
+ && !hasAllIdentifiers(theResultResource, theMergeOperationParameters.getTargetIdentifiers())) {
+ String msg = String.format(
+ "'%s' must have all the identifiers provided in %s",
+ theMergeOperationParameters.getResultResourceParameterName(),
+ theMergeOperationParameters.getTargetIdentifiersParameterName());
+ addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
+ retval = false;
+ }
+
+ // if the source resource is not being deleted, the result resource must have a replaces link to the source
+ // resource
+ // if the source resource is being deleted, the result resource must not have a replaces link to the source
+ // resource
+ if (!validateResultResourceReplacesLinkToSourceResource(
+ theResultResource,
+ theResolvedSourceResource,
+ theMergeOperationParameters.getResultResourceParameterName(),
+ theMergeOperationParameters.getDeleteSource(),
+ theOperationOutcome)) {
+ retval = false;
+ }
+
+ return retval;
+ }
+
+ private void addErrorToOperationOutcome(IBaseOperationOutcome theOutcome, String theDiagnosticMsg, String theCode) {
+ OperationOutcomeUtil.addIssue(myFhirContext, theOutcome, "error", theDiagnosticMsg, null, theCode);
+ }
+
+ private boolean hasAllIdentifiers(Patient theResource, List theIdentifiers) {
+
+ List identifiersInResource = theResource.getIdentifier();
+ for (CanonicalIdentifier identifier : theIdentifiers) {
+ boolean identifierFound = identifiersInResource.stream()
+ .anyMatch(i -> i.getSystem()
+ .equals(identifier.getSystemElement().getValueAsString())
+ && i.getValue().equals(identifier.getValueElement().getValueAsString()));
+
+ if (!identifierFound) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean validateResultResourceReplacesLinkToSourceResource(
+ Patient theResultResource,
+ Patient theResolvedSourceResource,
+ String theResultResourceParameterName,
+ boolean theDeleteSource,
+ IBaseOperationOutcome theOperationOutcome) {
+ // the result resource must have the replaces link set to the source resource
+ List replacesLinkToSourceResource = getLinksToResource(
+ theResultResource, Patient.LinkType.REPLACES, theResolvedSourceResource.getIdElement());
+
+ if (theDeleteSource) {
+ if (!replacesLinkToSourceResource.isEmpty()) {
+ String msg = String.format(
+ "'%s' must not have a 'replaces' link to the source resource "
+ + "when the source resource will be deleted, as the link may prevent deleting the source "
+ + "resource.",
+ theResultResourceParameterName);
+ addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
+ return false;
+ }
+ } else {
+ if (replacesLinkToSourceResource.isEmpty()) {
+ String msg = String.format(
+ "'%s' must have a 'replaces' link to the source resource.", theResultResourceParameterName);
+ addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
+ return false;
+ }
+
+ if (replacesLinkToSourceResource.size() > 1) {
+ String msg = String.format(
+ "'%s' has multiple 'replaces' links to the source resource. There should be only one.",
+ theResultResourceParameterName);
+ addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private List getLinksToResource(
+ Patient theResource, Patient.LinkType theLinkType, IIdType theResourceId) {
+ List links = getLinksOfTypeWithNonNullReference(theResource, theLinkType);
+ return links.stream()
+ .filter(r -> theResourceId.toVersionless().getValue().equals(r.getReference()))
+ .collect(Collectors.toList());
+ }
+
+ private List getLinksOfTypeWithNonNullReference(Patient theResource, Patient.LinkType theLinkType) {
+ List links = new ArrayList<>();
+ if (theResource.hasLink()) {
+ for (Patient.PatientLinkComponent link : theResource.getLink()) {
+ if (theLinkType.equals(link.getType()) && link.hasOther()) {
+ links.add(link.getOther());
+ }
+ }
+ }
+ return links;
+ }
+
+ private boolean validateSourceAndTargetAreSuitableForMerge(
+ Patient theSourceResource, Patient theTargetResource, IBaseOperationOutcome outcome) {
+
+ if (theSourceResource.getId().equalsIgnoreCase(theTargetResource.getId())) {
+ String msg = "Source and target resources are the same resource.";
+ // What is the right code to use in these cases?
+ addErrorToOperationOutcome(outcome, msg, "invalid");
+ return false;
+ }
+
+ if (theTargetResource.hasActive() && !theTargetResource.getActive()) {
+ String msg = "Target resource is not active, it must be active to be the target of a merge operation.";
+ addErrorToOperationOutcome(outcome, msg, "invalid");
+ return false;
+ }
+
+ List replacedByLinksInTarget =
+ getLinksOfTypeWithNonNullReference(theTargetResource, Patient.LinkType.REPLACEDBY);
+ if (!replacedByLinksInTarget.isEmpty()) {
+ String ref = replacedByLinksInTarget.get(0).getReference();
+ String msg = String.format(
+ "Target resource was previously replaced by a resource with reference '%s', it "
+ + "is not a suitable target for merging.",
+ ref);
+ addErrorToOperationOutcome(outcome, msg, "invalid");
+ return false;
+ }
+
+ List replacedByLinksInSource =
+ getLinksOfTypeWithNonNullReference(theSourceResource, Patient.LinkType.REPLACEDBY);
+ if (!replacedByLinksInSource.isEmpty()) {
+ String ref = replacedByLinksInSource.get(0).getReference();
+ String msg = String.format(
+ "Source resource was previously replaced by a resource with reference '%s', it "
+ + "is not a suitable source for merging.",
+ ref);
+ addErrorToOperationOutcome(outcome, msg, "invalid");
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Validates the merge operation parameters and adds validation errors to the outcome
+ *
+ * @param theMergeOperationParameters the merge operation parameters
+ * @param theOutcome the outcome to add validation errors to
+ * @return true if the parameters are valid, false otherwise
+ */
+ private boolean validateMergeOperationParameters(
+ BaseMergeOperationInputParameters theMergeOperationParameters, IBaseOperationOutcome theOutcome) {
+ List errorMessages = new ArrayList<>();
+ if (!theMergeOperationParameters.hasAtLeastOneSourceIdentifier()
+ && theMergeOperationParameters.getSourceResource() == null) {
+ String msg = String.format(
+ "There are no source resource parameters provided, include either a '%s', or a '%s' parameter.",
+ theMergeOperationParameters.getSourceResourceParameterName(),
+ theMergeOperationParameters.getSourceIdentifiersParameterName());
+ errorMessages.add(msg);
+ }
+
+ // Spec has conflicting information about this case
+ if (theMergeOperationParameters.hasAtLeastOneSourceIdentifier()
+ && theMergeOperationParameters.getSourceResource() != null) {
+ String msg = String.format(
+ "Source resource must be provided either by '%s' or by '%s', not both.",
+ theMergeOperationParameters.getSourceResourceParameterName(),
+ theMergeOperationParameters.getSourceIdentifiersParameterName());
+ errorMessages.add(msg);
+ }
+
+ if (!theMergeOperationParameters.hasAtLeastOneTargetIdentifier()
+ && theMergeOperationParameters.getTargetResource() == null) {
+ String msg = String.format(
+ "There are no target resource parameters provided, include either a '%s', or a '%s' parameter.",
+ theMergeOperationParameters.getTargetResourceParameterName(),
+ theMergeOperationParameters.getTargetIdentifiersParameterName());
+ errorMessages.add(msg);
+ }
+
+ // Spec has conflicting information about this case
+ if (theMergeOperationParameters.hasAtLeastOneTargetIdentifier()
+ && theMergeOperationParameters.getTargetResource() != null) {
+ String msg = String.format(
+ "Target resource must be provided either by '%s' or by '%s', not both.",
+ theMergeOperationParameters.getTargetResourceParameterName(),
+ theMergeOperationParameters.getTargetIdentifiersParameterName());
+ errorMessages.add(msg);
+ }
+
+ Reference sourceRef = (Reference) theMergeOperationParameters.getSourceResource();
+ if (sourceRef != null && !sourceRef.hasReference()) {
+ String msg = String.format(
+ "Reference specified in '%s' parameter does not have a reference element.",
+ theMergeOperationParameters.getSourceResourceParameterName());
+ errorMessages.add(msg);
+ }
+
+ Reference targetRef = (Reference) theMergeOperationParameters.getTargetResource();
+ if (targetRef != null && !targetRef.hasReference()) {
+ String msg = String.format(
+ "Reference specified in '%s' parameter does not have a reference element.",
+ theMergeOperationParameters.getTargetResourceParameterName());
+ errorMessages.add(msg);
+ }
+
+ if (!errorMessages.isEmpty()) {
+ for (String validationError : errorMessages) {
+ addErrorToOperationOutcome(theOutcome, validationError, "required");
+ }
+ // there are validation errors
+ return false;
+ }
+
+ // no validation errors
+ return true;
+ }
+
+ private IBaseResource resolveSourceResource(
+ BaseMergeOperationInputParameters theOperationParameters,
+ RequestDetails theRequestDetails,
+ IBaseOperationOutcome theOutcome) {
+ return resolveResource(
+ theOperationParameters.getSourceResource(),
+ theOperationParameters.getSourceIdentifiers(),
+ theRequestDetails,
+ theOutcome,
+ theOperationParameters.getSourceResourceParameterName(),
+ theOperationParameters.getSourceIdentifiersParameterName());
+ }
+
+ private IBaseResource resolveTargetResource(
+ BaseMergeOperationInputParameters theOperationParameters,
+ RequestDetails theRequestDetails,
+ IBaseOperationOutcome theOutcome) {
+ return resolveResource(
+ theOperationParameters.getTargetResource(),
+ theOperationParameters.getTargetIdentifiers(),
+ theRequestDetails,
+ theOutcome,
+ theOperationParameters.getTargetResourceParameterName(),
+ theOperationParameters.getTargetIdentifiersParameterName());
+ }
+
+ private IBaseResource resolveResource(
+ IBaseReference theReference,
+ List theIdentifiers,
+ RequestDetails theRequestDetails,
+ IBaseOperationOutcome theOutcome,
+ String theOperationReferenceParameterName,
+ String theOperationIdentifiersParameterName) {
+ if (theReference != null) {
+ return resolveResourceByReference(
+ theReference, theRequestDetails, theOutcome, theOperationReferenceParameterName);
+ }
+
+ return resolveResourceByIdentifiers(
+ theIdentifiers, theRequestDetails, theOutcome, theOperationIdentifiersParameterName);
+ }
+
+ private IBaseResource resolveResourceByIdentifiers(
+ List theIdentifiers,
+ RequestDetails theRequestDetails,
+ IBaseOperationOutcome theOutcome,
+ String theOperationParameterName) {
+
+ SearchParameterMap searchParameterMap = new SearchParameterMap();
+ TokenAndListParam tokenAndListParam = new TokenAndListParam();
+ for (CanonicalIdentifier identifier : theIdentifiers) {
+ TokenParam tokenParam = new TokenParam(
+ identifier.getSystemElement().getValueAsString(),
+ identifier.getValueElement().getValueAsString());
+ tokenAndListParam.addAnd(tokenParam);
+ }
+ searchParameterMap.add("identifier", tokenAndListParam);
+ searchParameterMap.setCount(2);
+
+ IBundleProvider bundle = myPatientDao.search(searchParameterMap, theRequestDetails);
+ List resources = bundle.getAllResources();
+ if (resources.isEmpty()) {
+ String msg = String.format(
+ "No resources found matching the identifier(s) specified in '%s'", theOperationParameterName);
+ addErrorToOperationOutcome(theOutcome, msg, "not-found");
+ return null;
+ }
+ if (resources.size() > 1) {
+ String msg = String.format(
+ "Multiple resources found matching the identifier(s) specified in '%s'", theOperationParameterName);
+ addErrorToOperationOutcome(theOutcome, msg, "multiple-matches");
+ return null;
+ }
+
+ return resources.get(0);
+ }
+
+ private IBaseResource resolveResourceByReference(
+ IBaseReference theReference,
+ RequestDetails theRequestDetails,
+ IBaseOperationOutcome theOutcome,
+ String theOperationParameterName) {
+ // TODO Emre: why does IBaseReference not have getIdentifier or hasReference methods?
+ // casting it to r4.Reference for now
+ Reference r4ref = (Reference) theReference;
+
+ IIdType theResourceId = new IdType(r4ref.getReferenceElement().getValue());
+ IBaseResource resource;
+ try {
+ resource = myPatientDao.read(theResourceId.toVersionless(), theRequestDetails);
+ } catch (ResourceNotFoundException e) {
+ String msg = String.format(
+ "Resource not found for the reference specified in '%s' parameter", theOperationParameterName);
+ addErrorToOperationOutcome(theOutcome, msg, "not-found");
+ return null;
+ }
+
+ if (theResourceId.hasVersionIdPart()
+ && !theResourceId
+ .getVersionIdPart()
+ .equals(resource.getIdElement().getVersionIdPart())) {
+ String msg = String.format(
+ "The reference in '%s' parameter has a version specified, "
+ + "but it is not the latest version of the resource",
+ theOperationParameterName);
+ addErrorToOperationOutcome(theOutcome, msg, "conflict");
+ return null;
+ }
+
+ return resource;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/PatientMergeOperationInputParameters.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/PatientMergeOperationInputParameters.java
new file mode 100644
index 000000000000..d1f98bc1d6cd
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/PatientMergeOperationInputParameters.java
@@ -0,0 +1,60 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.provider.merge;
+
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_RESULT_PATIENT;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER;
+
+/**
+ * See Patient $merge spec
+ */
+public class PatientMergeOperationInputParameters extends BaseMergeOperationInputParameters {
+ public PatientMergeOperationInputParameters(int theResourceLimit) {
+ super(theResourceLimit);
+ }
+
+ @Override
+ public String getSourceResourceParameterName() {
+ return OPERATION_MERGE_PARAM_SOURCE_PATIENT;
+ }
+
+ @Override
+ public String getTargetResourceParameterName() {
+ return OPERATION_MERGE_PARAM_TARGET_PATIENT;
+ }
+
+ @Override
+ public String getSourceIdentifiersParameterName() {
+ return OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER;
+ }
+
+ @Override
+ public String getTargetIdentifiersParameterName() {
+ return OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER;
+ }
+
+ @Override
+ public String getResultResourceParameterName() {
+ return OPERATION_MERGE_PARAM_RESULT_PATIENT;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/PatientMergeProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/PatientMergeProvider.java
new file mode 100644
index 000000000000..eb379cab5f53
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/PatientMergeProvider.java
@@ -0,0 +1,165 @@
+package ca.uhn.fhir.jpa.provider.merge;
+
+import ca.uhn.fhir.batch2.jobs.merge.MergeResourceHelper;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
+import ca.uhn.fhir.rest.annotation.Operation;
+import ca.uhn.fhir.rest.annotation.OperationParam;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
+import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import ca.uhn.fhir.util.CanonicalIdentifier;
+import ca.uhn.fhir.util.ParametersUtil;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import org.hl7.fhir.instance.model.api.IBaseParameters;
+import org.hl7.fhir.instance.model.api.IBaseReference;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import org.hl7.fhir.r4.model.Identifier;
+import org.hl7.fhir.r4.model.Patient;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_RESULT;
+
+public class PatientMergeProvider extends BaseJpaResourceProvider {
+
+ private final FhirContext myFhirContext;
+ private final ResourceMergeService myResourceMergeService;
+
+ public PatientMergeProvider(
+ FhirContext theFhirContext, DaoRegistry theDaoRegistry, ResourceMergeService theResourceMergeService) {
+ super(theDaoRegistry.getResourceDao("Patient"));
+ myFhirContext = theFhirContext;
+ assert myFhirContext.getVersion().getVersion() == FhirVersionEnum.R4;
+ myResourceMergeService = theResourceMergeService;
+ }
+
+ @Override
+ public Class getResourceType() {
+ return Patient.class;
+ }
+
+ /**
+ * /Patient/$merge
+ */
+ @Operation(
+ name = ProviderConstants.OPERATION_MERGE,
+ canonicalUrl = "http://hl7.org/fhir/OperationDefinition/Patient-merge")
+ public IBaseParameters patientMerge(
+ HttpServletRequest theServletRequest,
+ HttpServletResponse theServletResponse,
+ ServletRequestDetails theRequestDetails,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER)
+ List theSourcePatientIdentifier,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER)
+ List theTargetPatientIdentifier,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT, max = 1)
+ IBaseReference theSourcePatient,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT, max = 1)
+ IBaseReference theTargetPatient,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_PREVIEW, typeName = "boolean", max = 1)
+ IPrimitiveType thePreview,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_DELETE_SOURCE, typeName = "boolean", max = 1)
+ IPrimitiveType theDeleteSource,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_RESULT_PATIENT, max = 1)
+ IBaseResource theResultPatient,
+ @OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_BATCH_SIZE, typeName = "unsignedInt")
+ IPrimitiveType theResourceLimit) {
+
+ startRequest(theServletRequest);
+
+ try {
+ int resourceLimit = MergeResourceHelper.setResourceLimitFromParameter(myStorageSettings, theResourceLimit);
+
+ BaseMergeOperationInputParameters mergeOperationParameters = buildMergeOperationInputParameters(
+ theSourcePatientIdentifier,
+ theTargetPatientIdentifier,
+ theSourcePatient,
+ theTargetPatient,
+ thePreview,
+ theDeleteSource,
+ theResultPatient,
+ resourceLimit);
+
+ MergeOperationOutcome mergeOutcome =
+ myResourceMergeService.merge(mergeOperationParameters, theRequestDetails);
+
+ theServletResponse.setStatus(mergeOutcome.getHttpStatusCode());
+ return buildMergeOperationOutputParameters(myFhirContext, mergeOutcome, theRequestDetails.getResource());
+ } finally {
+ endRequest(theServletRequest);
+ }
+ }
+
+ private IBaseParameters buildMergeOperationOutputParameters(
+ FhirContext theFhirContext, MergeOperationOutcome theMergeOutcome, IBaseResource theInputParameters) {
+
+ IBaseParameters retVal = ParametersUtil.newInstance(theFhirContext);
+ ParametersUtil.addParameterToParameters(
+ theFhirContext, retVal, ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_INPUT, theInputParameters);
+
+ ParametersUtil.addParameterToParameters(
+ theFhirContext,
+ retVal,
+ ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_OUTCOME,
+ theMergeOutcome.getOperationOutcome());
+
+ if (theMergeOutcome.getUpdatedTargetResource() != null) {
+ ParametersUtil.addParameterToParameters(
+ theFhirContext,
+ retVal,
+ OPERATION_MERGE_OUTPUT_PARAM_RESULT,
+ theMergeOutcome.getUpdatedTargetResource());
+ }
+
+ if (theMergeOutcome.getTask() != null) {
+ ParametersUtil.addParameterToParameters(
+ theFhirContext,
+ retVal,
+ ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_TASK,
+ theMergeOutcome.getTask());
+ }
+ return retVal;
+ }
+
+ private BaseMergeOperationInputParameters buildMergeOperationInputParameters(
+ List theSourcePatientIdentifier,
+ List theTargetPatientIdentifier,
+ IBaseReference theSourcePatient,
+ IBaseReference theTargetPatient,
+ IPrimitiveType thePreview,
+ IPrimitiveType theDeleteSource,
+ IBaseResource theResultPatient,
+ int theResourceLimit) {
+ BaseMergeOperationInputParameters mergeOperationParameters =
+ new PatientMergeOperationInputParameters(theResourceLimit);
+ if (theSourcePatientIdentifier != null) {
+ List sourceResourceIdentifiers = theSourcePatientIdentifier.stream()
+ .map(CanonicalIdentifier::fromIdentifier)
+ .collect(Collectors.toList());
+ mergeOperationParameters.setSourceResourceIdentifiers(sourceResourceIdentifiers);
+ }
+ if (theTargetPatientIdentifier != null) {
+ List targetResourceIdentifiers = theTargetPatientIdentifier.stream()
+ .map(CanonicalIdentifier::fromIdentifier)
+ .collect(Collectors.toList());
+ mergeOperationParameters.setTargetResourceIdentifiers(targetResourceIdentifiers);
+ }
+ mergeOperationParameters.setSourceResource(theSourcePatient);
+ mergeOperationParameters.setTargetResource(theTargetPatient);
+ mergeOperationParameters.setPreview(thePreview != null && thePreview.getValue());
+ mergeOperationParameters.setDeleteSource(theDeleteSource != null && theDeleteSource.getValue());
+
+ if (theResultPatient != null) {
+ // pass in a copy of the result patient as we don't want it to be modified. It will be
+ // returned back to the client as part of the response.
+ mergeOperationParameters.setResultResource(((Patient) theResultPatient).copy());
+ }
+
+ return mergeOperationParameters;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/ResourceMergeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/ResourceMergeService.java
new file mode 100644
index 000000000000..8ea68fe5514b
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/merge/ResourceMergeService.java
@@ -0,0 +1,265 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.provider.merge;
+
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
+import ca.uhn.fhir.batch2.jobs.merge.MergeResourceHelper;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.provider.IReplaceReferencesSvc;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
+import ca.uhn.fhir.util.OperationOutcomeUtil;
+import org.hl7.fhir.instance.model.api.IBase;
+import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Task;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static ca.uhn.fhir.batch2.jobs.merge.MergeAppCtx.JOB_MERGE;
+import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_200_OK;
+import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_202_ACCEPTED;
+import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_500_INTERNAL_ERROR;
+
+/**
+ * Service for the FHIR $merge operation. Currently only supports Patient/$merge. The plan is to expand to other resource types.
+ */
+public class ResourceMergeService {
+ private static final Logger ourLog = LoggerFactory.getLogger(ResourceMergeService.class);
+
+ private final FhirContext myFhirContext;
+ private final JpaStorageSettings myStorageSettings;
+ private final IFhirResourceDao myPatientDao;
+ private final IReplaceReferencesSvc myReplaceReferencesSvc;
+ private final IHapiTransactionService myHapiTransactionService;
+ private final IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
+ private final IFhirResourceDao myTaskDao;
+ private final IJobCoordinator myJobCoordinator;
+ private final MergeResourceHelper myMergeResourceHelper;
+ private final Batch2TaskHelper myBatch2TaskHelper;
+ private final MergeValidationService myMergeValidationService;
+
+ public ResourceMergeService(
+ JpaStorageSettings theStorageSettings,
+ DaoRegistry theDaoRegistry,
+ IReplaceReferencesSvc theReplaceReferencesSvc,
+ IHapiTransactionService theHapiTransactionService,
+ IRequestPartitionHelperSvc theRequestPartitionHelperSvc,
+ IJobCoordinator theJobCoordinator,
+ Batch2TaskHelper theBatch2TaskHelper) {
+ myStorageSettings = theStorageSettings;
+
+ myPatientDao = theDaoRegistry.getResourceDao(Patient.class);
+ myTaskDao = theDaoRegistry.getResourceDao(Task.class);
+ myReplaceReferencesSvc = theReplaceReferencesSvc;
+ myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
+ myJobCoordinator = theJobCoordinator;
+ myBatch2TaskHelper = theBatch2TaskHelper;
+ myFhirContext = myPatientDao.getContext();
+ myHapiTransactionService = theHapiTransactionService;
+ myMergeResourceHelper = new MergeResourceHelper(myPatientDao);
+ myMergeValidationService = new MergeValidationService(myFhirContext, theDaoRegistry);
+ }
+
+ /**
+ * Perform the $merge operation. If the number of resources to be changed exceeds the provided batch size,
+ * then switch to async mode. See the Patient $merge spec
+ * for details on what the difference is between synchronous and asynchronous mode.
+ *
+ * @param theMergeOperationParameters the merge operation parameters
+ * @param theRequestDetails the request details
+ * @return the merge outcome containing OperationOutcome and HTTP status code
+ */
+ public MergeOperationOutcome merge(
+ BaseMergeOperationInputParameters theMergeOperationParameters, RequestDetails theRequestDetails) {
+
+ MergeOperationOutcome mergeOutcome = new MergeOperationOutcome();
+ IBaseOperationOutcome operationOutcome = OperationOutcomeUtil.newInstance(myFhirContext);
+ mergeOutcome.setOperationOutcome(operationOutcome);
+ // default to 200 OK, would be changed to another code during processing as required
+ mergeOutcome.setHttpStatusCode(STATUS_HTTP_200_OK);
+ try {
+ validateAndMerge(theMergeOperationParameters, theRequestDetails, mergeOutcome);
+ } catch (Exception e) {
+ ourLog.error("Resource merge failed", e);
+ if (e instanceof BaseServerResponseException) {
+ mergeOutcome.setHttpStatusCode(((BaseServerResponseException) e).getStatusCode());
+ } else {
+ mergeOutcome.setHttpStatusCode(STATUS_HTTP_500_INTERNAL_ERROR);
+ }
+ OperationOutcomeUtil.addIssue(myFhirContext, operationOutcome, "error", e.getMessage(), null, "exception");
+ }
+ return mergeOutcome;
+ }
+
+ private void validateAndMerge(
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ RequestDetails theRequestDetails,
+ MergeOperationOutcome theMergeOutcome) {
+
+ // TODO KHS remove the outparameter and instead accumulate issues in the validation result
+ MergeValidationResult mergeValidationResult =
+ myMergeValidationService.validate(theMergeOperationParameters, theRequestDetails, theMergeOutcome);
+
+ if (mergeValidationResult.isValid) {
+ Patient sourceResource = mergeValidationResult.sourceResource;
+ Patient targetResource = mergeValidationResult.targetResource;
+
+ if (theMergeOperationParameters.getPreview()) {
+ handlePreview(
+ sourceResource,
+ targetResource,
+ theMergeOperationParameters,
+ theRequestDetails,
+ theMergeOutcome);
+ } else {
+ doMerge(
+ theMergeOperationParameters,
+ sourceResource,
+ targetResource,
+ theRequestDetails,
+ theMergeOutcome);
+ }
+ } else {
+ theMergeOutcome.setHttpStatusCode(mergeValidationResult.httpStatusCode);
+ }
+ }
+
+ private void handlePreview(
+ Patient theSourceResource,
+ Patient theTargetResource,
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ RequestDetails theRequestDetails,
+ MergeOperationOutcome theMergeOutcome) {
+
+ Integer referencingResourceCount = myReplaceReferencesSvc.countResourcesReferencingResource(
+ theSourceResource.getIdElement().toVersionless(), theRequestDetails);
+
+ // in preview mode, we should also return what the target would look like
+ Patient theResultResource = (Patient) theMergeOperationParameters.getResultResource();
+ Patient targetPatientAsIfUpdated = myMergeResourceHelper.prepareTargetPatientForUpdate(
+ theTargetResource, theSourceResource, theResultResource, theMergeOperationParameters.getDeleteSource());
+ theMergeOutcome.setUpdatedTargetResource(targetPatientAsIfUpdated);
+
+ // adding +2 because the source and the target resources would be updated as well
+ String diagnosticsMsg = String.format("Merge would update %d resources", referencingResourceCount + 2);
+ String detailsText = "Preview only merge operation - no issues detected";
+ addInfoToOperationOutcome(theMergeOutcome.getOperationOutcome(), diagnosticsMsg, detailsText);
+ }
+
+ private void doMerge(
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ Patient theSourceResource,
+ Patient theTargetResource,
+ RequestDetails theRequestDetails,
+ MergeOperationOutcome theMergeOutcome) {
+
+ RequestPartitionId partitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(
+ theRequestDetails, ReadPartitionIdRequestDetails.forRead(theTargetResource.getIdElement()));
+
+ if (theRequestDetails.isPreferAsync()) {
+ doMergeAsync(
+ theMergeOperationParameters,
+ theSourceResource,
+ theTargetResource,
+ theRequestDetails,
+ theMergeOutcome,
+ partitionId);
+ } else {
+ doMergeSync(
+ theMergeOperationParameters,
+ theSourceResource,
+ theTargetResource,
+ theRequestDetails,
+ theMergeOutcome,
+ partitionId);
+ }
+ }
+
+ private void doMergeSync(
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ Patient theSourceResource,
+ Patient theTargetResource,
+ RequestDetails theRequestDetails,
+ MergeOperationOutcome theMergeOutcome,
+ RequestPartitionId partitionId) {
+
+ ReplaceReferencesRequest replaceReferencesRequest = new ReplaceReferencesRequest(
+ theSourceResource.getIdElement(),
+ theTargetResource.getIdElement(),
+ theMergeOperationParameters.getResourceLimit(),
+ partitionId);
+
+ myReplaceReferencesSvc.replaceReferences(replaceReferencesRequest, theRequestDetails);
+
+ Patient updatedTarget = myMergeResourceHelper.updateMergedResourcesAfterReferencesReplaced(
+ myHapiTransactionService,
+ theSourceResource,
+ theTargetResource,
+ (Patient) theMergeOperationParameters.getResultResource(),
+ theMergeOperationParameters.getDeleteSource(),
+ theRequestDetails);
+ theMergeOutcome.setUpdatedTargetResource(updatedTarget);
+
+ String detailsText = "Merge operation completed successfully.";
+ addInfoToOperationOutcome(theMergeOutcome.getOperationOutcome(), null, detailsText);
+ }
+
+ private void doMergeAsync(
+ BaseMergeOperationInputParameters theMergeOperationParameters,
+ Patient theSourceResource,
+ Patient theTargetResource,
+ RequestDetails theRequestDetails,
+ MergeOperationOutcome theMergeOutcome,
+ RequestPartitionId thePartitionId) {
+
+ MergeJobParameters mergeJobParameters = theMergeOperationParameters.asMergeJobParameters(
+ myFhirContext, myStorageSettings, theSourceResource, theTargetResource, thePartitionId);
+
+ Task task = myBatch2TaskHelper.startJobAndCreateAssociatedTask(
+ myTaskDao, theRequestDetails, myJobCoordinator, JOB_MERGE, mergeJobParameters);
+
+ task.setIdElement(task.getIdElement().toUnqualifiedVersionless());
+ task.getMeta().setVersionId(null);
+ theMergeOutcome.setTask(task);
+ theMergeOutcome.setHttpStatusCode(STATUS_HTTP_202_ACCEPTED);
+
+ String detailsText = "Merge request is accepted, and will be processed asynchronously. See"
+ + " task resource returned in this response for details.";
+ addInfoToOperationOutcome(theMergeOutcome.getOperationOutcome(), null, detailsText);
+ }
+
+ private void addInfoToOperationOutcome(
+ IBaseOperationOutcome theOutcome, String theDiagnosticMsg, String theDetailsText) {
+ IBase issue =
+ OperationOutcomeUtil.addIssue(myFhirContext, theOutcome, "information", theDiagnosticMsg, null, null);
+ OperationOutcomeUtil.addDetailsToIssue(myFhirContext, issue, null, null, theDetailsText);
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/merge/ResourceMergeServiceTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/merge/ResourceMergeServiceTest.java
new file mode 100644
index 000000000000..093f7c466d30
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/merge/ResourceMergeServiceTest.java
@@ -0,0 +1,1493 @@
+package ca.uhn.fhir.jpa.provider.merge;
+
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
+import ca.uhn.fhir.batch2.jobs.parameters.BatchJobParametersWithTaskId;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
+import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.provider.IReplaceReferencesSvc;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.model.api.IQueryParameterType;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
+import ca.uhn.fhir.util.CanonicalIdentifier;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.r4.model.IdType;
+import org.hl7.fhir.r4.model.Identifier;
+import org.hl7.fhir.r4.model.OperationOutcome;
+import org.hl7.fhir.r4.model.Parameters;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.Task;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.ValueSource;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.mockito.stubbing.OngoingStubbing;
+import org.testcontainers.shaded.org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.Collections;
+import java.util.List;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.isA;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
+
+@ExtendWith(MockitoExtension.class)
+public class ResourceMergeServiceTest {
+ private static final Integer PAGE_SIZE = 1024;
+
+ private static final String MISSING_SOURCE_PARAMS_MSG =
+ "There are no source resource parameters provided, include either a 'source-patient', or a 'source-patient-identifier' parameter.";
+ private static final String MISSING_TARGET_PARAMS_MSG =
+ "There are no target resource parameters provided, include either a 'target-patient', or a 'target-patient-identifier' parameter.";
+ private static final String BOTH_SOURCE_PARAMS_PROVIDED_MSG =
+ "Source resource must be provided either by 'source-patient' or by 'source-patient-identifier', not both.";
+ private static final String BOTH_TARGET_PARAMS_PROVIDED_MSG =
+ "Target resource must be provided either by 'target-patient' or by 'target-patient-identifier', not both.";
+ private static final String SUCCESSFUL_SYNC_MERGE_MSG = "Merge operation completed successfully";
+ private static final String SUCCESSFUL_ASYNC_MERGE_MSG = "Merge request is accepted, and will be " +
+ "processed asynchronously. See task resource returned in this response for details.";
+
+ private static final String SOURCE_PATIENT_TEST_ID = "Patient/123";
+ private static final String SOURCE_PATIENT_TEST_ID_WITH_VERSION_1= SOURCE_PATIENT_TEST_ID + "/_history/1";
+ private static final String SOURCE_PATIENT_TEST_ID_WITH_VERSION_2= SOURCE_PATIENT_TEST_ID + "/_history/2";
+ private static final String TARGET_PATIENT_TEST_ID = "Patient/456";
+ private static final String TARGET_PATIENT_TEST_ID_WITH_VERSION_1 = TARGET_PATIENT_TEST_ID + "/_history/1";
+ private static final String TARGET_PATIENT_TEST_ID_WITH_VERSION_2 = TARGET_PATIENT_TEST_ID + "/_history/2";
+ public static final String PRECONDITION_FAILED_MESSAGE = "bad wolf";
+
+ @Mock
+ DaoRegistry myDaoRegistryMock;
+
+ @Mock
+ IFhirResourceDaoPatient myPatientDaoMock;
+
+ @Mock
+ IFhirResourceDaoPatient myTaskDaoMock;
+
+ @Mock
+ IReplaceReferencesSvc myReplaceReferencesSvcMock;
+
+ @Mock
+ RequestDetails myRequestDetailsMock;
+
+ @Mock
+ IHapiTransactionService myTransactionServiceMock;
+
+ @Mock
+ IRequestPartitionHelperSvc myRequestPartitionHelperSvcMock;
+
+ @Mock
+ IJobCoordinator myJobCoordinatorMock;
+
+ @Mock
+ Batch2TaskHelper myBatch2TaskHelperMock;
+
+ @Mock
+ RequestPartitionId myRequestPartitionIdMock;
+
+ @Mock
+ private JpaStorageSettings myStorageSettingsMock;
+
+ private ResourceMergeService myResourceMergeService;
+
+ private final FhirContext myFhirContext = FhirContext.forR4Cached();
+
+ private Patient myCapturedSourcePatientForUpdate;
+
+ private Patient myCapturedTargetPatientForUpdate;
+
+ @BeforeEach
+ void setup() {
+ when(myDaoRegistryMock.getResourceDao(eq(Patient.class))).thenReturn(myPatientDaoMock);
+ when(myDaoRegistryMock.getResourceDao(eq(Task.class))).thenReturn(myTaskDaoMock);
+ when(myPatientDaoMock.getContext()).thenReturn(myFhirContext);
+ myResourceMergeService = new ResourceMergeService(
+ myStorageSettingsMock,
+ myDaoRegistryMock,
+ myReplaceReferencesSvcMock,
+ myTransactionServiceMock,
+ myRequestPartitionHelperSvcMock,
+ myJobCoordinatorMock,
+ myBatch2TaskHelperMock);
+ }
+
+ // SUCCESS CASES
+ @Test
+ void testMerge_WithoutResultResource_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+
+ //the identifiers should be copied from the source to the target, without creating duplicates on the target
+ sourcePatient.addIdentifier(new Identifier().setSystem("sysSource").setValue("valS1"));
+ sourcePatient.addIdentifier(new Identifier().setSystem("sysSource").setValue("valS2"));
+ sourcePatient.addIdentifier(new Identifier().setSystem("sysCommon").setValue("valCommon"));
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ targetPatient.addIdentifier(new Identifier().setSystem("sysCommon").setValue("valCommon"));
+ targetPatient.addIdentifier(new Identifier().setSystem("sysTarget").setValue("valT1"));
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+ setupDaoMockForSuccessfulSourcePatientUpdate(sourcePatient, new Patient());
+ Patient patientReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(targetPatient, patientReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedSourcePatient();
+ // the identifiers copied over from the source should be marked as OLD
+ List expectedIdentifiers = List.of(
+ new Identifier().setSystem("sysCommon").setValue("valCommon"),
+ new Identifier().setSystem("sysTarget").setValue("valT1"),
+ new Identifier().setSystem("sysSource").setValue("valS1").setUse(Identifier.IdentifierUse.OLD),
+ new Identifier().setSystem("sysSource").setValue("valS2").setUse(Identifier.IdentifierUse.OLD));
+ verifyUpdatedTargetPatient(true, expectedIdentifiers);
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @Test
+ void testMerge_WithoutResultResource_TargetSetToActiveExplicitly_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ targetPatient.setActive(true);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+ setupDaoMockForSuccessfulSourcePatientUpdate(sourcePatient, new Patient());
+ Patient patientReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(targetPatient, patientReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedSourcePatient();
+ verifyUpdatedTargetPatient(true, Collections.emptyList());
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @Test
+ void testMerge_WithResultResource_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ resultPatient.addLink().setType(Patient.LinkType.REPLACES).setOther(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ //when result resource exists, the identifiers should not be copied. so we don't expect this identifier when
+ //target is updated
+ sourcePatient.addIdentifier(new Identifier().setSystem("sysSource").setValue("valS1"));
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ setupDaoMockForSuccessfulSourcePatientUpdate(sourcePatient, new Patient());
+ Patient patientToBeReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(resultPatient, patientToBeReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientToBeReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedSourcePatient();
+ verifyUpdatedTargetPatient(true, Collections.emptyList());
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @Test
+ void testMerge_WithResultResource_ResultHasAllTargetIdentifiers_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResourceIdentifiers(List.of(
+ new CanonicalIdentifier().setSystem("sys").setValue("val1"),
+ new CanonicalIdentifier().setSystem("sys").setValue("val2")
+ ));
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ resultPatient.addLink().setType(Patient.LinkType.REPLACES).setOther(new Reference(SOURCE_PATIENT_TEST_ID));
+ resultPatient.addIdentifier().setSystem("sys").setValue("val1");
+ resultPatient.addIdentifier().setSystem("sys").setValue("val2");
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockSearchForIdentifiers(List.of(List.of(targetPatient)));
+
+ setupDaoMockForSuccessfulSourcePatientUpdate(sourcePatient, new Patient());
+ Patient patientToBeReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(resultPatient, patientToBeReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientToBeReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedSourcePatient();
+ List expectedIdentifiers = List.of(
+ new Identifier().setSystem("sys").setValue("val1"),
+ new Identifier().setSystem("sys").setValue("val2")
+ );
+ verifyUpdatedTargetPatient(true, expectedIdentifiers);
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @Test
+ void testMerge_WithDeleteSourceTrue_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setDeleteSource(true);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ when(myPatientDaoMock.delete(new IdType(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1), myRequestDetailsMock)).thenReturn(new DaoMethodOutcome());
+ Patient patientToBeReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(targetPatient, patientToBeReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientToBeReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedTargetPatient(false, Collections.emptyList());
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @Test
+ void testMerge_WithDeleteSourceTrue_And_WithResultResource_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setDeleteSource(true);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ mergeOperationParameters.setResultResource(resultPatient);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ when(myPatientDaoMock.delete(new IdType(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1), myRequestDetailsMock)).thenReturn(new DaoMethodOutcome());
+ Patient patientToBeReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(resultPatient, patientToBeReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientToBeReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedTargetPatient(false, Collections.emptyList());
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @Test
+ void testMerge_WithPreviewTrue_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(true);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ when(myReplaceReferencesSvcMock.countResourcesReferencingResource(new IdType(SOURCE_PATIENT_TEST_ID),
+ myRequestDetailsMock)).thenReturn(10);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(200);
+ assertThat(mergeOutcome.getUpdatedTargetResource()).isEqualTo(targetPatient);
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
+ assertThat(issue.getDetails().getText()).contains("Preview only merge operation - no issues detected");
+ assertThat(issue.getDiagnostics()).contains("Merge would update 12 resources");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @Test
+ void testMerge_ResolvesResourcesByReferenceThatHasVersions_CurrentResourceVersionAreTheSame_Success() {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID_WITH_VERSION_2));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID_WITH_VERSION_2));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_2);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_2);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+ setupDaoMockForSuccessfulSourcePatientUpdate(sourcePatient, new Patient());
+ Patient patientToBeReturnedFromDaoAfterTargetUpdate = new Patient();
+ setupDaoMockForSuccessfulTargetPatientUpdate(targetPatient, patientToBeReturnedFromDaoAfterTargetUpdate);
+ setupTransactionServiceMock();
+ setupReplaceReferencesForSuccessForSync();
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySuccessfulOutcomeForSync(mergeOutcome, patientToBeReturnedFromDaoAfterTargetUpdate);
+ verifyUpdatedSourcePatient();
+ verifyUpdatedTargetPatient(true, Collections.emptyList());
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @CsvSource({
+ "true, false",
+ "false, true",
+ "true, true",
+ "false, false"
+ })
+ void testMerge_AsyncBecauseOfPreferHeader_Success(boolean theWithResultResource, boolean theWithDeleteSource) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ mergeOperationParameters.setDeleteSource(theWithDeleteSource);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ when(myRequestDetailsMock.isPreferAsync()).thenReturn(true);
+ when(myRequestPartitionHelperSvcMock.determineReadPartitionForRequest(eq(myRequestDetailsMock), any())).thenReturn(myRequestPartitionIdMock);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+
+ Patient resultResource = null;
+ if (theWithResultResource) {
+ resultResource = createValidResultPatient(theWithDeleteSource);
+ mergeOperationParameters.setResultResource(resultResource);
+ }
+
+ Task task = new Task();
+ setupBatch2JobTaskHelperMock(task);
+
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ verifySuccessfulOutcomeForAsync(mergeOutcome, task);
+ verifyBatch2JobTaskHelperMockInvocation(resultResource, theWithDeleteSource);
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "true, false",
+ "false, true",
+ "true, true",
+ "false, false"
+ })
+ void testMerge_AsyncBecauseOfLargeNumberOfRefs_Success(boolean theWithResultResource,
+ boolean theWithDeleteSource) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ mergeOperationParameters.setDeleteSource(theWithDeleteSource);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ when(myRequestDetailsMock.isPreferAsync()).thenReturn(false);
+ when(myRequestPartitionHelperSvcMock.determineReadPartitionForRequest(eq(myRequestDetailsMock), any())).thenReturn(myRequestPartitionIdMock);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ if (theWithResultResource) {
+ Patient resultResource = createValidResultPatient(theWithDeleteSource);
+ mergeOperationParameters.setResultResource(resultResource);
+ }
+
+ when(myReplaceReferencesSvcMock.replaceReferences(any(), any())).thenThrow(new PreconditionFailedException(PRECONDITION_FAILED_MESSAGE));
+
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ verifyFailedOutcome(mergeOutcome);
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ private void verifyFailedOutcome(MergeOperationOutcome theMergeOutcome) {
+ assertThat(theMergeOutcome.getHttpStatusCode()).isEqualTo(PreconditionFailedException.STATUS_CODE);
+ OperationOutcome operationOutcome = (OperationOutcome) theMergeOutcome.getOperationOutcome();
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ assertThat(operationOutcome.getIssueFirstRep().getDiagnostics()).isEqualTo(PRECONDITION_FAILED_MESSAGE);
+ }
+
+ // ERROR CASES
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_UnhandledServerResponseExceptionThrown_UsesStatusCodeOfTheException(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ ForbiddenOperationException ex = new ForbiddenOperationException("this is the exception message");
+ when(myPatientDaoMock.read(any(), eq(myRequestDetailsMock))).thenThrow(ex);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(403);
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("this is the exception message");
+ assertThat(issue.getCode().toCode()).isEqualTo("exception");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_UnhandledExceptionThrown_Uses500StatusCode(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ RuntimeException ex = new RuntimeException("this is the exception message");
+ when(myPatientDaoMock.read(any(), eq(myRequestDetailsMock))).thenThrow(ex);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(500);
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("this is the exception message");
+ assertThat(issue.getCode().toCode()).isEqualTo("exception");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_MissingSourcePatientParams_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains(MISSING_SOURCE_PARAMS_MSG);
+ assertThat(issue.getCode().toCode()).isEqualTo("required");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_MissingTargetPatientParams_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains(MISSING_TARGET_PARAMS_MSG);
+ assertThat(issue.getCode().toCode()).isEqualTo("required");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_MissingBothSourceAndTargetPatientParams_ReturnsErrorsWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+ assertThat(operationOutcome.getIssue()).hasSize(2);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue1 = operationOutcome.getIssue().get(0);
+ OperationOutcome.OperationOutcomeIssueComponent issue2 = operationOutcome.getIssue().get(1);
+ assertThat(issue1.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue1.getDiagnostics()).contains(MISSING_SOURCE_PARAMS_MSG);
+ assertThat(issue1.getCode().toCode()).isEqualTo("required");
+ assertThat(issue2.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue2.getDiagnostics()).contains(MISSING_TARGET_PARAMS_MSG);
+ assertThat(issue2.getCode().toCode()).isEqualTo("required");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_BothSourceResourceAndSourceIdentifierParamsProvided_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setSourceResourceIdentifiers(List.of(new CanonicalIdentifier().setSystem("sys").setValue( "val")));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains(BOTH_SOURCE_PARAMS_PROVIDED_MSG);
+ assertThat(issue.getCode().toCode()).isEqualTo("required");
+
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_BothTargetResourceAndTargetIdentifiersParamsProvided_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResourceIdentifiers(List.of(new CanonicalIdentifier().setSystem("sys").setValue( "val")));
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains(BOTH_TARGET_PARAMS_PROVIDED_MSG);
+ assertThat(issue.getCode().toCode()).isEqualTo("required");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_SourceResourceParamHasNoReferenceElement_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference());
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Reference specified in 'source-patient' parameter does not have a reference element.");
+ assertThat(issue.getCode().toCode()).isEqualTo("required");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesInputParameters_TargetResourceParamHasNoReferenceElement_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference());
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Reference specified in 'target-patient' parameter does not have " +
+ "a reference element.");
+ assertThat(issue.getCode().toCode()).isEqualTo("required");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesSourceResourceByReference_ResourceNotFound_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ when(myPatientDaoMock.read(new IdType(SOURCE_PATIENT_TEST_ID), myRequestDetailsMock)).thenThrow(ResourceNotFoundException.class);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Resource not found for the reference specified in 'source-patient'");
+ assertThat(issue.getCode().toCode()).isEqualTo("not-found");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesTargetResourceByReference_ResourceNotFound_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ when(myPatientDaoMock.read(new IdType(TARGET_PATIENT_TEST_ID), myRequestDetailsMock)).thenThrow(ResourceNotFoundException.class);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Resource not found for the reference specified in 'target-patient'");
+ assertThat(issue.getCode().toCode()).isEqualTo("not-found");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesSourceResourceByIdentifiers_NoMatchFound_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResourceIdentifiers(List.of(
+ new CanonicalIdentifier().setSystem("sys").setValue("val1"),
+ new CanonicalIdentifier().setSystem("sys").setValue("val2")));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ setupDaoMockSearchForIdentifiers(List.of(Collections.emptyList()));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySearchParametersOnDaoSearchInvocations(List.of(List.of("sys|val1","sys|val2")));
+
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("No resources found matching the identifier(s) specified in 'source-patient-identifier'");
+ assertThat(issue.getCode().toCode()).isEqualTo("not-found");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesSourceResourceByIdentifiers_MultipleMatchesFound_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResourceIdentifiers(List.of(new CanonicalIdentifier().setSystem("sys").setValue("val1")));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ setupDaoMockSearchForIdentifiers(List.of(
+ List.of(
+ createPatient("Patient/match-1"),
+ createPatient("Patient/match-2"))
+ ));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySearchParametersOnDaoSearchInvocations(List.of(List.of("sys|val1")));
+
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Multiple resources found matching the identifier(s) specified in" +
+ " 'source-patient-identifier'");
+ assertThat(issue.getCode().toCode()).isEqualTo("multiple-matches");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesTargetResourceByIdentifiers_NoMatchFound_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResourceIdentifiers(List.of(
+ new CanonicalIdentifier().setSystem("sys").setValue("val1"),
+ new CanonicalIdentifier().setSystem("sys").setValue("val2")));
+ setupDaoMockSearchForIdentifiers(List.of(Collections.emptyList()));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySearchParametersOnDaoSearchInvocations(List.of(List.of("sys|val1", "sys|val2")));
+
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("No resources found matching the identifier(s) specified in " +
+ "'target-patient-identifier'");
+ assertThat(issue.getCode().toCode()).isEqualTo("not-found");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesTargetResourceByIdentifiers_MultipleMatchesFound_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResourceIdentifiers(List.of(new CanonicalIdentifier().setSystem("sys").setValue("val1")));
+ setupDaoMockSearchForIdentifiers(List.of(
+ List.of(
+ createPatient("Patient/match-1"),
+ createPatient("Patient/match-2"))
+ ));
+
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ verifySearchParametersOnDaoSearchInvocations(List.of(List.of("sys|val1")));
+
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Multiple resources found matching the identifier(s) specified in 'target-patient-identifier'");
+ assertThat(issue.getCode().toCode()).isEqualTo("multiple-matches");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesSourceResourceByReferenceThatHasVersion_CurrentResourceVersionIsDifferent_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ //make resolved patient has a more recent version than the one specified in the reference
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_2);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("The reference in 'source-patient' parameter has a version specified, but it is not the latest version of the resource");
+ assertThat(issue.getCode().toCode()).isEqualTo("conflict");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ResolvesTargetResourceByReferenceThatHasVersion_CurrentResourceVersionIsDifferent_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID_WITH_VERSION_1));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ // make resolved target patient has a more recent version than the one specified in the reference
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_2);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("The reference in 'target-patient' parameter has a version " +
+ "specified, but it is not the latest version of the resource");
+ assertThat(issue.getCode().toCode()).isEqualTo("conflict");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_SourceAndTargetResolvesToSameResource_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResourceIdentifiers(List.of(new CanonicalIdentifier().setSystem("sys").setValue("val1")));
+ mergeOperationParameters.setTargetResourceIdentifiers(List.of(new CanonicalIdentifier().setSystem("sys").setValue("val2")));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ Patient targetPatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ setupDaoMockSearchForIdentifiers(List.of(List.of(sourcePatient), List.of(targetPatient)));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+
+ verifySearchParametersOnDaoSearchInvocations(List.of(List.of("sys|val1"), List.of("sys|val2")));
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Source and target resources are the same resource.");
+
+ //TODO: enable this
+ //verifyNoMoreInteractions(myDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_TargetResourceIsInactive_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ targetPatient.setActive(false);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Target resource is not active, it must be active to be the target of a merge operation");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_TargetResourceWasPreviouslyReplacedByAnotherResource_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ addReplacedByLink(targetPatient, "Patient/replacing-res-id");
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Target resource was previously replaced by a resource with " +
+ "reference 'Patient/replacing-res-id', it is " +
+ "not a suitable target for merging.");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_SourceResourceWasPreviouslyReplacedByAnotherResource_ReturnsErrorWith422Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ addReplacedByLink(sourcePatient, "Patient/replacing-res-id");
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(422);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("Source resource was previously replaced by a resource with " +
+ "reference 'Patient/replacing-res-id', it is not a suitable source for merging.");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesResultResource_ResultResourceHasDifferentIdThanTargetResource_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ Patient resultPatient = createPatient("Patient/not-the-target-id");
+ addReplacesLink(resultPatient, SOURCE_PATIENT_TEST_ID);
+ mergeOperationParameters.setResultResource(resultPatient);
+
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("'result-patient' must have the same versionless id " +
+ "as the actual" +
+ " resolved target resource 'Patient/not-the-target-id'. The actual resolved target resource's id is: '" + TARGET_PATIENT_TEST_ID +"'");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesResultResource_ResultResourceDoesNotHaveAllIdentifiersProvidedInTargetIdentifiers_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResourceIdentifiers(List.of(
+ new CanonicalIdentifier().setSystem("sysA").setValue("val1"),
+ new CanonicalIdentifier().setSystem("sysB").setValue("val2")
+ ));
+
+ // the result patient has only one of the identifiers that were provided in the target identifiers
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ resultPatient.addIdentifier().setSystem("sysA").setValue("val1");
+ resultPatient.addIdentifier().setSystem("sysC").setValue("val2");
+ addReplacesLink(resultPatient, SOURCE_PATIENT_TEST_ID);
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockSearchForIdentifiers(List.of(List.of(targetPatient)));
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("'result-patient' must have all the identifiers provided in target-patient-identifier");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesResultResource_ResultResourceHasNoReplacesLinkAtAll_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("'result-patient' must have a 'replaces' link to the source resource.");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesResultResource_ResultResourceHasNoReplacesLinkToSource_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ addReplacesLink(resultPatient, "Patient/not-the-source-id");
+
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("'result-patient' must have a 'replaces' link to the source resource.");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesResultResource_ResultResourceHasReplacesLinkAndDeleteSourceIsTrue_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+ mergeOperationParameters.setDeleteSource(true);
+
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ addReplacesLink(resultPatient, SOURCE_PATIENT_TEST_ID);
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("'result-patient' must not have a 'replaces' link to the source resource when the source resource will be deleted, as the link may prevent deleting the source resource.");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void testMerge_ValidatesResultResource_ResultResourceHasRedundantReplacesLinksToSource_ReturnsErrorWith400Status(boolean thePreview) {
+ // Given
+ BaseMergeOperationInputParameters mergeOperationParameters = new PatientMergeOperationInputParameters(PAGE_SIZE);
+ mergeOperationParameters.setPreview(thePreview);
+ mergeOperationParameters.setSourceResource(new Reference(SOURCE_PATIENT_TEST_ID));
+ mergeOperationParameters.setTargetResource(new Reference(TARGET_PATIENT_TEST_ID));
+
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ //add the link twice
+ addReplacesLink(resultPatient, SOURCE_PATIENT_TEST_ID);
+ addReplacesLink(resultPatient, SOURCE_PATIENT_TEST_ID);
+
+ mergeOperationParameters.setResultResource(resultPatient);
+ Patient sourcePatient = createPatient(SOURCE_PATIENT_TEST_ID_WITH_VERSION_1);
+ Patient targetPatient = createPatient(TARGET_PATIENT_TEST_ID_WITH_VERSION_1);
+ setupDaoMockForSuccessfulRead(sourcePatient);
+ setupDaoMockForSuccessfulRead(targetPatient);
+
+ // When
+ MergeOperationOutcome mergeOutcome = myResourceMergeService.merge(mergeOperationParameters, myRequestDetailsMock);
+
+ // Then
+ OperationOutcome operationOutcome = (OperationOutcome) mergeOutcome.getOperationOutcome();
+ assertThat(mergeOutcome.getHttpStatusCode()).isEqualTo(400);
+
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.ERROR);
+ assertThat(issue.getDiagnostics()).contains("'result-patient' has multiple 'replaces' links to the source resource. There should be only one.");
+
+ verifyNoMoreInteractions(myPatientDaoMock);
+ }
+
+ private void verifySuccessfulOutcomeForSync(MergeOperationOutcome theMergeOutcome, Patient theExpectedTargetResource) {
+ assertThat(theMergeOutcome.getHttpStatusCode()).isEqualTo(200);
+
+ OperationOutcome operationOutcome = (OperationOutcome) theMergeOutcome.getOperationOutcome();
+ assertThat(theMergeOutcome.getUpdatedTargetResource()).isEqualTo(theExpectedTargetResource);
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
+ assertThat(issue.getDetails().getText()).contains(SUCCESSFUL_SYNC_MERGE_MSG);
+ }
+
+ private void verifySuccessfulOutcomeForAsync(MergeOperationOutcome theMergeOutcome, Task theExpectedTask) {
+ assertThat(theMergeOutcome.getHttpStatusCode()).isEqualTo(202);
+ assertThat(theMergeOutcome.getTask()).isEqualTo(theExpectedTask);
+ assertThat(theMergeOutcome.getUpdatedTargetResource()).isNull();
+ OperationOutcome operationOutcome = (OperationOutcome) theMergeOutcome.getOperationOutcome();
+ assertThat(theMergeOutcome.getUpdatedTargetResource()).isNull();
+ assertThat(operationOutcome.getIssue()).hasSize(1);
+ OperationOutcome.OperationOutcomeIssueComponent issue = operationOutcome.getIssueFirstRep();
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
+ assertThat(issue.getDetails().getText()).contains(SUCCESSFUL_ASYNC_MERGE_MSG);
+
+ }
+
+ private Patient createPatient(String theId) {
+ Patient patient = new Patient();
+ patient.setId(theId);
+ return patient;
+ }
+
+ private Patient createValidResultPatient(boolean theDeleteSource) {
+
+ Patient resultPatient = createPatient(TARGET_PATIENT_TEST_ID);
+ if (!theDeleteSource) {
+ addReplacesLink(resultPatient, SOURCE_PATIENT_TEST_ID);
+ }
+ return resultPatient;
+ }
+
+ private void addReplacedByLink(Patient thePatient, String theReplacingResourceId) {
+ thePatient.addLink().setType(Patient.LinkType.REPLACEDBY).setOther(new Reference(theReplacingResourceId));
+ }
+
+ private void addReplacesLink(Patient patient, String theReplacedResourceId) {
+ patient.addLink().setType(Patient.LinkType.REPLACES).setOther(new Reference(theReplacedResourceId));
+ }
+
+ private void setupTransactionServiceMock() {
+ IHapiTransactionService.IExecutionBuilder executionBuilderMock =
+ mock(IHapiTransactionService.IExecutionBuilder.class);
+ when(myTransactionServiceMock.withRequest(myRequestDetailsMock)).thenReturn(executionBuilderMock);
+ doAnswer(invocation -> {
+ Runnable runnable = invocation.getArgument(0);
+ runnable.run();
+ return null;
+ }).when(executionBuilderMock).execute(isA(Runnable.class));
+ }
+
+ private void setupDaoMockForSuccessfulRead(Patient resource) {
+ assertThat(resource.getIdElement()).isNotNull();
+ //dao reads the versionless id
+ when(myPatientDaoMock.read(resource.getIdElement().toVersionless(), myRequestDetailsMock)).thenReturn(resource);
+ }
+
+
+ /**
+ * Sets up the dao mock to return the given list of resources for each invocation of the search method
+ * @param theMatchingResourcesOnInvocations list containing the list of resources the search should return on each
+ * invocation of the search method, i.e. one list per invocation
+ */
+ private void setupDaoMockSearchForIdentifiers(List> theMatchingResourcesOnInvocations) {
+
+ OngoingStubbing ongoingStubbing = null;
+ for (List matchingResources : theMatchingResourcesOnInvocations) {
+ IBundleProvider bundleProviderMock = mock(IBundleProvider.class);
+ when(bundleProviderMock.getAllResources()).thenReturn(matchingResources);
+ if (ongoingStubbing == null) {
+ ongoingStubbing = when(myPatientDaoMock.search(any(), eq(myRequestDetailsMock))).thenReturn(bundleProviderMock);
+ }
+ else {
+ ongoingStubbing.thenReturn(bundleProviderMock);
+ }
+
+ }
+ }
+
+ private void verifyUpdatedSourcePatient() {
+ assertThat(myCapturedSourcePatientForUpdate.getLink()).hasSize(1);
+ assertThat(myCapturedSourcePatientForUpdate.getLinkFirstRep().getType()).isEqualTo(Patient.LinkType.REPLACEDBY);
+ assertThat(myCapturedSourcePatientForUpdate.getLinkFirstRep().getOther().getReference()).isEqualTo(TARGET_PATIENT_TEST_ID);
+ }
+
+ private void setupDaoMockForSuccessfulSourcePatientUpdate(Patient thePatientExpectedAsInput,
+ Patient thePatientToReturnInDaoOutcome) {
+ DaoMethodOutcome daoMethodOutcome = new DaoMethodOutcome();
+ daoMethodOutcome.setResource(thePatientToReturnInDaoOutcome);
+ when(myPatientDaoMock.update(thePatientExpectedAsInput, myRequestDetailsMock))
+ .thenAnswer(t -> {
+ myCapturedSourcePatientForUpdate = t.getArgument(0);
+
+ DaoMethodOutcome outcome = new DaoMethodOutcome();
+ outcome.setResource(thePatientToReturnInDaoOutcome);
+ return outcome;
+ });
+ }
+
+ private void verifyUpdatedTargetPatient(boolean theExpectLinkToSourcePatient, List theExpectedIdentifiers) {
+ if (theExpectLinkToSourcePatient) {
+ assertThat(myCapturedTargetPatientForUpdate.getLink()).hasSize(1);
+ assertThat(myCapturedTargetPatientForUpdate.getLinkFirstRep().getType()).isEqualTo(Patient.LinkType.REPLACES);
+ assertThat(myCapturedTargetPatientForUpdate.getLinkFirstRep().getOther().getReference()).isEqualTo(SOURCE_PATIENT_TEST_ID);
+ }
+ else {
+ assertThat(myCapturedTargetPatientForUpdate.getLink()).isEmpty();
+ }
+
+
+ assertThat(myCapturedTargetPatientForUpdate.getIdentifier()).hasSize(theExpectedIdentifiers.size());
+ for (int i = 0; i < theExpectedIdentifiers.size(); i++) {
+ Identifier expectedIdentifier = theExpectedIdentifiers.get(i);
+ Identifier actualIdentifier = myCapturedTargetPatientForUpdate.getIdentifier().get(i);
+ assertThat(expectedIdentifier.equalsDeep(actualIdentifier)).isTrue();
+ }
+
+ }
+
+ private void setupReplaceReferencesForSuccessForSync() {
+ // set the count to less that the page size for sync processing
+ when(myReplaceReferencesSvcMock.replaceReferences(isA(ReplaceReferencesRequest.class),
+ eq(myRequestDetailsMock))).thenReturn(new Parameters());
+ }
+
+ private void setupBatch2JobTaskHelperMock(Task theTaskToReturn) {
+ when(myBatch2TaskHelperMock.startJobAndCreateAssociatedTask(
+ eq(myTaskDaoMock),
+ eq(myRequestDetailsMock),
+ eq(myJobCoordinatorMock),
+ eq("MERGE"),
+ any())).thenReturn(theTaskToReturn);
+ }
+
+ private void verifyBatch2JobTaskHelperMockInvocation(@Nullable Patient theResultResource,
+ boolean theDeleteSource) {
+ ArgumentCaptor jobParametersCaptor =
+ ArgumentCaptor.forClass(BatchJobParametersWithTaskId.class);
+ verify(myBatch2TaskHelperMock).startJobAndCreateAssociatedTask(
+ eq(myTaskDaoMock),
+ eq(myRequestDetailsMock),
+ eq(myJobCoordinatorMock),
+ eq("MERGE"),
+ jobParametersCaptor.capture());
+
+ assertThat(jobParametersCaptor.getValue()).isInstanceOf(MergeJobParameters.class);
+ MergeJobParameters capturedJobParams = (MergeJobParameters) jobParametersCaptor.getValue();
+ assertThat(capturedJobParams.getBatchSize()).isEqualTo(PAGE_SIZE);
+ assertThat(capturedJobParams.getSourceId().toString()).isEqualTo(SOURCE_PATIENT_TEST_ID);
+ assertThat(capturedJobParams.getTargetId().toString()).isEqualTo(TARGET_PATIENT_TEST_ID);
+ assertThat(capturedJobParams.getDeleteSource()).isEqualTo(theDeleteSource);
+ assertThat(capturedJobParams.getPartitionId()).isEqualTo(myRequestPartitionIdMock);
+ if (theResultResource != null) {
+ assertThat(capturedJobParams.getResultResource()).isEqualTo(myFhirContext.newJsonParser().encodeResourceToString(theResultResource));
+ }
+ else {
+ assertThat(capturedJobParams.getResultResource()).isNull();
+ }
+ }
+
+ private void setupDaoMockForSuccessfulTargetPatientUpdate(Patient thePatientExpectedAsInput,
+ Patient thePatientToReturnInDaoOutcome) {
+ DaoMethodOutcome daoMethodOutcome = new DaoMethodOutcome();
+ daoMethodOutcome.setResource(thePatientToReturnInDaoOutcome);
+ when(myPatientDaoMock.update(thePatientExpectedAsInput, myRequestDetailsMock))
+ .thenAnswer(t -> {
+ myCapturedTargetPatientForUpdate = t.getArgument(0);
+ DaoMethodOutcome outcome = new DaoMethodOutcome();
+ outcome.setResource(thePatientToReturnInDaoOutcome);
+ return outcome;
+ });
+ }
+
+ private void verifySearchParametersOnDaoSearchInvocations(List> theExpectedIdentifierParams) {
+ ArgumentCaptor captor = ArgumentCaptor.forClass(SearchParameterMap.class);
+ verify(myPatientDaoMock, times(theExpectedIdentifierParams.size())).search(captor.capture(), eq(myRequestDetailsMock));
+ List maps = captor.getAllValues();
+ assertThat(maps).hasSameSizeAs(theExpectedIdentifierParams);
+ for (int i = 0; i < maps.size(); i++) {
+ verifySearchParameterOnSingleDaoSearchInvocation(maps.get(i), theExpectedIdentifierParams.get(i));
+ }
+
+ }
+
+ private void verifySearchParameterOnSingleDaoSearchInvocation(SearchParameterMap capturedMap,
+ List theExpectedIdentifierParams) {
+ List> actualIdentifierParams = capturedMap.get("identifier");
+ assertThat(actualIdentifierParams).hasSameSizeAs(theExpectedIdentifierParams);
+ for (int i = 0; i < theExpectedIdentifierParams.size(); i++) {
+ assertThat(actualIdentifierParams.get(i)).hasSize(1);
+ assertThat(actualIdentifierParams.get(i).get(0).getValueAsQueryToken(myFhirContext)).isEqualTo(theExpectedIdentifierParams.get(i));
+ }
+ }
+}
+
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
index 294dac1aab27..8a9a38fdabb8 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml
index 1d310f05d698..0e23db4edbc1 100644
--- a/hapi-fhir-jpaserver-hfql/pom.xml
+++ b/hapi-fhir-jpaserver-hfql/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml
index e9a1ae702fe9..e975d0820298 100644
--- a/hapi-fhir-jpaserver-ips/pom.xml
+++ b/hapi-fhir-jpaserver-ips/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index 76b147a3fc21..771ff69dc498 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 537c53f65ff0..56dc6e50e32a 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/BaseSubscriptionSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/BaseSubscriptionSettings.java
index 4d22cd7bdcbd..d27142d8a93c 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/BaseSubscriptionSettings.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/BaseSubscriptionSettings.java
@@ -55,12 +55,12 @@ public abstract class BaseSubscriptionSettings {
* If this is enabled (default is {@literal false}), changes to Subscription resource would be put on queue immediately.
* Reducing delay between creation of the Subscription and Activation.
*
- * @since 7.8.0
+ * @since 8.0.0
*/
private boolean mySubscriptionChangeQueuedImmediately = false;
/**
- * @since 7.8.0
+ * @since 8.0.0
*
* Regex To perform validation on the endpoint URL for Subscription of type RESTHOOK.
*/
@@ -289,7 +289,7 @@ public boolean hasRestHookEndpointUrlValidationRegex() {
* If this is enabled (default is {@literal false}), changes to Subscription resource would be put on queue immediately.
* Reducing delay between creation of the Subscription and Activation.
*
- * @since 7.8.0
+ * @since 8.0.0
*/
public boolean isSubscriptionChangeQueuedImmediately() {
return mySubscriptionChangeQueuedImmediately;
@@ -299,7 +299,7 @@ public boolean isSubscriptionChangeQueuedImmediately() {
* If this is enabled (default is {@literal false}), changes to Subscription resource would be put on queue immediately.
* Reducing delay between creation of the Subscription and Activation.
*
- * @since 7.8.0
+ * @since 8.0.0
*/
public void setSubscriptionChangeQueuedImmediately(boolean theSubscriptionChangeQueuedImmediately) {
mySubscriptionChangeQueuedImmediately = theSubscriptionChangeQueuedImmediately;
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index 2957e4b357fd..c4f8781e796e 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 5805778dc2c3..e9f3e8f80693 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml
index 38d2a47c79ff..35f2cd910c8d 100644
--- a/hapi-fhir-jpaserver-test-dstu2/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml
index 77bbc6d21254..e5a444ebdd32 100644
--- a/hapi-fhir-jpaserver-test-dstu3/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml
index 783fdbaa7dc4..5a96f279d0d0 100644
--- a/hapi-fhir-jpaserver-test-r4/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImplTest.java
index d34e1c030d48..93ef792ed469 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImplTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2DaoSvcImplTest.java
@@ -43,7 +43,7 @@ class Batch2DaoSvcImplTest extends BaseJpaR4Test {
@BeforeEach
void beforeEach() {
- mySvc = new Batch2DaoSvcImpl(myResourceTableDao, myMatchUrlService, myDaoRegistry, myFhirContext, myIHapiTransactionService);
+ mySvc = new Batch2DaoSvcImpl(myResourceTableDao, myResourceLinkDao, myMatchUrlService, myDaoRegistry, myFhirContext, myIHapiTransactionService);
}
@Test
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/merge/MergeBatchTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/merge/MergeBatchTest.java
new file mode 100644
index 000000000000..196fb407462c
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/merge/MergeBatchTest.java
@@ -0,0 +1,114 @@
+package ca.uhn.fhir.jpa.provider.merge;
+
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper;
+import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
+import ca.uhn.fhir.jpa.test.Batch2JobHelper;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.Task;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+
+import static ca.uhn.fhir.batch2.jobs.merge.MergeAppCtx.JOB_MERGE;
+import static org.awaitility.Awaitility.await;
+
+public class MergeBatchTest extends BaseJpaR4Test {
+
+ @Autowired
+ private IJobCoordinator myJobCoordinator;
+ @Autowired
+ private DaoRegistry myDaoRegistry;
+ @Autowired
+ private Batch2JobHelper myBatch2JobHelper;
+
+ SystemRequestDetails mySrd = new SystemRequestDetails();
+
+ private ReplaceReferencesTestHelper myTestHelper;
+
+ @Override
+ @BeforeEach
+ public void before() throws Exception {
+ super.before();
+
+ myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
+ myTestHelper.beforeEach();
+
+ mySrd.setRequestPartitionId(RequestPartitionId.allPartitions());
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "true,true",
+ "false,true",
+ "true,false",
+ "false,false"
+ })
+ public void testHappyPath(boolean theDeleteSource, boolean theWithResultResource) {
+ IIdType taskId = createTask();
+
+ MergeJobParameters jobParams = new MergeJobParameters();
+ jobParams.setSourceId(new FhirIdJson(myTestHelper.getSourcePatientId()));
+ jobParams.setTargetId(new FhirIdJson(myTestHelper.getTargetPatientId()));
+ jobParams.setTaskId(taskId);
+ jobParams.setDeleteSource(theDeleteSource);
+ if (theWithResultResource) {
+ String encodedResultPatient = myFhirContext.newJsonParser().encodeResourceToString(myTestHelper.createResultPatient(theDeleteSource));
+ jobParams.setResultResource(encodedResultPatient);
+ }
+
+ JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_MERGE, jobParams);
+ Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
+ JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(jobStartResponse);
+
+ Bundle patchResultBundle = myTestHelper.validateCompletedTask(jobInstance, taskId);
+ ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle, ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES,
+ List.of(
+ "Observation", "Encounter", "CarePlan"));
+
+
+ myTestHelper.assertAllReferencesUpdated();
+ myTestHelper.assertSourcePatientUpdatedOrDeleted(theDeleteSource);
+ myTestHelper.assertTargetPatientUpdated(theDeleteSource,
+ myTestHelper.getExpectedIdentifiersForTargetAfterMerge(theWithResultResource));
+ }
+
+ @Test
+ void testMergeJob_JobFails_ErrorHandlerSetsAssociatedTaskStatusToFailed() {
+ IIdType taskId = createTask();
+
+ MergeJobParameters jobParams = new MergeJobParameters();
+ //use a source that does not exist to force the job to fail
+ jobParams.setSourceId(new FhirIdJson("Patient", "doesnotexist"));
+ jobParams.setTargetId(new FhirIdJson(myTestHelper.getTargetPatientId()));
+ jobParams.setTaskId(taskId);
+
+ JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_MERGE, jobParams);
+ Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
+ myBatch2JobHelper.awaitJobFailure(jobStartResponse);
+
+ await().until(() -> {
+ myBatch2JobHelper.runMaintenancePass();
+ return myTaskDao.read(taskId, mySrd).getStatus().equals(Task.TaskStatus.FAILED);
+ });
+ }
+
+ private IIdType createTask() {
+ Task task = new Task();
+ task.setStatus(Task.TaskStatus.INPROGRESS);
+ return myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMergeR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMergeR4Test.java
new file mode 100644
index 000000000000..f781f54befc8
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMergeR4Test.java
@@ -0,0 +1,390 @@
+package ca.uhn.fhir.jpa.provider.r4;
+
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
+import ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper;
+import ca.uhn.fhir.jpa.test.Batch2JobHelper;
+import ca.uhn.fhir.parser.StrictErrorHandler;
+import ca.uhn.fhir.rest.gclient.IOperationUntypedWithInput;
+import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
+import jakarta.annotation.Nonnull;
+import jakarta.servlet.http.HttpServletResponse;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.Coding;
+import org.hl7.fhir.r4.model.Encounter;
+import org.hl7.fhir.r4.model.Identifier;
+import org.hl7.fhir.r4.model.OperationOutcome;
+import org.hl7.fhir.r4.model.Parameters;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.Resource;
+import org.hl7.fhir.r4.model.Task;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.api.extension.RegisterExtension;
+import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static ca.uhn.fhir.jpa.provider.ReplaceReferencesSvcImpl.RESOURCE_TYPES_SYSTEM;
+import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER;
+import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER_RESPOND_ASYNC;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_INPUT;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_OUTCOME;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_RESULT;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_TASK;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_RESULT_PATIENT;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.awaitility.Awaitility.await;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+public class PatientMergeR4Test extends BaseResourceProviderR4Test {
+ static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(PatientMergeR4Test.class);
+
+ @RegisterExtension
+ MyExceptionHandler ourExceptionHandler = new MyExceptionHandler();
+
+ @Autowired
+ Batch2JobHelper myBatch2JobHelper;
+
+ ReplaceReferencesTestHelper myTestHelper;
+
+ @Override
+ @AfterEach
+ public void after() throws Exception {
+ super.after();
+
+ myStorageSettings.setDefaultTransactionEntriesForWrite(new JpaStorageSettings().getDefaultTransactionEntriesForWrite());
+ myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis());
+ }
+
+ @Override
+ @BeforeEach
+ public void before() throws Exception {
+ super.before();
+ myStorageSettings.setReuseCachedSearchResultsForMillis(null);
+ myStorageSettings.setAllowMultipleDelete(true);
+ myFhirContext.setParserErrorHandler(new StrictErrorHandler());
+
+ myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
+ myTestHelper.beforeEach();
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ // withDelete, withInputResultPatient, withPreview, isAsync
+ "true, true, true, false",
+ "true, false, true, false",
+ "false, true, true, false",
+ "false, false, true, false",
+ "true, true, false, false",
+ "true, false, false, false",
+ "false, true, false, false",
+ "false, false, false, false",
+
+ "true, true, true, true",
+ "true, false, true, true",
+ "false, true, true, true",
+ "false, false, true, true",
+ "true, true, false, true",
+ "true, false, false, true",
+ "false, true, false, true",
+ "false, false, false, true",
+ })
+ public void testMerge(boolean withDelete, boolean withInputResultPatient, boolean withPreview, boolean isAsync) {
+ // setup
+
+ ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = new ReplaceReferencesTestHelper.PatientMergeInputParameters();
+ myTestHelper.setSourceAndTarget(inParams);
+ inParams.deleteSource = withDelete;
+ if (withInputResultPatient) {
+ inParams.resultPatient = myTestHelper.createResultPatient(withDelete);
+ }
+ if (withPreview) {
+ inParams.preview = true;
+ }
+
+ Parameters inParameters = inParams.asParametersResource();
+
+ // exec
+ Parameters outParams = callMergeOperation(inParameters, isAsync);
+
+ // validate
+ // in async mode, there will be an additional task resource in the output params
+ assertThat(outParams.getParameter()).hasSizeBetween(3, 4);
+
+ // Assert input
+ Parameters input = (Parameters) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_INPUT).getResource();
+ if (withInputResultPatient) { // if the following assert fails, check that these two patients are identical
+ Patient p1 = (Patient) inParameters.getParameter(OPERATION_MERGE_PARAM_RESULT_PATIENT).getResource();
+ Patient p2 = (Patient) input.getParameter(OPERATION_MERGE_PARAM_RESULT_PATIENT).getResource();
+ ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(p1));
+ ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(p2));
+ }
+ assertTrue(input.equalsDeep(inParameters));
+
+
+ List expectedIdentifiersOnTargetAfterMerge =
+ myTestHelper.getExpectedIdentifiersForTargetAfterMerge(withInputResultPatient);
+
+ // Assert Task inAsync mode, unless it is preview in which case we don't return a task
+ if (isAsync && !withPreview) {
+ assertThat(getLastHttpStatusCode()).isEqualTo(HttpServletResponse.SC_ACCEPTED);
+
+ Task task = (Task) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_TASK).getResource();
+ assertNull(task.getIdElement().getVersionIdPart());
+ ourLog.info("Got task {}", task.getId());
+ String jobId = myTestHelper.getJobIdFromTask(task);
+ myBatch2JobHelper.awaitJobCompletion(jobId);
+
+ Task taskWithOutput = myTaskDao.read(task.getIdElement(), mySrd);
+ assertThat(taskWithOutput.getStatus()).isEqualTo(Task.TaskStatus.COMPLETED);
+ ourLog.info("Complete Task: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(taskWithOutput));
+
+ Task.TaskOutputComponent taskOutput = taskWithOutput.getOutputFirstRep();
+
+ // Assert on the output type
+ Coding taskType = taskOutput.getType().getCodingFirstRep();
+ assertEquals(RESOURCE_TYPES_SYSTEM, taskType.getSystem());
+ assertEquals("Bundle", taskType.getCode());
+
+ List containedResources = taskWithOutput.getContained();
+ assertThat(containedResources)
+ .hasSize(1)
+ .element(0)
+ .isInstanceOf(Bundle.class);
+
+ Bundle containedBundle = (Bundle) containedResources.get(0);
+
+ Reference outputRef = (Reference) taskOutput.getValue();
+ Bundle patchResultBundle = (Bundle) outputRef.getResource();
+ assertTrue(containedBundle.equalsDeep(patchResultBundle));
+ ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle,
+ ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES,
+ List.of("Observation", "Encounter", "CarePlan"));
+
+ OperationOutcome outcome = (OperationOutcome) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_OUTCOME).getResource();
+ assertThat(outcome.getIssue())
+ .hasSize(1)
+ .element(0)
+ .satisfies(issue -> {
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
+ assertThat(issue.getDetails().getText()).isEqualTo("Merge request is accepted, and will be " +
+ "processed asynchronously. See task resource returned in this response for details.");
+ });
+
+ } else { // Synchronous case
+ // Assert outcome
+ OperationOutcome outcome = (OperationOutcome) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_OUTCOME).getResource();
+
+ if (withPreview) {
+ assertThat(outcome.getIssue())
+ .hasSize(1)
+ .element(0)
+ .satisfies(issue -> {
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
+ assertThat(issue.getDetails().getText()).isEqualTo("Preview only merge operation - no issues detected");
+ assertThat(issue.getDiagnostics()).isEqualTo("Merge would update 25 resources");
+ });
+ } else {
+ assertThat(outcome.getIssue())
+ .hasSize(1)
+ .element(0)
+ .satisfies(issue -> {
+ assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
+ assertThat(issue.getDetails().getText()).isEqualTo("Merge operation completed successfully.");
+ });
+ }
+
+ // Assert Merged Patient
+ Patient mergedPatient = (Patient) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_RESULT).getResource();
+ List identifiers = mergedPatient.getIdentifier();
+
+ // TODO ED We can also validate that result patient returned here has the same id as the target patient.
+ // And maybe in not preview case, we should also read the target patient from the db and assert it equals to the result returned.
+ myTestHelper.assertIdentifiers(identifiers, expectedIdentifiersOnTargetAfterMerge);
+ }
+
+ // Check that the linked resources were updated
+ if (withPreview) {
+ myTestHelper.assertNothingChanged();
+ } else {
+ myTestHelper.assertAllReferencesUpdated(withDelete);
+ myTestHelper.assertSourcePatientUpdatedOrDeleted(withDelete);
+ myTestHelper.assertTargetPatientUpdated(withDelete, expectedIdentifiersOnTargetAfterMerge);
+ }
+ }
+
+ @Test
+ void testMerge_smallResourceLimit() {
+ ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = new ReplaceReferencesTestHelper.PatientMergeInputParameters();
+ myTestHelper.setSourceAndTarget(inParams);
+
+ inParams.resourceLimit = 5;
+ Parameters inParameters = inParams.asParametersResource();
+
+ // exec
+ assertThatThrownBy(() -> callMergeOperation(inParameters, false))
+ .isInstanceOf(PreconditionFailedException.class)
+ .satisfies(ex -> assertThat(extractFailureMessage((BaseServerResponseException) ex)).isEqualTo("HAPI-2597: Number of resources with references to "+ myTestHelper.getSourcePatientId() + " exceeds the resource-limit 5. Submit the request asynchronsly by adding the HTTP Header 'Prefer: respond-async'."));
+ }
+
+ @Test
+ void testMerge_SourceResourceCannotBeDeletedBecauseAnotherResourceReferencingSourceWasAddedWhileJobIsRunning_JobFails() {
+ ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = new ReplaceReferencesTestHelper.PatientMergeInputParameters();
+ myTestHelper.setSourceAndTarget(inParams);
+ inParams.deleteSource = true;
+ //using a small batch size that would result in multiple chunks to ensure that
+ //the job runs a bit slowly so that we have sometime to add a resource that references the source
+ //after the first step
+ myStorageSettings.setDefaultTransactionEntriesForWrite(5);
+ Parameters inParameters = inParams.asParametersResource();
+
+ // exec
+ Parameters outParams = callMergeOperation(inParameters, true);
+ Task task = (Task) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_TASK).getResource();
+ assertNull(task.getIdElement().getVersionIdPart());
+ ourLog.info("Got task {}", task.getId());
+ String jobId = myTestHelper.getJobIdFromTask(task);
+
+ // wait for first step of the job to finish
+ await()
+ .until(() -> {
+ myBatch2JobHelper.runMaintenancePass();
+ String currentGatedStepId = myJobCoordinator.getInstance(jobId).getCurrentGatedStepId();
+ return !"query-ids".equals(currentGatedStepId);
+ });
+
+ Encounter enc = new Encounter();
+ enc.setStatus(Encounter.EncounterStatus.ARRIVED);
+ enc.getSubject().setReferenceElement(myTestHelper.getSourcePatientId());
+ myEncounterDao.create(enc, mySrd);
+
+ myBatch2JobHelper.awaitJobFailure(jobId);
+
+
+ Task taskAfterJobFailure = myTaskDao.read(task.getIdElement().toVersionless(), mySrd);
+ assertThat(taskAfterJobFailure.getStatus()).isEqualTo(Task.TaskStatus.FAILED);
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ // withDelete, withInputResultPatient, withPreview
+ "true, true, true",
+ "true, false, true",
+ "false, true, true",
+ "false, false, true",
+ "true, true, false",
+ "true, false, false",
+ "false, true, false",
+ "false, false, false",
+ })
+ public void testMultipleTargetMatchesFails(boolean withDelete, boolean withInputResultPatient, boolean withPreview) {
+ ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = myTestHelper.buildMultipleTargetMatchParameters(withDelete, withInputResultPatient, withPreview);
+
+ Parameters inParameters = inParams.asParametersResource();
+
+ assertUnprocessibleEntityWithMessage(inParameters, "Multiple resources found matching the identifier(s) specified in 'target-patient-identifier'");
+ }
+
+
+ @ParameterizedTest
+ @CsvSource({
+ // withDelete, withInputResultPatient, withPreview
+ "true, true, true",
+ "true, false, true",
+ "false, true, true",
+ "false, false, true",
+ "true, true, false",
+ "true, false, false",
+ "false, true, false",
+ "false, false, false",
+ })
+ public void testMultipleSourceMatchesFails(boolean withDelete, boolean withInputResultPatient, boolean withPreview) {
+ ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = myTestHelper.buildMultipleSourceMatchParameters(withDelete, withInputResultPatient, withPreview);
+
+ Parameters inParameters = inParams.asParametersResource();
+
+ assertUnprocessibleEntityWithMessage(inParameters, "Multiple resources found matching the identifier(s) specified in 'source-patient-identifier'");
+ }
+
+ @Test
+ void test_MissingRequiredParameters_Returns400BadRequest() {
+ assertThatThrownBy(() -> callMergeOperation(new Parameters())
+ ).isInstanceOf(InvalidRequestException.class)
+ .extracting(InvalidRequestException.class::cast)
+ .extracting(BaseServerResponseException::getStatusCode)
+ .isEqualTo(400);
+ }
+
+ private void assertUnprocessibleEntityWithMessage(Parameters inParameters, String theExpectedMessage) {
+ assertThatThrownBy(() ->
+ callMergeOperation(inParameters))
+ .isInstanceOf(UnprocessableEntityException.class)
+ .extracting(UnprocessableEntityException.class::cast)
+ .extracting(this::extractFailureMessage)
+ .isEqualTo(theExpectedMessage);
+ }
+
+ private void callMergeOperation(Parameters inParameters) {
+ this.callMergeOperation(inParameters, false);
+ }
+
+ private Parameters callMergeOperation(Parameters inParameters, boolean isAsync) {
+ IOperationUntypedWithInput request = myClient.operation()
+ .onType("Patient")
+ .named(OPERATION_MERGE)
+ .withParameters(inParameters);
+
+ if (isAsync) {
+ request.withAdditionalHeader(HEADER_PREFER, HEADER_PREFER_RESPOND_ASYNC);
+ }
+
+ return request
+ .returnResourceType(Parameters.class)
+ .execute();
+ }
+
+ class MyExceptionHandler implements TestExecutionExceptionHandler {
+ @Override
+ public void handleTestExecutionException(ExtensionContext theExtensionContext, Throwable theThrowable) throws Throwable {
+ if (theThrowable instanceof BaseServerResponseException) {
+ BaseServerResponseException ex = (BaseServerResponseException) theThrowable;
+ String message = extractFailureMessage(ex);
+ throw ex.getClass().getDeclaredConstructor(String.class, Throwable.class).newInstance(message, ex);
+ }
+ throw theThrowable;
+ }
+ }
+
+ private @Nonnull String extractFailureMessage(BaseServerResponseException ex) {
+ String body = ex.getResponseBody();
+ if (body != null) {
+ Parameters outParams = myFhirContext.newJsonParser().parseResource(Parameters.class, body);
+ OperationOutcome outcome = (OperationOutcome) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_OUTCOME).getResource();
+ return outcome.getIssue().stream()
+ .map(OperationOutcome.OperationOutcomeIssueComponent::getDiagnostics)
+ .collect(Collectors.joining(", "));
+ } else {
+ return "null";
+ }
+ }
+
+ @Override
+ protected boolean verboseClientLogging() {
+ return true;
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ReplaceReferencesR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ReplaceReferencesR4Test.java
new file mode 100644
index 000000000000..3e01dc25e662
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ReplaceReferencesR4Test.java
@@ -0,0 +1,160 @@
+package ca.uhn.fhir.jpa.provider.r4;
+
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
+import ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
+import jakarta.servlet.http.HttpServletResponse;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.Coding;
+import org.hl7.fhir.r4.model.Parameters;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.Resource;
+import org.hl7.fhir.r4.model.Task;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
+
+import java.util.List;
+
+import static ca.uhn.fhir.jpa.provider.ReplaceReferencesSvcImpl.RESOURCE_TYPES_SYSTEM;
+import static ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper.EXPECTED_SMALL_BATCHES;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+public class ReplaceReferencesR4Test extends BaseResourceProviderR4Test {
+ ReplaceReferencesTestHelper myTestHelper;
+
+ @Override
+ @AfterEach
+ public void after() throws Exception {
+ super.after();
+ myStorageSettings.setDefaultTransactionEntriesForWrite(new JpaStorageSettings().getDefaultTransactionEntriesForWrite());
+ }
+
+ @Override
+ @BeforeEach
+ public void before() throws Exception {
+ super.before();
+
+ myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
+ myTestHelper.beforeEach();
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {false, true})
+ void testReplaceReferences(boolean isAsync) {
+ // exec
+ Parameters outParams = myTestHelper.callReplaceReferences(myClient, isAsync);
+
+ assertThat(outParams.getParameter()).hasSize(1);
+
+ Bundle patchResultBundle;
+ if (isAsync) {
+ assertThat(getLastHttpStatusCode()).isEqualTo(HttpServletResponse.SC_ACCEPTED);
+
+ Task task = (Task) outParams.getParameter(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK).getResource();
+ assertNull(task.getIdElement().getVersionIdPart());
+ ourLog.info("Got task {}", task.getId());
+
+ JobInstance jobInstance = awaitJobCompletion(task);
+
+ patchResultBundle = myTestHelper.validateCompletedTask(jobInstance, task.getIdElement());
+ } else {
+ patchResultBundle = (Bundle) outParams.getParameter(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME).getResource();
+ }
+
+ // validate
+ ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle,
+ ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES, List.of(
+ "Observation", "Encounter", "CarePlan"));
+
+ // Check that the linked resources were updated
+
+ myTestHelper.assertAllReferencesUpdated();
+ }
+
+ private JobInstance awaitJobCompletion(Task task) {
+ String jobId = myTestHelper.getJobIdFromTask(task);
+ return myBatch2JobHelper.awaitJobCompletion(jobId);
+ }
+
+ @Test
+ void testReplaceReferencesSmallResourceLimitSync() {
+ assertThatThrownBy(() -> myTestHelper.callReplaceReferencesWithResourceLimit(myClient, false, ReplaceReferencesTestHelper.SMALL_BATCH_SIZE))
+ .isInstanceOf(PreconditionFailedException.class)
+ .hasMessage("HTTP 412 Precondition Failed: HAPI-2597: Number of resources with references to " + myTestHelper.getSourcePatientId() + " exceeds the resource-limit 5. Submit the request asynchronsly by adding the HTTP Header 'Prefer: respond-async'.");
+ }
+
+ @Test
+ void testReplaceReferencesSmallTransactionEntriesSize() {
+ myStorageSettings.setDefaultTransactionEntriesForWrite(5);
+
+ // exec
+ Parameters outParams = myTestHelper.callReplaceReferencesWithResourceLimit(myClient, true, ReplaceReferencesTestHelper.SMALL_BATCH_SIZE);
+
+ assertThat(getLastHttpStatusCode()).isEqualTo(HttpServletResponse.SC_ACCEPTED);
+
+ assertThat(outParams.getParameter()).hasSize(1);
+
+ Bundle patchResultBundle;
+ Task task = (Task) outParams.getParameter(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK).getResource();
+ assertNull(task.getIdElement().getVersionIdPart());
+ ourLog.info("Got task {}", task.getId());
+
+ awaitJobCompletion(task);
+
+ Task taskWithOutput = myTaskDao.read(task.getIdElement(), mySrd);
+ ourLog.info("Complete Task: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(taskWithOutput));
+
+ assertThat(taskWithOutput.getOutput()).as("task " + task.getId() + " has size " + EXPECTED_SMALL_BATCHES).hasSize(EXPECTED_SMALL_BATCHES);
+ List containedResources = taskWithOutput.getContained();
+
+ assertThat(containedResources)
+ .hasSize(EXPECTED_SMALL_BATCHES)
+ .element(0)
+ .isInstanceOf(Bundle.class);
+
+ int entriesLeft = ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES;
+ for (int i = 1; i < EXPECTED_SMALL_BATCHES; i++) {
+
+ Task.TaskOutputComponent taskOutput = taskWithOutput.getOutput().get(i);
+
+ // Assert on the output type
+ Coding taskType = taskOutput.getType().getCodingFirstRep();
+ assertEquals(RESOURCE_TYPES_SYSTEM, taskType.getSystem());
+ assertEquals("Bundle", taskType.getCode());
+
+ Bundle containedBundle = (Bundle) containedResources.get(i);
+
+ Reference outputRef = (Reference) taskOutput.getValue();
+ patchResultBundle = (Bundle) outputRef.getResource();
+ assertTrue(containedBundle.equalsDeep(patchResultBundle));
+
+ // validate
+ entriesLeft -= ReplaceReferencesTestHelper.SMALL_BATCH_SIZE;
+ int expectedNumberOfEntries = Math.min(entriesLeft, ReplaceReferencesTestHelper.SMALL_BATCH_SIZE);
+ ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle, expectedNumberOfEntries, List.of("Observation",
+ "Encounter", "CarePlan"));
+ }
+
+ // Check that the linked resources were updated
+
+ myTestHelper.assertAllReferencesUpdated();
+ }
+
+ // TODO ED we should add some tests for the invalid request error cases (and assert 4xx status code)
+
+ @Override
+ protected boolean verboseClientLogging() {
+ return true;
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/replacereferences/ReplaceReferencesBatchTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/replacereferences/ReplaceReferencesBatchTest.java
new file mode 100644
index 000000000000..6779dfef9ac8
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/replacereferences/ReplaceReferencesBatchTest.java
@@ -0,0 +1,96 @@
+package ca.uhn.fhir.jpa.replacereferences;
+
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesJobParameters;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
+import ca.uhn.fhir.jpa.test.Batch2JobHelper;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.Task;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+
+import static ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesAppCtx.JOB_REPLACE_REFERENCES;
+import static org.awaitility.Awaitility.await;
+
+public class ReplaceReferencesBatchTest extends BaseJpaR4Test {
+
+ @Autowired
+ private IJobCoordinator myJobCoordinator;
+ @Autowired
+ private DaoRegistry myDaoRegistry;
+ @Autowired
+ private Batch2JobHelper myBatch2JobHelper;
+
+ SystemRequestDetails mySrd = new SystemRequestDetails();
+
+ private ReplaceReferencesTestHelper myTestHelper;
+
+ @Override
+ @BeforeEach
+ public void before() throws Exception {
+ super.before();
+
+ myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
+ myTestHelper.beforeEach();
+
+ mySrd.setRequestPartitionId(RequestPartitionId.allPartitions());
+ }
+
+ @Test
+ public void testHappyPath() {
+ IIdType taskId = createReplaceReferencesTask();
+
+ ReplaceReferencesJobParameters jobParams = new ReplaceReferencesJobParameters();
+ jobParams.setSourceId(new FhirIdJson(myTestHelper.getSourcePatientId()));
+ jobParams.setTargetId(new FhirIdJson(myTestHelper.getTargetPatientId()));
+ jobParams.setTaskId(taskId);
+
+ JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_REPLACE_REFERENCES, jobParams);
+ Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
+ JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(jobStartResponse);
+
+ Bundle patchResultBundle = myTestHelper.validateCompletedTask(jobInstance, taskId);
+ ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle, ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES, List.of(
+ "Observation", "Encounter", "CarePlan"));
+
+ myTestHelper.assertAllReferencesUpdated();
+ }
+
+
+ @Test
+ void testReplaceReferencesJob_JobFails_ErrorHandlerSetsAssociatedTaskStatusToFailed() {
+ IIdType taskId = createReplaceReferencesTask();
+
+ ReplaceReferencesJobParameters jobParams = new ReplaceReferencesJobParameters();
+ jobParams.setSourceId(new FhirIdJson(myTestHelper.getSourcePatientId()));
+ //use a target that does not exist to force the job to fail
+ jobParams.setTargetId(new FhirIdJson("Patient", "doesnotexist"));
+ jobParams.setTaskId(taskId);
+
+ JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_REPLACE_REFERENCES, jobParams);
+ Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
+ myBatch2JobHelper.awaitJobFailure(jobStartResponse);
+
+ await().until(() -> {
+ myBatch2JobHelper.runMaintenancePass();
+ return myTaskDao.read(taskId, mySrd).getStatus().equals(Task.TaskStatus.FAILED);
+ });
+ }
+
+ private IIdType createReplaceReferencesTask() {
+ Task task = new Task();
+ task.setStatus(Task.TaskStatus.INPROGRESS);
+ return myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml
index 89eff81d8ac3..569c06704c48 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml
+++ b/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml
@@ -16,8 +16,14 @@
-
+
+
+
+
+
+
+
diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml
index 030df7804176..87bdf253448a 100644
--- a/hapi-fhir-jpaserver-test-r4b/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml
index 6f025d7b32a1..9ee03e7f63e7 100644
--- a/hapi-fhir-jpaserver-test-r5/pom.xml
+++ b/hapi-fhir-jpaserver-test-r5/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index 6337d9647557..4c7a5a99a7a1 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java
index 64b24b42e239..523f3ec80547 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java
@@ -25,13 +25,17 @@
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
+import ca.uhn.fhir.jpa.provider.merge.PatientMergeProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
import ca.uhn.fhir.rest.api.EncodingEnum;
+import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IGenericClient;
+import ca.uhn.fhir.rest.client.api.IHttpRequest;
+import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor;
@@ -74,6 +78,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
@RegisterExtension
protected RestfulServerExtension myServer;
+ private MyHttpCodeClientIntercepter myLastHttpResponseCodeCapture = new MyHttpCodeClientIntercepter();
+
@RegisterExtension
protected RestfulServerConfigurerExtension myServerConfigurer = new RestfulServerConfigurerExtension(() -> myServer)
.withServerBeforeAll(s -> {
@@ -94,6 +100,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
s.registerProvider(myAppCtx.getBean(SubscriptionTriggeringProvider.class));
s.registerProvider(myAppCtx.getBean(TerminologyUploaderProvider.class));
s.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class));
+ s.registerProvider(myAppCtx.getBean(PatientMergeProvider.class));
s.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class));
@@ -127,8 +134,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
myClient.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof LoggingInterceptor);
if (shouldLogClient()) {
- myClient.registerInterceptor(new LoggingInterceptor());
+ myClient.registerInterceptor(new LoggingInterceptor(verboseClientLogging()));
}
+
+ myClient.registerInterceptor(myLastHttpResponseCodeCapture);
});
@Autowired
@@ -157,6 +166,10 @@ protected boolean shouldLogClient() {
return true;
}
+ protected boolean verboseClientLogging() {
+ return false;
+ }
+
protected List toNameList(Bundle resp) {
List names = new ArrayList<>();
for (BundleEntryComponent next : resp.getEntry()) {
@@ -172,6 +185,10 @@ protected List toNameList(Bundle resp) {
return names;
}
+ protected int getLastHttpStatusCode() {
+ return myLastHttpResponseCodeCapture.getLastHttpStatusCode();
+ }
+
public static int getNumberOfParametersByName(Parameters theParameters, String theName) {
int retVal = 0;
@@ -241,4 +258,21 @@ protected List searchAndReturnUnqualifiedVersionlessIdValues(String uri)
return ids;
}
+
+ private class MyHttpCodeClientIntercepter implements IClientInterceptor {
+
+ private int myLastHttpStatusCode;
+
+ @Override
+ public void interceptRequest(IHttpRequest theRequest) {}
+
+ @Override
+ public void interceptResponse(IHttpResponse theResponse) throws IOException {
+ myLastHttpStatusCode = theResponse.getStatus();
+ }
+
+ public int getLastHttpStatusCode() {
+ return myLastHttpStatusCode;
+ }
+ }
}
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/replacereferences/ReplaceReferencesTestHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/replacereferences/ReplaceReferencesTestHelper.java
new file mode 100644
index 000000000000..794bdc9b6f79
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/replacereferences/ReplaceReferencesTestHelper.java
@@ -0,0 +1,483 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server Test Utilities
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.replacereferences;
+
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceResultsJson;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
+import ca.uhn.fhir.jpa.api.dao.PatientEverythingParameters;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import ca.uhn.fhir.rest.client.api.IGenericClient;
+import ca.uhn.fhir.rest.gclient.IOperationUntypedWithInputAndPartialOutput;
+import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
+import ca.uhn.fhir.util.JsonUtil;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.BooleanType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.CarePlan;
+import org.hl7.fhir.r4.model.Coding;
+import org.hl7.fhir.r4.model.Encounter;
+import org.hl7.fhir.r4.model.IdType;
+import org.hl7.fhir.r4.model.Identifier;
+import org.hl7.fhir.r4.model.IntegerType;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.OperationOutcome;
+import org.hl7.fhir.r4.model.Organization;
+import org.hl7.fhir.r4.model.Parameters;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.Resource;
+import org.hl7.fhir.r4.model.StringType;
+import org.hl7.fhir.r4.model.Task;
+import org.hl7.fhir.r4.model.Type;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import static ca.uhn.fhir.jpa.provider.ReplaceReferencesSvcImpl.RESOURCE_TYPES_SYSTEM;
+import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER;
+import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER_RESPOND_ASYNC;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.HAPI_BATCH_JOB_ID_SYSTEM;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+public class ReplaceReferencesTestHelper {
+ private static final Logger ourLog = LoggerFactory.getLogger(ReplaceReferencesTestHelper.class);
+
+ static final Identifier pat1IdentifierA =
+ new Identifier().setSystem("SYS1A").setValue("VAL1A");
+ static final Identifier pat1IdentifierB =
+ new Identifier().setSystem("SYS1B").setValue("VAL1B");
+ static final Identifier pat2IdentifierA =
+ new Identifier().setSystem("SYS2A").setValue("VAL2A");
+ static final Identifier pat2IdentifierB =
+ new Identifier().setSystem("SYS2B").setValue("VAL2B");
+ static final Identifier patBothIdentifierC =
+ new Identifier().setSystem("SYSC").setValue("VALC");
+ public static final int TOTAL_EXPECTED_PATCHES = 23;
+ public static final int SMALL_BATCH_SIZE = 5;
+ public static final int EXPECTED_SMALL_BATCHES = (TOTAL_EXPECTED_PATCHES + SMALL_BATCH_SIZE - 1) / SMALL_BATCH_SIZE;
+ private final IFhirResourceDaoPatient myPatientDao;
+ private final IFhirResourceDao myTaskDao;
+ private final IFhirResourceDao myOrganizationDao;
+ private final IFhirResourceDao myEncounterDao;
+ private final IFhirResourceDao myCarePlanDao;
+ private final IFhirResourceDao myObservationDao;
+
+ private IIdType myOrgId;
+ private IIdType mySourcePatientId;
+ private IIdType mySourceCarePlanId;
+ private IIdType mySourceEncId1;
+ private IIdType mySourceEncId2;
+ private ArrayList mySourceObsIds;
+ private IIdType myTargetPatientId;
+ private IIdType myTargetEnc1;
+
+ private final FhirContext myFhirContext;
+ private final SystemRequestDetails mySrd = new SystemRequestDetails();
+
+ public ReplaceReferencesTestHelper(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
+ myFhirContext = theFhirContext;
+ myPatientDao = (IFhirResourceDaoPatient) theDaoRegistry.getResourceDao(Patient.class);
+ myTaskDao = theDaoRegistry.getResourceDao(Task.class);
+ myOrganizationDao = theDaoRegistry.getResourceDao(Organization.class);
+ myEncounterDao = theDaoRegistry.getResourceDao(Encounter.class);
+ myCarePlanDao = theDaoRegistry.getResourceDao(CarePlan.class);
+ myObservationDao = theDaoRegistry.getResourceDao(Observation.class);
+ }
+
+ public void beforeEach() throws Exception {
+
+ Organization org = new Organization();
+ org.setName("an org");
+ myOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
+ ourLog.info("OrgId: {}", myOrgId);
+
+ Patient patient1 = new Patient();
+ patient1.getManagingOrganization().setReferenceElement(myOrgId);
+ patient1.addIdentifier(pat1IdentifierA);
+ patient1.addIdentifier(pat1IdentifierB);
+ patient1.addIdentifier(patBothIdentifierC);
+ mySourcePatientId = myPatientDao.create(patient1, mySrd).getId().toUnqualifiedVersionless();
+
+ Patient patient2 = new Patient();
+ patient2.addIdentifier(pat2IdentifierA);
+ patient2.addIdentifier(pat2IdentifierB);
+ patient2.addIdentifier(patBothIdentifierC);
+ patient2.getManagingOrganization().setReferenceElement(myOrgId);
+ myTargetPatientId = myPatientDao.create(patient2, mySrd).getId().toUnqualifiedVersionless();
+
+ Encounter enc1 = new Encounter();
+ enc1.setStatus(Encounter.EncounterStatus.CANCELLED);
+ enc1.getSubject().setReferenceElement(mySourcePatientId);
+ enc1.getServiceProvider().setReferenceElement(myOrgId);
+ mySourceEncId1 = myEncounterDao.create(enc1, mySrd).getId().toUnqualifiedVersionless();
+
+ Encounter enc2 = new Encounter();
+ enc2.setStatus(Encounter.EncounterStatus.ARRIVED);
+ enc2.getSubject().setReferenceElement(mySourcePatientId);
+ enc2.getServiceProvider().setReferenceElement(myOrgId);
+ mySourceEncId2 = myEncounterDao.create(enc2, mySrd).getId().toUnqualifiedVersionless();
+
+ CarePlan carePlan = new CarePlan();
+ carePlan.setStatus(CarePlan.CarePlanStatus.ACTIVE);
+ carePlan.getSubject().setReferenceElement(mySourcePatientId);
+ mySourceCarePlanId = myCarePlanDao.create(carePlan, mySrd).getId().toUnqualifiedVersionless();
+
+ Encounter targetEnc1 = new Encounter();
+ targetEnc1.setStatus(Encounter.EncounterStatus.ARRIVED);
+ targetEnc1.getSubject().setReferenceElement(myTargetPatientId);
+ targetEnc1.getServiceProvider().setReferenceElement(myOrgId);
+ this.myTargetEnc1 = myEncounterDao.create(targetEnc1, mySrd).getId().toUnqualifiedVersionless();
+
+ mySourceObsIds = new ArrayList<>();
+ for (int i = 0; i < 20; i++) {
+ Observation obs = new Observation();
+ obs.getSubject().setReferenceElement(mySourcePatientId);
+ obs.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obsId = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
+ mySourceObsIds.add(obsId);
+ }
+ }
+
+ public void setSourceAndTarget(PatientMergeInputParameters inParams) {
+ inParams.sourcePatient = new Reference().setReferenceElement(mySourcePatientId);
+ inParams.targetPatient = new Reference().setReferenceElement(myTargetPatientId);
+ }
+
+ public Patient createResultPatient(boolean theDeleteSource) {
+ Patient resultPatient = new Patient();
+ resultPatient.setIdElement((IdType) myTargetPatientId);
+ resultPatient.addIdentifier(pat1IdentifierA);
+ if (!theDeleteSource) {
+ // add the link only if we are not deleting the source
+ Patient.PatientLinkComponent link = resultPatient.addLink();
+ link.setOther(new Reference(mySourcePatientId));
+ link.setType(Patient.LinkType.REPLACES);
+ }
+ return resultPatient;
+ }
+
+ public Patient readSourcePatient() {
+ return myPatientDao.read(mySourcePatientId, mySrd);
+ }
+
+ public Patient readTargetPatient() {
+ return myPatientDao.read(myTargetPatientId, mySrd);
+ }
+
+ public IIdType getTargetPatientId() {
+ return myTargetPatientId;
+ }
+
+ private Set getTargetEverythingResourceIds() {
+ PatientEverythingParameters everythingParams = new PatientEverythingParameters();
+ everythingParams.setCount(new IntegerType(100));
+
+ IBundleProvider bundleProvider =
+ myPatientDao.patientInstanceEverything(null, mySrd, everythingParams, myTargetPatientId);
+
+ assertNull(bundleProvider.getNextPageId());
+
+ return bundleProvider.getAllResources().stream()
+ .map(IBaseResource::getIdElement)
+ .map(IIdType::toUnqualifiedVersionless)
+ .collect(Collectors.toSet());
+ }
+
+ public String getJobIdFromTask(Task task) {
+ assertThat(task.getIdentifier())
+ .hasSize(1)
+ .element(0)
+ .extracting(Identifier::getSystem)
+ .isEqualTo(HAPI_BATCH_JOB_ID_SYSTEM);
+
+ return task.getIdentifierFirstRep().getValue();
+ }
+
+ public Parameters callReplaceReferences(IGenericClient theFhirClient, boolean theIsAsync) {
+ return callReplaceReferencesWithResourceLimit(theFhirClient, theIsAsync, null);
+ }
+
+ public Parameters callReplaceReferencesWithResourceLimit(
+ IGenericClient theFhirClient, boolean theIsAsync, Integer theResourceLimit) {
+ IOperationUntypedWithInputAndPartialOutput request = theFhirClient
+ .operation()
+ .onServer()
+ .named(OPERATION_REPLACE_REFERENCES)
+ .withParameter(
+ Parameters.class,
+ ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID,
+ new StringType(mySourcePatientId.getValue()))
+ .andParameter(
+ ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID,
+ new StringType(myTargetPatientId.getValue()));
+ if (theResourceLimit != null) {
+ request.andParameter(
+ ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT, new IntegerType(theResourceLimit));
+ }
+
+ if (theIsAsync) {
+ request.withAdditionalHeader(HEADER_PREFER, HEADER_PREFER_RESPOND_ASYNC);
+ }
+
+ return request.returnResourceType(Parameters.class).execute();
+ }
+
+ public void assertAllReferencesUpdated() {
+ assertAllReferencesUpdated(false);
+ }
+
+ public void assertAllReferencesUpdated(boolean theWithDelete) {
+
+ Set actual = getTargetEverythingResourceIds();
+
+ ourLog.info("Found IDs: {}", actual);
+
+ if (theWithDelete) {
+ assertThat(actual).doesNotContain(mySourcePatientId);
+ }
+ assertThat(actual).contains(mySourceEncId1);
+ assertThat(actual).contains(mySourceEncId2);
+ assertThat(actual).contains(myOrgId);
+ assertThat(actual).contains(mySourceCarePlanId);
+ assertThat(actual).containsAll(mySourceObsIds);
+ assertThat(actual).contains(myTargetPatientId);
+ assertThat(actual).contains(myTargetEnc1);
+ }
+
+ public void assertNothingChanged() {
+ Set actual = getTargetEverythingResourceIds();
+
+ ourLog.info("Found IDs: {}", actual);
+
+ assertThat(actual).doesNotContain(mySourcePatientId);
+ assertThat(actual).doesNotContain(mySourceEncId1);
+ assertThat(actual).doesNotContain(mySourceEncId2);
+ assertThat(actual).contains(myOrgId);
+ assertThat(actual).doesNotContain(mySourceCarePlanId);
+ assertThat(actual).doesNotContainAnyElementsOf(mySourceObsIds);
+ assertThat(actual).contains(myTargetPatientId);
+ assertThat(actual).contains(myTargetEnc1);
+
+ // TODO ED should we also assert here that source still has the all references it had before the operation,
+ // that is in addition to the validation that target doesn't contain the references.
+ }
+
+ public PatientMergeInputParameters buildMultipleTargetMatchParameters(
+ boolean theWithDelete, boolean theWithInputResultPatient, boolean theWithPreview) {
+ PatientMergeInputParameters inParams = new PatientMergeInputParameters();
+ inParams.sourcePatient = new Reference().setReferenceElement(mySourcePatientId);
+ inParams.targetPatientIdentifier = patBothIdentifierC;
+ inParams.deleteSource = theWithDelete;
+ if (theWithInputResultPatient) {
+ inParams.resultPatient = createResultPatient(theWithDelete);
+ }
+ if (theWithPreview) {
+ inParams.preview = true;
+ }
+ return inParams;
+ }
+
+ public PatientMergeInputParameters buildMultipleSourceMatchParameters(
+ boolean theWithDelete, boolean theWithInputResultPatient, boolean theWithPreview) {
+ PatientMergeInputParameters inParams = new PatientMergeInputParameters();
+ inParams.sourcePatientIdentifier = patBothIdentifierC;
+ inParams.targetPatient = new Reference().setReferenceElement(mySourcePatientId);
+ inParams.deleteSource = theWithDelete;
+ if (theWithInputResultPatient) {
+ inParams.resultPatient = createResultPatient(theWithDelete);
+ }
+ if (theWithPreview) {
+ inParams.preview = true;
+ }
+ return inParams;
+ }
+
+ public IIdType getSourcePatientId() {
+ return mySourcePatientId;
+ }
+
+ public static class PatientMergeInputParameters {
+ public Type sourcePatient;
+ public Type sourcePatientIdentifier;
+ public Type targetPatient;
+ public Type targetPatientIdentifier;
+ public Patient resultPatient;
+ public Boolean preview;
+ public Boolean deleteSource;
+ public Integer resourceLimit;
+
+ public Parameters asParametersResource() {
+ Parameters inParams = new Parameters();
+ if (sourcePatient != null) {
+ inParams.addParameter().setName("source-patient").setValue(sourcePatient);
+ }
+ if (sourcePatientIdentifier != null) {
+ inParams.addParameter().setName("source-patient-identifier").setValue(sourcePatientIdentifier);
+ }
+ if (targetPatient != null) {
+ inParams.addParameter().setName("target-patient").setValue(targetPatient);
+ }
+ if (targetPatientIdentifier != null) {
+ inParams.addParameter().setName("target-patient-identifier").setValue(targetPatientIdentifier);
+ }
+ if (resultPatient != null) {
+ inParams.addParameter().setName("result-patient").setResource(resultPatient);
+ }
+ if (preview != null) {
+ inParams.addParameter().setName("preview").setValue(new BooleanType(preview));
+ }
+ if (deleteSource != null) {
+ inParams.addParameter().setName("delete-source").setValue(new BooleanType(deleteSource));
+ }
+ if (resourceLimit != null) {
+ inParams.addParameter().setName("batch-size").setValue(new IntegerType(resourceLimit));
+ }
+ return inParams;
+ }
+ }
+
+ public static void validatePatchResultBundle(
+ Bundle patchResultBundle, int theTotalExpectedPatches, List theExpectedResourceTypes) {
+ String resourceMatchString = "(" + String.join("|", theExpectedResourceTypes) + ")";
+ Pattern expectedPatchIssuePattern =
+ Pattern.compile("Successfully patched resource \"" + resourceMatchString + "/\\d+/_history/\\d+\".");
+ assertThat(patchResultBundle.getEntry())
+ .hasSize(theTotalExpectedPatches)
+ .allSatisfy(entry -> assertThat(entry.getResponse().getOutcome())
+ .isInstanceOf(OperationOutcome.class)
+ .extracting(OperationOutcome.class::cast)
+ .extracting(OperationOutcome::getIssue)
+ .satisfies(issues -> assertThat(issues)
+ .hasSize(1)
+ .element(0)
+ .extracting(OperationOutcome.OperationOutcomeIssueComponent::getDiagnostics)
+ .satisfies(
+ diagnostics -> assertThat(diagnostics).matches(expectedPatchIssuePattern))));
+ }
+
+ public Bundle validateCompletedTask(JobInstance theJobInstance, IIdType theTaskId) {
+ validateJobReport(theJobInstance, theTaskId);
+
+ Bundle patchResultBundle;
+ Task taskWithOutput = myTaskDao.read(theTaskId, mySrd);
+ assertThat(taskWithOutput.getStatus()).isEqualTo(Task.TaskStatus.COMPLETED);
+ ourLog.info(
+ "Complete Task: {}",
+ myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(taskWithOutput));
+
+ Task.TaskOutputComponent taskOutput = taskWithOutput.getOutputFirstRep();
+
+ // Assert on the output type
+ Coding taskType = taskOutput.getType().getCodingFirstRep();
+ assertEquals(RESOURCE_TYPES_SYSTEM, taskType.getSystem());
+ assertEquals("Bundle", taskType.getCode());
+
+ List containedResources = taskWithOutput.getContained();
+ assertThat(containedResources).hasSize(1).element(0).isInstanceOf(Bundle.class);
+
+ Bundle containedBundle = (Bundle) containedResources.get(0);
+
+ Reference outputRef = (Reference) taskOutput.getValue();
+ patchResultBundle = (Bundle) outputRef.getResource();
+ // ourLog.info("containedBundle: {}",
+ // myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(containedBundle));
+ // ourLog.info("patchResultBundle: {}",
+ // myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(patchResultBundle));
+ assertTrue(containedBundle.equalsDeep(patchResultBundle));
+ return patchResultBundle;
+ }
+
+ private void validateJobReport(JobInstance theJobInstance, IIdType theTaskId) {
+ String report = theJobInstance.getReport();
+ ReplaceReferenceResultsJson replaceReferenceResultsJson =
+ JsonUtil.deserialize(report, ReplaceReferenceResultsJson.class);
+ IdDt resultTaskId = replaceReferenceResultsJson.getTaskId().asIdDt();
+ assertEquals(theTaskId.getIdPart(), resultTaskId.getIdPart());
+ }
+
+ public List getExpectedIdentifiersForTargetAfterMerge(boolean theWithInputResultPatient) {
+
+ List expectedIdentifiersOnTargetAfterMerge = null;
+ if (theWithInputResultPatient) {
+ expectedIdentifiersOnTargetAfterMerge =
+ List.of(new Identifier().setSystem("SYS1A").setValue("VAL1A"));
+ } else {
+ // the identifiers copied over from source should be marked as old
+ expectedIdentifiersOnTargetAfterMerge = List.of(
+ new Identifier().setSystem("SYS2A").setValue("VAL2A"),
+ new Identifier().setSystem("SYS2B").setValue("VAL2B"),
+ new Identifier().setSystem("SYSC").setValue("VALC"),
+ new Identifier().setSystem("SYS1A").setValue("VAL1A").copy().setUse(Identifier.IdentifierUse.OLD),
+ new Identifier().setSystem("SYS1B").setValue("VAL1B").copy().setUse(Identifier.IdentifierUse.OLD));
+ }
+ return expectedIdentifiersOnTargetAfterMerge;
+ }
+
+ public void assertSourcePatientUpdatedOrDeleted(boolean withDelete) {
+ if (withDelete) {
+ assertThrows(ResourceGoneException.class, () -> readSourcePatient());
+ } else {
+ Patient source = readSourcePatient();
+ assertThat(source.getLink()).hasSize(1);
+ Patient.PatientLinkComponent link = source.getLink().get(0);
+ assertThat(link.getOther().getReferenceElement()).isEqualTo(getTargetPatientId());
+ assertThat(link.getType()).isEqualTo(Patient.LinkType.REPLACEDBY);
+ }
+ }
+
+ public void assertTargetPatientUpdated(boolean withDelete, List theExpectedIdentifiers) {
+ Patient target = readTargetPatient();
+ if (!withDelete) {
+ assertThat(target.getLink()).hasSize(1);
+ Patient.PatientLinkComponent link = target.getLink().get(0);
+ assertThat(link.getOther().getReferenceElement()).isEqualTo(getSourcePatientId());
+ assertThat(link.getType()).isEqualTo(Patient.LinkType.REPLACES);
+ }
+ // assertExpected Identifiers found on the target
+ assertIdentifiers(target.getIdentifier(), theExpectedIdentifiers);
+ }
+
+ public void assertIdentifiers(List theActualIdentifiers, List theExpectedIdentifiers) {
+ assertThat(theActualIdentifiers).hasSize(theExpectedIdentifiers.size());
+ for (int i = 0; i < theExpectedIdentifiers.size(); i++) {
+ Identifier expectedIdentifier = theExpectedIdentifiers.get(i);
+ Identifier actualIdentifier = theActualIdentifiers.get(i);
+ assertThat(expectedIdentifier.equalsDeep(actualIdentifier)).isTrue();
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
index 2b3dc840048d..894bae0815fc 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
@@ -22,6 +22,8 @@
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobMaintenanceService;
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.merge.MergeAppCtx;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesAppCtx;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
@@ -224,7 +226,9 @@
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {
- TestR4Config.class
+ TestR4Config.class,
+ ReplaceReferencesAppCtx.class, // Batch job
+ MergeAppCtx.class // Batch job
})
public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuilder {
public static final String MY_VALUE_SET = "my-value-set";
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java
index 472b0dfdad87..ee51e31f65f1 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java
@@ -80,11 +80,11 @@ public class TestR5Config {
* and catch any potential deadlocks caused by database connection
* starvation
*
- * A minimum of 2 is necessary for most transactions,
- * so 2 will be our limit
+ * A minimum of 3 is necessary for most transactions,
+ * so 3 will be our minimum
*/
if (ourMaxThreads == null) {
- ourMaxThreads = (int) (Math.random() * 6.0) + 2;
+ ourMaxThreads = (int) (Math.random() * 6.0) + 3;
if (HapiTestSystemProperties.isSingleDbConnectionEnabled()) {
ourMaxThreads = 1;
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index 4c1300e5fe0b..145a5aed18d8 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml
index f5386ced33ac..ceee8a42231d 100644
--- a/hapi-fhir-server-cds-hooks/pom.xml
+++ b/hapi-fhir-server-cds-hooks/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index 48232abdc495..9e3545a2e00f 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java
index 212716bc02f7..dc3748815e63 100644
--- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java
+++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java
@@ -22,7 +22,6 @@
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.mdm.model.CanonicalEID;
-import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.CanonicalIdentifier;
import org.hl7.fhir.instance.model.api.IBase;
@@ -31,23 +30,7 @@ public final class IdentifierUtil {
private IdentifierUtil() {}
public static CanonicalIdentifier identifierDtFromIdentifier(IBase theIdentifier) {
- CanonicalIdentifier retval = new CanonicalIdentifier();
-
- // TODO add other fields like "use" etc
- if (theIdentifier instanceof org.hl7.fhir.dstu3.model.Identifier) {
- org.hl7.fhir.dstu3.model.Identifier ident = (org.hl7.fhir.dstu3.model.Identifier) theIdentifier;
- retval.setSystem(ident.getSystem()).setValue(ident.getValue());
- } else if (theIdentifier instanceof org.hl7.fhir.r4.model.Identifier) {
- org.hl7.fhir.r4.model.Identifier ident = (org.hl7.fhir.r4.model.Identifier) theIdentifier;
- retval.setSystem(ident.getSystem()).setValue(ident.getValue());
- } else if (theIdentifier instanceof org.hl7.fhir.r5.model.Identifier) {
- org.hl7.fhir.r5.model.Identifier ident = (org.hl7.fhir.r5.model.Identifier) theIdentifier;
- retval.setSystem(ident.getSystem()).setValue(ident.getValue());
- } else {
- throw new InternalErrorException(Msg.code(1486) + "Expected 'Identifier' type but was '"
- + theIdentifier.getClass().getName() + "'");
- }
- return retval;
+ return ca.uhn.fhir.util.CanonicalIdentifier.fromIdentifier(theIdentifier);
}
/**
diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml
index 94ff32d0d56e..8dcbfe673d8f 100644
--- a/hapi-fhir-server-openapi/pom.xml
+++ b/hapi-fhir-server-openapi/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index 03e901047879..2c7cd90992dc 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java
index e3f6d852f709..88a14e5e2fff 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java
@@ -23,9 +23,11 @@
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.PreferHeader;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
+import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
@@ -609,4 +611,10 @@ public boolean isRetry() {
public void setRetry(boolean theRetry) {
myRetry = theRetry;
}
+
+ public boolean isPreferAsync() {
+ String prefer = getHeader(Constants.HEADER_PREFER);
+ PreferHeader preferHeader = RestfulServerUtils.parsePreferHeader(prefer);
+ return preferHeader.getRespondAsync();
+ }
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java
index 46e14d53476a..cb2d3f010e55 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java
@@ -78,6 +78,13 @@ public SystemRequestDetails(RequestDetails theDetails) {
}
}
+ // TODO KHS use this everywhere we create a srd with only one partition
+ public static SystemRequestDetails forRequestPartitionId(RequestPartitionId thePartitionId) {
+ SystemRequestDetails retVal = new SystemRequestDetails();
+ retVal.setRequestPartitionId(thePartitionId);
+ return retVal;
+ }
+
public RequestPartitionId getRequestPartitionId() {
return myRequestPartitionId;
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java
index 8477293b90be..9dfd75ee4be7 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java
@@ -791,6 +791,17 @@ public static boolean respectPreferHeader(RestOperationTypeEnum theRestOperation
@Nonnull
public static PreferHeader parsePreferHeader(IRestfulServer> theServer, String theValue) {
+ PreferHeader retVal = parsePreferHeader(theValue);
+
+ if (retVal.getReturn() == null && theServer != null && theServer.getDefaultPreferReturn() != null) {
+ retVal.setReturn(theServer.getDefaultPreferReturn());
+ }
+
+ return retVal;
+ }
+
+ @Nonnull
+ public static PreferHeader parsePreferHeader(String theValue) {
PreferHeader retVal = new PreferHeader();
if (isNotBlank(theValue)) {
@@ -825,11 +836,6 @@ public static PreferHeader parsePreferHeader(IRestfulServer> theServer, String
}
}
}
-
- if (retVal.getReturn() == null && theServer != null && theServer.getDefaultPreferReturn() != null) {
- retVal.setReturn(theServer.getDefaultPreferReturn());
- }
-
return retVal;
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
index 0042aadcbdd9..6635ec031ff8 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
@@ -277,16 +277,27 @@ public Object invokeServer(IRestfulServer> theServer, RequestDetails theReques
When we write directly to an HttpServletResponse, the invocation returns null. However, we still want to invoke
the SERVER_OUTGOING_RESPONSE pointcut.
*/
+
+ // if the response status code is set by the method, respect it. Otherwise, use the default 200.
+ int responseCode = Constants.STATUS_HTTP_200_OK;
+ if (theRequest instanceof ServletRequestDetails) {
+ HttpServletResponse servletResponse = ((ServletRequestDetails) theRequest).getServletResponse();
+ if (servletResponse != null && servletResponse.getStatus() > 0) {
+ responseCode = servletResponse.getStatus();
+ }
+ }
+
if (response == null) {
ResponseDetails responseDetails = new ResponseDetails();
- responseDetails.setResponseCode(Constants.STATUS_HTTP_200_OK);
+ responseDetails.setResponseCode(responseCode);
callOutgoingResponseHook(theRequest, responseDetails);
return null;
} else {
Set summaryMode = RestfulServerUtils.determineSummaryMode(theRequest);
ResponseDetails responseDetails = new ResponseDetails();
responseDetails.setResponseResource(response);
- responseDetails.setResponseCode(Constants.STATUS_HTTP_200_OK);
+ responseDetails.setResponseCode(responseCode);
+
if (!callOutgoingResponseHook(theRequest, responseDetails)) {
return null;
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java
index a0df066badeb..2568b0514c39 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java
@@ -132,7 +132,7 @@ public class ProviderConstants {
public static final String OPERATION_META = "$meta";
/**
- * Operation name for the $expunge operation
+ * Operation name for the $expunge operation
*/
public static final String OPERATION_EXPUNGE = "$expunge";
@@ -253,10 +253,50 @@ public class ProviderConstants {
/**
* Parameter for source reference of the "$replace-references" operation
*/
- public static final String PARAM_SOURCE_REFERENCE_ID = "sourceReferenceId";
+ public static final String OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID = "source-reference-id";
/**
* Parameter for target reference of the "$replace-references" operation
*/
- public static final String PARAM_TARGET_REFERENCE_ID = "targetReferenceId";
+ public static final String OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID = "target-reference-id";
+
+ /**
+ * If the request is being performed synchronously and the number of resources that need to change
+ * exceeds this amount, the operation will fail with 412 Precondition Failed.
+ */
+ public static final String OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT = "resource-limit";
+
+ /**
+ * $replace-references output Parameters names
+ */
+ public static final String OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK = "task";
+
+ public static final String OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME = "outcome";
+
+ /**
+ * Operation name for the Resource "$merge" operation
+ * Hapi-fhir use is based on https://www.hl7.org/fhir/patient-operation-merge.html
+ */
+ public static final String OPERATION_MERGE = "$merge";
+ /**
+ * Patient $merge operation parameters
+ */
+ public static final String OPERATION_MERGE_PARAM_SOURCE_PATIENT = "source-patient";
+
+ public static final String OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER = "source-patient-identifier";
+ public static final String OPERATION_MERGE_PARAM_TARGET_PATIENT = "target-patient";
+ public static final String OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER = "target-patient-identifier";
+ public static final String OPERATION_MERGE_PARAM_RESULT_PATIENT = "result-patient";
+ public static final String OPERATION_MERGE_PARAM_BATCH_SIZE = "batch-size";
+ public static final String OPERATION_MERGE_PARAM_PREVIEW = "preview";
+ public static final String OPERATION_MERGE_PARAM_DELETE_SOURCE = "delete-source";
+ public static final String OPERATION_MERGE_OUTPUT_PARAM_INPUT = "input";
+ public static final String OPERATION_MERGE_OUTPUT_PARAM_OUTCOME = OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME;
+ public static final String OPERATION_MERGE_OUTPUT_PARAM_RESULT = "result";
+ public static final String OPERATION_MERGE_OUTPUT_PARAM_TASK = OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
+
+ public static final String HAPI_BATCH_JOB_ID_SYSTEM = "http://hapifhir.io/batch/jobId";
+ public static final String OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT_STRING = "512";
+ public static final Integer OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT =
+ Integer.parseInt(OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT_STRING);
}
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
index 925c5b26064c..f09365ac6fee 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
index 85ffd69b093d..6184ea3fbb65 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
@@ -21,7 +21,7 @@
ca.uhn.hapi.fhir
hapi-fhir-caching-api
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
index b3d1a76d6744..dde87cdb57ce 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
index e0cafcc2d812..2462783195d7 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir
ca.uhn.hapi.fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../pom.xml
diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml
index 8fb9053f275c..e34064f72bff 100644
--- a/hapi-fhir-serviceloaders/pom.xml
+++ b/hapi-fhir-serviceloaders/pom.xml
@@ -5,7 +5,7 @@
hapi-deployable-pom
ca.uhn.hapi.fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
index 8e10c83d05c8..01ed191200d7 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
index 673f4ba06c8c..aef431beb382 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
hapi-fhir-spring-boot-sample-client-apache
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
index 3f72bcf0f545..7d6073019dab 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
index 5e76752983ee..1205f85c25b7 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
index b3230500c339..450044fa3f5c 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
index 6224ffffc240..a5505c221013 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml
index 941d8d153969..ff840b5a7010 100644
--- a/hapi-fhir-spring-boot/pom.xml
+++ b/hapi-fhir-spring-boot/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml
index e856ba0eccba..028910783b62 100644
--- a/hapi-fhir-sql-migrate/pom.xml
+++ b/hapi-fhir-sql-migrate/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml
index af7d8a4ba3fd..8f172b4ffc0d 100644
--- a/hapi-fhir-storage-batch2-jobs/pom.xml
+++ b/hapi-fhir-storage-batch2-jobs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java
index ced5e026f228..5e94aca6bb71 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java
@@ -37,6 +37,6 @@
DeleteExpungeAppCtx.class,
BulkExportAppCtx.class,
TermCodeSystemJobConfig.class,
- BulkImportPullConfig.class,
+ BulkImportPullConfig.class
})
public class Batch2JobsConfig {}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
index 56de694bc493..f898b388e4cc 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
@@ -30,7 +30,6 @@
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Scope;
@Configuration
public class BulkExportAppCtx {
@@ -145,7 +144,6 @@ public ExpandResourceAndWriteBinaryStep expandResourceAndWriteBinaryStep() {
}
@Bean
- @Scope("prototype")
public BulkExportCreateReportStep createReportStep() {
return new BulkExportCreateReportStep();
}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java
index 65d7d2af177f..35b86c0130f0 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java
@@ -47,6 +47,22 @@ public class BulkExportCreateReportStep
private Map> myResourceToBinaryIds;
+ @Nonnull
+ @Override
+ public ChunkOutcome consume(
+ ChunkExecutionDetails theChunkDetails) {
+ BulkExportBinaryFileId fileId = theChunkDetails.getData();
+ if (myResourceToBinaryIds == null) {
+ myResourceToBinaryIds = new HashMap<>();
+ }
+
+ myResourceToBinaryIds.putIfAbsent(fileId.getResourceType(), new ArrayList<>());
+
+ myResourceToBinaryIds.get(fileId.getResourceType()).add(fileId.getBinaryId());
+
+ return ChunkOutcome.SUCCESS();
+ }
+
@Nonnull
@Override
public RunOutcome run(
@@ -79,22 +95,6 @@ public RunOutcome run(
return RunOutcome.SUCCESS;
}
- @Nonnull
- @Override
- public ChunkOutcome consume(
- ChunkExecutionDetails theChunkDetails) {
- BulkExportBinaryFileId fileId = theChunkDetails.getData();
- if (myResourceToBinaryIds == null) {
- myResourceToBinaryIds = new HashMap<>();
- }
-
- myResourceToBinaryIds.putIfAbsent(fileId.getResourceType(), new ArrayList<>());
-
- myResourceToBinaryIds.get(fileId.getResourceType()).add(fileId.getBinaryId());
-
- return ChunkOutcome.SUCCESS();
- }
-
private static String getOriginatingRequestUrl(
@Nonnull StepExecutionDetails theStepExecutionDetails,
BulkExportJobResults results) {
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeAppCtx.java
new file mode 100644
index 000000000000..fe24006edd9d
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeAppCtx.java
@@ -0,0 +1,100 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.merge;
+
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencePatchOutcomeJson;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceResultsJson;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceUpdateStep;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesErrorHandler;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesQueryIdsStep;
+import ca.uhn.fhir.batch2.model.JobDefinition;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
+import org.hl7.fhir.r4.model.Task;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class MergeAppCtx {
+ public static final String JOB_MERGE = "MERGE";
+
+ @Bean
+ public JobDefinition merge(
+ ReplaceReferencesQueryIdsStep theMergeQueryIds,
+ ReplaceReferenceUpdateStep theMergeUpdateStep,
+ MergeUpdateTaskReducerStep theMergeUpdateTaskReducerStep,
+ ReplaceReferencesErrorHandler theMergeErrorHandler) {
+ return JobDefinition.newBuilder()
+ .setJobDefinitionId(JOB_MERGE)
+ .setJobDescription("Merge Resources")
+ .setJobDefinitionVersion(1)
+ .gatedExecution()
+ .setParametersType(MergeJobParameters.class)
+ .addFirstStep(
+ "query-ids",
+ "Query IDs of resources that link to the source resource",
+ FhirIdListWorkChunkJson.class,
+ theMergeQueryIds)
+ .addIntermediateStep(
+ "replace-references",
+ "Update all references from pointing to source to pointing to target",
+ ReplaceReferencePatchOutcomeJson.class,
+ theMergeUpdateStep)
+ .addFinalReducerStep(
+ "update-task",
+ "Waits for replace reference work to complete and updates Task.",
+ ReplaceReferenceResultsJson.class,
+ theMergeUpdateTaskReducerStep)
+ .errorHandler(theMergeErrorHandler)
+ .build();
+ }
+
+ @Bean
+ public ReplaceReferencesQueryIdsStep mergeQueryIdsStep(
+ HapiTransactionService theHapiTransactionService, IBatch2DaoSvc theBatch2DaoSvc) {
+ return new ReplaceReferencesQueryIdsStep<>(theHapiTransactionService, theBatch2DaoSvc);
+ }
+
+ @Bean
+ public ReplaceReferenceUpdateStep mergeUpdateStep(
+ FhirContext theFhirContext, ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc) {
+ return new ReplaceReferenceUpdateStep<>(theFhirContext, theReplaceReferencesPatchBundleSvc);
+ }
+
+ @Bean
+ public MergeUpdateTaskReducerStep mergeUpdateTaskStep(
+ DaoRegistry theDaoRegistry, IHapiTransactionService theHapiTransactionService) {
+ return new MergeUpdateTaskReducerStep(theDaoRegistry, theHapiTransactionService);
+ }
+
+ @Bean
+ public ReplaceReferencesErrorHandler mergeErrorHandler(
+ DaoRegistry theDaoRegistry, Batch2TaskHelper theBatch2TaskHelper) {
+ IFhirResourceDao taskDao = theDaoRegistry.getResourceDao(Task.class);
+ return new ReplaceReferencesErrorHandler<>(theBatch2TaskHelper, taskDao);
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeJobParameters.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeJobParameters.java
new file mode 100644
index 000000000000..7bbbeab55ec5
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeJobParameters.java
@@ -0,0 +1,47 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.merge;
+
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesJobParameters;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class MergeJobParameters extends ReplaceReferencesJobParameters {
+ @JsonProperty("deleteSource")
+ private boolean myDeleteSource;
+
+ @JsonProperty("resultResource")
+ private String myResultResource;
+
+ public void setResultResource(String theResultResource) {
+ myResultResource = theResultResource;
+ }
+
+ public String getResultResource() {
+ return myResultResource;
+ }
+
+ public boolean getDeleteSource() {
+ return myDeleteSource;
+ }
+
+ public void setDeleteSource(boolean theDeleteSource) {
+ this.myDeleteSource = theDeleteSource;
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeResourceHelper.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeResourceHelper.java
new file mode 100644
index 000000000000..4ad5960a83cb
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeResourceHelper.java
@@ -0,0 +1,187 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.merge;
+
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
+import jakarta.annotation.Nullable;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import org.hl7.fhir.r4.model.Identifier;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Reference;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
+
+/**
+ * This class contains code that is used to update source and target resources after the references are replaced.
+ * This is the common functionality that is used in sync case and in the async case as the reduction step.
+ */
+public class MergeResourceHelper {
+
+ private final IFhirResourceDao myPatientDao;
+
+ public MergeResourceHelper(IFhirResourceDao theDao) {
+ myPatientDao = theDao;
+ }
+
+ public static int setResourceLimitFromParameter(
+ JpaStorageSettings theStorageSettings, IPrimitiveType theResourceLimit) {
+ int retval = defaultIfNull(
+ IPrimitiveType.toValueOrNull(theResourceLimit),
+ ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT);
+ if (retval > theStorageSettings.getMaxTransactionEntriesForWrite()) {
+ retval = theStorageSettings.getMaxTransactionEntriesForWrite();
+ }
+ return retval;
+ }
+
+ public void updateMergedResourcesAfterReferencesReplaced(
+ IHapiTransactionService myHapiTransactionService,
+ IIdType theSourceResourceId,
+ IIdType theTargetResourceId,
+ @Nullable Patient theResultResource,
+ boolean theDeleteSource,
+ RequestDetails theRequestDetails) {
+ Patient sourceResource = myPatientDao.read(theSourceResourceId, theRequestDetails);
+ Patient targetResource = myPatientDao.read(theTargetResourceId, theRequestDetails);
+
+ updateMergedResourcesAfterReferencesReplaced(
+ myHapiTransactionService,
+ sourceResource,
+ targetResource,
+ theResultResource,
+ theDeleteSource,
+ theRequestDetails);
+ }
+
+ public Patient updateMergedResourcesAfterReferencesReplaced(
+ IHapiTransactionService myHapiTransactionService,
+ Patient theSourceResource,
+ Patient theTargetResource,
+ @Nullable Patient theResultResource,
+ boolean theDeleteSource,
+ RequestDetails theRequestDetails) {
+
+ AtomicReference targetPatientAfterUpdate = new AtomicReference<>();
+ myHapiTransactionService.withRequest(theRequestDetails).execute(() -> {
+ Patient patientToUpdate = prepareTargetPatientForUpdate(
+ theTargetResource, theSourceResource, theResultResource, theDeleteSource);
+
+ targetPatientAfterUpdate.set(updateResource(patientToUpdate, theRequestDetails));
+
+ if (theDeleteSource) {
+ deleteResource(theSourceResource, theRequestDetails);
+ } else {
+ prepareSourcePatientForUpdate(theSourceResource, theTargetResource);
+ updateResource(theSourceResource, theRequestDetails);
+ }
+ });
+
+ return targetPatientAfterUpdate.get();
+ }
+
+ public Patient prepareTargetPatientForUpdate(
+ Patient theTargetResource,
+ Patient theSourceResource,
+ @Nullable Patient theResultResource,
+ boolean theDeleteSource) {
+
+ // if the client provided a result resource as input then use it to update the target resource
+ if (theResultResource != null) {
+ return theResultResource;
+ }
+
+ // client did not provide a result resource, we should update the target resource,
+ // add the replaces link to the target resource, if the source resource is not to be deleted
+ if (!theDeleteSource) {
+ theTargetResource
+ .addLink()
+ .setType(Patient.LinkType.REPLACES)
+ .setOther(new Reference(theSourceResource.getIdElement().toVersionless()));
+ }
+
+ // copy all identifiers from the source to the target
+ copyIdentifiersAndMarkOld(theSourceResource, theTargetResource);
+
+ return theTargetResource;
+ }
+
+ private void prepareSourcePatientForUpdate(Patient theSourceResource, Patient theTargetResource) {
+ theSourceResource.setActive(false);
+ theSourceResource
+ .addLink()
+ .setType(Patient.LinkType.REPLACEDBY)
+ .setOther(new Reference(theTargetResource.getIdElement().toVersionless()));
+ }
+
+ /**
+ * Copies each identifier from theSourceResource to theTargetResource, after checking that theTargetResource does
+ * not already contain the source identifier. Marks the copied identifiers marked as old.
+ *
+ * @param theSourceResource the source resource to copy identifiers from
+ * @param theTargetResource the target resource to copy identifiers to
+ */
+ private void copyIdentifiersAndMarkOld(Patient theSourceResource, Patient theTargetResource) {
+ if (theSourceResource.hasIdentifier()) {
+ List sourceIdentifiers = theSourceResource.getIdentifier();
+ List targetIdentifiers = theTargetResource.getIdentifier();
+ for (Identifier sourceIdentifier : sourceIdentifiers) {
+ if (!containsIdentifier(targetIdentifiers, sourceIdentifier)) {
+ Identifier copyOfSrcIdentifier = sourceIdentifier.copy();
+ copyOfSrcIdentifier.setUse(Identifier.IdentifierUse.OLD);
+ theTargetResource.addIdentifier(copyOfSrcIdentifier);
+ }
+ }
+ }
+ }
+
+ /**
+ * Checks if theIdentifiers contains theIdentifier using equalsDeep
+ *
+ * @param theIdentifiers the list of identifiers
+ * @param theIdentifier the identifier to check
+ * @return true if theIdentifiers contains theIdentifier, false otherwise
+ */
+ private boolean containsIdentifier(List theIdentifiers, Identifier theIdentifier) {
+ for (Identifier identifier : theIdentifiers) {
+ if (identifier.equalsDeep(theIdentifier)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private Patient updateResource(Patient theResource, RequestDetails theRequestDetails) {
+ DaoMethodOutcome outcome = myPatientDao.update(theResource, theRequestDetails);
+ return (Patient) outcome.getResource();
+ }
+
+ private void deleteResource(Patient theResource, RequestDetails theRequestDetails) {
+ myPatientDao.delete(theResource.getIdElement(), theRequestDetails);
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeUpdateTaskReducerStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeUpdateTaskReducerStep.java
new file mode 100644
index 000000000000..67928769d254
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/merge/MergeUpdateTaskReducerStep.java
@@ -0,0 +1,75 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.merge;
+
+import ca.uhn.fhir.batch2.api.IJobDataSink;
+import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
+import ca.uhn.fhir.batch2.api.RunOutcome;
+import ca.uhn.fhir.batch2.api.StepExecutionDetails;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencePatchOutcomeJson;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceResultsJson;
+import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceUpdateTaskReducerStep;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import jakarta.annotation.Nonnull;
+import org.hl7.fhir.r4.model.Patient;
+
+public class MergeUpdateTaskReducerStep extends ReplaceReferenceUpdateTaskReducerStep {
+ private final IHapiTransactionService myHapiTransactionService;
+
+ public MergeUpdateTaskReducerStep(DaoRegistry theDaoRegistry, IHapiTransactionService theHapiTransactionService) {
+ super(theDaoRegistry);
+ this.myHapiTransactionService = theHapiTransactionService;
+ }
+
+ @Nonnull
+ @Override
+ public RunOutcome run(
+ @Nonnull StepExecutionDetails theStepExecutionDetails,
+ @Nonnull IJobDataSink theDataSink)
+ throws JobExecutionFailedException {
+
+ MergeJobParameters mergeJobParameters = theStepExecutionDetails.getParameters();
+ SystemRequestDetails requestDetails =
+ SystemRequestDetails.forRequestPartitionId(mergeJobParameters.getPartitionId());
+
+ Patient resultResource = null;
+ if (mergeJobParameters.getResultResource() != null) {
+ resultResource =
+ myFhirContext.newJsonParser().parseResource(Patient.class, mergeJobParameters.getResultResource());
+ }
+
+ IFhirResourceDao patientDao = myDaoRegistry.getResourceDao(Patient.class);
+
+ MergeResourceHelper helper = new MergeResourceHelper(patientDao);
+
+ helper.updateMergedResourcesAfterReferencesReplaced(
+ myHapiTransactionService,
+ mergeJobParameters.getSourceId().asIdDt(),
+ mergeJobParameters.getTargetId().asIdDt(),
+ resultResource,
+ mergeJobParameters.getDeleteSource(),
+ requestDetails);
+
+ return super.run(theStepExecutionDetails, theDataSink);
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencePatchOutcomeJson.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencePatchOutcomeJson.java
new file mode 100644
index 000000000000..2c0b63b40473
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencePatchOutcomeJson.java
@@ -0,0 +1,44 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.model.api.IModelJson;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.hl7.fhir.r4.model.Bundle;
+
+public class ReplaceReferencePatchOutcomeJson implements IModelJson {
+ @JsonProperty("patchResponseBundle")
+ String myPatchResponseBundle;
+
+ public ReplaceReferencePatchOutcomeJson() {}
+
+ public ReplaceReferencePatchOutcomeJson(FhirContext theFhirContext, Bundle theResult) {
+ myPatchResponseBundle = theFhirContext.newJsonParser().encodeResourceToString(theResult);
+ }
+
+ public String getPatchResponseBundle() {
+ return myPatchResponseBundle;
+ }
+
+ public void setPatchResponseBundle(String thePatchResponseBundle) {
+ myPatchResponseBundle = thePatchResponseBundle;
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceResultsJson.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceResultsJson.java
new file mode 100644
index 000000000000..ab8451db9e83
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceResultsJson.java
@@ -0,0 +1,39 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.model.api.IModelJson;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class ReplaceReferenceResultsJson implements IModelJson {
+ @JsonProperty("taskId")
+ private FhirIdJson myTaskId;
+
+ public ReplaceReferenceResultsJson() {}
+
+ public void setTaskId(FhirIdJson theTaskId) {
+ myTaskId = theTaskId;
+ }
+
+ public FhirIdJson getTaskId() {
+ return myTaskId;
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceUpdateStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceUpdateStep.java
new file mode 100644
index 000000000000..36fbd7969d1f
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceUpdateStep.java
@@ -0,0 +1,75 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.api.IJobDataSink;
+import ca.uhn.fhir.batch2.api.IJobStepWorker;
+import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
+import ca.uhn.fhir.batch2.api.RunOutcome;
+import ca.uhn.fhir.batch2.api.StepExecutionDetails;
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import jakarta.annotation.Nonnull;
+import org.hl7.fhir.r4.model.Bundle;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class ReplaceReferenceUpdateStep
+ implements IJobStepWorker {
+
+ private final FhirContext myFhirContext;
+ private final ReplaceReferencesPatchBundleSvc myReplaceReferencesPatchBundleSvc;
+
+ public ReplaceReferenceUpdateStep(
+ FhirContext theFhirContext, ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc) {
+ myFhirContext = theFhirContext;
+ myReplaceReferencesPatchBundleSvc = theReplaceReferencesPatchBundleSvc;
+ }
+
+ @Nonnull
+ @Override
+ public RunOutcome run(
+ @Nonnull StepExecutionDetails theStepExecutionDetails,
+ @Nonnull IJobDataSink theDataSink)
+ throws JobExecutionFailedException {
+
+ ReplaceReferencesJobParameters params = theStepExecutionDetails.getParameters();
+ ReplaceReferencesRequest replaceReferencesRequest = params.asReplaceReferencesRequest();
+ List fhirIds = theStepExecutionDetails.getData().getFhirIds().stream()
+ .map(FhirIdJson::asIdDt)
+ .collect(Collectors.toList());
+
+ SystemRequestDetails requestDetails = SystemRequestDetails.forRequestPartitionId(params.getPartitionId());
+
+ Bundle result = myReplaceReferencesPatchBundleSvc.patchReferencingResources(
+ replaceReferencesRequest, fhirIds, requestDetails);
+
+ ReplaceReferencePatchOutcomeJson data = new ReplaceReferencePatchOutcomeJson(myFhirContext, result);
+ theDataSink.accept(data);
+
+ return new RunOutcome(result.getEntry().size());
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceUpdateTaskReducerStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceUpdateTaskReducerStep.java
new file mode 100644
index 000000000000..11497d7af013
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferenceUpdateTaskReducerStep.java
@@ -0,0 +1,110 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.api.ChunkExecutionDetails;
+import ca.uhn.fhir.batch2.api.IJobDataSink;
+import ca.uhn.fhir.batch2.api.IReductionStepWorker;
+import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
+import ca.uhn.fhir.batch2.api.RunOutcome;
+import ca.uhn.fhir.batch2.api.StepExecutionDetails;
+import ca.uhn.fhir.batch2.model.ChunkOutcome;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import jakarta.annotation.Nonnull;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.Coding;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.Task;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ReplaceReferenceUpdateTaskReducerStep
+ implements IReductionStepWorker {
+ public static final String RESOURCE_TYPES_SYSTEM = "http://hl7.org/fhir/ValueSet/resource-types";
+
+ protected final FhirContext myFhirContext;
+ protected final DaoRegistry myDaoRegistry;
+ private final IFhirResourceDao myTaskDao;
+
+ private List myPatchOutputBundles = new ArrayList<>();
+
+ public ReplaceReferenceUpdateTaskReducerStep(DaoRegistry theDaoRegistry) {
+ myDaoRegistry = theDaoRegistry;
+ myTaskDao = myDaoRegistry.getResourceDao(Task.class);
+ myFhirContext = theDaoRegistry.getFhirContext();
+ }
+
+ @Nonnull
+ @Override
+ public ChunkOutcome consume(ChunkExecutionDetails theChunkDetails) {
+ ReplaceReferencePatchOutcomeJson result = theChunkDetails.getData();
+ Bundle patchOutputBundle =
+ myFhirContext.newJsonParser().parseResource(Bundle.class, result.getPatchResponseBundle());
+ myPatchOutputBundles.add(patchOutputBundle);
+ return ChunkOutcome.SUCCESS();
+ }
+
+ @Nonnull
+ @Override
+ public RunOutcome run(
+ @Nonnull StepExecutionDetails theStepExecutionDetails,
+ @Nonnull IJobDataSink theDataSink)
+ throws JobExecutionFailedException {
+
+ try {
+ ReplaceReferencesJobParameters params = theStepExecutionDetails.getParameters();
+ SystemRequestDetails requestDetails = SystemRequestDetails.forRequestPartitionId(params.getPartitionId());
+ Task task = myTaskDao.read(params.getTaskId().asIdDt(), requestDetails);
+
+ task.setStatus(Task.TaskStatus.COMPLETED);
+ // TODO KHS this Task will probably be too large for large jobs. Revisit this model once we support
+ // Provenance
+ // resources.
+ myPatchOutputBundles.forEach(outputBundle -> {
+ Task.TaskOutputComponent output = task.addOutput();
+ Coding coding = output.getType().getCodingFirstRep();
+ coding.setSystem(RESOURCE_TYPES_SYSTEM);
+ coding.setCode("Bundle");
+ Reference outputBundleReference =
+ new Reference("#" + outputBundle.getIdElement().getIdPart());
+ output.setValue(outputBundleReference);
+ task.addContained(outputBundle);
+ });
+
+ myTaskDao.update(task, requestDetails);
+
+ ReplaceReferenceResultsJson result = new ReplaceReferenceResultsJson();
+ result.setTaskId(params.getTaskId());
+ theDataSink.accept(result);
+
+ return new RunOutcome(myPatchOutputBundles.size());
+ } finally {
+ // Reusing the same reducer for all jobs feels confusing and dangerous to me. We need to fix this.
+ // See https://github.com/hapifhir/hapi-fhir/pull/6551
+ // TODO KHS add new methods to the api called init() and cleanup() that are called by the api so we can move
+ // this finally block out
+ myPatchOutputBundles.clear();
+ }
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesAppCtx.java
new file mode 100644
index 000000000000..9fd55181d1f4
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesAppCtx.java
@@ -0,0 +1,95 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
+import ca.uhn.fhir.batch2.model.JobDefinition;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
+import org.hl7.fhir.r4.model.Task;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class ReplaceReferencesAppCtx {
+ public static final String JOB_REPLACE_REFERENCES = "REPLACE_REFERENCES";
+
+ @Bean
+ public JobDefinition replaceReferencesJobDefinition(
+ ReplaceReferencesQueryIdsStep theReplaceReferencesQueryIds,
+ ReplaceReferenceUpdateStep theReplaceReferenceUpdateStep,
+ ReplaceReferenceUpdateTaskReducerStep
+ theReplaceReferenceUpdateTaskReducerStep,
+ ReplaceReferencesErrorHandler theReplaceReferencesErrorHandler) {
+ return JobDefinition.newBuilder()
+ .setJobDefinitionId(JOB_REPLACE_REFERENCES)
+ .setJobDescription("Replace References")
+ .setJobDefinitionVersion(1)
+ .gatedExecution()
+ .setParametersType(ReplaceReferencesJobParameters.class)
+ .addFirstStep(
+ "query-ids",
+ "Query IDs of resources that link to the source resource",
+ FhirIdListWorkChunkJson.class,
+ theReplaceReferencesQueryIds)
+ .addIntermediateStep(
+ "replace-references",
+ "Update all references from pointing to source to pointing to target",
+ ReplaceReferencePatchOutcomeJson.class,
+ theReplaceReferenceUpdateStep)
+ .addFinalReducerStep(
+ "update-task",
+ "Waits for replace reference work to complete and updates Task.",
+ ReplaceReferenceResultsJson.class,
+ theReplaceReferenceUpdateTaskReducerStep)
+ .errorHandler(theReplaceReferencesErrorHandler)
+ .build();
+ }
+
+ @Bean
+ public ReplaceReferencesQueryIdsStep replaceReferencesQueryIdsStep(
+ HapiTransactionService theHapiTransactionService, IBatch2DaoSvc theBatch2DaoSvc) {
+ return new ReplaceReferencesQueryIdsStep<>(theHapiTransactionService, theBatch2DaoSvc);
+ }
+
+ @Bean
+ public ReplaceReferenceUpdateStep replaceReferenceUpdateStep(
+ FhirContext theFhirContext, ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc) {
+ return new ReplaceReferenceUpdateStep<>(theFhirContext, theReplaceReferencesPatchBundleSvc);
+ }
+
+ @Bean
+ public ReplaceReferenceUpdateTaskReducerStep replaceReferenceUpdateTaskStep(
+ DaoRegistry theDaoRegistry) {
+ return new ReplaceReferenceUpdateTaskReducerStep<>(theDaoRegistry);
+ }
+
+ @Bean
+ public ReplaceReferencesErrorHandler replaceReferencesErrorHandler(
+ DaoRegistry theDaoRegistry, Batch2TaskHelper theBatch2TaskHelper) {
+ IFhirResourceDao taskDao = theDaoRegistry.getResourceDao(Task.class);
+ return new ReplaceReferencesErrorHandler<>(theBatch2TaskHelper, taskDao);
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesErrorHandler.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesErrorHandler.java
new file mode 100644
index 000000000000..76dd8643e835
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesErrorHandler.java
@@ -0,0 +1,54 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.api.IJobCompletionHandler;
+import ca.uhn.fhir.batch2.api.JobCompletionDetails;
+import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import org.hl7.fhir.r4.model.Task;
+
+/**
+ * This class is the error handler for ReplaceReferences and Merge jobs.
+ * It updates the status of the associated task.
+ */
+public class ReplaceReferencesErrorHandler
+ implements IJobCompletionHandler {
+
+ private final Batch2TaskHelper myBatch2TaskHelper;
+ private final IFhirResourceDao myTaskDao;
+
+ public ReplaceReferencesErrorHandler(Batch2TaskHelper theBatch2TaskHelper, IFhirResourceDao theTaskDao) {
+ myBatch2TaskHelper = theBatch2TaskHelper;
+ myTaskDao = theTaskDao;
+ }
+
+ @Override
+ public void jobComplete(JobCompletionDetails theDetails) {
+
+ PT jobParameters = theDetails.getParameters();
+
+ SystemRequestDetails requestDetails =
+ SystemRequestDetails.forRequestPartitionId(jobParameters.getPartitionId());
+
+ myBatch2TaskHelper.updateTaskStatusOnJobCompletion(myTaskDao, requestDetails, theDetails);
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesJobParameters.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesJobParameters.java
new file mode 100644
index 000000000000..193c8b21bc0a
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesJobParameters.java
@@ -0,0 +1,96 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.batch2.jobs.parameters.BatchJobParametersWithTaskId;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class ReplaceReferencesJobParameters extends BatchJobParametersWithTaskId {
+
+ @JsonProperty("sourceId")
+ private FhirIdJson mySourceId;
+
+ @JsonProperty("targetId")
+ private FhirIdJson myTargetId;
+
+ @JsonProperty(
+ value = "batchSize",
+ defaultValue = ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT_STRING,
+ required = false)
+ private int myBatchSize;
+
+ @JsonProperty("partitionId")
+ private RequestPartitionId myPartitionId;
+
+ public ReplaceReferencesJobParameters() {}
+
+ public ReplaceReferencesJobParameters(ReplaceReferencesRequest theReplaceReferencesRequest, int theBatchSize) {
+ mySourceId = new FhirIdJson(theReplaceReferencesRequest.sourceId);
+ myTargetId = new FhirIdJson(theReplaceReferencesRequest.targetId);
+ // Note theReplaceReferencesRequest.resourceLimit is only used for the synchronous case. It is ignored in this
+ // async case.
+ myBatchSize = theBatchSize;
+ myPartitionId = theReplaceReferencesRequest.partitionId;
+ }
+
+ public FhirIdJson getSourceId() {
+ return mySourceId;
+ }
+
+ public void setSourceId(FhirIdJson theSourceId) {
+ mySourceId = theSourceId;
+ }
+
+ public FhirIdJson getTargetId() {
+ return myTargetId;
+ }
+
+ public void setTargetId(FhirIdJson theTargetId) {
+ myTargetId = theTargetId;
+ }
+
+ public int getBatchSize() {
+ if (myBatchSize <= 0) {
+ myBatchSize = JpaStorageSettings.DEFAULT_TRANSACTION_ENTRIES_FOR_WRITE;
+ }
+ return myBatchSize;
+ }
+
+ public void setBatchSize(int theBatchSize) {
+ myBatchSize = theBatchSize;
+ }
+
+ public RequestPartitionId getPartitionId() {
+ return myPartitionId;
+ }
+
+ public void setPartitionId(RequestPartitionId thePartitionId) {
+ myPartitionId = thePartitionId;
+ }
+
+ public ReplaceReferencesRequest asReplaceReferencesRequest() {
+ return new ReplaceReferencesRequest(mySourceId.asIdDt(), myTargetId.asIdDt(), myBatchSize, myPartitionId);
+ }
+}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesQueryIdsStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesQueryIdsStep.java
new file mode 100644
index 000000000000..2e551cb518b4
--- /dev/null
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/replacereferences/ReplaceReferencesQueryIdsStep.java
@@ -0,0 +1,89 @@
+/*-
+ * #%L
+ * hapi-fhir-storage-batch2-jobs
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.replacereferences;
+
+import ca.uhn.fhir.batch2.api.IJobDataSink;
+import ca.uhn.fhir.batch2.api.IJobStepWorker;
+import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
+import ca.uhn.fhir.batch2.api.RunOutcome;
+import ca.uhn.fhir.batch2.api.StepExecutionDetails;
+import ca.uhn.fhir.batch2.api.VoidModel;
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
+import ca.uhn.fhir.util.StreamUtil;
+import jakarta.annotation.Nonnull;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Stream;
+
+public class ReplaceReferencesQueryIdsStep
+ implements IJobStepWorker {
+
+ private final HapiTransactionService myHapiTransactionService;
+ private final IBatch2DaoSvc myBatch2DaoSvc;
+
+ public ReplaceReferencesQueryIdsStep(
+ HapiTransactionService theHapiTransactionService, IBatch2DaoSvc theBatch2DaoSvc) {
+ myHapiTransactionService = theHapiTransactionService;
+ myBatch2DaoSvc = theBatch2DaoSvc;
+ }
+
+ @Nonnull
+ @Override
+ public RunOutcome run(
+ @Nonnull StepExecutionDetails theStepExecutionDetails,
+ @Nonnull IJobDataSink theDataSink)
+ throws JobExecutionFailedException {
+ ReplaceReferencesJobParameters params = theStepExecutionDetails.getParameters();
+
+ // Warning: It is a little confusing that source/target are reversed in the resource link table from the meaning
+ // in
+ // the replace references request
+
+ AtomicInteger totalCount = new AtomicInteger();
+ myHapiTransactionService
+ .withSystemRequestOnPartition(params.getPartitionId())
+ .execute(() -> {
+ Stream stream = myBatch2DaoSvc
+ .streamSourceIdsThatReferenceTargetId(
+ params.getSourceId().asIdDt())
+ .map(FhirIdJson::new);
+
+ StreamUtil.partition(stream, params.getBatchSize())
+ .forEach(chunk ->
+ totalCount.addAndGet(processChunk(theDataSink, chunk, params.getPartitionId())));
+ });
+
+ return new RunOutcome(totalCount.get());
+ }
+
+ private int processChunk(
+ IJobDataSink theDataSink,
+ List theChunk,
+ RequestPartitionId theRequestPartitionId) {
+ FhirIdListWorkChunkJson data = new FhirIdListWorkChunkJson(theChunk, theRequestPartitionId);
+ theDataSink.accept(data);
+ return theChunk.size();
+ }
+}
diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml
index 132506ac72eb..6abe157add62 100644
--- a/hapi-fhir-storage-batch2-test-utilities/pom.xml
+++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml
index 2c6054a32a5a..f5f8ee533b7a 100644
--- a/hapi-fhir-storage-batch2/pom.xml
+++ b/hapi-fhir-storage-batch2/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java
index 9c82b3b38257..4beada5a8cdb 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java
@@ -24,7 +24,8 @@
import jakarta.annotation.Nonnull;
/**
- * Reduction step worker.
+ * Reduction step worker. Once all chunks from the previous step have completed, consume() will first be called on
+ * all chunks, and then finally run() will be called on this step.
* @param Job Parameter Type
* @param Input Parameter type (real input for step is ListResult of IT
* @param Output Job Report Type
@@ -32,6 +33,9 @@
public interface IReductionStepWorker
extends IJobStepWorker {
+ // TODO KHS create an abstract superclass under this that enforces the one-at-a-time contract
+ // (this contract is currently baked into the implementations inconsistently)
+
/**
*
* If an exception is thrown, the workchunk will be marked as failed.
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java
index d8ea1eefe55e..f4eba370003d 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java
@@ -33,6 +33,7 @@
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
+import ca.uhn.fhir.batch2.progress.JobInstanceStatusUpdater;
import ca.uhn.fhir.batch2.util.BatchJobOpenTelemetryUtils;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
@@ -56,6 +57,7 @@
import org.springframework.transaction.annotation.Propagation;
import java.util.Collections;
+import java.util.Date;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.Map;
@@ -85,6 +87,7 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS
private final Semaphore myCurrentlyExecuting = new Semaphore(1);
private final AtomicReference myCurrentlyFinalizingInstanceId = new AtomicReference<>();
private final JobDefinitionRegistry myJobDefinitionRegistry;
+ private final JobInstanceStatusUpdater myJobInstanceStatusUpdater;
private Timer myHeartbeatTimer;
/**
@@ -97,6 +100,7 @@ public ReductionStepExecutorServiceImpl(
myJobPersistence = theJobPersistence;
myTransactionService = theTransactionService;
myJobDefinitionRegistry = theJobDefinitionRegistry;
+ myJobInstanceStatusUpdater = new JobInstanceStatusUpdater(theJobDefinitionRegistry);
myReducerExecutor = Executors.newSingleThreadExecutor(new CustomizableThreadFactory("batch2-reducer"));
}
@@ -232,7 +236,8 @@ ReductionStepChunkProcessingResponse executeReductionStep(
executeInTransactionWithSynchronization(() -> {
myJobPersistence.updateInstance(instance.getInstanceId(), theInstance -> {
- theInstance.setStatus(StatusEnum.FAILED);
+ theInstance.setEndTime(new Date());
+ myJobInstanceStatusUpdater.updateInstanceStatus(theInstance, StatusEnum.FAILED);
return true;
});
return null;
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/FhirIdJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/FhirIdJson.java
new file mode 100644
index 000000000000..f379dbe08f48
--- /dev/null
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/FhirIdJson.java
@@ -0,0 +1,98 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server - Batch2 Task Processor
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.chunk;
+
+import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.model.primitive.IdDt;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.hl7.fhir.instance.model.api.IIdType;
+
+public class FhirIdJson implements IModelJson {
+
+ @JsonProperty("type")
+ private String myResourceType;
+
+ @JsonProperty("id")
+ private String myFhirId;
+
+ // Jackson needs an empty constructor
+ public FhirIdJson() {}
+
+ public FhirIdJson(String theResourceType, String theFhirId) {
+ myResourceType = theResourceType;
+ myFhirId = theFhirId;
+ }
+
+ public FhirIdJson(IIdType theFhirId) {
+ myResourceType = theFhirId.getResourceType();
+ myFhirId = theFhirId.getIdPart();
+ }
+
+ @Override
+ public String toString() {
+ return myResourceType + "/" + myFhirId;
+ }
+
+ public String getResourceType() {
+ return myResourceType;
+ }
+
+ public FhirIdJson setResourceType(String theResourceType) {
+ myResourceType = theResourceType;
+ return this;
+ }
+
+ public String getFhirId() {
+ return myFhirId;
+ }
+
+ public FhirIdJson setFhirId(String theFhirId) {
+ myFhirId = theFhirId;
+ return this;
+ }
+
+ @Override
+ public boolean equals(Object theO) {
+ if (this == theO) return true;
+
+ if (theO == null || getClass() != theO.getClass()) return false;
+
+ FhirIdJson id = (FhirIdJson) theO;
+
+ return new EqualsBuilder()
+ .append(myResourceType, id.myResourceType)
+ .append(myFhirId, id.myFhirId)
+ .isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 37)
+ .append(myResourceType)
+ .append(myFhirId)
+ .toHashCode();
+ }
+
+ public IdDt asIdDt() {
+ return new IdDt(myResourceType, myFhirId);
+ }
+}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/FhirIdListWorkChunkJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/FhirIdListWorkChunkJson.java
new file mode 100644
index 000000000000..77064f586991
--- /dev/null
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/FhirIdListWorkChunkJson.java
@@ -0,0 +1,95 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server - Batch2 Task Processor
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.chunk;
+
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.model.api.IModelJson;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public class FhirIdListWorkChunkJson implements IModelJson {
+
+ @JsonProperty("requestPartitionId")
+ private RequestPartitionId myRequestPartitionId;
+
+ @JsonProperty("fhirIds")
+ private List myFhirIds;
+
+ /**
+ * Constructor
+ */
+ public FhirIdListWorkChunkJson() {
+ super();
+ }
+
+ /**
+ * Constructor
+ */
+ public FhirIdListWorkChunkJson(Collection theFhirIds, RequestPartitionId theRequestPartitionId) {
+ this();
+ getFhirIds().addAll(theFhirIds);
+ myRequestPartitionId = theRequestPartitionId;
+ }
+
+ public FhirIdListWorkChunkJson(int theBatchSize, RequestPartitionId theRequestPartitionId) {
+ myFhirIds = new ArrayList<>(theBatchSize);
+ myRequestPartitionId = theRequestPartitionId;
+ }
+
+ public RequestPartitionId getRequestPartitionId() {
+ return myRequestPartitionId;
+ }
+
+ public List getFhirIds() {
+ if (myFhirIds == null) {
+ myFhirIds = new ArrayList<>();
+ }
+ return myFhirIds;
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
+ .append("ids", myFhirIds)
+ .append("requestPartitionId", myRequestPartitionId)
+ .toString();
+ }
+
+ public int size() {
+ return getFhirIds().size();
+ }
+
+ public boolean isEmpty() {
+ return getFhirIds().isEmpty();
+ }
+
+ public void add(FhirIdJson theFhirId) {
+ getFhirIds().add(theFhirId);
+ }
+
+ public void clear() {
+ getFhirIds().clear();
+ }
+}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/BatchJobParametersWithTaskId.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/BatchJobParametersWithTaskId.java
new file mode 100644
index 000000000000..222b1e3d817f
--- /dev/null
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/BatchJobParametersWithTaskId.java
@@ -0,0 +1,38 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server - Batch2 Task Processor
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.jobs.parameters;
+
+import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
+import ca.uhn.fhir.model.api.BaseBatchJobParameters;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.hl7.fhir.instance.model.api.IIdType;
+
+public class BatchJobParametersWithTaskId extends BaseBatchJobParameters {
+ @JsonProperty("taskId")
+ private FhirIdJson myTaskId;
+
+ public void setTaskId(IIdType theTaskId) {
+ myTaskId = new FhirIdJson(theTaskId);
+ }
+
+ public FhirIdJson getTaskId() {
+ return myTaskId;
+ }
+}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2TaskHelper.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2TaskHelper.java
new file mode 100644
index 000000000000..4b3b61430f6c
--- /dev/null
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/util/Batch2TaskHelper.java
@@ -0,0 +1,92 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server - Batch2 Task Processor
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.batch2.util;
+
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.api.JobCompletionDetails;
+import ca.uhn.fhir.batch2.jobs.parameters.BatchJobParametersWithTaskId;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.batch2.model.StatusEnum;
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import org.hl7.fhir.r4.model.Task;
+
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.HAPI_BATCH_JOB_ID_SYSTEM;
+
+/**
+ * Start a job and create a Task resource that tracks the status of the job. This is designed in a way that
+ * it could be used by any Batch 2 job.
+ */
+public class Batch2TaskHelper {
+
+ public Task startJobAndCreateAssociatedTask(
+ IFhirResourceDao theTaskDao,
+ RequestDetails theRequestDetails,
+ IJobCoordinator theJobCoordinator,
+ String theJobDefinitionId,
+ BatchJobParametersWithTaskId theJobParams) {
+ Task task = new Task();
+ task.setStatus(Task.TaskStatus.INPROGRESS);
+ theTaskDao.create(task, theRequestDetails);
+
+ theJobParams.setTaskId(task.getIdElement().toUnqualifiedVersionless());
+
+ JobInstanceStartRequest request = new JobInstanceStartRequest(theJobDefinitionId, theJobParams);
+ Batch2JobStartResponse jobStartResponse = theJobCoordinator.startInstance(theRequestDetails, request);
+
+ task.addIdentifier().setSystem(HAPI_BATCH_JOB_ID_SYSTEM).setValue(jobStartResponse.getInstanceId());
+ theTaskDao.update(task, theRequestDetails);
+
+ return task;
+ }
+
+ public void updateTaskStatusOnJobCompletion(
+ IFhirResourceDao theTaskDao,
+ RequestDetails theRequestDetails,
+ JobCompletionDetails extends BatchJobParametersWithTaskId> theJobCompletionDetails) {
+
+ BatchJobParametersWithTaskId jobParams = theJobCompletionDetails.getParameters();
+
+ StatusEnum jobStatus = theJobCompletionDetails.getInstance().getStatus();
+ Task.TaskStatus taskStatus;
+ switch (jobStatus) {
+ case COMPLETED:
+ taskStatus = Task.TaskStatus.COMPLETED;
+ break;
+ case FAILED:
+ taskStatus = Task.TaskStatus.FAILED;
+ break;
+ case CANCELLED:
+ taskStatus = Task.TaskStatus.CANCELLED;
+ break;
+ default:
+ throw new IllegalStateException(Msg.code(2595)
+ + String.format(
+ "Cannot handle job status '%s'. COMPLETED, FAILED or CANCELLED were expected",
+ jobStatus));
+ }
+
+ Task task = theTaskDao.read(jobParams.getTaskId().asIdDt(), theRequestDetails);
+ task.setStatus(taskStatus);
+ theTaskDao.update(task, theRequestDetails);
+ }
+}
diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml
index ec45cf1cccb9..ad335ec75b5b 100644
--- a/hapi-fhir-storage-cr/pom.xml
+++ b/hapi-fhir-storage-cr/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml
index 6c2980b347ef..34019955f1b3 100644
--- a/hapi-fhir-storage-mdm/pom.xml
+++ b/hapi-fhir-storage-mdm/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml
index 0e8dcfe6bb56..7c0e16c142e3 100644
--- a/hapi-fhir-storage-test-utilities/pom.xml
+++ b/hapi-fhir-storage-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml
index e3d63bd3ac20..d565fdcc2f1d 100644
--- a/hapi-fhir-storage/pom.xml
+++ b/hapi-fhir-storage/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java
index 16a5ebe86798..bf9aebd5234b 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java
@@ -117,6 +117,26 @@ public class JpaStorageSettings extends StorageSettings {
private static final boolean DEFAULT_PREVENT_INVALIDATING_CONDITIONAL_MATCH_CRITERIA = false;
private static final long DEFAULT_REST_DELETE_BY_URL_RESOURCE_ID_THRESHOLD = 10000;
+ /**
+ * If we are batching write operations in transactions, what should the maximum number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ public static final String DEFAULT_MAX_TRANSACTION_ENTRIES_FOR_WRITE_STRING = "10000";
+
+ public static final int DEFAULT_MAX_TRANSACTION_ENTRIES_FOR_WRITE =
+ Integer.parseInt(DEFAULT_MAX_TRANSACTION_ENTRIES_FOR_WRITE_STRING);
+
+ /**
+ * If we are batching write operations in transactions, what should the default number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ public static final String DEFAULT_TRANSACTION_ENTRIES_FOR_WRITE_STRING = "1024";
+
+ public static final int DEFAULT_TRANSACTION_ENTRIES_FOR_WRITE =
+ Integer.parseInt(DEFAULT_TRANSACTION_ENTRIES_FOR_WRITE_STRING);
+
/**
* Do not change default of {@code 0}!
*
@@ -392,6 +412,20 @@ public class JpaStorageSettings extends StorageSettings {
@Beta
private boolean myIncludeHashIdentityForTokenSearches = false;
+ /**
+ * If we are batching write operations in transactions, what should the maximum number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ private int myMaxTransactionEntriesForWrite = DEFAULT_MAX_TRANSACTION_ENTRIES_FOR_WRITE;
+
+ /**
+ * If we are batching write operations in transactions, what should the default number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ private int myDefaultTransactionEntriesForWrite = DEFAULT_TRANSACTION_ENTRIES_FOR_WRITE;
+
/**
* Constructor
*/
@@ -2650,6 +2684,42 @@ public void setRestDeleteByUrlResourceIdThreshold(long theRestDeleteByUrlResourc
myRestDeleteByUrlResourceIdThreshold = theRestDeleteByUrlResourceIdThreshold;
}
+ /**
+ * If we are batching write operations in transactions, what should the maximum number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ public int getMaxTransactionEntriesForWrite() {
+ return myMaxTransactionEntriesForWrite;
+ }
+
+ /**
+ * If we are batching write operations in transactions, what should the maximum number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ public void setMaxTransactionEntriesForWrite(int theMaxTransactionEntriesForWrite) {
+ myMaxTransactionEntriesForWrite = theMaxTransactionEntriesForWrite;
+ }
+
+ /**
+ * If we are batching write operations in transactions, what should the default number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ public int getDefaultTransactionEntriesForWrite() {
+ return myDefaultTransactionEntriesForWrite;
+ }
+
+ /**
+ * If we are batching write operations in transactions, what should the default number of write operations per
+ * transaction be?
+ * @since 8.0.0
+ */
+ public void setDefaultTransactionEntriesForWrite(int theDefaultTransactionEntriesForWrite) {
+ myDefaultTransactionEntriesForWrite = theDefaultTransactionEntriesForWrite;
+ }
+
public enum StoreMetaSourceInformationEnum {
NONE(false, false),
SOURCE_URI(true, false),
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java
index a49e1eb4a00e..06629ec11f0d 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java
@@ -47,7 +47,7 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry {
private ApplicationContext myAppCtx;
@Autowired
- private FhirContext myContext;
+ private FhirContext myFhirContext;
private volatile Map> myResourceNameToResourceDao;
private volatile IFhirSystemDao, ?> mySystemDao;
@@ -65,7 +65,7 @@ public DaoRegistry() {
*/
public DaoRegistry(FhirContext theFhirContext) {
super();
- myContext = theFhirContext;
+ myFhirContext = theFhirContext;
}
public void setSupportedResourceTypes(Collection theSupportedResourceTypes) {
@@ -128,7 +128,7 @@ public IFhirResourceDao getResourceDaoIfExists(Clas
@Nullable
public IFhirResourceDao getResourceDaoOrNull(Class theResourceType) {
- String resourceName = myContext.getResourceType(theResourceType);
+ String resourceName = myFhirContext.getResourceType(theResourceType);
try {
return (IFhirResourceDao) getResourceDao(resourceName);
} catch (InvalidRequestException e) {
@@ -175,7 +175,7 @@ private void initializeMaps(Collection theResourceDaos) {
for (IFhirResourceDao nextResourceDao : theResourceDaos) {
Class resourceType = nextResourceDao.getResourceType();
assert resourceType != null;
- RuntimeResourceDefinition nextResourceDef = myContext.getResourceDefinition(resourceType);
+ RuntimeResourceDefinition nextResourceDef = myFhirContext.getResourceDefinition(resourceType);
if (mySupportedResourceTypes == null || mySupportedResourceTypes.contains(nextResourceDef.getName())) {
myResourceNameToResourceDao.put(nextResourceDef.getName(), nextResourceDao);
}
@@ -183,7 +183,7 @@ private void initializeMaps(Collection theResourceDaos) {
}
public void register(IFhirResourceDao theResourceDao) {
- RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceDao.getResourceType());
+ RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theResourceDao.getResourceType());
String resourceName = resourceDef.getName();
myResourceNameToResourceDao.put(resourceName, theResourceDao);
}
@@ -192,12 +192,12 @@ public IFhirResourceDao getDaoOrThrowException(Class extends IBaseResource> th
IFhirResourceDao retVal = getResourceDao(theClass);
if (retVal == null) {
List supportedResourceNames = myResourceNameToResourceDao.keySet().stream()
- .map(t -> myContext.getResourceType(t))
+ .map(t -> myFhirContext.getResourceType(t))
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException(Msg.code(573)
+ "Unable to process request, this server does not know how to handle resources of type "
- + myContext.getResourceType(theClass) + " - Can handle: " + supportedResourceNames);
+ + myFhirContext.getResourceType(theClass) + " - Can handle: " + supportedResourceNames);
}
return retVal;
}
@@ -225,4 +225,10 @@ private List toCollection(String[] theResourceTypes) {
public Set getRegisteredDaoTypes() {
return Collections.unmodifiableSet(myResourceNameToResourceDao.keySet());
}
+
+ // TODO KHS find all the places where FhirContext and DaoRegistry are both passed into constructors and
+ // remove the FhirContext parameter and pull it from the DaoRegistry parameter
+ public FhirContext getFhirContext() {
+ return myFhirContext;
+ }
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java
index cd303ca4fe54..31cb9a10aae3 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java
@@ -24,10 +24,13 @@
import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
import ca.uhn.fhir.jpa.api.pid.IResourcePidStream;
import ca.uhn.fhir.jpa.api.pid.ListWrappingPidStream;
+import ca.uhn.fhir.model.primitive.IdDt;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
+import org.hl7.fhir.instance.model.api.IIdType;
import java.util.Date;
+import java.util.stream.Stream;
public interface IBatch2DaoSvc {
@@ -76,4 +79,13 @@ default IResourcePidStream fetchResourceIdStream(
return new ListWrappingPidStream(fetchResourceIdsPage(
theStart, theEnd, 20000 /* ResourceIdListStep.DEFAULT_PAGE_SIZE */, theTargetPartitionId, theUrl));
}
+
+ /**
+ * Stream Resource Ids of all resources that have a reference to the provided resource id
+ *
+ * @param theTargetId the id of the resource we are searching for references to
+ */
+ default Stream streamSourceIdsThatReferenceTargetId(IIdType theTargetId) {
+ throw new UnsupportedOperationException(Msg.code(2594) + "Not implemented unless explicitly overridden");
+ }
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java
index fad65b9c1e37..e3a47a751ee0 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java
@@ -45,6 +45,8 @@ public class BulkExportHelperService {
@Autowired
private FhirContext myContext;
+ public BulkExportHelperService() {}
+
/**
* Given the parameters, create the search parameter map based on type filters and the _since parameter.
*
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/replacereferences/ReplaceReferencesPatchBundleSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/replacereferences/ReplaceReferencesPatchBundleSvc.java
new file mode 100644
index 000000000000..e01057ced108
--- /dev/null
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/replacereferences/ReplaceReferencesPatchBundleSvc.java
@@ -0,0 +1,129 @@
+/*-
+ * #%L
+ * HAPI FHIR Storage api
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.replacereferences;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.util.BundleBuilder;
+import ca.uhn.fhir.util.ResourceReferenceInfo;
+import jakarta.annotation.Nonnull;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.CodeType;
+import org.hl7.fhir.r4.model.Meta;
+import org.hl7.fhir.r4.model.Parameters;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.StringType;
+import org.hl7.fhir.r4.model.Type;
+
+import java.util.List;
+import java.util.UUID;
+
+import static ca.uhn.fhir.jpa.patch.FhirPatch.OPERATION_REPLACE;
+import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_OPERATION;
+import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_PATH;
+import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_TYPE;
+import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_VALUE;
+
+public class ReplaceReferencesPatchBundleSvc {
+
+ private final FhirContext myFhirContext;
+ private final DaoRegistry myDaoRegistry;
+
+ public ReplaceReferencesPatchBundleSvc(DaoRegistry theDaoRegistry) {
+ myDaoRegistry = theDaoRegistry;
+ myFhirContext = theDaoRegistry.getFhirContext();
+ }
+
+ /**
+ * Build a bundle of PATCH entries that make the requested reference updates
+ * @param theReplaceReferencesRequest source and target for reference switch
+ * @param theResourceIds the ids of the resource to create the patch entries for (they will all have references to the source resource)
+ * @param theRequestDetails
+ * @return
+ */
+ public Bundle patchReferencingResources(
+ ReplaceReferencesRequest theReplaceReferencesRequest,
+ List theResourceIds,
+ RequestDetails theRequestDetails) {
+ Bundle patchBundle = buildPatchBundle(theReplaceReferencesRequest, theResourceIds, theRequestDetails);
+ IFhirSystemDao systemDao = myDaoRegistry.getSystemDao();
+ Bundle result = systemDao.transaction(theRequestDetails, patchBundle);
+
+ result.setId(UUID.randomUUID().toString());
+ return result;
+ }
+
+ private Bundle buildPatchBundle(
+ ReplaceReferencesRequest theReplaceReferencesRequest,
+ List theResourceIds,
+ RequestDetails theRequestDetails) {
+ BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
+
+ theResourceIds.forEach(referencingResourceId -> {
+ IFhirResourceDao> dao = myDaoRegistry.getResourceDao(referencingResourceId.getResourceType());
+ IBaseResource resource = dao.read(referencingResourceId, theRequestDetails);
+ Parameters patchParams = buildPatchParams(theReplaceReferencesRequest, resource);
+ IIdType resourceId = resource.getIdElement();
+ bundleBuilder.addTransactionFhirPatchEntry(resourceId, patchParams);
+ });
+ return bundleBuilder.getBundleTyped();
+ }
+
+ private @Nonnull Parameters buildPatchParams(
+ ReplaceReferencesRequest theReplaceReferencesRequest, IBaseResource referencingResource) {
+ Parameters params = new Parameters();
+
+ myFhirContext.newTerser().getAllResourceReferences(referencingResource).stream()
+ .filter(refInfo -> matches(
+ refInfo,
+ theReplaceReferencesRequest.sourceId)) // We only care about references to our source resource
+ .map(refInfo -> createReplaceReferencePatchOperation(
+ referencingResource.fhirType() + "." + refInfo.getName(),
+ new Reference(theReplaceReferencesRequest.targetId.getValueAsString())))
+ .forEach(params::addParameter); // Add each operation to parameters
+ return params;
+ }
+
+ private static boolean matches(ResourceReferenceInfo refInfo, IIdType theSourceId) {
+ return refInfo.getResourceReference()
+ .getReferenceElement()
+ .toUnqualifiedVersionless()
+ .getValueAsString()
+ .equals(theSourceId.getValueAsString());
+ }
+
+ @Nonnull
+ private Parameters.ParametersParameterComponent createReplaceReferencePatchOperation(
+ String thePath, Type theValue) {
+
+ Parameters.ParametersParameterComponent operation = new Parameters.ParametersParameterComponent();
+ operation.setName(PARAMETER_OPERATION);
+ operation.addPart().setName(PARAMETER_TYPE).setValue(new CodeType(OPERATION_REPLACE));
+ operation.addPart().setName(PARAMETER_PATH).setValue(new StringType(thePath));
+ operation.addPart().setName(PARAMETER_VALUE).setValue(theValue);
+ return operation;
+ }
+}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/replacereferences/ReplaceReferencesRequest.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/replacereferences/ReplaceReferencesRequest.java
new file mode 100644
index 000000000000..d3855a24d805
--- /dev/null
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/replacereferences/ReplaceReferencesRequest.java
@@ -0,0 +1,78 @@
+/*-
+ * #%L
+ * HAPI FHIR Storage api
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.replacereferences;
+
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import jakarta.annotation.Nonnull;
+import org.hl7.fhir.instance.model.api.IIdType;
+
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID;
+import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID;
+import static org.apache.commons.lang3.StringUtils.isBlank;
+
+public class ReplaceReferencesRequest {
+ /**
+ * Unqualified source id
+ */
+ @Nonnull
+ public final IIdType sourceId;
+
+ /**
+ * Unqualified target id
+ */
+ @Nonnull
+ public final IIdType targetId;
+
+ public final int resourceLimit;
+
+ public final RequestPartitionId partitionId;
+
+ public ReplaceReferencesRequest(
+ @Nonnull IIdType theSourceId,
+ @Nonnull IIdType theTargetId,
+ int theResourceLimit,
+ RequestPartitionId thePartitionId) {
+ sourceId = theSourceId.toUnqualifiedVersionless();
+ targetId = theTargetId.toUnqualifiedVersionless();
+ resourceLimit = theResourceLimit;
+ partitionId = thePartitionId;
+ }
+
+ public void validateOrThrowInvalidParameterException() {
+ if (isBlank(sourceId.getResourceType())) {
+ throw new InvalidRequestException(
+ Msg.code(2585) + "'" + OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID
+ + "' must be a resource type qualified id");
+ }
+
+ if (isBlank(targetId.getResourceType())) {
+ throw new InvalidRequestException(
+ Msg.code(2586) + "'" + OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID
+ + "' must be a resource type qualified id");
+ }
+
+ if (!targetId.getResourceType().equals(sourceId.getResourceType())) {
+ throw new InvalidRequestException(
+ Msg.code(2587) + "Source and target id parameters must be for the same resource type");
+ }
+ }
+}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java
index b48e6567fdc5..19fd96c434d4 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java
@@ -24,8 +24,10 @@
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.model.primitive.UriDt;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.hl7.fhir.instance.model.api.IBase;
import java.util.List;
@@ -92,4 +94,24 @@ public boolean equals(Object theO) {
public int hashCode() {
return new HashCodeBuilder(17, 37).append(mySystem).append(myValue).toHashCode();
}
+
+ public static CanonicalIdentifier fromIdentifier(IBase theIdentifier) {
+ CanonicalIdentifier retval = new CanonicalIdentifier();
+
+ // TODO add other fields like "use" etc
+ if (theIdentifier instanceof org.hl7.fhir.dstu3.model.Identifier) {
+ org.hl7.fhir.dstu3.model.Identifier ident = (org.hl7.fhir.dstu3.model.Identifier) theIdentifier;
+ retval.setSystem(ident.getSystem()).setValue(ident.getValue());
+ } else if (theIdentifier instanceof org.hl7.fhir.r4.model.Identifier) {
+ org.hl7.fhir.r4.model.Identifier ident = (org.hl7.fhir.r4.model.Identifier) theIdentifier;
+ retval.setSystem(ident.getSystem()).setValue(ident.getValue());
+ } else if (theIdentifier instanceof org.hl7.fhir.r5.model.Identifier) {
+ org.hl7.fhir.r5.model.Identifier ident = (org.hl7.fhir.r5.model.Identifier) theIdentifier;
+ retval.setSystem(ident.getSystem()).setValue(ident.getValue());
+ } else {
+ throw new InternalErrorException(Msg.code(1486) + "Expected 'Identifier' type but was '"
+ + theIdentifier.getClass().getName() + "'");
+ }
+ return retval;
+ }
}
diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml
index 9aa50c1080cb..901e94989500 100644
--- a/hapi-fhir-structures-dstu2.1/pom.xml
+++ b/hapi-fhir-structures-dstu2.1/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml
index 88a52660638b..821818595997 100644
--- a/hapi-fhir-structures-dstu2/pom.xml
+++ b/hapi-fhir-structures-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml
index 048cc7351c16..dee2018d312c 100644
--- a/hapi-fhir-structures-dstu3/pom.xml
+++ b/hapi-fhir-structures-dstu3/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml
index 90f1c63fd951..9897236804fe 100644
--- a/hapi-fhir-structures-hl7org-dstu2/pom.xml
+++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml
index 45715423d8dc..56f110cd4732 100644
--- a/hapi-fhir-structures-r4/pom.xml
+++ b/hapi-fhir-structures-r4/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/ConsentInterceptorTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/ConsentInterceptorTest.java
index 186d14542323..73a1f9ba1609 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/ConsentInterceptorTest.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/interceptor/ConsentInterceptorTest.java
@@ -65,6 +65,7 @@
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.isA;
+import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.timeout;
@@ -138,6 +139,7 @@ public void testOutcomeSuccess() throws IOException {
when(myConsentSvc.startOperation(any(), any())).thenReturn(ConsentOutcome.PROCEED);
when(myConsentSvc.canSeeResource(any(), any(), any())).thenReturn(ConsentOutcome.PROCEED);
when(myConsentSvc.willSeeResource(any(), any(), any())).thenReturn(ConsentOutcome.PROCEED);
+ doNothing().when(myConsentSvc).completeOperationSuccess(any(), any());
HttpGet httpGet = new HttpGet("http://localhost:" + myPort + "/Patient");
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/OperationOutcomeUtilTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/OperationOutcomeUtilTest.java
index b1cba4a3be4a..88529113162e 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/OperationOutcomeUtilTest.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/OperationOutcomeUtilTest.java
@@ -4,6 +4,8 @@
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -44,6 +46,36 @@ public void testAddIssueWithMessageId() {
assertThat(oo.getIssueFirstRep().getDetails()).as("OO.issue.details is empty").isNotNull();
}
+ @ParameterizedTest
+ @CsvSource(value = {
+ "system, code, text",
+ "system, code, null",
+ "system, null, text",
+ "null, code, text",
+ "system, null, null",
+ "null, code, null ",
+ "null, null, text",
+ "null, null, null ",
+ }, nullValues={"null"})
+ public void testAddDetailsToIssue(String theSystem, String theCode, String theText) {
+
+ OperationOutcome oo = (OperationOutcome) OperationOutcomeUtil.newInstance(myCtx);
+ OperationOutcomeUtil.addIssue(myCtx, oo, "error", "Help i'm a bug",null, null);
+
+ OperationOutcomeUtil.addDetailsToIssue(myCtx, oo.getIssueFirstRep(), theSystem, theCode, theText);
+
+ assertThat(oo.getIssueFirstRep().getDetails().getText()).isEqualTo(theText);
+ if (theCode != null || theSystem != null) {
+ assertThat(oo.getIssueFirstRep().getDetails().getCoding()).hasSize(1);
+ assertThat(oo.getIssueFirstRep().getDetails().getCodingFirstRep().getSystem()).isEqualTo(theSystem);
+ assertThat(oo.getIssueFirstRep().getDetails().getCodingFirstRep().getCode()).isEqualTo(theCode);
+ }
+ else {
+ //both code and system are null, no coding should be present
+ assertThat(oo.getIssueFirstRep().getDetails().getCoding()).isEmpty();
+ }
+ }
+
@Test
public void hasIssuesOfSeverity_noMatchingIssues() {
OperationOutcome oo = new OperationOutcome();
diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml
index 4d234e03c116..98cfe1bd0b93 100644
--- a/hapi-fhir-structures-r4b/pom.xml
+++ b/hapi-fhir-structures-r4b/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml
index 961aa4c015ba..9ea5a9380e4c 100644
--- a/hapi-fhir-structures-r5/pom.xml
+++ b/hapi-fhir-structures-r5/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml
index 3f20fb916d44..f48a38d1efcd 100644
--- a/hapi-fhir-test-utilities/pom.xml
+++ b/hapi-fhir-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/validation/ValidationTestUtil.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/validation/ValidationTestUtil.java
index 56a3235fea13..0ad2d570a360 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/validation/ValidationTestUtil.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/validation/ValidationTestUtil.java
@@ -1,3 +1,22 @@
+/*-
+ * #%L
+ * HAPI FHIR Test Utilities
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
package ca.uhn.fhir.test.utilities.validation;
import ca.uhn.fhir.rest.api.MethodOutcome;
diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml
index e36bb384c964..8dc3380dddac 100644
--- a/hapi-fhir-testpage-overlay/pom.xml
+++ b/hapi-fhir-testpage-overlay/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml
index 24ddd7da033f..d8804878cd59 100644
--- a/hapi-fhir-validation-resources-dstu2.1/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml
index 617648dc364c..df7f21cbbef9 100644
--- a/hapi-fhir-validation-resources-dstu2/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml
index 5ea89fc67fc2..7b487a8622a1 100644
--- a/hapi-fhir-validation-resources-dstu3/pom.xml
+++ b/hapi-fhir-validation-resources-dstu3/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml
index 1dad8ee8663d..2103ecad9c3c 100644
--- a/hapi-fhir-validation-resources-r4/pom.xml
+++ b/hapi-fhir-validation-resources-r4/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml
index 136214b8ded4..b6d795ebe7bf 100644
--- a/hapi-fhir-validation-resources-r4b/pom.xml
+++ b/hapi-fhir-validation-resources-r4b/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml
index 0d974f262eea..77f2a2869177 100644
--- a/hapi-fhir-validation-resources-r5/pom.xml
+++ b/hapi-fhir-validation-resources-r5/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml
index 58a8c15a5d2b..6ed7a2f03ce7 100644
--- a/hapi-fhir-validation/pom.xml
+++ b/hapi-fhir-validation/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml
index a93556ea13e9..677a3b69e770 100644
--- a/hapi-tinder-plugin/pom.xml
+++ b/hapi-tinder-plugin/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml
index 568fbaef8792..9ef1aeb85123 100644
--- a/hapi-tinder-test/pom.xml
+++ b/hapi-tinder-test/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../pom.xml
diff --git a/pom.xml b/pom.xml
index 7b54f4fb5303..4cd901610e85 100644
--- a/pom.xml
+++ b/pom.xml
@@ -8,7 +8,7 @@
ca.uhn.hapi.fhir
hapi-fhir
pom
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
HAPI-FHIR
An open-source implementation of the FHIR specification in Java.
@@ -2669,7 +2669,7 @@
ca.uhn.hapi.fhir
hapi-tinder-plugin
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
index 90e79a595876..b62ed06354cc 100644
--- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
+++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
index 9635a9280ae8..0616cff36858 100644
--- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
index 0dfe0e34ed4a..6263da7433f7 100644
--- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 7.7.15-SNAPSHOT
+ 7.7.16-SNAPSHOT
../../pom.xml