From d11f6ac6d0b5447f22de1c741d867e7c54c4beae Mon Sep 17 00:00:00 2001 From: PBobylev Date: Tue, 6 Aug 2024 19:40:39 +0500 Subject: [PATCH] MODSOURCE-752: fix kafka headers and event model --- .../ParsedRecordChunksKafkaHandler.java | 2 +- .../consumers/QuickMarcKafkaHandler.java | 6 +- .../java/org/folio/dao/RecordDaoImpl.java | 12 +-- .../rest/impl/SourceStorageBatchImpl.java | 23 ++--- .../org/folio/services/RecordServiceImpl.java | 55 ++++++----- .../RecordDomainEventPublisher.java | 35 +++---- .../SourceRecordDomainEventType.java | 5 + .../AbstractPostProcessingEventHandler.java | 66 ++++++------- .../AbstractUpdateModifyEventHandler.java | 49 +++++----- .../services/util/EventHandlingUtil.java | 26 ++--- .../AuthorityDomainKafkaHandlerTest.java | 4 +- .../java/org/folio/dao/RecordDaoImplTest.java | 24 +++-- .../AuthorityLinkChunkKafkaHandlerTest.java | 12 ++- .../MarcAuthorityDeleteEventHandlerTest.java | 27 +++--- .../MarcAuthorityMatchEventHandlerTest.java | 45 +++++---- ...AuthorityUpdateModifyEventHandlerTest.java | 45 ++++----- .../MarcBibUpdateModifyEventHandlerTest.java | 13 +-- .../MarcHoldingsMatchEventHandlerTest.java | 47 +++++---- ...cHoldingsUpdateModifyEventHandlerTest.java | 43 ++++---- .../services/QuickMarcKafkaHandlerTest.java | 35 ++++--- .../org/folio/services/RecordServiceTest.java | 64 ++++++------ .../RecordDomainEventPublisherUnitTest.java | 54 ++++------- ...thorityPostProcessingEventHandlerTest.java | 41 ++++---- ...oldingsPostProcessingEventHandlerTest.java | 38 ++++---- ...nstancePostProcessingEventHandlerTest.java | 67 +++++++------ ...rcIndexersVersionDeletionVerticleTest.java | 25 +++-- .../DataImportConsumersVerticleTest.java | 47 +++++---- ramls/source-record-domain-event.json | 97 ------------------- ramls/source-record-storage-records.raml | 1 - 29 files changed, 432 insertions(+), 576 deletions(-) create mode 100644 mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java delete mode 100644 ramls/source-record-domain-event.json diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java index 4dec329df..2e4c6a163 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/ParsedRecordChunksKafkaHandler.java @@ -86,7 +86,7 @@ public Future handle(KafkaConsumerRecord targetRecord) { LOGGER.debug("handle:: RecordCollection has been received with event: '{}', jobExecutionId '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}'", event.getEventType(), jobExecutionId, chunkId, chunkNumber, key); setUserMetadata(recordCollection, userId); - return recordService.saveRecords(recordCollection, toOkapiHeaders(kafkaHeaders, null)) + return recordService.saveRecords(recordCollection, toOkapiHeaders(kafkaHeaders)) .compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, event.getEventType(), targetRecord)); } catch (Exception e) { LOGGER.warn("handle:: RecordCollection processing has failed with errors jobExecutionId '{}', chunkId: '{}', chunkNumber '{}'-'{}'", diff --git a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java index a54182612..136e2ea57 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/consumers/QuickMarcKafkaHandler.java @@ -2,7 +2,7 @@ import static org.folio.dao.util.QMEventTypes.QM_ERROR; import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.EventHandlingUtil.createProducer; import static org.folio.services.util.EventHandlingUtil.createProducerRecord; import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; @@ -65,12 +65,12 @@ public Future handle(KafkaConsumerRecord consumerRecord) log.trace("handle:: Handling kafka consumerRecord {}", consumerRecord); var kafkaHeaders = consumerRecord.headers(); - var okapiHeaders = toOkapiHeaders(kafkaHeaders, null); + var okapiHeaders = toOkapiHeaders(kafkaHeaders); return getEventPayload(consumerRecord) .compose(eventPayload -> { String snapshotId = eventPayload.getOrDefault(SNAPSHOT_ID_KEY, UUID.randomUUID().toString()); - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); return getRecordDto(eventPayload) .compose(recordDto -> recordService.updateSourceRecord(recordDto, snapshotId, okapiHeaders)) .compose(updatedRecord -> { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java index 6a9f89a63..c8a940bb5 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/dao/RecordDaoImpl.java @@ -15,7 +15,6 @@ import static org.folio.dao.util.RecordDaoUtil.getExternalId; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jooq.Tables.ERROR_RECORDS_LB; import static org.folio.rest.jooq.Tables.MARC_RECORDS_LB; import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; @@ -23,6 +22,7 @@ import static org.folio.rest.jooq.Tables.RECORDS_LB; import static org.folio.rest.jooq.Tables.SNAPSHOTS_LB; import static org.folio.rest.jooq.enums.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.rest.util.QueryParamUtil.toRecordType; import static org.jooq.impl.DSL.condition; import static org.jooq.impl.DSL.countDistinct; @@ -733,7 +733,7 @@ public Future> getRecordByCondition(ReactiveClassicGenericQuery @Override public Future saveRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.trace("saveRecord:: Saving {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> saveRecord(txQE, record, okapiHeaders)); } @@ -748,7 +748,7 @@ public Future saveRecord(ReactiveClassicGenericQueryExecutor txQE, Recor @Override public Future saveRecords(RecordCollection recordCollection, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); logRecordCollection("saveRecords:: Saving", recordCollection, tenantId); Promise finalPromise = Promise.promise(); Context context = Vertx.currentContext(); @@ -966,7 +966,7 @@ public Future saveRecords(RecordCollection recordCollectio @Override public Future updateRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.trace("updateRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> getRecordById(txQE, record.getId()) .compose(optionalRecord -> optionalRecord @@ -1078,7 +1078,7 @@ public Future calculateGeneration(ReactiveClassicGenericQueryExecutor t @Override public Future updateParsedRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.trace("updateParsedRecord:: Updating {} record {} for tenant {}", record.getRecordType(), record.getId(), tenantId); return getQueryExecutor(tenantId).transaction(txQE -> GenericCompositeFuture.all(Lists.newArrayList( @@ -1090,7 +1090,7 @@ public Future updateParsedRecord(Record record, Map updateParsedRecords(RecordCollection recordCollection, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); logRecordCollection("updateParsedRecords:: Updating", recordCollection, tenantId); Promise promise = Promise.promise(); Context context = Vertx.currentContext(); diff --git a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java index 557258640..7a128b607 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/rest/impl/SourceStorageBatchImpl.java @@ -1,12 +1,17 @@ package org.folio.rest.impl; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import io.vertx.core.AsyncResult; +import io.vertx.core.Context; +import io.vertx.core.Future; +import io.vertx.core.Handler; +import io.vertx.core.Vertx; import java.util.List; import java.util.Map; - import javax.ws.rs.core.Response; - +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.folio.dataimport.util.ExceptionHelper; import org.folio.rest.jaxrs.model.FetchParsedRecordsBatchRequest; import org.folio.rest.jaxrs.model.RecordCollection; @@ -17,14 +22,6 @@ import org.folio.spring.SpringContextUtil; import org.springframework.beans.factory.annotation.Autowired; -import io.vertx.core.AsyncResult; -import io.vertx.core.Context; -import io.vertx.core.Future; -import io.vertx.core.Handler; -import io.vertx.core.Vertx; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - public class SourceStorageBatchImpl implements SourceStorageBatch { private static final Logger LOG = LogManager.getLogger(); @@ -59,7 +56,7 @@ public void postSourceStorageBatchVerifiedRecords(List marcBibIds, Map okapiHeaders, Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { - okapiHeaders.put(TENANT, tenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, tenantId); try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); recordService.saveRecords(entity, okapiHeaders) @@ -85,7 +82,7 @@ public void postSourceStorageBatchRecords(RecordCollection entity, Map okapiHeaders, Handler> asyncResultHandler, Context vertxContext) { vertxContext.runOnContext(v -> { - okapiHeaders.put(TENANT, tenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, tenantId); try { MetadataUtil.populateMetadata(entity.getRecords(), okapiHeaders); recordService.updateParsedRecords(entity, okapiHeaders) diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java index 1eabfc84a..d8d3ce82f 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/RecordServiceImpl.java @@ -14,12 +14,20 @@ import static org.folio.dao.util.RecordDaoUtil.getExternalIdsConditionWithQualifier; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_FOUND_TEMPLATE; import static org.folio.dao.util.SnapshotDaoUtil.SNAPSHOT_NOT_STARTED_MESSAGE_TEMPLATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.rest.util.QueryParamUtil.toRecordType; import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; import static org.folio.services.util.AdditionalFieldsUtil.getFieldFromMarcRecord; +import io.reactivex.Flowable; +import io.vertx.core.AsyncResult; +import io.vertx.core.Future; +import io.vertx.core.Promise; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.pgclient.PgException; +import io.vertx.sqlclient.Row; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -32,41 +40,22 @@ import java.util.stream.Collectors; import javax.ws.rs.BadRequestException; import javax.ws.rs.NotFoundException; - -import io.reactivex.Flowable; -import io.vertx.core.AsyncResult; -import io.vertx.core.Future; -import io.vertx.core.Promise; -import io.vertx.core.json.JsonArray; -import io.vertx.core.json.JsonObject; -import io.vertx.pgclient.PgException; -import io.vertx.sqlclient.Row; import net.sf.jsqlparser.JSQLParserException; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.folio.dao.RecordDao; import org.folio.dao.util.IdType; -import org.folio.dao.util.ParsedRecordDaoUtil; import org.folio.dao.util.MatchField; +import org.folio.dao.util.ParsedRecordDaoUtil; import org.folio.dao.util.RecordDaoUtil; import org.folio.dao.util.RecordType; import org.folio.dao.util.SnapshotDaoUtil; import org.folio.okapi.common.GenericCompositeFuture; import org.folio.processing.value.ListValue; -import org.folio.rest.jaxrs.model.Filter; -import org.folio.rest.jaxrs.model.RecordIdentifiersDto; -import org.folio.rest.jaxrs.model.RecordMatchingDto; -import org.folio.rest.jaxrs.model.RecordsIdentifiersCollection; -import org.folio.services.exceptions.DuplicateRecordException; -import org.folio.services.util.AdditionalFieldsUtil; -import org.folio.services.util.TypeConnection; -import org.jooq.Condition; -import org.jooq.OrderField; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.folio.dao.RecordDao; import org.folio.rest.jaxrs.model.FetchParsedRecordsBatchRequest; import org.folio.rest.jaxrs.model.FieldRange; +import org.folio.rest.jaxrs.model.Filter; import org.folio.rest.jaxrs.model.MarcBibCollection; import org.folio.rest.jaxrs.model.ParsedRecord; import org.folio.rest.jaxrs.model.ParsedRecordDto; @@ -74,15 +63,25 @@ import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.RecordCollection; +import org.folio.rest.jaxrs.model.RecordIdentifiersDto; +import org.folio.rest.jaxrs.model.RecordMatchingDto; import org.folio.rest.jaxrs.model.RecordsBatchResponse; +import org.folio.rest.jaxrs.model.RecordsIdentifiersCollection; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.rest.jaxrs.model.SourceRecord; import org.folio.rest.jaxrs.model.SourceRecordCollection; import org.folio.rest.jaxrs.model.StrippedParsedRecordCollection; import org.folio.rest.jooq.enums.RecordState; +import org.folio.services.exceptions.DuplicateRecordException; +import org.folio.services.util.AdditionalFieldsUtil; +import org.folio.services.util.TypeConnection; import org.folio.services.util.parser.ParseFieldsResult; import org.folio.services.util.parser.ParseLeaderResult; import org.folio.services.util.parser.SearchExpressionParser; +import org.jooq.Condition; +import org.jooq.OrderField; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; @Service public class RecordServiceImpl implements RecordService { @@ -125,7 +124,7 @@ public Future> getRecordById(String id, String tenantId) { @Override public Future saveRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); LOG.debug("saveRecord:: Saving record with id: {} for tenant: {}", record.getId(), tenantId); ensureRecordHasId(record); ensureRecordHasSuppressDiscovery(record); @@ -168,7 +167,7 @@ public Future saveRecords(RecordCollection recordCollectio } List setMatchedIdsFutures = new ArrayList<>(); recordCollection.getRecords().forEach(record -> setMatchedIdsFutures.add(setMatchedIdForRecord(record, - okapiHeaders.get(TENANT)))); + okapiHeaders.get(OKAPI_TENANT_HEADER)))); return GenericCompositeFuture.all(setMatchedIdsFutures) .compose(ar -> ar.succeeded() ? recordDao.saveRecords(recordCollection, okapiHeaders) @@ -189,7 +188,7 @@ public Future updateRecordGeneration(String matchedId, Record record, Ma } record.setId(UUID.randomUUID().toString()); - return recordDao.getRecordByMatchedId(matchedId, okapiHeaders.get(TENANT)) + return recordDao.getRecordByMatchedId(matchedId, okapiHeaders.get(OKAPI_TENANT_HEADER)) .map(r -> r.orElseThrow(() -> new NotFoundException(format(RECORD_WITH_GIVEN_MATCHED_ID_NOT_FOUND, matchedId)))) .compose(v -> saveRecord(record, okapiHeaders)) .recover(throwable -> { @@ -316,7 +315,7 @@ public Future updateSourceRecord(ParsedRecordDto parsedRecordDto, String .withAdditionalInfo(parsedRecordDto.getAdditionalInfo()) .withMetadata(parsedRecordDto.getMetadata()), existingRecord.withState(Record.State.OLD), okapiHeaders))) .orElse(Future.failedFuture(new NotFoundException( - format(RECORD_NOT_FOUND_TEMPLATE, parsedRecordDto.getId()))))), okapiHeaders.get(TENANT)); + format(RECORD_NOT_FOUND_TEMPLATE, parsedRecordDto.getId()))))), okapiHeaders.get(OKAPI_TENANT_HEADER)); } @Override @@ -346,7 +345,7 @@ public Future getMatchedRecordsIdentifiers(RecordM @Override public Future deleteRecordById(String id, IdType idType, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); return recordDao.getRecordByExternalId(id, idType, tenantId) .map(recordOptional -> recordOptional.orElseThrow(() -> new NotFoundException(format(NOT_FOUND_MESSAGE, Record.class.getSimpleName(), id)))) .map(record -> { diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java index 15d5289c7..ed4758847 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/RecordDomainEventPublisher.java @@ -1,22 +1,19 @@ package org.folio.services.domainevent; import static java.util.Objects.isNull; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.okapi.common.XOkapiHeaders.TOKEN; -import static org.folio.okapi.common.XOkapiHeaders.URL; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_CREATED; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_UPDATED; -import io.vertx.core.json.Json; import io.vertx.kafka.client.producer.KafkaHeader; import java.util.List; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.folio.services.kafka.KafkaSender; import org.folio.rest.jaxrs.model.Record; -import org.folio.rest.jaxrs.model.SourceRecordDomainEvent; -import org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; +import org.folio.services.kafka.KafkaSender; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @@ -40,15 +37,15 @@ public void publishRecordUpdated(Record updated, Map okapiHeader publishRecord(updated, okapiHeaders, SOURCE_RECORD_UPDATED); } - private void publishRecord(Record aRecord, Map okapiHeaders, EventType eventType) { + private void publishRecord(Record aRecord, Map okapiHeaders, SourceRecordDomainEventType eventType) { if (!domainEventsEnabled || notValidForPublishing(aRecord)) { return; } try { var kafkaHeaders = getKafkaHeaders(okapiHeaders, aRecord.getRecordType()); var key = aRecord.getId(); - kafkaSender.sendEventToKafka(okapiHeaders.get(TENANT), getEvent(aRecord, eventType), eventType.value(), - kafkaHeaders, key); + kafkaSender.sendEventToKafka(okapiHeaders.get(OKAPI_TENANT_HEADER), aRecord.getRawRecord().getContent(), + eventType.name(), kafkaHeaders, key); } catch (Exception e) { LOG.error("Exception during Record domain event sending", e); } @@ -73,19 +70,11 @@ private boolean notValidForPublishing(Record aRecord) { private List getKafkaHeaders(Map okapiHeaders, Record.RecordType recordType) { return List.of( - KafkaHeader.header(URL, okapiHeaders.get(URL)), - KafkaHeader.header(TENANT, okapiHeaders.get(TENANT)), - KafkaHeader.header(TOKEN, okapiHeaders.get(TOKEN)), + KafkaHeader.header(OKAPI_URL_HEADER, okapiHeaders.get(OKAPI_URL_HEADER)), + KafkaHeader.header(OKAPI_TENANT_HEADER, okapiHeaders.get(OKAPI_TENANT_HEADER)), + KafkaHeader.header(OKAPI_TOKEN_HEADER, okapiHeaders.get(OKAPI_TOKEN_HEADER)), KafkaHeader.header(RECORD_TYPE, recordType.value()) ); } - private String getEvent(Record eventRecord, EventType type) { - var event = new SourceRecordDomainEvent() - .withId(eventRecord.getId()) - .withEventType(type) - .withEventPayload(eventRecord.getRawRecord().getContent()); - return Json.encode(event); - } - } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java new file mode 100644 index 000000000..983d3fe25 --- /dev/null +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/domainevent/SourceRecordDomainEventType.java @@ -0,0 +1,5 @@ +package org.folio.services.domainevent; + +public enum SourceRecordDomainEventType { + SOURCE_RECORD_CREATED, SOURCE_RECORD_UPDATED +} diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java index 58a8a630b..85a263b77 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/AbstractPostProcessingEventHandler.java @@ -1,12 +1,40 @@ package org.folio.services.handlers; +import static java.lang.String.format; +import static org.apache.commons.lang.StringUtils.isEmpty; +import static org.apache.commons.lang.StringUtils.isNotEmpty; +import static org.folio.dao.util.MarcUtil.reorderMarcRecordFields; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId; +import static org.folio.dao.util.RecordDaoUtil.filterRecordByNotSnapshotId; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; +import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; +import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; +import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; +import static org.folio.services.util.AdditionalFieldsUtil.fillHrIdFieldInMarcRecord; +import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; +import static org.folio.services.util.AdditionalFieldsUtil.isFieldsFillingNeeded; +import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; +import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; +import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; +import static org.folio.services.util.RestUtil.retrieveOkapiConnectionParams; + import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import io.vertx.kafka.client.producer.KafkaHeader; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -32,36 +60,6 @@ import org.folio.services.util.TypeConnection; import org.jooq.Condition; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.atomic.AtomicInteger; - -import static java.lang.String.format; -import static org.apache.commons.lang.StringUtils.isEmpty; -import static org.apache.commons.lang.StringUtils.isNotEmpty; -import static org.folio.dao.util.MarcUtil.reorderMarcRecordFields; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByExternalId; -import static org.folio.dao.util.RecordDaoUtil.filterRecordByNotSnapshotId; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; -import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; -import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; -import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; -import static org.folio.services.util.AdditionalFieldsUtil.TAG_999; -import static org.folio.services.util.AdditionalFieldsUtil.addFieldToMarcRecord; -import static org.folio.services.util.AdditionalFieldsUtil.fillHrIdFieldInMarcRecord; -import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; -import static org.folio.services.util.AdditionalFieldsUtil.isFieldsFillingNeeded; -import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; -import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; -import static org.folio.services.util.EventHandlingUtil.sendEventToKafka; -import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; -import static org.folio.services.util.RestUtil.retrieveOkapiConnectionParams; - public abstract class AbstractPostProcessingEventHandler implements EventHandler { private static final String USER_ID_HEADER = "userId"; @@ -254,8 +252,8 @@ private Future updatePreviousRecordsState(String externalId, String snapsh Condition condition = filterRecordByNotSnapshotId(snapshotId) .and(filterRecordByExternalId(externalId)); - return recordService.getRecords(condition, getDbType(), new ArrayList<>(), 0, 999, okapiHeaders.get(TENANT)) - .compose(recordCollection -> { + return recordService.getRecords(condition, getDbType(), new ArrayList<>(), 0, 999, + okapiHeaders.get(OKAPI_TENANT_HEADER)).compose(recordCollection -> { Promise result = Promise.promise(); @SuppressWarnings("squid:S3740") List> futures = new ArrayList<>(); @@ -315,7 +313,7 @@ private void executeHridManipulation(Record record, JsonObject externalEntity) { * @return - Future with Record result */ private Future saveRecord(Record record, Map okapiHeaders) { - var tenantId = okapiHeaders.get(TENANT); + var tenantId = okapiHeaders.get(OKAPI_TENANT_HEADER); return recordService.getRecordById(record.getId(), tenantId) .compose(r -> { if (r.isPresent()) { @@ -354,7 +352,7 @@ private Future saveRecordForCentralTenant(DataImportEventPayload dataImp LOG.info("handle:: Processing AbstractPostProcessingEventHandler - saving record by jobExecutionId: {} for the central tenantId: {}", jobExecutionId, centralTenantId); var okapiHeaders = toOkapiHeaders(dataImportEventPayload); if (centralTenantId != null) { - okapiHeaders.put(TENANT, centralTenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, centralTenantId); return snapshotService.copySnapshotToOtherTenant(record.getSnapshotId(), dataImportEventPayload.getTenant(), centralTenantId) .compose(f -> saveRecord(record, okapiHeaders)); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java index 5b6f38a36..09c1b9d49 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/handlers/actions/AbstractUpdateModifyEventHandler.java @@ -1,10 +1,33 @@ package org.folio.services.handlers.actions; +import static java.lang.String.format; +import static java.util.Objects.isNull; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID; +import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; +import static org.folio.services.util.AdditionalFieldsUtil.addControlledFieldToMarcRecord; +import static org.folio.services.util.AdditionalFieldsUtil.fill035FieldInMarcRecordIfNotExists; +import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; +import static org.folio.services.util.AdditionalFieldsUtil.normalize035; +import static org.folio.services.util.AdditionalFieldsUtil.remove003FieldIfNeeded; +import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; +import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; +import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; + import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import java.util.function.Function; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -27,30 +50,6 @@ import org.folio.services.caches.MappingParametersSnapshotCache; import org.folio.services.util.RestUtil; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - -import static java.lang.String.format; -import static java.util.Objects.isNull; -import static java.util.Objects.nonNull; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.folio.ActionProfile.Action.UPDATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; -import static org.folio.services.handlers.match.AbstractMarcMatchEventHandler.CENTRAL_TENANT_ID; -import static org.folio.services.util.AdditionalFieldsUtil.HR_ID_FROM_FIELD; -import static org.folio.services.util.AdditionalFieldsUtil.addControlledFieldToMarcRecord; -import static org.folio.services.util.AdditionalFieldsUtil.fill035FieldInMarcRecordIfNotExists; -import static org.folio.services.util.AdditionalFieldsUtil.getValueFromControlledField; -import static org.folio.services.util.AdditionalFieldsUtil.normalize035; -import static org.folio.services.util.AdditionalFieldsUtil.remove003FieldIfNeeded; -import static org.folio.services.util.AdditionalFieldsUtil.remove035WithActualHrId; -import static org.folio.services.util.AdditionalFieldsUtil.updateLatestTransactionDate; -import static org.folio.services.util.EventHandlingUtil.toOkapiHeaders; - public abstract class AbstractUpdateModifyEventHandler implements EventHandler { private static final Logger LOG = LogManager.getLogger(); @@ -122,7 +121,7 @@ public CompletableFuture handle(DataImportEventPayload p String centralTenantId = payload.getContext().get(CENTRAL_TENANT_ID); var okapiHeaders = toOkapiHeaders(payload); if (centralTenantId != null) { - okapiHeaders.put(TENANT, centralTenantId); + okapiHeaders.put(OKAPI_TENANT_HEADER, centralTenantId); return snapshotService.copySnapshotToOtherTenant(changedRecord.getSnapshotId(), payload.getTenant(), centralTenantId) .compose(snapshot -> recordService.saveRecord(changedRecord, okapiHeaders)); } diff --git a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java index 5f74b924b..8b836df5d 100644 --- a/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java +++ b/mod-source-record-storage-server/src/main/java/org/folio/services/util/EventHandlingUtil.java @@ -2,10 +2,9 @@ import static java.util.Arrays.stream; import static java.util.Objects.nonNull; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.okapi.common.XOkapiHeaders.TOKEN; -import static org.folio.okapi.common.XOkapiHeaders.URL; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; import static org.folio.services.domainevent.RecordDomainEventPublisher.RECORD_DOMAIN_EVENT_TOPIC; import static org.folio.services.util.KafkaUtil.extractHeaderValue; @@ -30,6 +29,7 @@ import org.folio.rest.jaxrs.model.Event; import org.folio.rest.jaxrs.model.EventMetadata; import org.folio.rest.tools.utils.ModuleName; +import org.folio.services.domainevent.SourceRecordDomainEventType; public final class EventHandlingUtil { @@ -107,7 +107,7 @@ public static String constructModuleName() { } public static String createTopicName(String eventType, String tenantId, KafkaConfig kafkaConfig) { - if (stream(EventType.values()).anyMatch(et -> et.value().equals(eventType))) { + if (stream(SourceRecordDomainEventType.values()).anyMatch(et -> et.name().equals(eventType))) { return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), tenantId, RECORD_DOMAIN_EVENT_TOPIC); } return KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(), @@ -124,17 +124,21 @@ public static KafkaProducer createProducer(String eventType, Kaf public static Map toOkapiHeaders(DataImportEventPayload eventPayload) { var okapiHeaders = new HashMap(); - okapiHeaders.put(URL, eventPayload.getOkapiUrl()); - okapiHeaders.put(TENANT, eventPayload.getTenant()); - okapiHeaders.put(TOKEN, eventPayload.getToken()); + okapiHeaders.put(OKAPI_URL_HEADER, eventPayload.getOkapiUrl()); + okapiHeaders.put(OKAPI_TENANT_HEADER, eventPayload.getTenant()); + okapiHeaders.put(OKAPI_TOKEN_HEADER, eventPayload.getToken()); return okapiHeaders; } + public static Map toOkapiHeaders(List kafkaHeaders) { + return toOkapiHeaders(kafkaHeaders, null); + } + public static Map toOkapiHeaders(List kafkaHeaders, String eventTenantId) { var okapiHeaders = new HashMap(); - okapiHeaders.put(URL, extractHeaderValue(URL, kafkaHeaders)); - okapiHeaders.put(TENANT, nonNull(eventTenantId) ? eventTenantId : extractHeaderValue(TENANT, kafkaHeaders)); - okapiHeaders.put(TOKEN, extractHeaderValue(TOKEN, kafkaHeaders)); + okapiHeaders.put(OKAPI_URL_HEADER, extractHeaderValue(OKAPI_URL_HEADER, kafkaHeaders)); + okapiHeaders.put(OKAPI_TENANT_HEADER, nonNull(eventTenantId) ? eventTenantId : extractHeaderValue(OKAPI_TENANT_HEADER, kafkaHeaders)); + okapiHeaders.put(OKAPI_TOKEN_HEADER, extractHeaderValue(OKAPI_TOKEN_HEADER, kafkaHeaders)); return okapiHeaders; } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java index 099849baa..4de699fa8 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/consumers/AuthorityDomainKafkaHandlerTest.java @@ -1,7 +1,7 @@ package org.folio.consumers; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.json.Json; @@ -84,7 +84,7 @@ record = new Record() .withRecordType(MARC_AUTHORITY) .withRawRecord(rawRecord) .withParsedRecord(parsedRecord); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onSuccess(ar -> async.complete()) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java index 22fdc8e4f..8fac17c66 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/dao/RecordDaoImplTest.java @@ -1,11 +1,21 @@ package org.folio.dao; +import static org.folio.dao.RecordDaoImpl.INDEXERS_DELETION_LOCK_NAMESPACE_ID; +import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; +import static org.folio.rest.jaxrs.model.Record.State.DELETED; +import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; + import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.Future; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.UUID; import org.folio.TestMocks; import org.folio.TestUtil; import org.folio.dao.util.AdvisoryLockUtil; @@ -22,7 +32,6 @@ import org.folio.services.AbstractLBServiceTest; import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.util.TypeConnection; - import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -31,17 +40,6 @@ import org.mockito.MockitoAnnotations; import org.springframework.test.util.ReflectionTestUtils; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.UUID; - -import static org.folio.dao.RecordDaoImpl.INDEXERS_DELETION_LOCK_NAMESPACE_ID; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; -import static org.folio.rest.jaxrs.model.Record.State.DELETED; -import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; - @RunWith(VertxUnitRunner.class) public class RecordDaoImplTest extends AbstractLBServiceTest { @@ -92,7 +90,7 @@ public void setUp(TestContext context) throws IOException { .withExternalIdsHolder(new ExternalIdsHolder() .withInstanceId(UUID.randomUUID().toString())); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordDao.saveRecord(record, okapiHeaders)) .compose(savedSnapshot -> recordDao.saveRecord(deletedRecord, okapiHeaders)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java index eeac85ac7..870b83ac4 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/AuthorityLinkChunkKafkaHandlerTest.java @@ -4,8 +4,10 @@ import static org.folio.EntityLinksKafkaTopic.INSTANCE_AUTHORITY; import static org.folio.EntityLinksKafkaTopic.LINKS_STATS; import static org.folio.RecordStorageKafkaTopic.MARC_BIB; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.LinkUpdateReport.Status.FAIL; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; @@ -84,9 +86,9 @@ public class AuthorityLinkChunkKafkaHandlerTest extends AbstractLBServiceTest { private static final String USER_ID = UUID.randomUUID().toString(); private static final ObjectMapper objectMapper = new ObjectMapper(); private static final Map OKAPI_HEADERS = Map.of( - XOkapiHeaders.URL, OKAPI_URL, - XOkapiHeaders.TENANT, TENANT_ID, - XOkapiHeaders.TOKEN, TOKEN, + OKAPI_URL_HEADER, OKAPI_URL, + OKAPI_TENANT_HEADER, TENANT_ID, + OKAPI_TOKEN_HEADER, TOKEN, XOkapiHeaders.USER_ID, USER_ID ); private final RawRecord rawRecord = new RawRecord().withId(RECORD_ID) @@ -145,7 +147,7 @@ record = new Record() .withSnapshotId(snapshot.getJobExecutionId()) .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(ERROR_INSTANCE_ID)); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .compose(savedRecord -> recordService.saveRecord(secondRecord, okapiHeaders)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java index bffdc9ada..789309018 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityDeleteEventHandlerTest.java @@ -1,11 +1,23 @@ package org.folio.services; +import static org.folio.ActionProfile.Action.DELETE; +import static org.folio.ActionProfile.Action.UPDATE; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; +import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; + import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.Date; +import java.util.HashMap; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.dao.RecordDaoImpl; @@ -23,22 +35,9 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; - -import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import static org.folio.ActionProfile.Action.DELETE; -import static org.folio.ActionProfile.Action.UPDATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; -import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; - @RunWith(VertxUnitRunner.class) public class MarcAuthorityDeleteEventHandlerTest extends AbstractLBServiceTest { @@ -96,7 +95,7 @@ public void shouldDeleteRecord(TestContext context) { .withFolioRecord(ActionProfile.FolioRecord.MARC_AUTHORITY) ) ); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders) // when .onSuccess(ar -> eventHandler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java index 3198628a6..bd659ad58 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityMatchEventHandlerTest.java @@ -1,5 +1,16 @@ package org.folio.services; +import static java.util.Collections.singletonList; +import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MATCHED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED; +import static org.folio.rest.jaxrs.model.MatchExpression.DataValueType.VALUE_FROM_RECORD; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MATCH_PROFILE; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; + import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import io.vertx.core.json.Json; @@ -7,7 +18,13 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.UUID; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.MatchDetail; @@ -37,24 +54,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.UUID; - -import static java.util.Collections.singletonList; -import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_MATCHED; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_NOT_MATCHED; -import static org.folio.rest.jaxrs.model.MatchExpression.DataValueType.VALUE_FROM_RECORD; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.rest.jaxrs.model.ProfileType.MATCH_PROFILE; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; - @RunWith(VertxUnitRunner.class) public class MarcAuthorityMatchEventHandlerTest extends AbstractLBServiceTest { @@ -175,7 +174,7 @@ public void shouldMatchBy999ffsField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -225,7 +224,7 @@ public void shouldMatchBy001Field(TestContext context) { new Field().withLabel("recordSubfield").withValue("") ))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -276,7 +275,7 @@ public void shouldMatchBy010aField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -324,7 +323,7 @@ public void shouldNotMatchBy999ffsField(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -370,7 +369,7 @@ public void shouldNotMatchBy001Field(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java index d9ec3939f..05b2c97de 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcAuthorityUpdateModifyEventHandlerTest.java @@ -1,29 +1,17 @@ package org.folio.services; import static com.github.tomakehurst.wiremock.client.WireMock.get; - import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_AUTHORITY; import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.common.Slf4jNotifier; @@ -38,15 +26,16 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.JobProfile; @@ -68,7 +57,15 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.caches.MappingParametersSnapshotCache; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.actions.MarcAuthorityUpdateModifyEventHandler; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -186,7 +183,7 @@ record = new Record() .withParsedRecord(parsedRecord); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) .compose(v -> recordService.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java index 5c3a2c126..1b8f5e6dd 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcBibUpdateModifyEventHandlerTest.java @@ -11,15 +11,14 @@ import static org.apache.commons.lang3.RandomUtils.nextInt; import static org.folio.ActionProfile.Action.MODIFY; import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; import static org.folio.rest.jaxrs.model.MappingDetail.MarcMappingOption.UPDATE; - import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; import com.fasterxml.jackson.core.JsonProcessingException; @@ -36,7 +35,6 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; - import java.io.IOException; import java.util.Collections; import java.util.Date; @@ -48,7 +46,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.InstanceLinkDtoCollection; @@ -293,10 +290,10 @@ record = new Record() ReactiveClassicGenericQueryExecutor queryExecutorCentralTenant = postgresClientFactory.getQueryExecutor(CENTRAL_TENANT_ID); SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshot) - .compose(v -> recordService.saveRecord(record, Map.of(TENANT, TENANT_ID))) + .compose(v -> recordService.saveRecord(record, Map.of(OKAPI_TENANT_HEADER, TENANT_ID))) .compose(v -> SnapshotDaoUtil.save(queryExecutorLocalTenant, snapshotForRecordUpdate)) .compose(v -> SnapshotDaoUtil.save(queryExecutorCentralTenant, snapshot_2)) - .compose(v -> recordService.saveRecord(record_2, Map.of(TENANT, CENTRAL_TENANT_ID))) + .compose(v -> recordService.saveRecord(record_2, Map.of(OKAPI_TENANT_HEADER, CENTRAL_TENANT_ID))) .onComplete(context.asyncAssertSuccess()); } @@ -713,7 +710,7 @@ public void shouldNotUpdateBibFieldWhen500ErrorGetEntityLinkRequest(TestContext .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId)) .withMetadata(new Metadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), secondSnapshot) .compose(v -> recordService.saveRecord(secondRecord, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshotForRecordUpdate)) @@ -891,7 +888,7 @@ private void verifyBibRecordUpdate(String incomingParsedContent, String expected .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId)) .withMetadata(new Metadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), secondSnapshot) .compose(v -> recordService.saveRecord(secondRecord, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshotForRecordUpdate)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java index b319404bf..d0e430146 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsMatchEventHandlerTest.java @@ -1,9 +1,8 @@ package org.folio.services; import static java.util.Collections.singletonList; - import static org.folio.MatchDetail.MatchCriterion.EXACTLY_MATCHES; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_NOT_MATCHED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDING_RECORD_CREATED; @@ -12,14 +11,6 @@ import static org.folio.rest.jaxrs.model.ProfileType.MATCH_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_HOLDING; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; - import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import io.vertx.core.json.Json; @@ -27,16 +18,13 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.MatchDetail; @@ -55,7 +43,16 @@ import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.match.MarcHoldingsMatchEventHandler; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; @RunWith(VertxUnitRunner.class) public class MarcHoldingsMatchEventHandlerTest extends AbstractLBServiceTest { @@ -177,7 +174,7 @@ public void shouldMatchBy999ffsField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -227,7 +224,7 @@ public void shouldMatchBy001Field(TestContext context) { new Field().withLabel("recordSubfield").withValue("") ))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -278,7 +275,7 @@ public void shouldMatchBy010aField(TestContext context) { ))) )))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -326,7 +323,7 @@ public void shouldNotMatchBy999ffsField(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(existingSavedRecord -> handler.handle(dataImportEventPayload) @@ -372,7 +369,7 @@ public void shouldNotMatchBy001Field(TestContext context) { new Field().withLabel("indicator2").withValue(""), new Field().withLabel("recordSubfield").withValue("a")))))))); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(existingRecord, okapiHeaders) .onComplete(context.asyncAssertSuccess()) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java index defffe2bc..3dbb9b299 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/MarcHoldingsUpdateModifyEventHandlerTest.java @@ -1,9 +1,7 @@ package org.folio.services; import static com.github.tomakehurst.wiremock.client.WireMock.get; - import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDINGS_RECORD_UPDATED; import static org.folio.rest.jaxrs.model.EntityType.MARC_HOLDINGS; @@ -12,19 +10,9 @@ import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.common.Slf4jNotifier; @@ -39,15 +27,16 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.JobProfile; @@ -69,7 +58,15 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.caches.MappingParametersSnapshotCache; +import org.folio.services.domainevent.RecordDomainEventPublisher; import org.folio.services.handlers.actions.MarcHoldingsUpdateModifyEventHandler; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -187,7 +184,7 @@ record = new Record() .withParsedRecord(parsedRecord); ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) .compose(v -> recordService.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java index 8726b2bf8..9a79e9587 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/QuickMarcKafkaHandlerTest.java @@ -1,12 +1,27 @@ package org.folio.services; +import static org.folio.dao.util.QMEventTypes.QM_ERROR; +import static org.folio.dao.util.QMEventTypes.QM_RECORD_UPDATED; +import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; +import static org.folio.kafka.KafkaTopicNameHelper.formatTopicName; +import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; + import com.fasterxml.jackson.databind.ObjectMapper; import io.vertx.core.Future; import io.vertx.core.json.Json; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeUnit; import net.mguenther.kafka.junit.KeyValue; import net.mguenther.kafka.junit.ObserveKeyValues; import net.mguenther.kafka.junit.SendKeyValues; @@ -35,22 +50,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import static org.folio.dao.util.QMEventTypes.QM_ERROR; -import static org.folio.dao.util.QMEventTypes.QM_RECORD_UPDATED; -import static org.folio.dao.util.QMEventTypes.QM_SRS_MARC_RECORD_UPDATED; -import static org.folio.kafka.KafkaTopicNameHelper.formatTopicName; -import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; - @RunWith(VertxUnitRunner.class) public class QuickMarcKafkaHandlerTest extends AbstractLBServiceTest { @@ -96,7 +95,7 @@ record = new Record() .withRecordType(MARC_BIB) .withRawRecord(rawRecord) .withParsedRecord(parsedRecord); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onSuccess(ar -> async.complete()) @@ -120,7 +119,7 @@ public void shouldUpdateParsedRecordAndSendRecordUpdatedEvent(TestContext contex ParsedRecord parsedRecord = record.getParsedRecord(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordService.saveRecord(record, okapiHeaders); ParsedRecordDto parsedRecordDto = new ParsedRecordDto() diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java index 131ec2c5f..24f1e02ca 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/RecordServiceTest.java @@ -1,7 +1,7 @@ package org.folio.services; import static java.util.Comparator.comparing; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; +import static org.folio.dataimport.util.RestUtil.OKAPI_TENANT_HEADER; import static org.folio.rest.jooq.Tables.RECORDS_LB; import static org.folio.services.RecordServiceImpl.INDICATOR; import static org.folio.services.RecordServiceImpl.SUBFIELD_S; @@ -370,7 +370,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFrom999field(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -416,7 +416,7 @@ public void shouldFailDuringUpdateRecordGenerationIfIncomingMatchedIdNotEqualToM .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.updateRecordGeneration(matchedId, record, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); @@ -454,7 +454,7 @@ public void shouldFailDuringUpdateRecordGenerationIfRecordWithIdAsIncomingMatche .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.updateRecordGeneration(matchedId, record, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); @@ -510,7 +510,7 @@ public void shouldFailUpdateRecordGenerationIfDuplicateError(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record1, okapiHeaders).onComplete(record1Saved -> { if (record1Saved.failed()) { @@ -574,7 +574,7 @@ public void shouldUpdateRecordGeneration(TestContext context) { .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record1, okapiHeaders).onComplete(record1Saved -> { if (record1Saved.failed()) { @@ -632,7 +632,7 @@ public void shouldUpdateRecordGenerationByMatchId(TestContext context) { .withMetadata(mock.getMetadata()); var async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(recordToSave, okapiHeaders).onComplete(savedRecord -> { if (savedRecord.failed()) { @@ -716,7 +716,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context .withExternalIdsHolder(new ExternalIdsHolder().withInstanceId(UUID.randomUUID().toString())) .withMetadata(original.getMetadata()); Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -743,7 +743,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromRecordId(TestContext context public void shouldSaveEdifactRecordAndNotSet999Field(TestContext context) { Async async = context.async(); Record record = TestMocks.getRecords(Record.RecordType.EDIFACT); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -797,7 +797,7 @@ public void shouldSaveMarcBibRecordWithMatchedIdFromExistingSourceRecord(TestCon .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(externalIdsHolder) .withMetadata(original.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(record1, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -876,7 +876,7 @@ public void shouldFailToSaveRecord(TestContext context) { .withAdditionalInfo(valid.getAdditionalInfo()) .withExternalIdsHolder(valid.getExternalIdsHolder()) .withMetadata(valid.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(invalid, okapiHeaders).onComplete(save -> { context.assertTrue(save.failed()); String expected = "Invalid UUID string: " + fakeSnapshotId; @@ -909,7 +909,7 @@ public void shouldSaveMarcBibRecordsWithExpectedErrors(TestContext context) { public void shouldUpdateMarcRecord(TestContext context) { Async async = context.async(); Record original = TestMocks.getRecord(0); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders).onComplete(save -> { if (save.failed()) { context.fail(save.cause()); @@ -962,7 +962,7 @@ public void shouldUpdateRecordState(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) @@ -999,7 +999,7 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { .withAdditionalInfo(original.getAdditionalInfo()) .withExternalIdsHolder(original.getExternalIdsHolder()) .withMetadata(original.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) @@ -1028,7 +1028,7 @@ public void shouldUpdateMarcAuthorityRecordStateToDeleted(TestContext context) { public void shouldUpdateEdifactRecord(TestContext context) { Async async = context.async(); Record original = TestMocks.getEdifactRecord(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(original, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1074,7 +1074,7 @@ public void shouldUpdateEdifactRecord(TestContext context) { public void shouldFailToUpdateRecord(TestContext context) { Async async = context.async(); Record record = TestMocks.getRecord(0); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.getRecordById(record.getMatchedId(), TENANT_ID).onComplete(get -> { if (get.failed()) { @@ -1188,7 +1188,7 @@ public void shouldGetMarcBibSourceRecordByMatchedIdNotEqualToId(TestContext cont .withAdditionalInfo(expected.getAdditionalInfo()) .withExternalIdsHolder(expected.getExternalIdsHolder()) .withMetadata(expected.getMetadata()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders) .compose(ar -> recordService.updateSourceRecord(parsedRecordDto, snapshotId, okapiHeaders)) @@ -1277,7 +1277,7 @@ public void shouldGetFormattedMarcHoldingsRecord(TestContext context) { public void shouldGetFormattedEdifactRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getEdifactRecord(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1300,7 +1300,7 @@ public void shouldGetFormattedDeletedRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getMarcBibRecord(); expected.setState(State.DELETED); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1347,7 +1347,7 @@ public void shouldDeleteMarcAuthorityRecordsBySnapshotId(TestContext context) { public void shouldUpdateSourceRecord(TestContext context) { Async async = context.async(); Record expected = TestMocks.getRecord(0); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1414,7 +1414,7 @@ public void shouldThrowExceptionWhenSavedDuplicateRecord(TestContext context) { RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); List> futures = List.of(recordService.saveRecords(recordCollection, okapiHeaders), recordService.saveRecords(recordCollection, okapiHeaders)); @@ -1548,7 +1548,7 @@ private void streamRecordsBySnapshotId(TestContext context, String snapshotId, R private void getMarcRecordById(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1569,7 +1569,7 @@ private void getMarcRecordById(TestContext context, Record expected) { private void saveMarcRecord(TestContext context, Record expected, Record.RecordType marcBib) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1595,7 +1595,7 @@ private void saveMarcRecord(TestContext context, Record expected, Record.RecordT private void saveMarcRecordWithGenerationGreaterThanZero(TestContext context, Record expected, Record.RecordType marcBib) { Async async = context.async(); expected.setGeneration(1); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1628,7 +1628,7 @@ private void saveMarcRecords(TestContext context, Record.RecordType marcBib) { RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecords(recordCollection, okapiHeaders).onComplete(batch -> { if (batch.failed()) { context.fail(batch.cause()); @@ -1659,7 +1659,7 @@ private void saveMarcRecordsWithExpectedErrors(TestContext context, Record.Recor RecordCollection recordCollection = new RecordCollection() .withRecords(expected) .withTotalRecords(expected.size()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.saveRecords(recordCollection, okapiHeaders).onComplete(batch -> { if (batch.failed()) { context.fail(batch.cause()); @@ -1883,7 +1883,7 @@ private void getMarcSourceRecordsByListOfIdsThatAreDeleted(TestContext context, private void getMarcSourceRecordById(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -1938,7 +1938,7 @@ private void updateParsedMarcRecords(TestContext context, Record.RecordType reco List expected = updated.stream() .map(Record::getParsedRecord) .collect(Collectors.toList()); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordService.updateParsedRecords(recordCollection, okapiHeaders).onComplete(update -> { if (update.failed()) { context.fail(update.cause()); @@ -1967,7 +1967,7 @@ private void updateParsedMarcRecords(TestContext context, Record.RecordType reco private void updateParsedMarcRecordsAndGetOnlyActualRecord(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { context.assertTrue(save.succeeded()); @@ -1987,7 +1987,7 @@ private void updateParsedMarcRecordsAndGetOnlyActualRecord(TestContext context, private void getFormattedMarcRecord(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -2009,7 +2009,7 @@ private void getFormattedMarcRecord(TestContext context, Record expected) { private void updateSuppressFromDiscoveryForMarcRecord(TestContext context, Record expected) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(expected, okapiHeaders).onComplete(save -> { if (save.failed()) { @@ -2085,7 +2085,7 @@ private void deleteMarcRecordsBySnapshotId(TestContext context, String snapshotI } private CompositeFuture saveRecords(List records) { - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); return GenericCompositeFuture.all(records.stream() .map(record -> recordService.saveRecord(record, okapiHeaders)) .collect(Collectors.toList()) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java index 3a2a918fd..56160f14c 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/domainevent/RecordDomainEventPublisherUnitTest.java @@ -1,10 +1,10 @@ package org.folio.services.domainevent; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.okapi.common.XOkapiHeaders.TOKEN; -import static org.folio.okapi.common.XOkapiHeaders.URL; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.SourceRecordDomainEvent.EventType.SOURCE_RECORD_UPDATED; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TOKEN_HEADER; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_CREATED; +import static org.folio.services.domainevent.SourceRecordDomainEventType.SOURCE_RECORD_UPDATED; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -35,7 +35,7 @@ public void publishRecordCreated_shouldSendNoEvents_ifDomainEventsAreNotEnabled( // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", false); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordCreated(aRecord, headers); @@ -49,7 +49,7 @@ public void publishRecordUpdated_shouldSendNoEvents_ifDomainEventsAreNotEnabled( // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", false); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordUpdated(aRecord, headers); @@ -63,7 +63,7 @@ public void publishRecordCreated_shouldSendNoEvents_ifRecordHasNoType() { // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordCreated(aRecord, headers); @@ -77,7 +77,7 @@ public void publishRecordUpdated_shouldSendNoEvents_ifRecordHasNoType() { // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record(); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordUpdated(aRecord, headers); @@ -91,7 +91,7 @@ public void publishRecordCreated_shouldSendNoEvents_ifRecordContainsNoParsedCont // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record().withRecordType(Record.RecordType.MARC_BIB); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordCreated(aRecord, headers); @@ -105,7 +105,7 @@ public void publishRecordUpdated_shouldSendNoEvents_ifRecordContainsNoParsedCont // given ReflectionTestUtils.setField(publisher, "domainEventsEnabled", true); var aRecord = new Record().withRecordType(Record.RecordType.MARC_BIB); - var headers = Map.of(TENANT, "TENANT", URL, "OKAPI_URL", TOKEN, "TOKEN"); + var headers = Map.of(OKAPI_TENANT_HEADER, "TENANT", OKAPI_URL_HEADER, "OKAPI_URL", OKAPI_TOKEN_HEADER, "TOKEN"); // when publisher.publishRecordUpdated(aRecord, headers); @@ -123,24 +123,18 @@ public void publishRecordCreated_shouldSendEvent_ifRecordIsValid() { .withId(UUID.randomUUID().toString()) .withRecordType(Record.RecordType.MARC_BIB) .withRawRecord(new RawRecord().withContent(rawContent)); - var tenantId = "TENANT"; + var tenantId = "OKAPI_TENANT_HEADER"; var okapiUrl = "OKAPI_URL"; var token = "TOKEN"; - var givenHeaders = Map.of(TENANT, tenantId, URL, okapiUrl, TOKEN, token); + var givenHeaders = Map.of(OKAPI_TENANT_HEADER, tenantId, OKAPI_URL_HEADER, okapiUrl, OKAPI_TOKEN_HEADER, token); var expectedHeaders = getKafkaHeaders(okapiUrl, tenantId, token, aRecord); - var eventType = SOURCE_RECORD_CREATED.value(); - var expectedPayload = "{" - + "\"id\":\"" + aRecord.getId() + "\"" - + ",\"eventType\":\"" + eventType + "\"" - + ",\"eventPayload\":\"" + rawContent + "\"" - + "}"; + var eventType = SOURCE_RECORD_CREATED.name(); // when publisher.publishRecordCreated(aRecord, givenHeaders); // then - verify(kafkaSender).sendEventToKafka(tenantId, expectedPayload, eventType, expectedHeaders, - aRecord.getId()); + verify(kafkaSender).sendEventToKafka(tenantId, rawContent, eventType, expectedHeaders, aRecord.getId()); } @Test @@ -155,28 +149,22 @@ public void publishRecordUpdated_shouldSendEvent_ifRecordIsValid() { var tenantId = "TENANT"; var okapiUrl = "OKAPI_URL"; var token = "TOKEN"; - var givenHeaders = Map.of(TENANT, tenantId, URL, okapiUrl, TOKEN, token); + var givenHeaders = Map.of(OKAPI_TENANT_HEADER, tenantId, OKAPI_URL_HEADER, okapiUrl, OKAPI_TOKEN_HEADER, token); var expectedHeaders = getKafkaHeaders(okapiUrl, tenantId, token, aRecord); - var eventType = SOURCE_RECORD_UPDATED.value(); - var expectedPayload = "{" - + "\"id\":\"" + aRecord.getId() + "\"" - + ",\"eventType\":\"" + eventType + "\"" - + ",\"eventPayload\":\"" + rawContent + "\"" - + "}"; + var eventType = SOURCE_RECORD_UPDATED.name(); // when publisher.publishRecordUpdated(aRecord, givenHeaders); // thenÏ - verify(kafkaSender).sendEventToKafka(tenantId, expectedPayload, eventType, expectedHeaders, - aRecord.getId()); + verify(kafkaSender).sendEventToKafka(tenantId, rawContent, eventType, expectedHeaders, aRecord.getId()); } private List getKafkaHeaders(String okapiUrl, String tenantId, String token, Record aRecord) { return List.of( - KafkaHeader.header(URL, okapiUrl), - KafkaHeader.header(TENANT, tenantId), - KafkaHeader.header(TOKEN, token), + KafkaHeader.header(OKAPI_URL_HEADER, okapiUrl), + KafkaHeader.header(OKAPI_TENANT_HEADER, tenantId), + KafkaHeader.header(OKAPI_TOKEN_HEADER, token), KafkaHeader.header("folio.srs.recordType", aRecord.getRecordType().value()) ); } diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java index f2fe2dd79..03330d0ba 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/AuthorityPostProcessingEventHandlerTest.java @@ -1,25 +1,16 @@ package org.folio.services.handlers; import static com.github.tomakehurst.wiremock.client.WireMock.get; - -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_AUTHORITY_UPDATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_CREATED; import static org.folio.rest.jaxrs.model.EntityType.AUTHORITY; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_AUTHORITY; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; -import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.matching.RegexPattern; @@ -31,14 +22,13 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.rest.jaxrs.model.Metadata; -import org.folio.services.RecordService; -import org.folio.services.SnapshotService; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; @@ -49,11 +39,18 @@ import org.folio.rest.jaxrs.model.ExternalIdsHolder; import org.folio.rest.jaxrs.model.MappingMetadataDto; import org.folio.rest.jaxrs.model.MarcFieldProtectionSetting; +import org.folio.rest.jaxrs.model.Metadata; import org.folio.rest.jaxrs.model.ParsedRecord; import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper; import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; +import org.folio.services.RecordService; +import org.folio.services.SnapshotService; import org.folio.services.util.AdditionalFieldsUtil; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; @RunWith(VertxUnitRunner.class) public class AuthorityPostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest { @@ -88,7 +85,7 @@ public void shouldSetAuthorityIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -246,7 +243,7 @@ public void shouldSetAuthorityIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_AUTHORITY_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -433,7 +430,7 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java index 135149f41..a2a2b1dc1 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/HoldingsPostProcessingEventHandlerTest.java @@ -1,7 +1,6 @@ package org.folio.services.handlers; import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_HOLDING_RECORD_CREATED; import static org.folio.rest.jaxrs.model.EntityType.HOLDINGS; @@ -9,15 +8,9 @@ import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_HOLDING; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.matching.RegexPattern; @@ -29,26 +22,31 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import org.folio.rest.jaxrs.model.MappingMetadataDto; -import org.folio.services.RecordService; -import org.folio.services.SnapshotService; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; import org.folio.TestUtil; import org.folio.kafka.KafkaConfig; import org.folio.processing.mapping.defaultmapper.processor.parameters.MappingParameters; +import org.folio.rest.jaxrs.model.MappingMetadataDto; import org.folio.rest.jaxrs.model.MarcFieldProtectionSetting; import org.folio.rest.jaxrs.model.ParsedRecord; import org.folio.rest.jaxrs.model.ProfileSnapshotWrapper; import org.folio.rest.jaxrs.model.RawRecord; import org.folio.rest.jaxrs.model.Record; +import org.folio.services.RecordService; +import org.folio.services.SnapshotService; import org.folio.services.util.AdditionalFieldsUtil; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; @RunWith(VertxUnitRunner.class) public class HoldingsPostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest { @@ -83,7 +81,7 @@ public void shouldSetHoldingsIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -197,7 +195,7 @@ public void shouldSetHoldingsIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -368,7 +366,7 @@ public void shouldSetHoldingsHridToParsedRecordWhenContentHasNotField001(TestCon createDataImportEventPayload(payloadContext, DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -441,7 +439,7 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java index 7ce917c14..6d8860fbc 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/services/handlers/InstancePostProcessingEventHandlerTest.java @@ -1,5 +1,25 @@ package org.folio.services.handlers; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.EntityType.INSTANCE; +import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; +import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; +import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.services.handlers.InstancePostProcessingEventHandler.POST_PROCESSING_RESULT_EVENT; +import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.matching.RegexPattern; @@ -12,7 +32,13 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.MappingProfile; @@ -42,33 +68,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static org.folio.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_ORDER_CREATED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.EntityType.INSTANCE; -import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; -import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; -import static org.folio.services.handlers.InstancePostProcessingEventHandler.POST_PROCESSING_RESULT_EVENT; -import static org.folio.services.util.AdditionalFieldsUtil.TAG_005; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - @RunWith(VertxUnitRunner.class) public class InstancePostProcessingEventHandlerTest extends AbstractPostProcessingEventHandlerTest { @@ -114,7 +113,7 @@ public void shouldSetInstanceIdToRecord(TestContext context) { createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -418,7 +417,7 @@ public void shouldSaveIncomingRecordAndMarkExistingAsOldWhenIncomingRecordHasSam DataImportEventPayload dataImportEventPayload = createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordDao.saveRecord(existingRecord, okapiHeaders) .compose(v -> Future.fromCompletionStage(handler.handle(dataImportEventPayload))); @@ -472,7 +471,7 @@ public void checkGeneration035FiledAfterUpdateMarcBib(TestContext context) throw DataImportEventPayload dataImportEventPayload = createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_UPDATED_READY_FOR_POST_PROCESSING); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordDao.saveRecord(existingRecord, okapiHeaders) .compose(v -> Future.fromCompletionStage(handler.handle(dataImportEventPayload))); @@ -516,7 +515,7 @@ public void shouldSetInstanceIdToParsedRecordWhenContentHasField999(TestContext createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -688,7 +687,7 @@ public void shouldSetInstanceHridToParsedRecordWhenContentHasNotField001(TestCon createDataImportEventPayload(payloadContext, DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(rec -> handler.handle(dataImportEventPayload) @@ -761,7 +760,7 @@ public void shouldReturnFailedFutureWhenParsedRecordHasNoFields(TestContext cont .withToken(TOKEN); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) @@ -894,7 +893,7 @@ public void shouldFillEventPayloadWithPostProcessingFlagIfOrderEventExists(TestC dataImportEventPayload.getContext().put(POST_PROCESSING_RESULT_EVENT, DI_ORDER_CREATED_READY_FOR_POST_PROCESSING.value()); CompletableFuture future = new CompletableFuture<>(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); recordDao.saveRecord(record, okapiHeaders) .onFailure(future::completeExceptionally) .onSuccess(record -> handler.handle(dataImportEventPayload) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java index 34c902477..30c7b3293 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/MarcIndexersVersionDeletionVerticleTest.java @@ -1,10 +1,19 @@ package org.folio.verticle; +import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; +import static org.folio.rest.jaxrs.model.Record.State.OLD; +import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.name; +import static org.jooq.impl.DSL.table; + import io.vertx.core.Future; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; import java.util.Map; +import java.util.UUID; import org.folio.TestMocks; import org.folio.dao.RecordDao; import org.folio.dao.RecordDaoImpl; @@ -23,19 +32,9 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; - -import java.util.UUID; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.Record.State.ACTUAL; -import static org.folio.rest.jaxrs.model.Record.State.OLD; -import static org.folio.rest.jooq.Tables.MARC_RECORDS_TRACKING; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.name; -import static org.jooq.impl.DSL.table; - @RunWith(VertxUnitRunner.class) public class MarcIndexersVersionDeletionVerticleTest extends AbstractLBServiceTest { @@ -73,7 +72,7 @@ public void setUp(TestContext context) { .withRawRecord(TestMocks.getRecord(0).getRawRecord().withId(recordId)) .withParsedRecord(TestMocks.getRecord(0).getParsedRecord().withId(recordId)); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(postgresClientFactory.getQueryExecutor(TENANT_ID), snapshot) .compose(savedSnapshot -> recordService.saveRecord(record, okapiHeaders)) .onComplete(save -> { @@ -99,7 +98,7 @@ public void cleanUp(TestContext context) { public void shouldDeleteOldVersionsOfMarcIndexers(TestContext context) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); // performs record update in the DB that leads to new indexers creation with incremented version // so that previous existing indexers become old and should be deleted Future future = recordService.updateRecord(record, okapiHeaders) @@ -119,7 +118,7 @@ public void shouldDeleteOldVersionsOfMarcIndexers(TestContext context) { public void shouldDeleteMarcIndexersRelatedToRecordInOldState(TestContext context) { Async async = context.async(); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); Future future = recordService.updateRecord(record.withState(OLD), okapiHeaders) .compose(v -> existMarcIndexersByRecordId(record.getId())) .onSuccess(context::assertTrue) diff --git a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java index c9f7556b0..9f266b3d0 100644 --- a/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java +++ b/mod-source-record-storage-server/src/test/java/org/folio/verticle/consumers/DataImportConsumersVerticleTest.java @@ -3,35 +3,25 @@ import static com.github.tomakehurst.wiremock.client.WireMock.get; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Collections.singletonList; -import static org.folio.ActionProfile.Action.UPDATE; -import static org.folio.okapi.common.XOkapiHeaders.TENANT; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; -import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; -import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; -import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; -import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; -import static org.junit.Assert.assertEquals; - import static org.folio.ActionProfile.Action.DELETE; +import static org.folio.ActionProfile.Action.UPDATE; import static org.folio.consumers.DataImportKafkaHandler.PROFILE_SNAPSHOT_ID_KEY; import static org.folio.consumers.ParsedRecordChunksKafkaHandler.JOB_EXECUTION_ID_HEADER; import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_MARC_FOR_DELETE_RECEIVED; import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_AUTHORITY_RECORD_DELETED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_CREATED; +import static org.folio.rest.jaxrs.model.DataImportEventTypes.DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING; +import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC; import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE; import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE; +import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE; import static org.folio.rest.jaxrs.model.Record.RecordType.MARC_BIB; +import static org.folio.rest.util.OkapiConnectionParams.OKAPI_TENANT_HEADER; +import static org.folio.services.MarcBibUpdateModifyEventHandlerTest.getParsedContentWithoutLeaderAndDate; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.common.Slf4jNotifier; @@ -44,15 +34,17 @@ import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeUnit; import net.mguenther.kafka.junit.KeyValue; import net.mguenther.kafka.junit.ObserveKeyValues; import net.mguenther.kafka.junit.SendKeyValues; -import org.folio.services.domainevent.RecordDomainEventPublisher; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; - import org.folio.ActionProfile; import org.folio.JobProfile; import org.folio.MappingProfile; @@ -77,6 +69,11 @@ import org.folio.rest.jaxrs.model.Record; import org.folio.rest.jaxrs.model.Snapshot; import org.folio.services.AbstractLBServiceTest; +import org.folio.services.domainevent.RecordDomainEventPublisher; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -150,7 +147,7 @@ record = new Record() ReactiveClassicGenericQueryExecutor queryExecutor = postgresClientFactory.getQueryExecutor(TENANT_ID); RecordDaoImpl recordDao = new RecordDaoImpl(postgresClientFactory, recordDomainEventPublisher); - var okapiHeaders = Map.of(TENANT, TENANT_ID); + var okapiHeaders = Map.of(OKAPI_TENANT_HEADER, TENANT_ID); SnapshotDaoUtil.save(queryExecutor, snapshot) .compose(v -> recordDao.saveRecord(record, okapiHeaders)) .compose(v -> SnapshotDaoUtil.save(queryExecutor, snapshotForRecordUpdate)) diff --git a/ramls/source-record-domain-event.json b/ramls/source-record-domain-event.json deleted file mode 100644 index 24fa5f1e4..000000000 --- a/ramls/source-record-domain-event.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "description": "Source record domain event data model", - "javaType": "org.folio.rest.jaxrs.model.SourceRecordDomainEvent", - "type": "object", - "additionalProperties": false, - "properties": { - "id": { - "description": "UUID", - "type": "string", - "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" - }, - "eventType": { - "type": "string", - "enum": ["SOURCE_RECORD_CREATED", "SOURCE_RECORD_UPDATED"], - "description": "Source record domain event type" - }, - "sourceRecordDomainEventMetadata": { - "description": "Event metadata", - "type": "object", - "additionalProperties": false, - "properties": { - "eventTTL": { - "description": "Time-to-live (TTL) for event in minutes", - "type": "integer" - }, - "correlationId": { - "description": "Id to track related events, can be a meaningful string or a UUID", - "type": "string" - }, - "originalEventId": { - "description": "Id of the event that started the sequence of related events", - "type": "string", - "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" - }, - "publisherCallback": { - "description": "Allows a publisher to provide a callback endpoint or an error Event Type to be notified that despite the fact that there are subscribers for such an event type no one has received the event within the specified period of time", - "type": "object", - "properties": { - "endpoint": { - "description": "Callback endpoint", - "type": "string" - }, - "eventType": { - "description": "Error Event Type", - "type": "string" - } - } - }, - "createdDate": { - "description": "Timestamp when event was created", - "type": "string", - "format": "date-time" - }, - "publishedDate": { - "description": "Timestamp when event was initially published to the underlying topic", - "type": "string", - "format": "date-time" - }, - "createdBy": { - "description": "Username of the user whose action caused an event", - "type": "string" - }, - "publishedBy": { - "description": "Name and version of the module that published an event", - "type": "string" - } - }, - "required": [ - "eventTTL", - "publishedBy" - ] - }, - "eventPayload": { - "type": "string", - "description": "The source record JSON string" - }, - "tenant": { - "description": "Tenant id", - "type": "string" - }, - "ts": { - "description": "Message timestamp", - "type": "string", - "format": "date-time" - } - }, - "excludedFromEqualsAndHashCode": [ - "eventMetadata", - "tenant", - "ts" - ], - "required": [ - "id", - "eventType" - ] -} diff --git a/ramls/source-record-storage-records.raml b/ramls/source-record-storage-records.raml index 964125f86..1d7a99083 100644 --- a/ramls/source-record-storage-records.raml +++ b/ramls/source-record-storage-records.raml @@ -29,7 +29,6 @@ types: linkUpdateReport: !include raml-storage/schemas/mod-source-record-storage/linkUpdateReport.json recordMatchingDto: !include raml-storage/schemas/dto/recordMatchingRqDto.json recordsIdentifiersCollection: !include raml-storage/schemas/dto/recordsIdentifiersCollection.json - sourceRecordDomainEvent: !include source-record-domain-event.json traits: validate: !include raml-storage/raml-util/traits/validation.raml