From 4691cb845bede7baa61a25ac189d681a34fa06d5 Mon Sep 17 00:00:00 2001 From: Ruslan Lavrov Date: Fri, 7 Mar 2025 22:42:07 +0200 Subject: [PATCH 01/19] Updated dependencies --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 27b0a00b..798d288d 100644 --- a/pom.xml +++ b/pom.xml @@ -63,7 +63,7 @@ org.folio folio-kafka-wrapper - 3.3.0-SNAPSHOT + 3.3.0 org.folio From b7e1ceb2e8d7bcc6195ffdba5671ef75bd5974eb Mon Sep 17 00:00:00 2001 From: Ruslan Lavrov Date: Fri, 7 Mar 2025 23:18:50 +0200 Subject: [PATCH 02/19] Update NEWS --- NEWS.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index 0bbc775d..97535547 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,11 +1,14 @@ -## 202X-XX-XX 4.4.0-SNAPSHOT +## 2025-03-07 v4.4.0 * [MODDICORE-433](https://folio-org.atlassian.net/browse/MODDICORE-433) Add userId to event header and allow to send events with null token * [MODDICORE-432](https://folio-org.atlassian.net/browse/MODDICORE-432) Vendor details are empty with code that contains brackets during order creation * [MODINVSTOR-1342](https://folio-org.atlassian.net/browse/MODINVSTOR-1342) Add "deleted" field to Instance schema +* [MODDICORE-439](https://folio-org.atlassian.net/browse/MODDICORE-439) Add job execution id to kafka headers +* [MODINV-1140](https://folio-org.atlassian.net/browse/MODINV-1140) Create set_deleted normalization function to account for Leader 05 value * [MODDICORE-438](https://folio-org.atlassian.net/browse/MODDICORE-438) Add MARC fields 147/447/547 to authority schema * [MODDICORE-440](https://folio-org.atlassian.net/browse/MODDICORE-440) Mode of Issuance not being set with MARC to Instance Mapping * [MODDICORE-443](https://folio-org.atlassian.net/browse/MODDICORE-443) Add MARC fields 180/480/580 to authority schema * [MODDICORE-442](https://folio-org.atlassian.net/browse/MODDICORE-442) Update to data-import-processing-core Java 21 +* [MODDICORE-428](https://folio-org.atlassian.net/browse/MODDICORE-428) Error appeared when in donor field for order field mapping profile user set 2 incorrect values * [MODDICORE-367](https://folio-org.atlassian.net/browse/MODDICORE-367) Add MARC fields 1XX/4XX/5XX to authority schema ## 2024-10-28 v4.3.0 From e4632d75a1f99eaecd9a84de6edc7d3cb541f2ad Mon Sep 17 00:00:00 2001 From: Ruslan Lavrov Date: Fri, 7 Mar 2025 23:22:52 +0200 Subject: [PATCH 03/19] [maven-release-plugin] prepare release v4.4.0 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 798d288d..2ce028c5 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.0-SNAPSHOT + 4.4.0 jar data-import-processing-core @@ -195,7 +195,7 @@ https://github.com/folio-org/data-import-processing-core scm:git:git@github.com:folio-org/data-import-processing-core.git scm:git:git@github.com:folio-org/data-import-processing-core.git - HEAD + v4.4.0 From 42595d59361a804f0b81a5e5e0827dfdd6b8b6a9 Mon Sep 17 00:00:00 2001 From: Kateryna_Senchenko Date: Thu, 13 Mar 2025 11:07:54 +0200 Subject: [PATCH 04/19] release branch --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 2ce028c5..47030d11 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.0 + 4.4.1-SNAPSHOT jar data-import-processing-core From 29a31c19d51d5acb99f0db85a5ff45d4c2aa1d4d Mon Sep 17 00:00:00 2001 From: Javokhir Abdullaev <101543142+JavokhirAbdullayev@users.noreply.github.com> Date: Fri, 28 Mar 2025 18:21:18 +0500 Subject: [PATCH 05/19] MODMARCMIG-50 MARC Migration failing (#395) * MODMARCMIG-50 MARC Migration failing * fix * fix code smell (cherry picked from commit fc513686ea572990cdbdd23b3dc99e2e0e6225b7) --- .../functions/NormalizationFunction.java | 6 ++++ .../mapping/InstanceMappingTest.java | 33 +++++++++++++++++++ .../mapping/instance/empty_008_field.mrc | 1 + 3 files changed, 40 insertions(+) create mode 100644 src/test/resources/org/folio/processing/mapping/instance/empty_008_field.mrc diff --git a/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java b/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java index 3c3e4ebd..8b66907c 100644 --- a/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java +++ b/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java @@ -58,6 +58,9 @@ public enum NormalizationFunction implements Function subfield @Override public String apply(RuleExecutionContext context) { String subFieldValue = context.getSubFieldValue(); + if (subFieldValue == null || subFieldValue.isEmpty()) { + return EMPTY_STRING; + } char sixthChar = subFieldValue.charAt(6); List dateTypes = context.getMappingParameters().getInstanceDateTypes(); if (dateTypes == null || dateTypes.isEmpty()) { diff --git a/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java b/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java index ebb42904..15516c8d 100644 --- a/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java +++ b/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java @@ -70,6 +70,7 @@ public class InstanceMappingTest { private static final String BIB_WITH_REPEATED_600_SUBFIELD_AND_EMPTY_INDICATOR = "src/test/resources/org/folio/processing/mapping/instance/6xx_subjects_without_indicators.mrc"; private static final String BIB_WITH_008_DATE = "src/test/resources/org/folio/processing/mapping/instance/008_date.mrc"; private static final String BIB_WITHOUT_008_DATE = "src/test/resources/org/folio/processing/mapping/instance/008_empty_date.mrc"; + private static final String BIB_WITH_EMPTY_008_FIELD = "src/test/resources/org/folio/processing/mapping/instance/empty_008_field.mrc"; private static final String BIB_WITH_DELETED_LEADER = "src/test/resources/org/folio/processing/mapping/instance/deleted_leader.mrc"; private static final String BIB_WITH_RESOURCE_TYPE_SUBFIELD_VALUE = "src/test/resources/org/folio/processing/mapping/instance/336_subfields_mapping.mrc"; private static final String BIB_WITH_720_FIELDS = "src/test/resources/org/folio/processing/mapping/instance/720_fields_samples.mrc"; @@ -691,6 +692,38 @@ public void testMarcToInstanceWithEmpty008Date() throws IOException { assertEquals("77a09c3c-37bd-4ad3-aae4-9d86fc1b33d8", mappedInstances.get(0).getDates().getDateTypeId()); } + @Test + public void testMarcToInstanceWithEmpty008Field() throws IOException { + MarcReader reader = new MarcStreamReader(new ByteArrayInputStream(TestUtil.readFileFromPath( + BIB_WITH_EMPTY_008_FIELD).getBytes(StandardCharsets.UTF_8))); + JsonObject mappingRules = new JsonObject(TestUtil.readFileFromPath(DEFAULT_MAPPING_RULES_PATH)); + String rawInstanceDateTypes = TestUtil.readFileFromPath(DEFAULT_INSTANCE_DATE_TYPES_PATH); + List instanceDateTypes = List.of(new ObjectMapper().readValue(rawInstanceDateTypes, InstanceDateType[].class)); + + + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + List mappedInstances = new ArrayList<>(); + while (reader.hasNext()) { + ByteArrayOutputStream os = new ByteArrayOutputStream(); + MarcJsonWriter writer = new MarcJsonWriter(os); + Record targetRecord = reader.next(); + writer.write(targetRecord); + JsonObject marc = new JsonObject(os.toString()); + Instance instance = mapper.mapRecord(marc, new MappingParameters().withInstanceDateTypes(instanceDateTypes), mappingRules); + mappedInstances.add(instance); + Validator validator = factory.getValidator(); + Set> violations = validator.validate(instance); + assertTrue(violations.isEmpty()); + } + assertFalse(mappedInstances.isEmpty()); + assertEquals(1, mappedInstances.size()); + + Instance mappedInstance = mappedInstances.get(0); + assertNotNull(mappedInstance.getId()); + + assertNull(mappedInstances.get(0).getDates()); + } + @Test public void testMarcToInstanceWithRepeatableSubjectsMappedWithTypeButWithoutIndicators() throws IOException { final String FIRST_SUBJECT_TYPE_ID = "d6488f88-1e74-40ce-81b5-b19a928ff5b1"; diff --git a/src/test/resources/org/folio/processing/mapping/instance/empty_008_field.mrc b/src/test/resources/org/folio/processing/mapping/instance/empty_008_field.mrc new file mode 100644 index 00000000..41c0815a --- /dev/null +++ b/src/test/resources/org/folio/processing/mapping/instance/empty_008_field.mrc @@ -0,0 +1 @@ +01300cam 2200313Ma 4500001001200000003000600012005001700018006001900035007001500054008000100069040009800070050002300168072002500191082001800216100002400234245006500258260003100323300005800354336002600412337002600438338003600464490009600500500004000596504006900636588002600705710002300731776013400754830009800888ocm85820197OCoLC20160514041104.1m o d cr ||||||||||| aDG1bengepncDG1dDG1dOCLCQdE7BdOCLCFdOCLCOdOCLCQdYDXCPdN$TdIDEBKdOCLCQdCOOdOCLCQ 4aQA278.5b.J27 1991 7aMATx0290202bisacsh04a519.5/3542201 aJackson, J. Edward.12aA user's guide to principal components /cJ. Edward Jackson. aNew York :bWiley,c?1991. a1 online resource (xvii, 569 pages) :billustrations. atextbtxt2rdacontent acomputerbc2rdamedia aonline resourcebcr2rdacarrier1 aWiley series in probability and mathematical statistics. Applied probability and statistics a"A Wiley-Interscience publication." aIncludes bibliographical references (pages 497-550) and indexes.0 aPrint version record.2 aJohn Wiley & Sons.08iPrint version:aJackson, J. Edward.tUser's guide to principal components.dNew York : Wiley, ?1991z0471622672w(DLC) 90028108 0aWiley series in probability and mathematical statistics.pApplied probability and statistics. \ No newline at end of file From d42845b788bf0e58b9cc90c44b0dce76b0b03b30 Mon Sep 17 00:00:00 2001 From: Javokhir Abdullaev <101543142+JavokhirAbdullayev@users.noreply.github.com> Date: Fri, 4 Apr 2025 14:03:13 +0500 Subject: [PATCH 06/19] Skip mapping of invalid 008 field (#396) * code review * change testing marc field * change testing marc field * change testing marc field (cherry picked from commit 5f35c923bc6451f56cb622987eb794c5abf60778) --- .../processor/functions/NormalizationFunction.java | 6 +++--- .../org/folio/processing/mapping/InstanceMappingTest.java | 4 ++-- .../folio/processing/mapping/instance/008_invalid_field.mrc | 1 + 3 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 src/test/resources/org/folio/processing/mapping/instance/008_invalid_field.mrc diff --git a/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java b/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java index 8b66907c..9a8618d8 100644 --- a/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java +++ b/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java @@ -58,7 +58,7 @@ public enum NormalizationFunction implements Function subfield @Override public String apply(RuleExecutionContext context) { String subFieldValue = context.getSubFieldValue(); - if (subFieldValue == null || subFieldValue.isEmpty()) { + if (subFieldValue == null || subFieldValue.length() < 7) { return EMPTY_STRING; } - char sixthChar = subFieldValue.charAt(6); List dateTypes = context.getMappingParameters().getInstanceDateTypes(); if (dateTypes == null || dateTypes.isEmpty()) { return StringUtils.EMPTY; } + char sixthChar = subFieldValue.charAt(6); String defaultDateTypeId = findDateTypeId(dateTypes, StringUtils.EMPTY); return matchInstanceDateTypeViaCode(sixthChar, dateTypes, defaultDateTypeId); } diff --git a/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java b/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java index 15516c8d..02ebe8ee 100644 --- a/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java +++ b/src/test/java/org/folio/processing/mapping/InstanceMappingTest.java @@ -70,7 +70,7 @@ public class InstanceMappingTest { private static final String BIB_WITH_REPEATED_600_SUBFIELD_AND_EMPTY_INDICATOR = "src/test/resources/org/folio/processing/mapping/instance/6xx_subjects_without_indicators.mrc"; private static final String BIB_WITH_008_DATE = "src/test/resources/org/folio/processing/mapping/instance/008_date.mrc"; private static final String BIB_WITHOUT_008_DATE = "src/test/resources/org/folio/processing/mapping/instance/008_empty_date.mrc"; - private static final String BIB_WITH_EMPTY_008_FIELD = "src/test/resources/org/folio/processing/mapping/instance/empty_008_field.mrc"; + private static final String BIB_WITH_INVALID_008_FIELD = "src/test/resources/org/folio/processing/mapping/instance/008_invalid_field.mrc"; private static final String BIB_WITH_DELETED_LEADER = "src/test/resources/org/folio/processing/mapping/instance/deleted_leader.mrc"; private static final String BIB_WITH_RESOURCE_TYPE_SUBFIELD_VALUE = "src/test/resources/org/folio/processing/mapping/instance/336_subfields_mapping.mrc"; private static final String BIB_WITH_720_FIELDS = "src/test/resources/org/folio/processing/mapping/instance/720_fields_samples.mrc"; @@ -695,7 +695,7 @@ public void testMarcToInstanceWithEmpty008Date() throws IOException { @Test public void testMarcToInstanceWithEmpty008Field() throws IOException { MarcReader reader = new MarcStreamReader(new ByteArrayInputStream(TestUtil.readFileFromPath( - BIB_WITH_EMPTY_008_FIELD).getBytes(StandardCharsets.UTF_8))); + BIB_WITH_INVALID_008_FIELD).getBytes(StandardCharsets.UTF_8))); JsonObject mappingRules = new JsonObject(TestUtil.readFileFromPath(DEFAULT_MAPPING_RULES_PATH)); String rawInstanceDateTypes = TestUtil.readFileFromPath(DEFAULT_INSTANCE_DATE_TYPES_PATH); List instanceDateTypes = List.of(new ObjectMapper().readValue(rawInstanceDateTypes, InstanceDateType[].class)); diff --git a/src/test/resources/org/folio/processing/mapping/instance/008_invalid_field.mrc b/src/test/resources/org/folio/processing/mapping/instance/008_invalid_field.mrc new file mode 100644 index 00000000..f5c94826 --- /dev/null +++ b/src/test/resources/org/folio/processing/mapping/instance/008_invalid_field.mrc @@ -0,0 +1 @@ +01304cam 2200313Ma 4500001001200000003000600012005001700018006001900035007001500054008000500069040009800074050002300172072002500195082001800220100002400238245006500262260003100327300005800358336002600416337002600442338003600468490009600504500004000600504006900640588002600709710002300735776013400758830009800892ocm85820197OCoLC20160514041104.1m o d cr |||||||||||None aDG1bengepncDG1dDG1dOCLCQdE7BdOCLCFdOCLCOdOCLCQdYDXCPdN$TdIDEBKdOCLCQdCOOdOCLCQ 4aQA278.5b.J27 1991 7aMATx0290202bisacsh04a519.5/3542201 aJackson, J. Edward.12aA user's guide to principal components /cJ. Edward Jackson. aNew York :bWiley,c?1991. a1 online resource (xvii, 569 pages) :billustrations. atextbtxt2rdacontent acomputerbc2rdamedia aonline resourcebcr2rdacarrier1 aWiley series in probability and mathematical statistics. Applied probability and statistics a"A Wiley-Interscience publication." aIncludes bibliographical references (pages 497-550) and indexes.0 aPrint version record.2 aJohn Wiley & Sons.08iPrint version:aJackson, J. Edward.tUser's guide to principal components.dNew York : Wiley, ?1991z0471622672w(DLC) 90028108 0aWiley series in probability and mathematical statistics.pApplied probability and statistics. \ No newline at end of file From 9ddbc87f5dea8488b2bd96efe2893fadf03cbf3a Mon Sep 17 00:00:00 2001 From: Aliaksandr Fedasiuk Date: Thu, 17 Apr 2025 18:40:09 +0500 Subject: [PATCH 07/19] MODDATAIMP-1189: Fix update of MARC bibliographic fails with "NullPointerException" (#397) * MODDATAIMP-1189: Fix update of MARC bibliographic fails with "NullPointerException" (cherry picked from commit 3d32588fea16bdcd36f4af56f5bfd15705677a36) --- .../folio/processing/events/EventManager.java | 18 ++-- .../processor/EventProcessorImpl.java | 4 +- .../writer/marc/MarcRecordModifier.java | 84 +++++++++++++------ .../processing/events/AbstractRestTest.java | 8 ++ .../events/EventManagerUnitTest.java | 35 ++++++-- .../writer/marc/MarcRecordModifierTest.java | 38 +++++++++ src/test/resources/log4j2.properties | 18 ++++ 7 files changed, 165 insertions(+), 40 deletions(-) create mode 100644 src/test/resources/log4j2.properties diff --git a/src/main/java/org/folio/processing/events/EventManager.java b/src/main/java/org/folio/processing/events/EventManager.java index 25170df1..de88153a 100644 --- a/src/main/java/org/folio/processing/events/EventManager.java +++ b/src/main/java/org/folio/processing/events/EventManager.java @@ -41,12 +41,12 @@ */ public final class EventManager { + private static final Logger LOGGER = LogManager.getLogger(EventManager.class); + public static final String POST_PROCESSING_INDICATOR = "POST_PROCESSING"; public static final String POST_PROCESSING_RESULT_EVENT_KEY = "POST_PROCESSING_RESULT_EVENT"; public static final String OL_ACCUMULATIVE_RESULTS = "OL_ACCUMULATIVE_RESULTS"; - private static final Logger LOGGER = LogManager.getLogger(EventManager.class); - private static final EventProcessor eventProcessor = new EventProcessorImpl(); private static final List eventPublisher = new ArrayList<>(Arrays.asList(new RestEventPublisher())); @@ -67,6 +67,7 @@ static List getEventPublishers() { * @return future with event payload after handling */ public static CompletableFuture handleEvent(DataImportEventPayload eventPayload, ProfileSnapshotWrapper jobProfileSnapshot) { + LOGGER.trace("handleEvent:: Event type: {}, event payload: {}", eventPayload.getEventType(), eventPayload); CompletableFuture future = new CompletableFuture<>(); try { setCurrentNodeIfRoot(eventPayload, jobProfileSnapshot); @@ -89,21 +90,24 @@ public static CompletableFuture handleEvent(DataImportEv } private static void setCurrentNodeIfRoot(DataImportEventPayload eventPayload, ProfileSnapshotWrapper jobProfileSnapshot) { + LOGGER.trace("setCurrentNodeIfRoot:: Event type: {}, event payload: {}", eventPayload.getEventType(), eventPayload); if (eventPayload.getCurrentNode() == null || eventPayload.getCurrentNode().getContentType() == JOB_PROFILE) { List jobProfileChildren = jobProfileSnapshot.getChildSnapshotWrappers(); if (isNotEmpty(jobProfileChildren)) { - eventPayload.setCurrentNode(jobProfileChildren.get(0)); + eventPayload.setCurrentNode(jobProfileChildren.getFirst()); } eventPayload.setCurrentNodePath(new ArrayList<>(Collections.singletonList(jobProfileSnapshot.getId()))); } } private static CompletableFuture publishEventIfNecessary(DataImportEventPayload eventPayload, ProfileSnapshotWrapper jobProfileSnapshot, Throwable processThrowable) { + LOGGER.trace("publishEventIfNecessary:: Event type: {}, event payload: {}", eventPayload.getEventType(), eventPayload, processThrowable); if (processThrowable instanceof EventHandlerNotFoundException || (Objects.nonNull(processThrowable) && processThrowable.getCause() instanceof DuplicateEventException)) { return CompletableFuture.completedFuture(false); } - return eventPublisher.get(0).publish(prepareEventPayload(eventPayload, jobProfileSnapshot, processThrowable)) + LOGGER.trace("publishEventIfNecessary:: eventPublisher = {}", eventPublisher.getFirst().getClass().getSimpleName()); + return eventPublisher.getFirst().publish(prepareEventPayload(eventPayload, jobProfileSnapshot, processThrowable)) .thenApply(sentEvent -> true); } @@ -167,7 +171,7 @@ private static Optional findNext(DataImportEventPayload } if (currentNode.getContentType() == ACTION_PROFILE) { if (isNotEmpty(currentNode.getChildSnapshotWrappers())) { - return Optional.of(currentNode.getChildSnapshotWrappers().get(0)); + return Optional.of(currentNode.getChildSnapshotWrappers().getFirst()); } else { return findParent(currentNode.getId(), jobProfileSnapshot) .flatMap(actionParent -> getNextChildProfile(currentNode, actionParent)); @@ -212,6 +216,7 @@ private static DataImportEventPayload prepareErrorEventPayload(DataImportEventPa * @return true handlers is registered */ public static boolean registerEventHandler(T eventHandler) { + LOGGER.trace("registerEventHandler:: Registering event handler: {}", eventHandler.getClass()); return eventProcessor.getEventHandlers().add(eventHandler); } @@ -222,6 +227,7 @@ public static boolean registerEventHandler(T eventHandl * @param vertx - vertx instance */ public static void registerKafkaEventPublisher(KafkaConfig kafkaConfig, Vertx vertx, int maxDistributionNum) { + LOGGER.trace("registerKafkaEventPublisher:: Registering kafka event publisher"); eventPublisher.forEach(p -> { LOGGER.info("registerKafkaEventPublisher {}", p.toString()); if(p instanceof KafkaEventPublisher publisher) { @@ -240,6 +246,7 @@ public static void registerKafkaEventPublisher(KafkaConfig kafkaConfig, Vertx ve * Performs registration for rest event publisher in publishers list */ public static void registerRestEventPublisher() { + LOGGER.trace("registerRestEventPublisher:: Registering rest event publisher"); eventPublisher.clear(); eventPublisher.add(new RestEventPublisher()); } @@ -248,6 +255,7 @@ public static void registerRestEventPublisher() { * Clears the registry of event handlers. */ public static void clearEventHandlers() { + LOGGER.trace("clearEventHandlers:: Clearing event handlers"); eventProcessor.getEventHandlers().clear(); } } diff --git a/src/main/java/org/folio/processing/events/services/processor/EventProcessorImpl.java b/src/main/java/org/folio/processing/events/services/processor/EventProcessorImpl.java index 970010e6..fd39d08e 100644 --- a/src/main/java/org/folio/processing/events/services/processor/EventProcessorImpl.java +++ b/src/main/java/org/folio/processing/events/services/processor/EventProcessorImpl.java @@ -25,6 +25,7 @@ public class EventProcessorImpl implements EventProcessor { @Override public CompletableFuture process(DataImportEventPayload eventPayload) { + LOG.debug("process:: Processing event payload {}", eventPayload); CompletableFuture future = new CompletableFuture<>(); try { Optional optionalEventHandler = eventHandlers.stream() @@ -40,6 +41,7 @@ public CompletableFuture process(DataImportEventPayload .whenComplete((payload, throwable) -> { logEventProcessingTime(eventType, startTime, eventPayload); if (throwable != null) { + LOG.warn("process:: Failed to process event payload", throwable); future.completeExceptionally(throwable); } else { future.complete(payload); @@ -86,7 +88,7 @@ private void logEventProcessingTime(String eventType, long startTime, DataImport private String getLastEvent(DataImportEventPayload eventPayload) { final var eventsChain = eventPayload.getEventsChain(); - return eventsChain.get(eventsChain.size() - 1); + return eventsChain.getLast(); } private DataImportEventPayload updatePayloadIfNeeded(DataImportEventPayload dataImportEventPayload) { diff --git a/src/main/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifier.java b/src/main/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifier.java index e35d3e3b..3c9df8bf 100644 --- a/src/main/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifier.java +++ b/src/main/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifier.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -143,7 +144,7 @@ public void processUpdateMappingOption(List marcMappingRules) isNotEmpty(detail.getField().getIndicator1()) ? detail.getField().getIndicator1().charAt(0) : BLANK_SUBFIELD_CODE; char ind2 = isNotEmpty(detail.getField().getIndicator2()) ? detail.getField().getIndicator2().charAt(0) : BLANK_SUBFIELD_CODE; - String subfieldCode = detail.getField().getSubfields().get(0).getSubfield(); + String subfieldCode = detail.getField().getSubfields().getFirst().getSubfield(); Stream incomingDataFields = incomingMarcRecord.getDataFields().stream() .filter(field -> fieldMatches(field, fieldTag, ind1, ind2, subfieldCode.charAt(0))); @@ -315,7 +316,7 @@ private void processAddAction(MarcMappingDetail detail) { String fieldTag = detail.getField().getField(); if (Verifier.isControlField(fieldTag)) { ControlField controlField = - marcFactory.newControlField(fieldTag, detail.getField().getSubfields().get(0).getData().getText()); + marcFactory.newControlField(fieldTag, detail.getField().getSubfields().getFirst().getData().getText()); addControlFieldInNumericalOrder(controlField); } else { char ind1 = @@ -390,13 +391,13 @@ private void processDeleteAction(MarcMappingDetail detail) { for (VariableField field : marcRecordToChange.getVariableFields(fieldTag)) { marcRecordToChange.removeVariableField(field); } - } else if (detail.getField().getSubfields().get(0).getSubfield().charAt(0) == ANY_CHAR) { + } else if (detail.getField().getSubfields().getFirst().getSubfield().charAt(0) == ANY_CHAR) { marcRecordToChange.getDataFields().stream() .filter(field -> fieldMatches(field, fieldTag, ind1, ind2)) .toList() .forEach(fieldToDelete -> marcRecordToChange.removeVariableField(fieldToDelete)); } else { - char subfieldCode = detail.getField().getSubfields().get(0).getSubfield().charAt(0); + char subfieldCode = detail.getField().getSubfields().getFirst().getSubfield().charAt(0); marcRecordToChange.getDataFields().stream() .filter(field -> fieldMatches(field, fieldTag, ind1, ind2)) .map(targetField -> { @@ -420,7 +421,7 @@ private boolean fieldMatches(DataField field, String tag, char ind1, char ind2) } private void processEditAction(MarcMappingDetail mappingDetail) { - MarcSubfield subfieldRule = mappingDetail.getField().getSubfields().get(0); + MarcSubfield subfieldRule = mappingDetail.getField().getSubfields().getFirst(); switch (subfieldRule.getSubaction()) { case INSERT: processInsert(subfieldRule, mappingDetail); @@ -443,8 +444,8 @@ private void processInsert(MarcSubfield ruleSubfield, MarcMappingDetail mappingR char ind2 = isNotEmpty(mappingRule.getField().getIndicator2()) ? mappingRule.getField().getIndicator2().charAt(0) : BLANK_SUBFIELD_CODE; - String dataToInsert = mappingRule.getField().getSubfields().get(0).getData().getText(); - MarcSubfield.Position dataPosition = mappingRule.getField().getSubfields().get(0).getPosition(); + String dataToInsert = mappingRule.getField().getSubfields().getFirst().getData().getText(); + MarcSubfield.Position dataPosition = mappingRule.getField().getSubfields().getFirst().getPosition(); List fieldsToEdit = marcRecordToChange.getDataFields().stream() .filter(field -> fieldMatches(field, tag, ind1, ind2)) @@ -468,8 +469,8 @@ private void processInsert(MarcSubfield ruleSubfield, MarcMappingDetail mappingR private void processReplace(MarcMappingDetail mappingRule) { String tag = mappingRule.getField().getField().substring(0, 3); - String dataToReplace = mappingRule.getField().getSubfields().get(0).getData().getFind(); - String replacementData = mappingRule.getField().getSubfields().get(0).getData().getReplaceWith(); + String dataToReplace = mappingRule.getField().getSubfields().getFirst().getData().getFind(); + String replacementData = mappingRule.getField().getSubfields().getFirst().getData().getReplaceWith(); if (LDR_TAG.equals(tag)) { Range positions = getControlFieldDataPosition(mappingRule.getField().getField()); @@ -508,7 +509,7 @@ private void processReplace(MarcMappingDetail mappingRule) { private void processRemove(MarcMappingDetail mappingRule) { String tag = mappingRule.getField().getField().substring(0, 3); - String dataToRemove = mappingRule.getField().getSubfields().get(0).getData().getText(); + String dataToRemove = mappingRule.getField().getSubfields().getFirst().getData().getText(); if (Verifier.isControlField(tag)) { Range positions = getControlFieldDataPosition(mappingRule.getField().getField()); @@ -555,7 +556,7 @@ private void replaceDataInDataFields(String tag, String dataToReplace, String re char ind2 = isNotEmpty(mappingRule.getField().getIndicator2()) ? mappingRule.getField().getIndicator2().charAt(0) : BLANK_SUBFIELD_CODE; - char subfieldCode = mappingRule.getField().getSubfields().get(0).getSubfield().charAt(0); + char subfieldCode = mappingRule.getField().getSubfields().getFirst().getSubfield().charAt(0); marcRecordToChange.getDataFields().stream() .filter(field -> fieldMatches(field, tag, ind1, ind2, subfieldCode)) @@ -622,7 +623,7 @@ private void moveDataToNewField(List sourceFields, MarcSubfield subfi String newFieldTag = newFieldRule.getField(); char srcSubfieldCode = subfieldRule.getSubfield().charAt(0); char newSubfieldCode = - newFieldRule.getSubfields().isEmpty() ? srcSubfieldCode : newFieldRule.getSubfields().get(0).getSubfield().charAt(0); + newFieldRule.getSubfields().isEmpty() ? srcSubfieldCode : newFieldRule.getSubfields().getFirst().getSubfield().charAt(0); for (DataField sourceField : sourceFields) { char newFieldInd1 = @@ -652,7 +653,7 @@ private void moveDataToExistingField(List sourceFields, MarcSubfield char existingFieldInd2 = isEmpty(subfieldRule.getData().getMarcField().getIndicator2()) ? BLANK_SUBFIELD_CODE : subfieldRule.getData().getMarcField().getIndicator2().charAt(0); char srcSubfieldCode = subfieldRule.getSubfield().charAt(0); - char existingFieldSfCode = subfieldRule.getData().getMarcField().getSubfields().get(0).getSubfield().charAt(0); + char existingFieldSfCode = subfieldRule.getData().getMarcField().getSubfields().getFirst().getSubfield().charAt(0); List existingFields = marcRecordToChange.getDataFields().stream() .filter(field -> fieldMatches(field, existingFieldTag, existingFieldInd1, existingFieldInd2)) @@ -815,7 +816,7 @@ protected boolean updateSubfields(String subfieldCode, List tmpFields if (existingSubfields.isEmpty()) { allSubfields.addAll(newSubfields); } else { - var indexOfFirstSubfield = allSubfields.indexOf(existingSubfields.get(0)); + var indexOfFirstSubfield = allSubfields.indexOf(existingSubfields.getFirst()); // replace all existed subfields to all new subfields allSubfields.removeIf(subfield -> subfield.getCode() == subfieldChar); allSubfields.addAll(indexOfFirstSubfield, newSubfields); @@ -889,18 +890,51 @@ private boolean isNotProtected(ControlField field) { } private boolean isNotProtected(DataField field) { + LOGGER.trace("isNotProtected:: field {}", field.getTag()); return applicableProtectionSettings.stream() - .filter(setting -> setting.getField().equals(ANY_STRING) || setting.getField().equals(field.getTag())) - .filter(setting -> setting.getIndicator1().equals(ANY_STRING) - || (isNotEmpty(setting.getIndicator1()) ? setting.getIndicator1().charAt(0) : BLANK_SUBFIELD_CODE) - == field.getIndicator1()) - .filter(setting -> setting.getIndicator2().equals(ANY_STRING) - || (isNotEmpty(setting.getIndicator2()) ? setting.getIndicator2().charAt(0) : BLANK_SUBFIELD_CODE) - == field.getIndicator2()) - .filter( - setting -> setting.getSubfield().equals(ANY_STRING) || field.getSubfield(setting.getSubfield().charAt(0)) != null) - .noneMatch(setting -> setting.getData().equals(ANY_STRING) || setting.getData() - .equals(field.getSubfield(setting.getSubfield().charAt(0)).getData())); + .filter(setting -> matchesField(setting, field)) + .filter(setting -> matchesIndicator1(setting, field)) + .filter(setting -> matchesIndicator2(setting, field)) + .filter(setting -> hasSubfield(setting, field)) + .noneMatch(setting -> matchesData(setting, field)); + } + + private boolean matchesField(MarcFieldProtectionSetting setting, DataField field) { + LOGGER.trace("matchesField:: field={}, setting: field={}", field.getTag(), setting.getField()); + return setting.getField().equals(ANY_STRING) || setting.getField().equals(field.getTag()); + } + + private boolean matchesIndicator1(MarcFieldProtectionSetting setting, DataField field) { + LOGGER.trace("matchesIndicator1:: field={} | setting: indicator1={}", field.getTag(), setting.getIndicator1()); + return setting.getIndicator1().equals(ANY_STRING) || + (isNotEmpty(setting.getIndicator1()) ? setting.getIndicator1().charAt(0) : BLANK_SUBFIELD_CODE) == field.getIndicator1(); + } + + private boolean matchesIndicator2(MarcFieldProtectionSetting setting, DataField field) { + LOGGER.trace("matchesIndicator2:: field={} | setting: indicator2={}", field.getTag(), setting.getIndicator2()); + return setting.getIndicator2().equals(ANY_STRING) || + (isNotEmpty(setting.getIndicator2()) ? setting.getIndicator2().charAt(0) : BLANK_SUBFIELD_CODE) == field.getIndicator2(); + } + + private boolean hasSubfield(MarcFieldProtectionSetting setting, DataField field) { + LOGGER.trace("hasSubfield:: field={} | setting: subfield={}", field.getTag(), setting.getSubfield()); + return setting.getSubfield().equals(ANY_STRING) || field.getSubfield(setting.getSubfield().charAt(0)) != null; + } + + private boolean matchesData(MarcFieldProtectionSetting setting, DataField field) { + LOGGER.trace("matchesData:: field={} | setting: subfield={}, data={}", field.getTag(), setting.getSubfield(), setting.getData()); + if (setting.getSubfield().charAt(0) == ANY_CHAR) { + return field.getSubfields().stream().anyMatch(subfield -> dataMatches(setting, subfield)); + } else { + return Optional.ofNullable(field.getSubfield(setting.getSubfield().charAt(0))) + .map(subfield -> dataMatches(setting, subfield)) + .orElse(false); + } + } + + private boolean dataMatches(MarcFieldProtectionSetting setting, Subfield subfield) { + LOGGER.trace("dataMatches:: subfield: code={}, data={} | setting: subfield={}, data={}", subfield.getCode(), subfield.getData(), setting.getSubfield(), setting.getData()); + return setting.getData().equals(ANY_STRING) || setting.getData().equals(subfield.getData()); } private boolean isControlFieldsContains(List controlFields, ControlField controlField) { diff --git a/src/test/java/org/folio/processing/events/AbstractRestTest.java b/src/test/java/org/folio/processing/events/AbstractRestTest.java index 11992a9b..bcfc195f 100644 --- a/src/test/java/org/folio/processing/events/AbstractRestTest.java +++ b/src/test/java/org/folio/processing/events/AbstractRestTest.java @@ -3,6 +3,8 @@ import com.github.tomakehurst.wiremock.common.Slf4jNotifier; import com.github.tomakehurst.wiremock.core.WireMockConfiguration; import com.github.tomakehurst.wiremock.junit.WireMockRule; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.junit.Rule; import java.io.IOException; @@ -10,6 +12,9 @@ import java.util.concurrent.ThreadLocalRandom; public abstract class AbstractRestTest { + + private static final Logger LOGGER = LogManager.getLogger(AbstractRestTest.class); + protected final String TENANT_ID = "diku"; protected final String TOKEN = "token"; private int PORT = nextFreePort(); @@ -22,6 +27,8 @@ public abstract class AbstractRestTest { .notifier(new Slf4jNotifier(true))); public static int nextFreePort() { + LOGGER.trace("nextFreePort:: creating random port"); + int maxTries = 10000; int port = ThreadLocalRandom.current().nextInt(49152 , 65535); while (true) { @@ -38,6 +45,7 @@ public static int nextFreePort() { } public static boolean isLocalPortFree(int port) { + LOGGER.trace("isLocalPortFree:: checking if port {} is free", port); try { new ServerSocket(port).close(); return true; diff --git a/src/test/java/org/folio/processing/events/EventManagerUnitTest.java b/src/test/java/org/folio/processing/events/EventManagerUnitTest.java index fe82a9c8..aceb11cb 100644 --- a/src/test/java/org/folio/processing/events/EventManagerUnitTest.java +++ b/src/test/java/org/folio/processing/events/EventManagerUnitTest.java @@ -5,6 +5,8 @@ import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.folio.ActionProfile; import org.folio.DataImportEventPayload; import org.folio.JobProfile; @@ -51,16 +53,19 @@ @RunWith(VertxUnitRunner.class) public class EventManagerUnitTest extends AbstractRestTest { + private static final Logger LOGGER = LogManager.getLogger(EventManagerUnitTest.class); private final String PUBLISH_SERVICE_URL = "/pubsub/publish"; @Before public void beforeTest() { EventManager.clearEventHandlers(); + EventManager.registerRestEventPublisher(); WireMock.stubFor(WireMock.post(PUBLISH_SERVICE_URL).willReturn(WireMock.noContent())); } @Test public void shouldHandleEvent(TestContext testContext) { + LOGGER.info("test:: shouldHandleEvent"); Async async = testContext.async(); // given EventManager.registerEventHandler(new CreateInstanceEventHandler()); @@ -95,9 +100,10 @@ public void shouldHandleEvent(TestContext testContext) { .withOkapiUrl(OKAPI_URL) .withToken(TOKEN) .withContext(new HashMap<>()) - .withCurrentNode(profileSnapshot.getChildSnapshotWrappers().get(0)); + .withCurrentNode(profileSnapshot.getChildSnapshotWrappers().getFirst()); // when - EventManager.handleEvent(eventPayload, profileSnapshot).whenComplete((nextEventContext, throwable) -> { + EventManager.handleEvent(eventPayload, profileSnapshot) + .whenComplete((nextEventContext, throwable) -> { // then testContext.assertNull(throwable); testContext.assertEquals(1, nextEventContext.getEventsChain().size()); @@ -112,6 +118,7 @@ public void shouldHandleEvent(TestContext testContext) { @Test public void shouldHandleLastEvent(TestContext testContext) { + LOGGER.info("test:: shouldHandleLastEvent"); Async async = testContext.async(); // given EventManager.registerEventHandler(new CreateInstanceEventHandler()); @@ -134,7 +141,7 @@ public void shouldHandleLastEvent(TestContext testContext) { .withOkapiUrl(OKAPI_URL) .withToken(TOKEN) .withContext(new HashMap<>()) - .withCurrentNode(jobProfileSnapshot.getChildSnapshotWrappers().get(0)); + .withCurrentNode(jobProfileSnapshot.getChildSnapshotWrappers().getFirst()); // when EventManager.handleEvent(eventPayload, jobProfileSnapshot).whenComplete((nextEventContext, throwable) -> { // then @@ -151,6 +158,7 @@ public void shouldHandleLastEvent(TestContext testContext) { @Test public void shouldIgnoreEventIfNoHandlersDefined(TestContext testContext) { + LOGGER.info("test:: shouldIgnoreEventIfNoHandlersDefined"); Async async = testContext.async(); // given ProfileSnapshotWrapper profileSnapshot = new ProfileSnapshotWrapper() @@ -167,7 +175,7 @@ public void shouldIgnoreEventIfNoHandlersDefined(TestContext testContext) { .withOkapiUrl(OKAPI_URL) .withToken(TOKEN) .withContext(new HashMap<>()) - .withCurrentNode(profileSnapshot.getChildSnapshotWrappers().get(0)); + .withCurrentNode(profileSnapshot.getChildSnapshotWrappers().getFirst()); // when EventManager.handleEvent(eventPayload, profileSnapshot).whenComplete((nextEventContext, throwable) -> { @@ -181,6 +189,7 @@ public void shouldIgnoreEventIfNoHandlersDefined(TestContext testContext) { @Test public void shouldHandleAsErrorEventIfHandlerCompletedExceptionally(TestContext testContext) { + LOGGER.info("test:: shouldHandleAsErrorEventIfHandlerCompletedExceptionally"); Async async = testContext.async(); // given EventManager.registerEventHandler(new FailExceptionallyHandler()); @@ -199,7 +208,7 @@ public void shouldHandleAsErrorEventIfHandlerCompletedExceptionally(TestContext .withOkapiUrl(OKAPI_URL) .withToken(TOKEN) .withContext(new HashMap<>()) - .withCurrentNode(jobProfileSnapshot.getChildSnapshotWrappers().get(0)); + .withCurrentNode(jobProfileSnapshot.getChildSnapshotWrappers().getFirst()); // when EventManager.handleEvent(eventPayload, jobProfileSnapshot).whenComplete((nextEventContext, throwable) -> { // then @@ -212,6 +221,7 @@ public void shouldHandleAsErrorEventIfHandlerCompletedExceptionally(TestContext @Test public void shouldHandleFirstEventInJobProfile(TestContext testContext) { + LOGGER.info("test:: shouldHandleFirstEventInJobProfile"); Async async = testContext.async(); // given String jobProfileId = UUID.randomUUID().toString(); @@ -255,12 +265,13 @@ public void shouldHandleFirstEventInJobProfile(TestContext testContext) { @Test public void shouldHandleAndSetToCurrentNodeAction2Wrapper(TestContext testContext) { + LOGGER.info("test:: shouldHandleAndSetToCurrentNodeAction2Wrapper"); Async async = testContext.async(); // given CreateInstanceEventHandler createInstanceHandler = Mockito.spy(new CreateInstanceEventHandler()); Mockito.doAnswer(invocationOnMock -> { DataImportEventPayload payload = invocationOnMock.getArgument(0); - payload.setCurrentNode(payload.getCurrentNode().getChildSnapshotWrappers().get(0)); + payload.setCurrentNode(payload.getCurrentNode().getChildSnapshotWrappers().getFirst()); return invocationOnMock.callRealMethod(); }).when(createInstanceHandler).handle(any(DataImportEventPayload.class)); @@ -330,6 +341,7 @@ public void shouldHandleAndSetToCurrentNodeAction2Wrapper(TestContext testContex @Test public void shouldHandleAndSetToCurrentNodeAction1Wrapper(TestContext testContext) { + LOGGER.info("test:: shouldHandleAndSetToCurrentNodeAction1Wrapper"); Async async = testContext.async(); // given EventHandler matchInstanceHandler = Mockito.mock(EventHandler.class); @@ -387,13 +399,14 @@ public void shouldHandleAndSetToCurrentNodeAction1Wrapper(TestContext testContex @Test public void shouldHandleEventInCascadingProfilesAndSwitchNode(TestContext testContext) { + LOGGER.info("test:: shouldHandleEventInCascadingProfilesAndSwitchNode"); Async async = testContext.async(); // given EventHandler updateInstanceHandler = Mockito.mock(EventHandler.class); Mockito.doAnswer(invocationOnMock -> { DataImportEventPayload payload = invocationOnMock.getArgument(0); - payload.setCurrentNode(payload.getCurrentNode().getChildSnapshotWrappers().get(0)); + payload.setCurrentNode(payload.getCurrentNode().getChildSnapshotWrappers().getFirst()); return CompletableFuture.completedFuture(payload.withEventType(DI_INVENTORY_INSTANCE_UPDATED.value())); }).when(updateInstanceHandler).handle(any(DataImportEventPayload.class)); @@ -528,12 +541,13 @@ public void shouldHandleEventInCascadingProfilesAndSwitchNode(TestContext testCo @Test public void shouldHandleAndSetToCurrentNodeMatchWrapper2(TestContext testContext) { + LOGGER.info("test:: shouldHandleAndSetToCurrentNodeMatchWrapper2"); Async async = testContext.async(); // given EventHandler updateInstanceHandler = Mockito.mock(EventHandler.class); Mockito.doAnswer(invocationOnMock -> { DataImportEventPayload payload = invocationOnMock.getArgument(0); - payload.setCurrentNode(payload.getCurrentNode().getChildSnapshotWrappers().get(0)); + payload.setCurrentNode(payload.getCurrentNode().getChildSnapshotWrappers().getFirst()); return CompletableFuture.completedFuture(payload.withEventType(DI_INVENTORY_INSTANCE_UPDATED.value())); }).when(updateInstanceHandler).handle(any(DataImportEventPayload.class)); Mockito.when(updateInstanceHandler.isEligible(any(DataImportEventPayload.class))).thenReturn(true); @@ -593,6 +607,7 @@ public void shouldHandleAndSetToCurrentNodeMatchWrapper2(TestContext testContext @Test public void shouldHandleEventAndPreparePayloadForPostProcessing(TestContext testContext) { + LOGGER.info("test:: shouldHandleEventAndPreparePayloadForPostProcessing"); Async async = testContext.async(); // given String jobProfileId = UUID.randomUUID().toString(); @@ -633,6 +648,7 @@ public void shouldHandleEventAndPreparePayloadForPostProcessing(TestContext test @Test public void shouldPerformEventPostProcessingAndPreparePayloadAfterPostProcessing(TestContext testContext) { + LOGGER.info("test:: shouldPerformEventPostProcessingAndPreparePayloadAfterPostProcessing"); Async async = testContext.async(); // given String jobProfileId = UUID.randomUUID().toString(); @@ -678,6 +694,7 @@ public void shouldPerformEventPostProcessingAndPreparePayloadAfterPostProcessing @Test public void shouldClearExtraOLKeyFromPayload(TestContext testContext) { + LOGGER.info("test:: shouldClearExtraOLKeyFromPayload"); Async async = testContext.async(); // given EventManager.registerEventHandler(new CreateInstanceEventHandler()); @@ -702,7 +719,7 @@ public void shouldClearExtraOLKeyFromPayload(TestContext testContext) { .withOkapiUrl(OKAPI_URL) .withToken(TOKEN) .withContext(extraOLKey) - .withCurrentNode(jobProfileSnapshot.getChildSnapshotWrappers().get(0)); + .withCurrentNode(jobProfileSnapshot.getChildSnapshotWrappers().getFirst()); // when EventManager.handleEvent(eventPayload, jobProfileSnapshot).whenComplete((nextEventContext, throwable) -> { // then diff --git a/src/test/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifierTest.java b/src/test/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifierTest.java index bcb085c4..28b047a4 100644 --- a/src/test/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifierTest.java +++ b/src/test/java/org/folio/processing/mapping/mapper/writer/marc/MarcRecordModifierTest.java @@ -2033,6 +2033,44 @@ public void shouldRetainExistingRepeatableDataFieldAndAddIncomingWhenExistingIsP testUpdateRecord(incomingParsedContent, existingParsedContent, expectedParsedContent, mappingParameters); } + @Test + public void shouldRetainExistingRepeatableDataFieldAndAddIncomingWhenExistingIsProtectedAndSomeIncomingFieldIsSameWithMatchesMultipleSubfieldProtectionSettings() { + String incomingParsedContent = "{\"leader\":\"00129nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"655\":{\"subfields\":[{\"a\":\"Catalogs1.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst015\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"655\":{\"subfields\":[{\"a\":\"Periodicals.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst01411641\"}],\"ind1\":\" \",\"ind2\":\"7\"}}]}"; + String existingParsedContent = "{\"leader\":\"00129nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"655\":{\"subfields\":[{\"a\":\"Catalogs0.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst014\"}],\"ind1\":\" \",\"ind2\":\"7\"}}]}"; + String expectedParsedContent = "{\"leader\":\"00200nam 22000731a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"655\":{\"subfields\":[{\"a\":\"Catalogs0.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst014\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"655\":{\"subfields\":[{\"a\":\"Catalogs1.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst015\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"655\":{\"subfields\":[{\"a\":\"Periodicals.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst01411641\"}],\"ind1\":\" \",\"ind2\":\"7\"}}]}"; + + List protectionSettings = List.of( + new MarcFieldProtectionSetting() + .withField("655") + .withIndicator1("*") + .withIndicator2("*") + .withSubfield("*") //any subfield + .withData("fast")); + + MappingParameters mappingParameters = new MappingParameters() + .withMarcFieldProtectionSettings(protectionSettings); + testUpdateRecord(incomingParsedContent, existingParsedContent, expectedParsedContent, mappingParameters); + } + + @Test + public void shouldRetainExistingRepeatableDataFieldAndAddIncomingWhenExistingIsProtectedAndIncomingFieldIsSameWithMatchesSubfieldProtectionSettings() { + String incomingParsedContent = "{\"leader\":\"00129nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"655\":{\"subfields\":[{\"a\":\"Catalogs1.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst015\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"655\":{\"subfields\":[{\"a\":\"Periodicals.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst01411641\"}],\"ind1\":\" \",\"ind2\":\"7\"}}]}"; + String existingParsedContent = "{\"leader\":\"00129nam 22000611a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"655\":{\"subfields\":[{\"a\":\"Catalogs0.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst014\"}],\"ind1\":\" \",\"ind2\":\"7\"}}]}"; + String expectedParsedContent = "{\"leader\":\"00200nam 22000731a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"655\":{\"subfields\":[{\"a\":\"Catalogs0.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst014\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"655\":{\"subfields\":[{\"a\":\"Catalogs1.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst015\"}],\"ind1\":\" \",\"ind2\":\"7\"}},{\"655\":{\"subfields\":[{\"a\":\"Periodicals.\"},{\"2\":\"fast\"},{\"0\":\"(OCoLC)fst01411641\"}],\"ind1\":\" \",\"ind2\":\"7\"}}]}"; + + List protectionSettings = List.of( + new MarcFieldProtectionSetting() + .withField("655") + .withIndicator1("*") + .withIndicator2("*") + .withSubfield("2") //selected subfield + .withData("fast")); + + MappingParameters mappingParameters = new MappingParameters() + .withMarcFieldProtectionSettings(protectionSettings); + testUpdateRecord(incomingParsedContent, existingParsedContent, expectedParsedContent, mappingParameters); + } + @Test public void shouldRetainExistingRepeatableFieldWhenExistingIsProtectedAndHasNoIncomingFieldWithSameTag() { // 950 is repeatable field diff --git a/src/test/resources/log4j2.properties b/src/test/resources/log4j2.properties new file mode 100644 index 00000000..2f71b05c --- /dev/null +++ b/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +filters = threshold + +filter.threshold.type = ThresholdFilter +filter.threshold.level = INFO + +appenders = console + +packages = org.folio.okapi.common.logging + +appender.console.type = Console +appender.console.name = STDOUT + +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %-20.20C{1} [%reqId] %m%n + +rootLogger.level = INFO +rootLogger.appenderRefs = INFO +rootLogger.appenderRef.stdout.ref = STDOUT From a3e563ea23ad0950b5afffde75aec3c181d775ed Mon Sep 17 00:00:00 2001 From: Kateryna_Senchenko Date: Fri, 18 Apr 2025 17:14:18 +0300 Subject: [PATCH 08/19] update news --- NEWS.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/NEWS.md b/NEWS.md index 97535547..cf7aaeb1 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,7 @@ +## 2025-04-18 v4.4.1 +* [MODDICORE-452](https://folio-org.atlassian.net/browse/MODDICORE-452) Fix MARC Migration failings due to records missing values +* [MODDICORE-450](https://folio-org.atlassian.net/browse/MODDICORE-450) Fix NullPointerException during Update of MARC bibliographic action processing + ## 2025-03-07 v4.4.0 * [MODDICORE-433](https://folio-org.atlassian.net/browse/MODDICORE-433) Add userId to event header and allow to send events with null token * [MODDICORE-432](https://folio-org.atlassian.net/browse/MODDICORE-432) Vendor details are empty with code that contains brackets during order creation From ee90971bd6dc77840a68f5b40e527a14ecf6c8dc Mon Sep 17 00:00:00 2001 From: Kateryna_Senchenko Date: Fri, 18 Apr 2025 17:15:19 +0300 Subject: [PATCH 09/19] [maven-release-plugin] prepare release v4.4.1 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 47030d11..9ed9f4b3 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.1-SNAPSHOT + 4.4.1 jar data-import-processing-core @@ -195,7 +195,7 @@ https://github.com/folio-org/data-import-processing-core scm:git:git@github.com:folio-org/data-import-processing-core.git scm:git:git@github.com:folio-org/data-import-processing-core.git - v4.4.0 + v4.4.1 From 08198287f5e75cd4ab834fa9c5904fc45c70001c Mon Sep 17 00:00:00 2001 From: Kateryna_Senchenko Date: Fri, 18 Apr 2025 17:15:19 +0300 Subject: [PATCH 10/19] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 9ed9f4b3..6050d065 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.1 + 4.4.2-SNAPSHOT jar data-import-processing-core @@ -195,7 +195,7 @@ https://github.com/folio-org/data-import-processing-core scm:git:git@github.com:folio-org/data-import-processing-core.git scm:git:git@github.com:folio-org/data-import-processing-core.git - v4.4.1 + v4.4.0 From 1505d478d5b1100f51cd6a85c8c4aa37114355ed Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Sat, 26 Apr 2025 18:12:03 +0200 Subject: [PATCH 11/19] MODDICORE-454: Upgrade all dependencies to supported versions for Sunflower https://folio-org.atlassian.net/browse/MODDICORE-454 Purpose Upgrade all dependencies to supported versions for Sunflower. Fix security vulnerabilities in Netty in graalvm. Approach Bump versions in pom.xml. Upgrade all dependencies to supported versions for Sunflower. Upgrading Vertx from 4.5.11 to 4.5.14 indirectly upgrades Netty fixing these security vulnerabilities: * https://github.com/netty/netty/security/advisories/GHSA-4g8c-wm8x-jfhw CVE-2025-24970 netty-handler: SslHandler doesn't correctly validate packets which can lead to native crash when using native SSLEngine * https://github.com/netty/netty/security/advisories/GHSA-389x-839f-4rhx CVE-2025-25193 netty-common: Denial of Service attack on windows app using Netty, again Upgrading org.graalvm.js:js and org.graalvm.js:js-scriptengine from 23.0.0 to 24.2.1 fixes these security vulnerabilities: * https://github.com/advisories/GHSA-7f6c-8chx-2vm5 CVE-2025-21587 Timing Attack in security-libs/javax.net.ssl that exposes information from a TLS handshake via side channel. * https://github.com/advisories/GHSA-qh4r-w9x4-6fq2 CVE-2025-25193 Buffer Overflow in hotspot/compiler due to improper handling of buffers in addnode.cpp * https://github.com/advisories/GHSA-3fvx-r9r8-xq97 CVE-2025-30698 Heap-based Buffer Overflow in the Graphics.copyArea functionality in client-libs/2d. An attacker can manipulate memory and potentially execute code. TODOS and Open Questions - [x] Check logging. Learning Before making the first release for a flower release bump all dependency versions to a supported version. (cherry picked from commit c40ade8c9c5b1b2b6f133cf607fdab66afb16603) --- README.md | 2 +- pom.xml | 83 ++++++++----------- .../functions/NormalizationFunction.java | 2 +- .../events/utils/PomReaderUtilTest.java | 2 +- 4 files changed, 37 insertions(+), 52 deletions(-) diff --git a/README.md b/README.md index 1f912641..661e1ffa 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # data-import-processing-core -Copyright (C) 2019-2023 The Open Library Foundation +Copyright (C) 2019-2025 The Open Library Foundation This software is distributed under the terms of the Apache License, Version 2.0. See the file "[LICENSE](LICENSE)" for more information. diff --git a/pom.xml b/pom.xml index 6050d065..4ff6e327 100644 --- a/pom.xml +++ b/pom.xml @@ -21,11 +21,9 @@ UTF-8 2.6 - 2.0.6 - 2.14.2 + 2.0.17 4.13.2 - 4.5.11 - 3.0.1 + 3.12.1 35.4.0 org.folio.processing.mapping.defaultmapper.** **/OkapiConnectionParams.java @@ -47,7 +45,14 @@ io.vertx vertx-stack-depchain - ${vertx.version} + 4.5.14 + pom + import + + + org.junit + junit-bom + 5.12.2 pom import @@ -63,7 +68,7 @@ org.folio folio-kafka-wrapper - 3.3.0 + 3.3.1 org.folio @@ -73,12 +78,11 @@ org.marc4j marc4j - 2.9.2 + 2.9.6 io.vertx vertx-web-client - ${vertx.version} javax.ws.rs @@ -93,42 +97,36 @@ com.fasterxml.jackson.core jackson-databind - ${jackson.version} org.apache.maven maven-model - 3.8.7 + 3.9.9 commons-io commons-io - 2.14.0 - - - commons-collections - commons-collections - 3.2.2 - - - org.apache.commons - commons-collections4 - 4.4 + 2.19.0 com.google.guava guava - 32.0.0-jre + 33.4.8-jre org.graalvm.js js - 23.0.0 + 24.2.1 + pom org.graalvm.js js-scriptengine - 23.0.0 + 24.2.1 + + + io.vertx + vertx-kafka-client @@ -140,11 +138,10 @@ io.vertx vertx-unit - ${vertx.version} test - com.github.tomakehurst + org.wiremock wiremock ${wiremock.version} test @@ -152,25 +149,18 @@ org.mockito mockito-core - 5.2.0 - test - - - net.bytebuddy - byte-buddy - 1.17.1 + 5.17.0 test org.junit.jupiter junit-jupiter-api - 5.9.2 test org.hamcrest - hamcrest-junit - 2.0.0.0 + hamcrest + 3.0 test @@ -182,13 +172,8 @@ org.junit.jupiter junit-jupiter-params - 5.9.2 test - - io.vertx - vertx-kafka-client - @@ -203,7 +188,7 @@ org.codehaus.mojo exec-maven-plugin - 3.0.0 + 3.5.0 git submodule update @@ -227,7 +212,7 @@ org.codehaus.mojo properties-maven-plugin - 1.0.0 + 1.2.1 set-system-properties @@ -249,7 +234,7 @@ org.jsonschema2pojo jsonschema2pojo-maven-plugin - 0.4.37 + 0.5.1 ${jsonschema2pojo_output_dir} @@ -329,7 +314,7 @@ org.apache.maven.plugins maven-enforcer-plugin - 3.0.0-M2 + 3.5.0 enforce-maven @@ -349,7 +334,7 @@ maven-compiler-plugin - 3.11.0 + 3.14.0 21 -Xlint:unchecked @@ -359,7 +344,7 @@ org.apache.maven.plugins maven-shade-plugin - 3.2.4 + 3.6.0 package @@ -377,7 +362,7 @@ org.apache.maven.plugins maven-release-plugin - 2.5.3 + 3.1.1 clean verify v@{project.version} @@ -389,7 +374,7 @@ org.apache.maven.plugins maven-surefire-plugin - 2.22.2 + 3.5.3 false diff --git a/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java b/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java index 9a8618d8..d3f0b486 100644 --- a/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java +++ b/src/main/java/org/folio/processing/mapping/defaultmapper/processor/functions/NormalizationFunction.java @@ -42,7 +42,7 @@ import java.util.stream.IntStream; import static io.netty.util.internal.StringUtil.EMPTY_STRING; -import static org.apache.commons.collections.CollectionUtils.isEmpty; +import static org.apache.commons.collections4.CollectionUtils.isEmpty; import static org.apache.commons.lang.StringUtils.isEmpty; import static org.apache.commons.lang3.math.NumberUtils.INTEGER_ZERO; diff --git a/src/test/java/org/folio/processing/events/utils/PomReaderUtilTest.java b/src/test/java/org/folio/processing/events/utils/PomReaderUtilTest.java index b2b3c122..bb2ce73f 100644 --- a/src/test/java/org/folio/processing/events/utils/PomReaderUtilTest.java +++ b/src/test/java/org/folio/processing/events/utils/PomReaderUtilTest.java @@ -50,7 +50,7 @@ void readFromJar() throws IOException, XmlPullParserException { pom.readIt(null, "META-INF/maven/io.vertx"); // force reading from Jar // first dependency in main pom - assertThat(pom.getModuleName(), is("vertx_ext_parent")); + assertThat(pom.getModuleName(), is("vertx_parent")); } @Test From 594510ff452216c68f2f7cbe811feed99099721f Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Sat, 26 Apr 2025 23:15:42 +0200 Subject: [PATCH 12/19] MODDICORE-454: Replace kafka-junit https://folio-org.atlassian.net/browse/MODDICORE-454 (cherry picked from commit 37b13c56b108a69a2e86f2a85480922f8b17e5ba) --- pom.xml | 15 +++-- ramls/settings/fund.json | 1 + .../java/org/folio/processing/TestUtil.java | 4 +- .../processing/events/EventManagerTest.java | 24 +++---- .../publisher/KafkaEventPublisherTest.java | 67 +++++++++++-------- 5 files changed, 63 insertions(+), 48 deletions(-) diff --git a/pom.xml b/pom.xml index 4ff6e327..49f0e618 100644 --- a/pom.xml +++ b/pom.xml @@ -89,6 +89,11 @@ javax.ws.rs-api 2.1.1 + + org.glassfish.jersey.core + jersey-common + 2.46 + org.slf4j slf4j-api @@ -164,14 +169,14 @@ test - net.mguenther.kafka - kafka-junit - 3.6.0 + org.junit.jupiter + junit-jupiter-params test - org.junit.jupiter - junit-jupiter-params + org.testcontainers + kafka + 1.20.6 test diff --git a/ramls/settings/fund.json b/ramls/settings/fund.json index 38684719..82b9ae8b 100644 --- a/ramls/settings/fund.json +++ b/ramls/settings/fund.json @@ -85,6 +85,7 @@ "type": "array", "items": { "type": "object", + "javaType": "org.folio.FundLocation", "properties": { "locationId": { "description": "UUID of the associated location", diff --git a/src/test/java/org/folio/processing/TestUtil.java b/src/test/java/org/folio/processing/TestUtil.java index 6550dce8..3eedd7bd 100644 --- a/src/test/java/org/folio/processing/TestUtil.java +++ b/src/test/java/org/folio/processing/TestUtil.java @@ -1,7 +1,7 @@ package org.folio.processing; import org.apache.commons.io.FileUtils; - +import org.testcontainers.utility.DockerImageName; import java.io.File; import java.io.IOException; @@ -10,6 +10,8 @@ */ public final class TestUtil { + public static final DockerImageName KAFKA_CONTAINER_NAME = DockerImageName.parse("apache/kafka-native:3.8.0"); + public static String readFileFromPath(String path) throws IOException { return new String(FileUtils.readFileToByteArray(new File(path))); } diff --git a/src/test/java/org/folio/processing/events/EventManagerTest.java b/src/test/java/org/folio/processing/events/EventManagerTest.java index 4dd73aff..a3eae088 100644 --- a/src/test/java/org/folio/processing/events/EventManagerTest.java +++ b/src/test/java/org/folio/processing/events/EventManagerTest.java @@ -4,35 +4,33 @@ import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.VertxUnitRunner; -import net.mguenther.kafka.junit.EmbeddedKafkaCluster; import org.folio.kafka.KafkaConfig; +import org.folio.processing.TestUtil; import org.junit.BeforeClass; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; - -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; +import org.testcontainers.kafka.KafkaContainer; @RunWith(VertxUnitRunner.class) public class EventManagerTest { private static final String KAFKA_ENV = "folio"; + @ClassRule + public static KafkaContainer kafkaContainer = new KafkaContainer(TestUtil.KAFKA_CONTAINER_NAME); + private static KafkaConfig kafkaConfig; + @Rule public RunTestOnContext rule = new RunTestOnContext(); - public static EmbeddedKafkaCluster kafkaCluster; - private static KafkaConfig kafkaConfig; @BeforeClass public static void setUpClass() { - kafkaCluster = provisionWith(defaultClusterConfig()); - kafkaCluster.start(); - String[] hostAndPort = kafkaCluster.getBrokerList().split(":"); kafkaConfig = KafkaConfig.builder() - .kafkaHost(hostAndPort[0]) - .kafkaPort(hostAndPort[1]) - .envId(KAFKA_ENV) - .build(); + .kafkaHost(kafkaContainer.getHost()) + .kafkaPort(kafkaContainer.getFirstMappedPort() + "") + .envId(KAFKA_ENV) + .build(); } @Test diff --git a/src/test/java/org/folio/processing/events/services/publisher/KafkaEventPublisherTest.java b/src/test/java/org/folio/processing/events/services/publisher/KafkaEventPublisherTest.java index 420ec02c..8828cb18 100644 --- a/src/test/java/org/folio/processing/events/services/publisher/KafkaEventPublisherTest.java +++ b/src/test/java/org/folio/processing/events/services/publisher/KafkaEventPublisherTest.java @@ -3,25 +3,26 @@ import io.vertx.core.Vertx; import io.vertx.core.json.Json; import io.vertx.ext.unit.junit.VertxUnitRunner; -import net.mguenther.kafka.junit.EmbeddedKafkaCluster; -import net.mguenther.kafka.junit.ObserveKeyValues; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; import org.folio.DataImportEventPayload; import org.folio.kafka.KafkaConfig; import org.folio.kafka.KafkaTopicNameHelper; +import org.folio.processing.TestUtil; import org.folio.rest.jaxrs.model.Event; import org.junit.BeforeClass; +import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; - +import org.testcontainers.kafka.KafkaContainer; +import java.time.Duration; import java.util.HashMap; import java.util.List; +import java.util.Properties; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith; -import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig; import static org.folio.DataImportEventTypes.DI_COMPLETED; import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace; import static org.junit.Assert.assertEquals; @@ -35,30 +36,35 @@ public class KafkaEventPublisherTest { private static final String TENANT_ID = "diku"; private static final String TOKEN = "stub-token"; - public static EmbeddedKafkaCluster kafkaCluster; - + @ClassRule + public static KafkaContainer kafkaContainer = new KafkaContainer(TestUtil.KAFKA_CONTAINER_NAME); private static KafkaConfig kafkaConfig; + private static Properties consumerConfig = new Properties(); private Vertx vertx = Vertx.vertx(); @BeforeClass public static void setUpClass() { - kafkaCluster = provisionWith(defaultClusterConfig()); - kafkaCluster.start(); - String[] hostAndPort = kafkaCluster.getBrokerList().split(":"); kafkaConfig = KafkaConfig.builder() - .kafkaHost(hostAndPort[0]) - .kafkaPort(hostAndPort[1]) + .kafkaHost(kafkaContainer.getHost()) + .kafkaPort(kafkaContainer.getFirstMappedPort() + "") .envId(KAFKA_ENV) .build(); + kafkaConfig.getConsumerProps().forEach((key, value) -> { + if (value != null) { + consumerConfig.put(key, value); + } + }); + consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, "test"); } @Test public void shouldPublishPayload() throws Exception { + var tenant = "shouldPublishPayload"; try(KafkaEventPublisher eventPublisher = new KafkaEventPublisher(kafkaConfig, vertx, 100)) { DataImportEventPayload eventPayload = new DataImportEventPayload() .withEventType(DI_COMPLETED.value()) .withOkapiUrl(OKAPI_URL) - .withTenant(TENANT_ID) + .withTenant(tenant) .withToken(TOKEN) .withContext(new HashMap<>() {{ put("recordId", UUID.randomUUID().toString()); @@ -67,13 +73,8 @@ public void shouldPublishPayload() throws Exception { CompletableFuture future = eventPublisher.publish(eventPayload); - String topicToObserve = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV, getDefaultNameSpace(), TENANT_ID, DI_COMPLETED.value()); - List observedValues = kafkaCluster.observeValues(ObserveKeyValues.on(topicToObserve, 1) - .observeFor(30, TimeUnit.SECONDS) - .build()); - - Event obtainedEvent = Json.decodeValue(observedValues.get(0), Event.class); - DataImportEventPayload actualPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class); + String topicToObserve = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV, getDefaultNameSpace(), tenant, DI_COMPLETED.value()); + DataImportEventPayload actualPayload = Json.decodeValue(getEventPayload(topicToObserve), DataImportEventPayload.class); assertEquals(eventPayload, actualPayload); assertFalse(future.isCompletedExceptionally()); @@ -82,11 +83,12 @@ public void shouldPublishPayload() throws Exception { @Test public void shouldPublishPayloadIfTokenIsNull() throws Exception { + var tenant = "shouldPublishPayloadIfTokenIsNull"; try(KafkaEventPublisher eventPublisher = new KafkaEventPublisher(kafkaConfig, vertx, 100)) { DataImportEventPayload eventPayload = new DataImportEventPayload() .withEventType(DI_COMPLETED.value()) .withOkapiUrl(OKAPI_URL) - .withTenant(TENANT_ID) + .withTenant(tenant) .withToken(null) .withContext(new HashMap<>() {{ put("recordId", UUID.randomUUID().toString()); @@ -96,13 +98,8 @@ public void shouldPublishPayloadIfTokenIsNull() throws Exception { CompletableFuture future = eventPublisher.publish(eventPayload); - String topicToObserve = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV, getDefaultNameSpace(), TENANT_ID, DI_COMPLETED.value()); - List observedValues = kafkaCluster.observeValues(ObserveKeyValues.on(topicToObserve, 1) - .observeFor(30, TimeUnit.SECONDS) - .build()); - - Event obtainedEvent = Json.decodeValue(observedValues.get(observedValues.size() - 1), Event.class); - DataImportEventPayload actualPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class); + String topicToObserve = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV, getDefaultNameSpace(), tenant, DI_COMPLETED.value()); + DataImportEventPayload actualPayload = Json.decodeValue(getEventPayload(topicToObserve), DataImportEventPayload.class); assertEquals(eventPayload, actualPayload); assertFalse(future.isCompletedExceptionally()); @@ -171,4 +168,16 @@ public void shouldReturnFailedFutureWhenChunkIdIsNull() throws Exception { future.get(); } } + + private String getEventPayload(String topicToObserve) { + try (var kafkaConsumer = new KafkaConsumer(consumerConfig)) { + kafkaConsumer.subscribe(List.of(topicToObserve)); + var records = kafkaConsumer.poll(Duration.ofSeconds(30)); + if (records.isEmpty()) { + throw new IllegalStateException("Expected Kafka event at " + topicToObserve + " but got none"); + } + Event obtainedEvent = Json.decodeValue(records.iterator().next().value(), Event.class); + return obtainedEvent.getEventPayload(); + } + } } From 2dfc75f6ae2576f7f687489a5a08df435d8eca1d Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Mon, 28 Apr 2025 23:18:02 +0200 Subject: [PATCH 13/19] NEWS for 4.4.2 --- NEWS.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/NEWS.md b/NEWS.md index cf7aaeb1..83f10927 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,6 @@ +## 2025-04-29 v4.4.2 +* [MODDICORE-454](https://folio-org.atlassian.net/browse/MODDICORE-454) Upgrade all dependencies to supported versions for Sunflower + ## 2025-04-18 v4.4.1 * [MODDICORE-452](https://folio-org.atlassian.net/browse/MODDICORE-452) Fix MARC Migration failings due to records missing values * [MODDICORE-450](https://folio-org.atlassian.net/browse/MODDICORE-450) Fix NullPointerException during Update of MARC bibliographic action processing From 19ef38776f505d72aa605808956dacf8f3b6f31a Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Mon, 28 Apr 2025 23:19:01 +0200 Subject: [PATCH 14/19] [maven-release-plugin] prepare release v4.4.2 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 49f0e618..2c54e44f 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.2-SNAPSHOT + 4.4.2 jar data-import-processing-core @@ -185,7 +185,7 @@ https://github.com/folio-org/data-import-processing-core scm:git:git@github.com:folio-org/data-import-processing-core.git scm:git:git@github.com:folio-org/data-import-processing-core.git - v4.4.0 + v4.4.2 From 0d74fc3e462e0ffdb9eafcb0b49de4227c0796b3 Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Mon, 28 Apr 2025 23:19:01 +0200 Subject: [PATCH 15/19] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 2c54e44f..be026501 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.2 + 4.4.3-SNAPSHOT jar data-import-processing-core @@ -185,7 +185,7 @@ https://github.com/folio-org/data-import-processing-core scm:git:git@github.com:folio-org/data-import-processing-core.git scm:git:git@github.com:folio-org/data-import-processing-core.git - v4.4.2 + v4.4.0 From 6f90c0760fdce220859c2d4067f949866ccfd2a0 Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Wed, 30 Apr 2025 16:08:29 +0200 Subject: [PATCH 16/19] MODDICORE-455: Disable dependency reduced pom in maven-shade-plugin https://folio-org.atlassian.net/browse/MODDICORE-455 The software library data-import-processing-core must ship will all required dependencies, therefore maven-shade-plugin must not create a dependency reduced pom. (cherry picked from commit 01ec759d89146e0fcce378925422763219dede63) --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index be026501..bd3b2bba 100644 --- a/pom.xml +++ b/pom.xml @@ -358,6 +358,7 @@ + false ${project.build.directory}/${project.artifactId}-fat.jar From f0c1ebc1ff2e2f42a12f9884713016ed4764495a Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Tue, 6 May 2025 12:43:45 +0200 Subject: [PATCH 17/19] NEWS for 4.4.3 --- NEWS.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/NEWS.md b/NEWS.md index 83f10927..789acba6 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,6 @@ +## 2025-05-06 v4.4.3 +* [MODDICORE-455](https://folio-org.atlassian.net/browse/MODDICORE-455) Disable dependency reduced pom in maven-shade-plugin + ## 2025-04-29 v4.4.2 * [MODDICORE-454](https://folio-org.atlassian.net/browse/MODDICORE-454) Upgrade all dependencies to supported versions for Sunflower From b546db5242857da39b2590a90e1bd825186d6ca7 Mon Sep 17 00:00:00 2001 From: Julian Ladisch Date: Tue, 6 May 2025 12:44:59 +0200 Subject: [PATCH 18/19] [maven-release-plugin] prepare release v4.4.3 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index bd3b2bba..7a0d2752 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ org.folio data-import-processing-core - 4.4.3-SNAPSHOT + 4.4.3 jar data-import-processing-core @@ -185,7 +185,7 @@ https://github.com/folio-org/data-import-processing-core scm:git:git@github.com:folio-org/data-import-processing-core.git scm:git:git@github.com:folio-org/data-import-processing-core.git - v4.4.0 + v4.4.3 From 9508ec6120fce72993f5cd6a592b0fed13cc31c6 Mon Sep 17 00:00:00 2001 From: Olamide Kolawole Date: Mon, 29 Sep 2025 08:18:00 -0500 Subject: [PATCH 19/19] MODDICORE-457 Use index-friendly CQL for identifier matching The previous CQL query for matching instances by identifier was causing significant performance issues when there were many values to filter. The generated query (`identifiers="\"identifierTypeId\":\"...\""` AND ...) performed a string-based search on the entire JSONB object without using the index in some cases. --- .../loader/query/LoadQueryBuilder.java | 85 ++++- .../matching/loader/LoadQueryBuilderTest.java | 312 +++++++++++++++++- 2 files changed, 375 insertions(+), 22 deletions(-) diff --git a/src/main/java/org/folio/processing/matching/loader/query/LoadQueryBuilder.java b/src/main/java/org/folio/processing/matching/loader/query/LoadQueryBuilder.java index d3663ea4..e347e601 100644 --- a/src/main/java/org/folio/processing/matching/loader/query/LoadQueryBuilder.java +++ b/src/main/java/org/folio/processing/matching/loader/query/LoadQueryBuilder.java @@ -1,6 +1,9 @@ package org.folio.processing.matching.loader.query; +import io.vertx.core.json.Json; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.folio.MatchDetail; import org.folio.processing.value.StringValue; import org.folio.processing.value.Value; @@ -8,6 +11,7 @@ import org.folio.rest.jaxrs.model.Field; import org.folio.rest.jaxrs.model.MatchExpression; +import java.util.ArrayList; import java.util.List; import static org.folio.processing.value.Value.ValueType.DATE; @@ -23,11 +27,23 @@ public class LoadQueryBuilder { private LoadQueryBuilder() { } + private static final Logger LOGGER = LogManager.getLogger(LoadQueryBuilder.class); private static final String JSON_PATH_SEPARATOR = "."; private static final String IDENTIFIER_TYPE_ID = "identifierTypeId"; private static final String IDENTIFIER_TYPE_VALUE = "instance.identifiers[].value"; - private static final String IDENTIFIER_CQL_QUERY = "identifiers =/@value/@identifierTypeId=\"%s\" %s"; - private static final String WHERE_CLAUSE_CONSTRUCTOR_MATCH_CRITERION = "WHERE_CLAUSE_CONSTRUCTOR"; + /** + * CQL query template to find an instance by a specific identifier. + *

+ * This query leverages a relation modifier ({@code @}) to efficiently search within the 'identifiers' JSON array. + *

    + *
  • {@code @identifierTypeId=%s}: Filters array elements to only include those where the 'identifierTypeId' + * matches the first placeholder.
  • + *
  • {@code "%s"}: The search term (the identifier's value) is then matched against the 'value' subfield + * of the filtered elements.
  • + *
+ * This syntax allows PostgreSQL to use the GIN index on the field consistently, improving query performance. + */ + private static final String IDENTIFIER_INDIVIDUAL_CQL_QUERY = "identifiers =/@identifierTypeId=%s \"%s\""; /** * Builds LoadQuery, @@ -39,13 +55,13 @@ private LoadQueryBuilder() { * @param matchDetail match detail * @return LoadQuery or null if query cannot be built */ - public static LoadQuery build(Value value, MatchDetail matchDetail) { + public static LoadQuery build(Value value, MatchDetail matchDetail) { if (value != null && (value.getType() == STRING || value.getType() == LIST || value.getType() == DATE)) { MatchExpression matchExpression = matchDetail.getExistingMatchExpression(); if (matchExpression != null && matchExpression.getDataValueType() == VALUE_FROM_RECORD) { List fields = matchExpression.getFields(); if (fields != null && !fields.isEmpty()) { - String fieldPath = fields.get(0).getValue(); + String fieldPath = fields.getFirst().getValue(); String tableName = StringUtils.substringBefore(fieldPath, JSON_PATH_SEPARATOR); String fieldName = StringUtils.substringAfter(fieldPath, JSON_PATH_SEPARATOR); QueryHolder mainQuery = new QueryHolder(value, matchDetail.getMatchCriterion()) @@ -61,13 +77,16 @@ public static LoadQuery build(Value value, MatchDetail matchDetail) { mainQuery.applyAdditionalCondition(additionalQuery); // TODO provide all the requirements for MODDATAIMP-592 and refactor code block below if(checkIfIdentifierTypeExists(matchDetail, fieldPath, additionalField.getLabel())) { - MatchingCondition matchingCondition = - MatchingCondition.valueOf(WHERE_CLAUSE_CONSTRUCTOR_MATCH_CRITERION); - String condition = matchingCondition.constructCqlQuery(value); - mainQuery.setCqlQuery(String.format(IDENTIFIER_CQL_QUERY, additionalField.getValue(), condition)); + String cqlQuery = buildIdentifierCqlQuery(value, additionalField.getValue(), matchDetail.getMatchCriterion()); + mainQuery.setCqlQuery(cqlQuery); mainQuery.setSqlQuery(StringUtils.EMPTY); + } else { + LOGGER.debug("LoadQueryBuilder::build - Additional field does not match identifier type criteria: {} fieldPath: {}", + additionalField.getLabel(), fieldPath); } } + LOGGER.debug(() -> String.format("LoadQueryBuilder::build - Built LoadQuery for VALUE: ~| %s |~ MATCHDETAIL: ~| %s |~ CQL: ~| %s |~", + Json.encode(value), Json.encode(matchDetail), mainQuery.getCqlQuery())); return new DefaultJsonLoadQuery(tableName, mainQuery.getSqlQuery(), mainQuery.getCqlQuery()); } } @@ -77,8 +96,54 @@ public static LoadQuery build(Value value, MatchDetail matchDetail) { private static boolean checkIfIdentifierTypeExists(MatchDetail matchDetail, String fieldPath, String additionalFieldPath) { return matchDetail.getIncomingRecordType() == EntityType.MARC_BIBLIOGRAPHIC && matchDetail.getExistingRecordType() == EntityType.INSTANCE && - matchDetail.getMatchCriterion() == MatchDetail.MatchCriterion.EXACTLY_MATCHES && fieldPath.equals(IDENTIFIER_TYPE_VALUE) && - additionalFieldPath.equals(IDENTIFIER_TYPE_ID); + (matchDetail.getMatchCriterion() == MatchDetail.MatchCriterion.EXACTLY_MATCHES || + matchDetail.getMatchCriterion() == MatchDetail.MatchCriterion.EXISTING_VALUE_CONTAINS_INCOMING_VALUE) && + fieldPath.equals(IDENTIFIER_TYPE_VALUE) && additionalFieldPath.equals(IDENTIFIER_TYPE_ID); + } + + /** + * Builds CQL query for identifier matching with individual AND conditions for each value + * + * @param value the value to match against (can be STRING or LIST) + * @param identifierTypeId the identifier type ID + * @param matchCriterion the match criterion to determine if wildcards should be applied + * @return CQL query string with individual AND conditions + */ + private static String buildIdentifierCqlQuery(Value value, String identifierTypeId, MatchDetail.MatchCriterion matchCriterion) { + if (value.getType() == STRING) { + String escapedValue = escapeCqlValue(value.getValue().toString()); + if (matchCriterion == MatchDetail.MatchCriterion.EXISTING_VALUE_CONTAINS_INCOMING_VALUE) { + escapedValue = "*" + escapedValue + "*"; + } + return String.format(IDENTIFIER_INDIVIDUAL_CQL_QUERY, identifierTypeId, escapedValue); + } else if (value.getType() == LIST) { + List conditions = new ArrayList<>(); + for (Object val : ((org.folio.processing.value.ListValue) value).getValue()) { + String escapedValue = escapeCqlValue(val.toString()); + if (matchCriterion == MatchDetail.MatchCriterion.EXISTING_VALUE_CONTAINS_INCOMING_VALUE) { + escapedValue = "*" + escapedValue + "*"; + } + conditions.add(String.format(IDENTIFIER_INDIVIDUAL_CQL_QUERY, identifierTypeId, escapedValue)); + } + return String.join(" OR ", conditions); + } + return ""; + } + + /** + * Escapes special characters in CQL values to prevent parsing errors + * + * @param value the value to escape + * @return escaped value safe for CQL queries + */ + private static String escapeCqlValue(String value) { + // Escape backslashes first, then other special characters + return value.replace("\\", "\\\\") + .replace("\"", "\\\"") + .replace("(", "\\(") + .replace(")", "\\)") + .replace("*", "\\*") + .replace("?", "\\?"); } } diff --git a/src/test/java/org/folio/processing/matching/loader/LoadQueryBuilderTest.java b/src/test/java/org/folio/processing/matching/loader/LoadQueryBuilderTest.java index 6ddf57aa..b01a2d99 100644 --- a/src/test/java/org/folio/processing/matching/loader/LoadQueryBuilderTest.java +++ b/src/test/java/org/folio/processing/matching/loader/LoadQueryBuilderTest.java @@ -70,6 +70,8 @@ public void shouldBuildQueryWhere_ExistingValueExactlyMatches_MultipleIncomingSt StringValue value = StringValue.of("ybp7406411"); MatchDetail matchDetail = new MatchDetail() .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) .withExistingMatchExpression(new MatchExpression() .withDataValueType(VALUE_FROM_RECORD) .withFields(Arrays.asList( @@ -80,11 +82,9 @@ public void shouldBuildQueryWhere_ExistingValueExactlyMatches_MultipleIncomingSt LoadQuery result = LoadQueryBuilder.build(value, matchDetail); //then assertNotNull(result); - assertNotNull(result.getSql()); - String expectedSQLQuery = format("CROSS JOIN LATERAL jsonb_array_elements(instance.jsonb -> 'identifiers') fields(field) WHERE field ->> 'value' = 'ybp7406411' AND field ->> 'identifierTypeId' = '439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef'", value.getValue()); - assertEquals(expectedSQLQuery, result.getSql()); + assertEquals(StringUtils.EMPTY, result.getSql()); assertNotNull(result.getCql()); - String expectedCQLQuery = format("identifiers=\"\\\"identifierTypeId\\\":\\\"439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef\\\"\" AND (identifiers=\"\\\"value\\\":\\\"ybp7406411\\\"\")", value.getValue()); + String expectedCQLQuery = "identifiers =/@identifierTypeId=439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef \"ybp7406411\""; assertEquals(expectedCQLQuery, result.getCql()); } @@ -94,6 +94,8 @@ public void shouldBuildQueryWhere_ExistingValueExactlyMatches_MultipleIncomingLi ListValue value = ListValue.of(Arrays.asList("ybp7406411", "ybp74064123")); MatchDetail matchDetail = new MatchDetail() .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) .withExistingMatchExpression(new MatchExpression() .withDataValueType(VALUE_FROM_RECORD) .withFields(Arrays.asList( @@ -104,11 +106,9 @@ public void shouldBuildQueryWhere_ExistingValueExactlyMatches_MultipleIncomingLi LoadQuery result = LoadQueryBuilder.build(value, matchDetail); //then assertNotNull(result); - assertNotNull(result.getSql()); - String expectedSQLQuery = format("CROSS JOIN LATERAL jsonb_array_elements(instance.jsonb -> 'identifiers') fields(field) WHERE (field ->> 'value' = 'ybp7406411' OR field ->> 'value' = 'ybp74064123') AND field ->> 'identifierTypeId' = '439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef'", value.getValue()); - assertEquals(expectedSQLQuery, result.getSql()); + assertEquals(StringUtils.EMPTY, result.getSql()); assertNotNull(result.getCql()); - String expectedCQLQuery = format("identifiers=\"\\\"identifierTypeId\\\":\\\"439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef\\\"\" AND (identifiers=\"\\\"value\\\":\\\"ybp7406411\\\"\" OR identifiers=\"\\\"value\\\":\\\"ybp74064123\\\"\")", value.getValue()); + String expectedCQLQuery = "identifiers =/@identifierTypeId=439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef \"ybp7406411\" OR identifiers =/@identifierTypeId=439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef \"ybp74064123\""; assertEquals(expectedCQLQuery, result.getCql()); } @@ -490,7 +490,7 @@ public void shouldBuildQuery_ExistingValueBeginsWith_IncomingListValue_WithQuali @Test public void shouldReturnNullIfPassedNullValue() { // given - Value value = null; + Value value = null; MatchDetail matchDetail = new MatchDetail() .withMatchCriterion(EXACTLY_MATCHES) .withExistingMatchExpression(new MatchExpression() @@ -507,7 +507,7 @@ public void shouldReturnNullIfPassedNullValue() { @Test public void shouldReturnNullIfPassedMissingValue() { // given - Value value = MissingValue.getInstance(); + Value value = MissingValue.getInstance(); MatchDetail matchDetail = new MatchDetail() .withMatchCriterion(EXACTLY_MATCHES) .withExistingMatchExpression(new MatchExpression() @@ -524,7 +524,7 @@ public void shouldReturnNullIfPassedMissingValue() { @Test public void shouldReturnNullIfMatchingByExistingStaticValue() { // given - Value value = MissingValue.getInstance(); + Value value = MissingValue.getInstance(); MatchDetail matchDetail = new MatchDetail() .withMatchCriterion(EXACTLY_MATCHES) .withExistingMatchExpression(new MatchExpression() @@ -691,8 +691,296 @@ public void shouldBuildQueryWhere_ExistingValueExactlyMatches_MultipleIncomingLi assertNotEquals(expectedSQLQuery, wrongResult.getSql()); assertNotNull(result.getCql()); assertNotNull(wrongResult.getCql()); - String expectedCQLQuery = format("identifiers =/@value/@identifierTypeId=\"%s\" \"%s\"",identifierTypeFieldValue, value.getValue()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"%s\"",identifierTypeFieldValue, value.getValue()); assertEquals(expectedCQLQuery, result.getCql()); assertNotEquals(expectedCQLQuery, wrongResult.getCql()); } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithParenthesesInValue() { + // given + StringValue value = StringValue.of("(OCoLC)1024095011"); + String identifierTypeFieldValue = "7e591197-f335-4afb-bc6d-a6d76ca3bace"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"\\(OCoLC\\)1024095011\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithQuotesInValue() { + // given + StringValue value = StringValue.of("test\"quote\"value"); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"test\\\"quote\\\"value\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithBackslashesInValue() { + // given + StringValue value = StringValue.of("path\\to\\resource"); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"path\\\\to\\\\resource\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithWildcardsInValue() { + // given + StringValue value = StringValue.of("test*value?"); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"test\\*value\\?\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithMultipleSpecialCharacters() { + // given + StringValue value = StringValue.of("(test*)\\query?"); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"\\(test\\*\\)\\\\query\\?\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_ListWithSpecialCharacters() { + // given + ListValue value = ListValue.of(Arrays.asList("(OCoLC)123", "test*value", "path\\file")); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"\\(OCoLC\\)123\" OR identifiers =/@identifierTypeId=%s \"test\\*value\" OR identifiers =/@identifierTypeId=%s \"path\\\\file\"", identifierTypeFieldValue, identifierTypeFieldValue, identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithApostropheInValue() { + // given + StringValue value = StringValue.of("O'Reilly's Book"); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + // Apostrophes don't need escaping in CQL + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"O'Reilly's Book\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_EmptyValue() { + // given + StringValue value = StringValue.of(""); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_PreEscapedValue() { + // given + StringValue value = StringValue.of("already\\\\escaped"); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + // Should double-escape the already escaped backslashes + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"already\\\\\\\\escaped\"", identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_RealWorldExampleFromProblem() { + // Test the exact values from problem.md + ListValue value = ListValue.of(Arrays.asList( + "(CStRLIN)NYCX1604275S", + "(NIC)notisABP6388", + "366832", + "(OCoLC)1604275" + )); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXACTLY_MATCHES) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + //when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + //then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"\\(CStRLIN\\)NYCX1604275S\" OR identifiers =/@identifierTypeId=%s \"\\(NIC\\)notisABP6388\" OR identifiers =/@identifierTypeId=%s \"366832\" OR identifiers =/@identifierTypeId=%s \"\\(OCoLC\\)1604275\"", identifierTypeFieldValue, identifierTypeFieldValue, identifierTypeFieldValue, identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + + @Test + public void shouldBuildQueryWhere_IdentifierMatching_WithListValue_ContainsCriterion() { + // given + ListValue value = ListValue.of(Arrays.asList( + "(OCoLC)1349275037", + "9924655804502931", + "in00022912564" + )); + String identifierTypeFieldValue = "439bfbae-75bc-4f74-9fc7-b2a2d47ce3ef"; + MatchDetail matchDetail = new MatchDetail() + .withMatchCriterion(EXISTING_VALUE_CONTAINS_INCOMING_VALUE) + .withIncomingRecordType(EntityType.MARC_BIBLIOGRAPHIC) + .withExistingRecordType(EntityType.INSTANCE) + .withIncomingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("035"), + new Field().withLabel("indicator1").withValue(""), + new Field().withLabel("indicator2").withValue(""), + new Field().withLabel("recordSubfield").withValue("a")) + )) + .withExistingMatchExpression(new MatchExpression() + .withDataValueType(VALUE_FROM_RECORD) + .withFields(Arrays.asList( + new Field().withLabel("field").withValue("instance.identifiers[].value"), + new Field().withLabel("identifierTypeId").withValue(identifierTypeFieldValue)) + )); + + // when + LoadQuery result = LoadQueryBuilder.build(value, matchDetail); + + // then + assertNotNull(result); + assertEquals(StringUtils.EMPTY, result.getSql()); + // For EXISTING_VALUE_CONTAINS_INCOMING_VALUE with identifiers, the CQL should use wildcard matching + String expectedCQLQuery = format("identifiers =/@identifierTypeId=%s \"*\\(OCoLC\\)1349275037*\" OR identifiers =/@identifierTypeId=%s \"*9924655804502931*\" OR identifiers =/@identifierTypeId=%s \"*in00022912564*\"", + identifierTypeFieldValue, identifierTypeFieldValue, identifierTypeFieldValue); + assertEquals(expectedCQLQuery, result.getCql()); + } + }