Skip to content

Commit

Permalink
More logging.
Browse files Browse the repository at this point in the history
  • Loading branch information
janvanmansum committed Feb 3, 2025
1 parent 3c7e625 commit 9d278eb
Show file tree
Hide file tree
Showing 5 changed files with 61 additions and 58 deletions.
14 changes: 7 additions & 7 deletions src/main/java/nl/knaw/dans/dvingest/core/DepositTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,39 +55,39 @@ public void run() {
try {
deposit.validate();
if (deposit.convertDansDepositIfNeeded() && onlyConvertDansDeposit) {
log.info("Only converting DANS deposit, LEAVING CONVERTED DEPOSIT IN PLACE");
log.info("[{}] Only converting DANS deposit, LEAVING CONVERTED DEPOSIT IN PLACE", deposit.getId());
return;
}
pid = deposit.getUpdatesDataset();

for (DataverseIngestBag bag : deposit.getBags()) {
log.info("START processing deposit / bag: {} / {}", deposit.getId(), bag);
log.info("[{}] START processing bag: {}", deposit.getId(), bag);
pid = bagProcessorFactory.createBagProcessor(deposit.getId(), bag).run(pid);
log.info("END processing deposit / bag: {} / {}", deposit.getId(), bag);
log.info("[{}] END processing bag: {}", deposit.getId(), bag);
}
deposit.onSuccess(pid, "Deposit processed successfully");
deposit.moveTo(outputDir.resolve("processed"));
}
catch (RejectedDepositException e) {
try {
log.error("Deposit rejected", e);
log.error("[{}] Deposit rejected: {}", deposit.getId(), e.getMessage());
deposit.onRejected(pid, e.getMessage());
deposit.moveTo(outputDir.resolve("rejected"));
status = Status.REJECTED;
}
catch (Exception e2) {
log.error("Failed to move deposit to rejected directory", e2);
log.error("[{}] Failed to move deposit to rejected directory", deposit.getId(), e2);
}
}
catch (Exception e) {
try {
log.error("Failed to ingest deposit", e);
log.error("[{}] Failed to ingest deposit", deposit.getId(), e);
deposit.onFailed(pid, e.getMessage());
deposit.moveTo(outputDir.resolve("failed"));
status = Status.FAILED;
}
catch (IOException ioException) {
log.error("Failed to move deposit to failed directory", ioException);
log.error("[{}] Failed to move deposit to failed directory", deposit.getId(), ioException);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public String createDatasetVersion(String targetPid) throws IOException, Dataver
}

private void checkExpectations(@NonNull Expect expect, String targetPid) throws DataverseException, IOException {
log.debug("Start checking expectations for deposit {}", depositId);
log.debug("[{}] Start checking expectations", depositId);
if (expect.getState() != null && targetPid != null && !initLog.getExpect().getState().isCompleted()) {
checkDatasetState(expect, targetPid);
}
Expand All @@ -83,13 +83,13 @@ private void checkExpectations(@NonNull Expect expect, String targetPid) throws
else {
initLog.getExpect().getDatasetRoleAssignment().setCompleted(true);
}
log.debug("End checking expectations for deposit {}", depositId);
log.debug("[{}] End checking expectations", depositId);
}

private void checkDatasetState(@NonNull Expect expect, String targetPid) throws DataverseException, IOException {
var actualState = dataverseService.getDatasetState(targetPid);
if (expect.getState().name().equalsIgnoreCase(actualState)) {
log.debug("Expected state {} found for dataset {}", expect.getState(), targetPid);
log.debug("[{}] Expected state {} found for dataset {}", depositId, expect.getState(), targetPid);
initLog.getExpect().getState().setCompleted(true);
}
else {
Expand All @@ -100,7 +100,7 @@ private void checkDatasetState(@NonNull Expect expect, String targetPid) throws
private void checkDataverseRoleAssignment(@NonNull Expect expect) throws DataverseException, IOException {
var actualRoleAssignments = dataverseService.getRoleAssignmentsOnDataverse("root");
if (contains(actualRoleAssignments, expect.getDataverseRoleAssignment(), true)) {
log.debug("Expected role assignment found for dataverse root");
log.debug("[{}] Expected role assignment found for dataverse root", depositId);
initLog.getExpect().getDataverseRoleAssignment().setCompleted(true);
}
else {
Expand All @@ -112,7 +112,7 @@ private void checkDataverseRoleAssignment(@NonNull Expect expect) throws Dataver
private void checkDatasetRoleAssignment(@NonNull Expect expect, String targetPid) throws DataverseException, IOException {
var actualRoleAssignments = dataverseService.getRoleAssignmentsOnDataset(targetPid);
if (contains(actualRoleAssignments, expect.getDatasetRoleAssignment(), false)) {
log.debug("Expected role assignment found for dataset {}", targetPid);
log.debug("[{}] Expected role assignment found for dataset {}", depositId, targetPid);
initLog.getExpect().getDatasetRoleAssignment().setCompleted(true);
}
else {
Expand All @@ -136,7 +136,7 @@ private boolean roleCorrect(RoleAssignmentReadOnly ra, RoleAssignment expectedRo

private String createDatasetIfNeeded(String targetPid) throws IOException, DataverseException {
if (initLog.getCreate().isCompleted()) {
log.debug("Create task already completed for deposit {}", depositId);
log.debug("[{}] Dataset {} already created.", depositId, initLog.getTargetPid());
return initLog.getTargetPid();
}

Expand All @@ -148,7 +148,7 @@ private String createDatasetIfNeeded(String targetPid) throws IOException, Datav
pid = createOrImportDataset();
}
else {
log.debug("Target PID provided, dataset does not need to be created for deposit {}", depositId);
log.debug("[{}] Target PID provided; dataset {} does not need to be created.", depositId, targetPid);
pid = targetPid;

}
Expand All @@ -167,22 +167,22 @@ private String getImportPid() {
}

private @NonNull String importDataset(String pid) throws IOException, DataverseException {
log.debug("Start importing dataset for deposit {}", depositId);
log.debug("[{}] Start importing dataset with pid {}", depositId, pid);
dataverseService.importDataset(pid, dataset);
log.debug("End importing dataset for deposit {}", depositId);
log.debug("[{}] End importing dataset with pid {}", depositId, pid);
return pid;
}

private @NonNull String createDataset() throws IOException, DataverseException {
log.debug("Start creating dataset for deposit {}", depositId);
log.debug("[{}] Start creating dataset", depositId);
var pid = dataverseService.createDataset(dataset);
log.debug("End creating dataset for deposit {}", depositId);
log.debug("[{}] End creating dataset; assigned pid is {}", depositId, pid);
return pid;
}

private void updateDataset(String pid) throws IOException, DataverseException {
log.debug("Start updating dataset for deposit {}", depositId);
log.debug("[{}] Start updating dataset metadata.", depositId);
dataverseService.updateMetadata(pid, dataset.getDatasetVersion());
log.debug("End updating dataset for deposit {}", depositId);
log.debug("[{}] End updating dataset metadata.", depositId);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,68 +38,67 @@ public class MetadataEditor {
public void editMetadata(String pid) throws IOException, DataverseException {
this.pid = pid;
if (editMetadata == null) {
log.debug("No metadata found. Skipping metadata update.");
log.debug("[{}] No metadata to edit. Skipping metadata editing.", depositId);
editMetadataLog.completeAll();
return;
}
log.debug("Start updating metadata for deposit {}", depositId);
log.debug("[{}] Start editing metadata.", depositId);
addFieldValues();
replaceFieldValues();
deleteFieldValues();
log.debug("End updating metadata for deposit {}", depositId);
log.debug("[{}] End editing metadata.", depositId);
}

private void deleteFieldValues() throws IOException, DataverseException {
if (editMetadataLog.getDeleteFieldValues().isCompleted()) {
log.debug("Deletion of field values already completed. Skipping deletion.");
log.debug("[{}] Deletion of field values already completed.", depositId);
return;
}
if (editMetadata.getDeleteFieldValues().isEmpty()) {
log.debug("No field values to delete. Skipping deletion.");
log.debug("[{}] No field values to delete.", depositId);
}
else {
log.debug("Start deleting {} field values for deposit {}", depositId, editMetadata.getDeleteFieldValues().size());
log.debug("[{}] Start deleting {} field values.", depositId, editMetadata.getDeleteFieldValues().size());
for (var fieldValue : editMetadata.getDeleteFieldValues()) {
log.debug("Deleting field value: {}", fieldValue);
log.debug("[{}] Deleting field value: {}", depositId, fieldValue.getTypeName());
dataverseService.deleteDatasetMetadata(pid, editMetadata.getDeleteFieldValues());
}
log.debug("End deleting field values for deposit {}", depositId);
log.debug("[{}] End deleting field values.", depositId);
}
editMetadataLog.getDeleteFieldValues().setCompleted(true);
}

private void addFieldValues() throws IOException, DataverseException {
if (editMetadataLog.getAddFieldValues().isCompleted()) {
log.debug("Addition of field values already completed. Skipping addition.");
log.debug("[{}] Addition of field values already completed.", depositId);
return;
}
if (editMetadata.getAddFieldValues().isEmpty()) {
log.debug("No field values to add. Skipping addition.");
log.debug("[{}] No field values to add.", depositId);
}
else {
log.debug("Start adding {} field values for deposit {}", depositId, editMetadata.getAddFieldValues().size());
log.debug("[{}] Start adding {} field values.", depositId, editMetadata.getAddFieldValues().size());
for (var fieldValue : editMetadata.getAddFieldValues()) {
log.debug("Adding field value: {}", fieldValue);
log.debug("[{}] Adding field value: {}", depositId, fieldValue.getTypeName());
dataverseService.editMetadata(pid, editMetadata.getAddFieldValues(), false);
}
log.debug("End adding field values for deposit {}", depositId);
log.debug("[{}] End adding field values.", depositId);
}
editMetadataLog.getAddFieldValues().setCompleted(true);
}

private void replaceFieldValues() throws IOException, DataverseException {
if (editMetadataLog.getReplaceFieldValues().isCompleted()) {
log.debug("Replacement of field values already completed. Skipping replacement.");
log.debug("[{}] Replacement of field values already completed.", depositId);
return;
}
if (editMetadata.getReplaceFieldValues().isEmpty()) {
log.debug("No field values to replace. Skipping replacement.");
log.debug("[{}] No field values to replace.", depositId);
}
else {

log.debug("Start replacing {} field values for deposit {}", depositId, editMetadata.getReplaceFieldValues().size());
log.debug("Start replacing field values for deposit {}", depositId);
for (var fieldValue : editMetadata.getReplaceFieldValues()) {
log.debug("Replacing field value: {}", fieldValue);
log.debug("Replacing field value: {}", fieldValue.getTypeName());
dataverseService.editMetadata(pid, editMetadata.getReplaceFieldValues(), true);
}
log.debug("End replacing field values for deposit {}", depositId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,62 +37,62 @@ public class PermissionsEditor {

public void editPermissions(String pid) throws IOException, DataverseException {
if (editPermissions == null) {
log.debug("No permissions to edit for deposit {}", depositId);
log.debug("[{}] No permissions to edit.", depositId);
return;
}

this.pid = pid;
log.debug("Start updating permissions for deposit {}", depositId);
log.debug("[{}] Start updating permissions.", depositId);
deleteRoleAssignments();
addRoleAssignments();
log.debug("End updating permissions for deposit {}", depositId);
log.debug("[{}] End updating permissions.", depositId);
}

private void addRoleAssignments() throws IOException, DataverseException {
if (editPermissionsLog.getAddRoleAssignments().isCompleted()) {
log.debug("Adding of role assignments already completed. Skipping addition.");
log.debug("[{}] Addition of role assignments already completed.", depositId);
return;
}
if (editPermissions.getAddRoleAssignments().isEmpty()) {
log.debug("No role assignments to add. Skipping addition.");
log.debug("[{}] No role assignments to add.", depositId);
}
else {
log.debug("Start adding {} role assignments for deposit {}", depositId, editPermissions.getAddRoleAssignments().size());
log.debug("[{}] Start adding {} role assignments.", depositId, editPermissions.getAddRoleAssignments().size());
int numberCompleted = editPermissionsLog.getAddRoleAssignments().getNumberCompleted();
if (numberCompleted > 0) {
log.debug("Resuming adding role assignments from index {}", numberCompleted);
log.debug("[{}] Resuming adding role assignments from index {}.", depositId, numberCompleted);
}
for (int i = numberCompleted; i < editPermissions.getAddRoleAssignments().size(); i++) {
var roleAssignment = editPermissions.getAddRoleAssignments().get(i);
log.debug("Adding role assignment: {}", roleAssignment);
log.debug("[{}] Adding role assignment: {}={}", depositId, roleAssignment.getAssignee(), roleAssignment.getRole());
dataverseService.addRoleAssignment(pid, roleAssignment);
editPermissionsLog.getAddRoleAssignments().setNumberCompleted(i + 1);
}
log.debug("End adding role assignments for deposit {}", depositId);
log.debug("[{}] End adding role assignments.", depositId);
}
editPermissionsLog.getAddRoleAssignments().setCompleted(true);
}

private void deleteRoleAssignments() throws IOException, DataverseException {
if (editPermissionsLog.getDeleteRoleAssignments().isCompleted()) {
log.debug("Deletion of role assignments already completed. Skipping deletion.");
log.debug("[{}] Deletion of role assignments already completed.", depositId);
return;
}
if (editPermissions.getDeleteRoleAssignments().isEmpty()) {
log.debug("No role assignments to delete. Skipping deletion.");
log.debug("[{}] No role assignments to delete.", depositId);
}
else {
log.debug("Start deleting {} role assignments for deposit {}", depositId, editPermissions.getDeleteRoleAssignments().size());
log.debug("[{}] Start deleting {} role assignments.", depositId, editPermissions.getDeleteRoleAssignments().size());
int numberCompleted = editPermissionsLog.getDeleteRoleAssignments().getNumberCompleted();
if (numberCompleted > 0) {
log.debug("Resuming deleting role assignments from index {}", numberCompleted);
log.debug("[{}] Resuming deleting role assignments from index {}.", depositId, numberCompleted);
}
for (int i = numberCompleted; i < editPermissions.getDeleteRoleAssignments().size(); i++) {
var roleAssignment = editPermissions.getDeleteRoleAssignments().get(i);
log.debug("Deleting role assignment: {}", roleAssignment);
log.debug("[{}] Deleting role assignment: {}={}", depositId, roleAssignment.getAssignee(), roleAssignment.getRole());
dataverseService.deleteRoleAssignment(pid, roleAssignment);
}
log.debug("End deleting role assignments for deposit {}", depositId);
log.debug("[{}] End deleting role assignments.", depositId);
}
editPermissionsLog.getDeleteRoleAssignments().setCompleted(true);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class StateUpdater {

public void updateState(String pid, int numberOfFilesInDataset) throws DataverseException, IOException {
if (updateStateLog.isCompleted()) {
log.debug("Update action already completed. Skipping update.");
log.debug("[{}] State already updated.", depositId);
return;
}

Expand All @@ -59,16 +59,20 @@ else if (updateAction instanceof ReleaseMigratedAction) {
}

private void publishVersion(UpdateType updateType) throws DataverseException, IOException {
log.debug("Start publishing version for deposit {}", depositId);
log.debug("[{}] Start publishing version; dataset = {}; updateType = {}", depositId, pid, updateType);
dataverseService.publishDataset(pid, updateType);
log.debug("[{}] Waiting for dataset to reach released state; dataset = {}", depositId, pid);
dataverseService.waitForReleasedState(pid, numberOfFilesInDataset);
log.debug("End publishing version for deposit {}", depositId);
log.debug("[{}] Dataset reached released state; dataset = {}", depositId, pid);
log.debug("[{}] End publishing version; dataset = {}; updateType = {}", depositId, pid, updateType);
}

public void releaseMigrated(String date) throws DataverseException, IOException {
log.debug("Start releasing migrated version for deposit {}", depositId);
log.debug("[{}] Start releasing migrated dataset with pid and date: {} {}", depositId, pid, date);
dataverseService.releaseMigratedDataset(pid, date);
log.debug("[{}] Waiting for dataset to reach released state; dataset = {}", depositId, pid);
dataverseService.waitForReleasedState(pid, numberOfFilesInDataset);
log.debug("End releasing migrated version for deposit {}", depositId);
log.debug("[{}] Dataset reached released state; dataset = {}", depositId, pid);
log.debug("[{}] End releasing migrated dataset with pid and date: {} {}", depositId, pid, date);
}
}

0 comments on commit 9d278eb

Please sign in to comment.