From 6eae5e4fec9be0435a91921881e0a64fab46dffd Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 15:12:32 -0500
Subject: [PATCH 001/366] implement batch processing of new versions to archive
---
.../dataverse/DatasetVersionServiceBean.java | 26 ++++++++-
.../edu/harvard/iq/dataverse/api/Admin.java | 57 +++++++++++++++++++
2 files changed, 82 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index e4eb6aac88e..ea6a05a2c3c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -987,7 +987,7 @@ public List> getBasicDatasetVersionInfo(Dataset dataset)
- public HashMap getFileMetadataHistory(DataFile df){
+ public HashMap, ?> getFileMetadataHistory(DataFile df){
if (df == null){
throw new NullPointerException("DataFile 'df' cannot be null");
@@ -1165,4 +1165,28 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion)
return null;
}
+ /**
+ * Execute a query to return DatasetVersion
+ *
+ * @param queryString
+ * @return
+ */
+ public List getUnarchivedDatasetVersions(){
+
+ String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;";
+
+ try{
+ TypedQuery query = em.createQuery(queryString, DatasetVersion.class);
+ List dsl = query.getResultList();
+ return dsl;
+
+ } catch (javax.persistence.NoResultException e) {
+ logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString);
+ return null;
+ } catch (EJBException e) {
+ logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage());
+ return null;
+ }
+ } // end getUnarchivedDatasetVersions
+
} // end class
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index b52665a7747..81fe1ecd2a9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -1722,6 +1722,63 @@ public void run() {
}
}
+
+ @GET
+ @Path("/archiveAllUnarchivedDataVersions")
+ public Response archiveAllUnarchivedDatasetVersions() {
+
+ try {
+ AuthenticatedUser au = findAuthenticatedUserOrDie();
+ // Note - the user is being set in the session so it becomes part of the
+ // DataverseRequest and is sent to the back-end command where it is used to get
+ // the API Token which is then used to retrieve files (e.g. via S3 direct
+ // downloads) to create the Bag
+ session.setUser(au);
+ List dsl = datasetversionService.getUnarchivedDatasetVersions();
+ if (dsl != null) {
+ String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName);
+ AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0));
+
+ if (cmd != null) {
+ new Thread(new Runnable() {
+ public void run() {
+ int total = dsl.size();
+ int successes = 0;
+ int failures = 0;
+ for (DatasetVersion dv : dsl) {
+ try {
+ AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv);
+
+ dv = commandEngine.submit(cmd);
+ if (dv.getArchivalCopyLocation() != null) {
+ successes++;
+ logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: "
+ + dv.getArchivalCopyLocation());
+ } else {
+ failures++;
+ logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber());
+ }
+ } catch (CommandException ex) {
+ logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex);
+ }
+ logger.fine(successes + failures + " of " + total + " archive submissions complete");
+ }
+ logger.info("Archiving complete: " + successes + " Successes, " + failures + " Failures. See prior log messages for details.");
+ }
+ }).start();
+ return ok("Archiving all unarchived published dataset versions using " + cmd.getClass().getCanonicalName() + ". Processing can take significant time for large datasets/ large numbers of dataset versions. View log and/or check archive for results.");
+ } else {
+ logger.log(Level.SEVERE, "Could not find Archiver class: " + className);
+ return error(Status.INTERNAL_SERVER_ERROR, "Could not find Archiver class: " + className);
+ }
+ } else {
+ return error(Status.BAD_REQUEST, "No unarchived published dataset versions found");
+ }
+ } catch (WrappedResponse e1) {
+ return error(Status.UNAUTHORIZED, "api key required");
+ }
+ }
+
@DELETE
@Path("/clearMetricsCache")
public Response clearMetricsCache() {
From 8313404e6604daba3ee53d32d9b09e83ebaae9f2 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 15:26:19 -0500
Subject: [PATCH 002/366] add listonly and limit options, count commandEx as
failure
---
.../edu/harvard/iq/dataverse/api/Admin.java | 24 ++++++++++++++++++-
1 file changed, 23 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 81fe1ecd2a9..3c61d2e8919 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -46,6 +46,7 @@
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
@@ -1723,9 +1724,16 @@ public void run() {
}
+ /**
+ * Iteratively archives all unarchived dataset versions
+ * @param
+ * listonly - don't archive, just list unarchived versions
+ * limit - max number to process
+ * @return
+ */
@GET
@Path("/archiveAllUnarchivedDataVersions")
- public Response archiveAllUnarchivedDatasetVersions() {
+ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) {
try {
AuthenticatedUser au = findAuthenticatedUserOrDie();
@@ -1736,6 +1744,16 @@ public Response archiveAllUnarchivedDatasetVersions() {
session.setUser(au);
List dsl = datasetversionService.getUnarchivedDatasetVersions();
if (dsl != null) {
+ if (listonly) {
+ logger.info("Unarchived versions found: ");
+ int current = 0;
+ for (DatasetVersion dv : dsl) {
+ if (limit != null && current > limit) {
+ break;
+ }
+ logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
+ }
+ }
String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName);
AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0));
@@ -1746,6 +1764,9 @@ public void run() {
int successes = 0;
int failures = 0;
for (DatasetVersion dv : dsl) {
+ if (limit != null && (successes + failures) > limit) {
+ break;
+ }
try {
AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv);
@@ -1759,6 +1780,7 @@ public void run() {
logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber());
}
} catch (CommandException ex) {
+ failures++;
logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex);
}
logger.fine(successes + failures + " of " + total + " archive submissions complete");
From 70d923ae08b80d6248acc062ec836ed5812fa645 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 15:36:50 -0500
Subject: [PATCH 003/366] send list in response for listonly
---
src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 3 +++
1 file changed, 3 insertions(+)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 3c61d2e8919..4fd3f43b127 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -1745,14 +1745,17 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool
List dsl = datasetversionService.getUnarchivedDatasetVersions();
if (dsl != null) {
if (listonly) {
+ JsonArrayBuilder jab = Json.createArrayBuilder();
logger.info("Unarchived versions found: ");
int current = 0;
for (DatasetVersion dv : dsl) {
if (limit != null && current > limit) {
break;
}
+ jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
}
+ return ok(jab);
}
String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName);
AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0));
From 96d3723307c26668e5687f4ba61fb80d0d207a16 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 15:51:02 -0500
Subject: [PATCH 004/366] fix query
---
.../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index ea6a05a2c3c..344f8af3b87 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -1173,10 +1173,10 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion)
*/
public List getUnarchivedDatasetVersions(){
- String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;";
+ String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL";
try{
- TypedQuery query = em.createQuery(queryString, DatasetVersion.class);
+ TypedQuery query = em.createQuery(queryString, DatasetVersion.class);
List dsl = query.getResultList();
return dsl;
From cb9f374e6452cffa5069ef941a0a5f65a8248ca7 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 16:00:54 -0500
Subject: [PATCH 005/366] case sensitive in query
---
.../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index 344f8af3b87..3f46a25c91e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -1173,7 +1173,7 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion)
*/
public List getUnarchivedDatasetVersions(){
- String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL";
+ String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL";
try{
TypedQuery query = em.createQuery(queryString, DatasetVersion.class);
From 76e23960219f7cdf0cde5bede1cf8fda55fddd9e Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 16:24:13 -0500
Subject: [PATCH 006/366] param to only archive latest version
---
.../edu/harvard/iq/dataverse/api/Admin.java | 38 +++++++++++--------
1 file changed, 22 insertions(+), 16 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 4fd3f43b127..e06289dfac8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -1729,11 +1729,12 @@ public void run() {
* @param
* listonly - don't archive, just list unarchived versions
* limit - max number to process
+ * lastestonly - only archive the latest versions
* @return
*/
@GET
@Path("/archiveAllUnarchivedDataVersions")
- public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) {
+ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) {
try {
AuthenticatedUser au = findAuthenticatedUserOrDie();
@@ -1752,8 +1753,11 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool
if (limit != null && current > limit) {
break;
}
- jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
- logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
+ if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) {
+ jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
+ logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber());
+ current++;
+ }
}
return ok(jab);
}
@@ -1770,21 +1774,23 @@ public void run() {
if (limit != null && (successes + failures) > limit) {
break;
}
- try {
- AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv);
-
- dv = commandEngine.submit(cmd);
- if (dv.getArchivalCopyLocation() != null) {
- successes++;
- logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: "
- + dv.getArchivalCopyLocation());
- } else {
+ if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) {
+ try {
+ AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv);
+
+ dv = commandEngine.submit(cmd);
+ if (dv.getArchivalCopyLocation() != null) {
+ successes++;
+ logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: "
+ + dv.getArchivalCopyLocation());
+ } else {
+ failures++;
+ logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber());
+ }
+ } catch (CommandException ex) {
failures++;
- logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber());
+ logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex);
}
- } catch (CommandException ex) {
- failures++;
- logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex);
}
logger.fine(successes + failures + " of " + total + " archive submissions complete");
}
From 2e8d990ad4b75719c2d8e6b35a0f3d104822f3c3 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Mon, 21 Dec 2020 16:41:58 -0500
Subject: [PATCH 007/366] off by one in limit
---
src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index e06289dfac8..9f819ff13a5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -1750,7 +1750,7 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool
logger.info("Unarchived versions found: ");
int current = 0;
for (DatasetVersion dv : dsl) {
- if (limit != null && current > limit) {
+ if (limit != null && current >= limit) {
break;
}
if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) {
@@ -1771,7 +1771,7 @@ public void run() {
int successes = 0;
int failures = 0;
for (DatasetVersion dv : dsl) {
- if (limit != null && (successes + failures) > limit) {
+ if (limit != null && (successes + failures) >= limit) {
break;
}
if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) {
From b7968333b5950f44bbf086ebc1d020ee4ca4535f Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Wed, 23 Dec 2020 11:52:43 -0500
Subject: [PATCH 008/366] documentation
---
doc/sphinx-guides/source/installation/config.rst | 16 ++++++++++++++--
1 file changed, 14 insertions(+), 2 deletions(-)
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 4a877eabff7..5b9433d7c31 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -866,9 +866,9 @@ For example:
``cp /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json``
-.. _Archiving API Call:
+.. _Archiving API Calls:
-API Call
+API Calls
++++++++
Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing:
@@ -881,6 +881,18 @@ where:
``{version}`` is the friendly version number, e.g. "1.2".
+A batch API call is also available that will attempt to archive any currently unarchived dataset versions:
+
+``curl -H "X-Dataverse-key: " http://localhost:8080/api/admin/archiveAllUnarchivedDataVersions``
+
+The call supports three optional query parameters that can be used in combination:
+
+``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any
+
+``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions)
+
+``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call.
+
The submitDataVersionToArchive API (and the workflow discussed below) attempt to archive the dataset version via an archive specific method. For Chronopolis, a DuraCloud space named for the dataset (it's DOI with ':' and '.' replaced with '-') is created and two files are uploaded to it: a version-specific datacite.xml metadata file and a BagIt bag containing the data and an OAI-ORE map file. (The datacite.xml file, stored outside the Bag as well as inside is intended to aid in discovery while the ORE map file is 'complete', containing all user-entered metadata and is intended as an archival record.)
In the Chronopolis case, since the transfer from the DuraCloud front-end to archival storage in Chronopolis can take significant time, it is currently up to the admin/curator to submit a 'snap-shot' of the space within DuraCloud and to monitor its successful transfer. Once transfer is complete the space should be deleted, at which point the Dataverse API call can be used to submit a Bag for other versions of the same Dataset. (The space is reused, so that archival copies of different Dataset versions correspond to different snapshots of the same DuraCloud space.).
From 006a4baff870ebd1c11c86caaacaf96511fadd0c Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Fri, 8 Jan 2021 12:28:55 -0500
Subject: [PATCH 009/366] Update
doc/sphinx-guides/source/installation/config.rst
Co-authored-by: Philip Durbin
---
doc/sphinx-guides/source/installation/config.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 5b9433d7c31..84ec0699d62 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -869,7 +869,7 @@ For example:
.. _Archiving API Calls:
API Calls
-++++++++
++++++++++
Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing:
From bba8ba0a13703410a9196713c6920150291d4643 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Fri, 8 Jan 2021 12:29:20 -0500
Subject: [PATCH 010/366] Update
doc/sphinx-guides/source/installation/config.rst
Co-authored-by: Philip Durbin
---
doc/sphinx-guides/source/installation/config.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 84ec0699d62..a997f0e353f 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -887,7 +887,7 @@ A batch API call is also available that will attempt to archive any currently un
The call supports three optional query parameters that can be used in combination:
-``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any
+``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any.
``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions)
From 011c97a4b73775cf152e0cf06127d8da9e8d2780 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Fri, 8 Jan 2021 12:29:46 -0500
Subject: [PATCH 011/366] Update
doc/sphinx-guides/source/installation/config.rst
Co-authored-by: Philip Durbin
---
doc/sphinx-guides/source/installation/config.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index a997f0e353f..67ee66af763 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -889,7 +889,7 @@ The call supports three optional query parameters that can be used in combinatio
``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any.
-``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions)
+``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions).
``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call.
From 1a1c28ccb7a6c0427f349cd8569c516bca43bf68 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Fri, 8 Jan 2021 13:10:22 -0500
Subject: [PATCH 012/366] updates per review
---
.../dataverse/DatasetVersionServiceBean.java | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index 3f46a25c91e..33cc236b902 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -986,8 +986,8 @@ public List> getBasicDatasetVersionInfo(Dataset dataset)
} // end getBasicDatasetVersionInfo
-
- public HashMap, ?> getFileMetadataHistory(DataFile df){
+ //Not used?
+ public HashMap getFileMetadataHistory(DataFile df){
if (df == null){
throw new NullPointerException("DataFile 'df' cannot be null");
@@ -1175,18 +1175,18 @@ public List getUnarchivedDatasetVersions(){
String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL";
- try{
+ try {
TypedQuery query = em.createQuery(queryString, DatasetVersion.class);
List dsl = query.getResultList();
return dsl;
-
+
} catch (javax.persistence.NoResultException e) {
logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString);
return null;
- } catch (EJBException e) {
- logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage());
- return null;
- }
+ } catch (EJBException e) {
+ logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage());
+ return null;
+ }
} // end getUnarchivedDatasetVersions
-
+
} // end class
From a849d6cb4a037f971075ff6838bbe3023ed4d953 Mon Sep 17 00:00:00 2001
From: Stephen Kraffmiller
Date: Tue, 30 Nov 2021 09:44:41 -0500
Subject: [PATCH 013/366] #8191 update ui and bundle, etc.
---
.../edu/harvard/iq/dataverse/Dataset.java | 1 +
.../edu/harvard/iq/dataverse/DatasetPage.java | 15 +++++-
.../harvard/iq/dataverse/DatasetVersion.java | 28 +++++++++++
.../edu/harvard/iq/dataverse/Template.java | 1 +
.../iq/dataverse/TermsOfUseAndAccess.java | 13 ++++++
.../TermsOfUseAndAccessValidator.java | 46 ++++++++++++++++---
src/main/java/propertyFiles/Bundle.properties | 4 +-
src/main/webapp/dataset-license-terms.xhtml | 36 ++++++++++-----
src/main/webapp/editFilesFragment.xhtml | 45 +++++++++++-------
.../webapp/file-edit-popup-fragment.xhtml | 19 ++++++--
10 files changed, 166 insertions(+), 42 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index 60466f96362..f1fe7b2b09d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -319,6 +319,7 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f
TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
terms.setDatasetVersion(dsv);
terms.setLicense(TermsOfUseAndAccess.License.CC0);
+ terms.setFileAccessRequest(true);
dsv.setTermsOfUseAndAccess(terms);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 90ca5ecb027..0367fca8591 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -394,6 +394,18 @@ public void setRsyncScript(String rsyncScript) {
public String getRsyncScriptFilename() {
return rsyncScriptFilename;
}
+
+ private Boolean hasRestrictedFiles = null;
+
+ public Boolean isHasRestrictedFiles(){
+ //cache in page to limit processing
+ if (hasRestrictedFiles != null){
+ return hasRestrictedFiles;
+ } else {
+ hasRestrictedFiles = workingVersion.isHasRestrictedFile();
+ return hasRestrictedFiles;
+ }
+ }
private String thumbnailString = null;
@@ -2054,7 +2066,8 @@ private String init(boolean initFull) {
previewTools = externalToolService.findFileToolsByType(ExternalTool.Type.PREVIEW);
datasetExploreTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.EXPLORE);
rowsPerPage = 10;
-
+ hasRestrictedFiles = workingVersion.isHasRestrictedFile();
+
return null;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
index d53cf20491c..2a235e5fefb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
@@ -560,6 +560,13 @@ public boolean isHasNonPackageFile(){
// The presence of any non-package file means that HTTP Upload was used (no mixing allowed) so we just check the first file.
return !this.fileMetadatas.get(0).getDataFile().getContentType().equals(DataFileServiceBean.MIME_TYPE_PACKAGE_FILE);
}
+
+ public boolean isHasRestrictedFile(){
+ if (this.fileMetadatas.isEmpty()){;
+ return false;
+ }
+ return this.fileMetadatas.stream().anyMatch(fm -> (fm.isRestricted()));
+ }
public void updateDefaultValuesFromTemplate(Template template) {
if (!template.getDatasetFields().isEmpty()) {
@@ -636,6 +643,11 @@ public void initDefaultValues() {
TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
terms.setDatasetVersion(this);
terms.setLicense(TermsOfUseAndAccess.License.CC0);
+ /*
+ Added for https://github.com/IQSS/dataverse/issues/8191
+ set File Access Request to true
+ */
+ terms.setFileAccessRequest(true);
this.setTermsOfUseAndAccess(terms);
}
@@ -1665,7 +1677,23 @@ public Set validate() {
}
}
}
+
+
+ TermsOfUseAndAccess toua = this.termsOfUseAndAccess;
+ //Only need to test Terms of Use and Access if there are restricted files
+ if (toua != null && this.isHasRestrictedFile()) {
+ Set> constraintViolations = validator.validate(toua);
+ if (constraintViolations.size() > 0) {
+ ConstraintViolation violation = constraintViolations.iterator().next();
+ String message = "Constraint violation found in Terms of Use and Access. "
+ + "If Request Access to restricted files is disabled then Terms of Access must be provided.";
+ logger.info(message);
+ this.termsOfUseAndAccess.setValidationMessage(message);
+ returnSet.add(violation);
+ }
+ }
+
return returnSet;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java
index b01b0a2b792..5b9d7c82fe8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Template.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Template.java
@@ -326,6 +326,7 @@ public Template cloneNewTemplate(Template source) {
} else {
terms = new TermsOfUseAndAccess();
terms.setLicense(TermsOfUseAndAccess.defaultLicense);
+ terms.setFileAccessRequest(true);
}
newTemplate.setTermsOfUseAndAccess(terms);
return newTemplate;
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
index 72f4ab54ee8..04dd48ea473 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
@@ -14,6 +14,7 @@
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToOne;
+import javax.persistence.Transient;
/**
*
@@ -21,6 +22,7 @@
* @author skraffmi
*/
@Entity
+@ValidateTermsOfUseAndAccess
public class TermsOfUseAndAccess implements Serializable {
@Id
@@ -275,6 +277,17 @@ public enum License {
NONE, CC0
}
+ @Transient
+ private String validationMessage;
+
+ public String getValidationMessage() {
+ return validationMessage;
+ }
+
+ public void setValidationMessage(String validationMessage) {
+ this.validationMessage = validationMessage;
+ }
+
/**
* @todo What does the GUI use for a default license? What does the "native"
* API use? See also https://github.com/IQSS/dataverse/issues/1385
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
index dfa9e9f6c77..394d0f359ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
@@ -21,12 +21,46 @@ public void initialize(ValidateTermsOfUseAndAccess constraintAnnotation) {
@Override
public boolean isValid(TermsOfUseAndAccess value, ConstraintValidatorContext context) {
- //if both null invalid
- //if(value.getTemplate() == null && value.getDatasetVersion() == null) return false;
+ //must allow access requests or have terms of access filled in.
- //if both not null invalid
- //return !(value.getTemplate() != null && value.getDatasetVersion() != null);
- return true;
+ boolean valid = value.isFileAccessRequest() == true || (value.getTermsOfAccess() != null && !value.getTermsOfAccess().isEmpty()) ;
+ if (!valid) {
+ try {
+
+
+ if ( context != null) {
+ context.buildConstraintViolationWithTemplate( "If Request Access is false then Terms of Access must be provided.").addConstraintViolation();
+ }
+
+ String message = "Constraint violation found in Terms of Use and Access. "
+ + " If Request Access to restricted files is set to false then Terms of Access must be provided.";
+
+ value.setValidationMessage(message);
+ } catch (NullPointerException e) {
+ return false;
+ }
+ return false;
+ }
+
+
+ return valid;
+ }
+
+ public static boolean isTOUAValid(TermsOfUseAndAccess value, ConstraintValidatorContext context){
+
+ boolean valid = value.isFileAccessRequest() == true || (value.getTermsOfAccess() != null && !value.getTermsOfAccess().isEmpty());
+ if (!valid) {
+
+ if (context != null) {
+ context.buildConstraintViolationWithTemplate("If Request Access is false then Terms of Access must be provided.").addConstraintViolation();
+ }
+
+ String message = "Constraint violation found in Terms of Use and Access. "
+ + " If Request Access to restricted files is set to false then Terms of Access must be provided.";
+
+ value.setValidationMessage(message);
+ }
+ return valid;
}
-
+
}
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index fbbda5213ad..621b9116381 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -2124,8 +2124,8 @@ citationFrame.banner.countdownMessage.seconds=seconds
#file-edit-popup-fragment.xhtml #editFilesFragment.xhtml
dataset.access.accessHeader=Restrict Files and Add Dataset Terms of Access
-dataset.access.description=Restricting limits access to published files. You can add or edit Terms of Access for the dataset, and allow people to Request Access to restricted files.
-
+dataset.access.description=Restricting limits access to published files. Providing information about access to restricted files is required. By default people who want to use these files can request access to them. You can provide Terms of Access instead by unchecking the box and adding them. These settings can be changed when you edit the dataset. Learn about restricting files and dataset access in the User Guide.
+dataset.access.description.line.2=One of the following methods for communicating access must be active, and applies to all restricted files in this dataset.
#datasetFieldForEditFragment.xhtml
dataset.AddReplication=Add "Replication Data for" to Title
dataset.replicationDataFor=Replication Data for:
diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml
index c5bdc8638cf..cf623ec8c8a 100644
--- a/src/main/webapp/dataset-license-terms.xhtml
+++ b/src/main/webapp/dataset-license-terms.xhtml
@@ -277,26 +277,38 @@
-
+
+
@@ -370,7 +382,7 @@
data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title']}">
-
-
-
-
Date: Fri, 11 Feb 2022 17:24:15 -0500
Subject: [PATCH 037/366] #8191 remove out of date comment/code
---
.../edu/harvard/iq/dataverse/TermsOfUseAndAccess.java | 8 --------
1 file changed, 8 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
index c4f52d7ffca..a8616283332 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
@@ -300,14 +300,6 @@ public void setValidationMessage(String validationMessage) {
this.validationMessage = validationMessage;
}
- /**
- * @todo What does the GUI use for a default license? What does the "native"
- * API use? See also https://github.com/IQSS/dataverse/issues/1385
- */
- /*
- public static TermsOfUseAndAccess.License defaultLicense = TermsOfUseAndAccess.License.CC0;
- public static String CC0_URI = "https://creativecommons.org/publicdomain/zero/1.0/";
- */
@Override
public int hashCode() {
int hash = 0;
From 4764b8e5501e0ba1b4c5bf791e2dfc7533bc52fd Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Tue, 15 Feb 2022 15:19:19 -0500
Subject: [PATCH 038/366] read metadatalanauge if sent, set if exists and is
allowed
not sent - use default
not allowed - throw exception to return bad request
---
.../edu/harvard/iq/dataverse/util/json/JsonParser.java | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
index 4930d0a4e7b..5a49c56acbf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
@@ -301,7 +301,13 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException {
dataset.setAuthority(obj.getString("authority", null) == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Authority) : obj.getString("authority"));
dataset.setProtocol(obj.getString("protocol", null) == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Protocol) : obj.getString("protocol"));
dataset.setIdentifier(obj.getString("identifier",null));
-
+ String mdl = obj.getString("metadatalanguage",null);
+ if(mdl==null || settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(mdl)) {
+ dataset.setMetadataLanguage(mdl);
+ }else {
+ throw new JsonParseException("Specified metadatalangauge not allowed.");
+ }
+
DatasetVersion dsv = new DatasetVersion();
dsv.setDataset(dataset);
dsv = parseDatasetVersion(obj.getJsonObject("datasetVersion"), dsv);
From 212c824079b31cb68ef3bbe69dc55d2fee0e6773 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Tue, 15 Feb 2022 15:40:27 -0500
Subject: [PATCH 039/366] add metadata language as schema.org/inLanguage for
import/export
---
.../edu/harvard/iq/dataverse/api/Dataverses.java | 12 +++++++++++-
.../edu/harvard/iq/dataverse/util/bagit/OREMap.java | 5 ++++-
.../harvard/iq/dataverse/util/json/JSONLDUtil.java | 3 +++
3 files changed, 18 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index 0a4c76a54e3..a9c706996a7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -66,6 +66,7 @@
import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty;
import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
+import edu.harvard.iq.dataverse.util.json.JsonLDTerm;
import edu.harvard.iq.dataverse.util.json.JsonParseException;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief;
import java.io.StringReader;
@@ -110,6 +111,7 @@
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
+import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import javax.servlet.http.HttpServletResponse;
@@ -300,6 +302,11 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie
ds.setIdentifier(null);
ds.setProtocol(null);
ds.setGlobalIdCreateTime(null);
+
+ //Verify metadatalanguage is allowed
+ if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) {
+ throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed.");
+ }
Dataset managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u)));
return created("/datasets/" + managedDs.getId(),
@@ -479,8 +486,11 @@ public Response recreateDataset(String jsonLDBody, @PathParam("identifier") Stri
if(!datasetSvc.isIdentifierLocallyUnique(ds)) {
throw new BadRequestException("Cannot recreate a dataset whose PID is already in use");
}
-
+ //Verify metadatalanguage is allowed
+ if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) {
+ throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed.");
+ }
if (ds.getVersions().isEmpty()) {
return badRequest("Supplied json must contain a single dataset version.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
index 38a04b36314..ff9de5d8f25 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
@@ -214,7 +214,10 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
aggBuilder.add(JsonLDTerm.schemaOrg("includedInDataCatalog").getLabel(),
BrandingUtil.getRootDataverseCollectionName());
-
+
+ addIfNotNull(aggBuilder, JsonLDTerm.schemaOrg("inLanguage"), dataset.getMetadataLanguage());
+
+
// The aggregation aggregates aggregatedresources (Datafiles) which each have
// their own entry and metadata
JsonArrayBuilder aggResArrayBuilder = Json.createArrayBuilder();
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
index 3fdacbdc8de..62cd54387b8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
@@ -89,6 +89,9 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody,
+ "'. Make sure it is in valid form - see Dataverse Native API documentation.");
}
}
+
+ //Store the metadatalanguage if sent - the caller needs to check whether it is allowed (as with any GlobalID)
+ ds.setMetadataLanguage(jsonld.getString(JsonLDTerm.schemaOrg("inLanguage").getUrl(),null));
dsv = updateDatasetVersionMDFromJsonLD(dsv, jsonld, metadataBlockSvc, datasetFieldSvc, append, migrating, licenseSvc);
dsv.setDataset(ds);
From a156c528468180cdd86a9828f132d39d6c097f68 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Tue, 15 Feb 2022 17:13:53 -0500
Subject: [PATCH 040/366] i18n more fields
---
.../dataverse/export/ddi/DdiExportUtil.java | 50 ++++++++++---------
1 file changed, 26 insertions(+), 24 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
index 10c02342867..a49e0a73fbf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
@@ -437,7 +437,7 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset
}
}
if (DatasetFieldConstant.kindOfData.equals(fieldDTO.getTypeName())) {
- writeMultipleElement(xmlw, "dataKind", fieldDTO);
+ writeMultipleElement(xmlw, "dataKind", fieldDTO, lang);
}
}
}
@@ -496,7 +496,7 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset
}
if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) {
writeFullElement(xmlw, "southBL", next.getSinglePrimitive());
- }
+ }
}
xmlw.writeEndElement();
@@ -510,20 +510,21 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset
if("socialscience".equals(key)){
for (FieldDTO fieldDTO : value.getFields()) {
if (DatasetFieldConstant.universe.equals(fieldDTO.getTypeName())) {
- writeMultipleElement(xmlw, "universe", fieldDTO);
+ writeMultipleElement(xmlw, "universe", fieldDTO, lang);
}
if (DatasetFieldConstant.unitOfAnalysis.equals(fieldDTO.getTypeName())) {
writeI18NElementList(xmlw, "anlyUnit", fieldDTO.getMultipleVocab(), "unitOfAnalysis", fieldDTO.getTypeClass(), "socialscience", lang);
}
- }
+ }
}
}
xmlw.writeEndElement(); //sumDscr
}
- private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO) throws XMLStreamException {
+ private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO, String lang) throws XMLStreamException {
for (String value : fieldDTO.getMultiplePrimitive()) {
- writeFullElement(xmlw, element, value);
+ //Write multiple lang vals for controlled vocab, otherwise don't include any lang tag
+ writeFullElement(xmlw, element, value, fieldDTO.getTypeClass().equals("controlledVocabulary") ? lang : null);
}
}
@@ -541,37 +542,37 @@ private static void writeDateElement(XMLStreamWriter xmlw, String element, Strin
private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO version, String lang) throws XMLStreamException{
xmlw.writeStartElement("method");
xmlw.writeStartElement("dataColl");
- writeI18NElement(xmlw, "timeMeth", version, DatasetFieldConstant.timeMethod,lang);
- writeFullElement(xmlw, "dataCollector", dto2Primitive(version, DatasetFieldConstant.dataCollector));
- writeFullElement(xmlw, "collectorTraining", dto2Primitive(version, DatasetFieldConstant.collectorTraining));
- writeFullElement(xmlw, "frequenc", dto2Primitive(version, DatasetFieldConstant.frequencyOfDataCollection));
+ writeI18NElement(xmlw, "timeMeth", version, DatasetFieldConstant.timeMethod,lang);
+ writeI18NElement(xmlw, "dataCollector", version, DatasetFieldConstant.dataCollector, lang);
+ writeI18NElement(xmlw, "collectorTraining", version, DatasetFieldConstant.collectorTraining, lang);
+ writeI18NElement(xmlw, "frequenc", version, DatasetFieldConstant.frequencyOfDataCollection, lang);
writeI18NElement(xmlw, "sampProc", version, DatasetFieldConstant.samplingProcedure, lang);
writeTargetSampleElement(xmlw, version);
- writeFullElement(xmlw, "deviat", dto2Primitive(version, DatasetFieldConstant.deviationsFromSampleDesign));
+ writeI18nElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang);
xmlw.writeStartElement("sources");
- writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources));
- writeFullElement(xmlw, "srcOrig", dto2Primitive(version, DatasetFieldConstant.originOfSources));
- writeFullElement(xmlw, "srcChar", dto2Primitive(version, DatasetFieldConstant.characteristicOfSources));
- writeFullElement(xmlw, "srcDocu", dto2Primitive(version, DatasetFieldConstant.accessToSources));
+ writeI18NElementList(xmlw, "dataSrc", version, DatasetFieldConstant.dataSources, lang);
+ writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang);
+ writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang);
+ writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang);
xmlw.writeEndElement(); //sources
writeI18NElement(xmlw, "collMode", version, DatasetFieldConstant.collectionMode, lang);
writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang);
- writeFullElement(xmlw, "collSitu", dto2Primitive(version, DatasetFieldConstant.dataCollectionSituation));
- writeFullElement(xmlw, "actMin", dto2Primitive(version, DatasetFieldConstant.actionsToMinimizeLoss));
- writeFullElement(xmlw, "conOps", dto2Primitive(version, DatasetFieldConstant.controlOperations));
- writeFullElement(xmlw, "weight", dto2Primitive(version, DatasetFieldConstant.weighting));
- writeFullElement(xmlw, "cleanOps", dto2Primitive(version, DatasetFieldConstant.cleaningOperations));
+ writeI18NElement(xmlw, "collSitu", version, DatasetFieldConstant.dataCollectionSituation, lang);
+ writeI18NElement(xmlw, "actMin", version, DatasetFieldConstant.actionsToMinimizeLoss, lang);
+ writeI18NElement(xmlw, "conOps", version, DatasetFieldConstant.controlOperations, lang);
+ writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang);
+ writeI18NElement(xmlw, "cleanOps", version, DatasetFieldConstant.cleaningOperations, lang);
xmlw.writeEndElement(); //dataColl
xmlw.writeStartElement("anlyInfo");
//writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes));
- writeFullElement(xmlw, "respRate", dto2Primitive(version, DatasetFieldConstant.responseRate));
- writeFullElement(xmlw, "EstSmpErr", dto2Primitive(version, DatasetFieldConstant.samplingErrorEstimates));
- writeFullElement(xmlw, "dataAppr", dto2Primitive(version, DatasetFieldConstant.otherDataAppraisal));
+ writeI18NElement(xmlw, "respRate", version, DatasetFieldConstant.responseRate, lang);
+ writeI18NElement(xmlw, "EstSmpErr", version, DatasetFieldConstant.samplingErrorEstimates, lang);
+ writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang);
xmlw.writeEndElement(); //anlyInfo
writeNotesElement(xmlw, version);
@@ -1412,9 +1413,10 @@ private static void writeI18NElement(XMLStreamWriter xmlw, String name, DatasetV
String val = dto2Primitive(version, fieldTypeName);
Locale defaultLocale = Locale.getDefault();
// Get the language-specific value for the default language
+ // A null value is returned if this is not a CVV field
String localeVal = dto2Primitive(version, fieldTypeName, defaultLocale);
String requestedLocaleVal = null;
- if (lang != null && !defaultLocale.getLanguage().equals(lang)) {
+ if (lang != null && localeVal != null && !defaultLocale.getLanguage().equals(lang)) {
// Also get the value in the requested locale/lang if that's not the default
// lang.
requestedLocaleVal = dto2Primitive(version, fieldTypeName, new Locale(lang));
From 6c09f15a3b1238ab9461091c438955b8296b5a0e Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Thu, 17 Feb 2022 14:27:32 -0500
Subject: [PATCH 041/366] isControlledVocabularyField convenience method
---
src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java
index 9d79e68cca3..684a4d12b7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java
@@ -290,6 +290,10 @@ public Object getConvertedValue() {
}
}
+ public boolean isControlledVocabularyField() {
+ return getTypeClass().equals("controlledVocabulary");
+ }
+
@Override
public int hashCode() {
int hash = 3;
From d60d63bfc9e7e88c9a9268cd43a27b589fae933b Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Thu, 17 Feb 2022 14:29:34 -0500
Subject: [PATCH 042/366] handle keyword CVV vals
---
.../dataverse/export/ddi/DdiExportUtil.java | 135 +++++++++++++-----
1 file changed, 97 insertions(+), 38 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
index a49e0a73fbf..cb5a10fd81e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
@@ -93,6 +93,7 @@ public class DdiExportUtil {
public static final String NOTE_TYPE_CONTENTTYPE = "DATAVERSE:CONTENTTYPE";
public static final String NOTE_SUBJECT_CONTENTTYPE = "Content/MIME Type";
+ public static final String CITATION_BLOCK_NAME = "citation";
public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) {
logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson));
@@ -524,7 +525,7 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset
private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO, String lang) throws XMLStreamException {
for (String value : fieldDTO.getMultiplePrimitive()) {
//Write multiple lang vals for controlled vocab, otherwise don't include any lang tag
- writeFullElement(xmlw, element, value, fieldDTO.getTypeClass().equals("controlledVocabulary") ? lang : null);
+ writeFullElement(xmlw, element, value, fieldDTO.isControlledVocabularyField() ? lang : null);
}
}
@@ -550,10 +551,10 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO
writeTargetSampleElement(xmlw, version);
- writeI18nElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang);
+ writeI18NElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang);
xmlw.writeStartElement("sources");
- writeI18NElementList(xmlw, "dataSrc", version, DatasetFieldConstant.dataSources, lang);
+ writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources));
writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang);
writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang);
writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang);
@@ -582,18 +583,20 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO
private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String lang) throws XMLStreamException{
//Key Words and Topic Classification
-
- xmlw.writeStartElement("subject");
+ Locale defaultLocale = Locale.getDefault();
+ xmlw.writeStartElement("subject");
for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
String key = entry.getKey();
MetadataBlockDTO value = entry.getValue();
- if ("citation".equals(key)) {
+ if (CITATION_BLOCK_NAME.equals(key)) {
for (FieldDTO fieldDTO : value.getFields()) {
- if (DatasetFieldConstant.subject.equals(fieldDTO.getTypeName())){
- writeI18NElementList(xmlw, "keyword", fieldDTO.getMultipleVocab(), "subject", fieldDTO.getTypeClass(), "citation", lang);
+ if (DatasetFieldConstant.subject.equals(fieldDTO.getTypeName())) {
+ writeI18NElementList(xmlw, "keyword", fieldDTO.getMultipleVocab(), "subject",
+ fieldDTO.getTypeClass(), "citation", lang);
}
-
+
if (DatasetFieldConstant.keyword.equals(fieldDTO.getTypeName())) {
+ boolean isCVV = false;
for (HashSet foo : fieldDTO.getMultipleCompound()) {
String keywordValue = "";
String keywordVocab = "";
@@ -601,30 +604,57 @@ private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO
for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
FieldDTO next = iterator.next();
if (DatasetFieldConstant.keywordValue.equals(next.getTypeName())) {
- keywordValue = next.getSinglePrimitive();
+ if (next.isControlledVocabularyField()) {
+ isCVV = true;
+ }
+ keywordValue = next.getSinglePrimitive();
}
if (DatasetFieldConstant.keywordVocab.equals(next.getTypeName())) {
- keywordVocab = next.getSinglePrimitive();
+ keywordVocab = next.getSinglePrimitive();
}
if (DatasetFieldConstant.keywordVocabURI.equals(next.getTypeName())) {
- keywordURI = next.getSinglePrimitive();
+ keywordURI = next.getSinglePrimitive();
}
}
- if (!keywordValue.isEmpty()){
- xmlw.writeStartElement("keyword");
- if(!keywordVocab.isEmpty()){
- writeAttribute(xmlw,"vocab",keywordVocab);
+ if (!keywordValue.isEmpty()) {
+ xmlw.writeStartElement("keyword");
+ if (!keywordVocab.isEmpty()) {
+ writeAttribute(xmlw, "vocab", keywordVocab);
+ }
+ if (!keywordURI.isEmpty()) {
+ writeAttribute(xmlw, "vocabURI", keywordURI);
+ }
+ if (lang != null && isCVV) {
+ writeAttribute(xmlw, "xml:lang", defaultLocale.getLanguage());
+ xmlw.writeCharacters(ControlledVocabularyValue.getLocaleStrValue(keywordValue,
+ DatasetFieldConstant.keywordValue, CITATION_BLOCK_NAME, defaultLocale,
+ true));
+ } else {
+ xmlw.writeCharacters(keywordValue);
+ }
+ xmlw.writeEndElement(); // Keyword
+ if (lang != null && isCVV && !defaultLocale.getLanguage().equals(lang)) {
+ String translatedValue = ControlledVocabularyValue.getLocaleStrValue(keywordValue,
+ DatasetFieldConstant.keywordValue, CITATION_BLOCK_NAME, new Locale(lang),
+ false);
+ if (translatedValue != null) {
+ xmlw.writeStartElement("keyword");
+ if (!keywordVocab.isEmpty()) {
+ writeAttribute(xmlw, "vocab", keywordVocab);
+ }
+ if (!keywordURI.isEmpty()) {
+ writeAttribute(xmlw, "vocabURI", keywordURI);
+ }
+ writeAttribute(xmlw, "xml:lang", lang);
+ xmlw.writeCharacters(translatedValue);
+ xmlw.writeEndElement(); // Keyword
+ }
}
- if(!keywordURI.isEmpty()){
- writeAttribute(xmlw,"vocabURI",keywordURI);
- }
- xmlw.writeCharacters(keywordValue);
- xmlw.writeEndElement(); //Keyword
}
-
}
}
if (DatasetFieldConstant.topicClassification.equals(fieldDTO.getTypeName())) {
+ boolean isCVV = false;
for (HashSet foo : fieldDTO.getMultipleCompound()) {
String topicClassificationValue = "";
String topicClassificationVocab = "";
@@ -632,34 +662,63 @@ private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO
for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
FieldDTO next = iterator.next();
if (DatasetFieldConstant.topicClassValue.equals(next.getTypeName())) {
- topicClassificationValue = next.getSinglePrimitive();
+ // Currently getSingleVocab() is the same as getSinglePrimitive() so this works
+ // for either case
+ topicClassificationValue = next.getSinglePrimitive();
+ if (next.isControlledVocabularyField()) {
+ isCVV = true;
+ }
}
if (DatasetFieldConstant.topicClassVocab.equals(next.getTypeName())) {
- topicClassificationVocab = next.getSinglePrimitive();
+ topicClassificationVocab = next.getSinglePrimitive();
}
if (DatasetFieldConstant.topicClassVocabURI.equals(next.getTypeName())) {
- topicClassificationURI = next.getSinglePrimitive();
+ topicClassificationURI = next.getSinglePrimitive();
}
}
- if (!topicClassificationValue.isEmpty()){
- xmlw.writeStartElement("topcClas");
- if(!topicClassificationVocab.isEmpty()){
- writeAttribute(xmlw,"vocab",topicClassificationVocab);
- }
- if(!topicClassificationURI.isEmpty()){
- writeAttribute(xmlw,"vocabURI",topicClassificationURI);
- }
- xmlw.writeCharacters(topicClassificationValue);
- xmlw.writeEndElement(); //topcClas
+ if (!topicClassificationValue.isEmpty()) {
+ xmlw.writeStartElement("topcClas");
+ if (!topicClassificationVocab.isEmpty()) {
+ writeAttribute(xmlw, "vocab", topicClassificationVocab);
+ }
+ if (!topicClassificationURI.isEmpty()) {
+ writeAttribute(xmlw, "vocabURI", topicClassificationURI);
+ }
+ if (lang != null && isCVV) {
+ writeAttribute(xmlw, "xml:lang", defaultLocale.getLanguage());
+ xmlw.writeCharacters(ControlledVocabularyValue.getLocaleStrValue(
+ topicClassificationValue, DatasetFieldConstant.topicClassValue,
+ CITATION_BLOCK_NAME, defaultLocale, true));
+ } else {
+ xmlw.writeCharacters(topicClassificationValue);
+ }
+ xmlw.writeEndElement(); // topcClas
+ if (lang != null && isCVV && !defaultLocale.getLanguage().equals(lang)) {
+ String translatedValue = ControlledVocabularyValue.getLocaleStrValue(
+ topicClassificationValue, DatasetFieldConstant.topicClassValue,
+ CITATION_BLOCK_NAME, new Locale(lang), false);
+ if (translatedValue != null) {
+ xmlw.writeStartElement("topcClas");
+ if (!topicClassificationVocab.isEmpty()) {
+ writeAttribute(xmlw, "vocab", topicClassificationVocab);
+ }
+ if (!topicClassificationURI.isEmpty()) {
+ writeAttribute(xmlw, "vocabURI", topicClassificationURI);
+ }
+ writeAttribute(xmlw, "xml:lang", lang);
+ xmlw.writeCharacters(translatedValue);
+ xmlw.writeEndElement(); // topcClas
+ }
+ }
}
}
}
}
}
- }
+ }
xmlw.writeEndElement(); // subject
}
-
+
private static void writeAuthorsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
@@ -1339,7 +1398,7 @@ private static String dto2Primitive(DatasetVersionDTO datasetVersionDTO, String
for (FieldDTO fieldDTO : value.getFields()) {
if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) {
String rawVal = fieldDTO.getSinglePrimitive();
- if (fieldDTO.getTypeClass().equals("controlledVocabulary")) {
+ if (fieldDTO.isControlledVocabularyField()) {
return ControlledVocabularyValue.getLocaleStrValue(rawVal, datasetFieldTypeName, value.getName(),
locale, false);
}
From d3d1aab1b86b2df52688ac9a280e19118cb29219 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Fri, 18 Feb 2022 12:19:39 -0500
Subject: [PATCH 043/366] typo (Thanks pdurbin) and de-merge issues brealing
this PR out
---
.../harvard/iq/dataverse/SettingsWrapper.java | 48 +++------------
.../harvard/iq/dataverse/api/Dataverses.java | 4 +-
.../settings/SettingsServiceBean.java | 59 +++++++++++++++++++
3 files changed, 69 insertions(+), 42 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
index ec06e6bb91a..dcbec37fd7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
@@ -331,37 +331,20 @@ public Boolean isHasDropBoxKey() {
public boolean isLocalesConfigured() {
if (configuredLocales == null) {
- initLocaleSettings();
+ configuredLocales = new LinkedHashMap<>();
+ settingsService.initLocaleSettings(configuredLocales);
}
return configuredLocales.size() > 1;
}
public Map getConfiguredLocales() {
if (configuredLocales == null) {
- initLocaleSettings();
+ configuredLocales = new LinkedHashMap<>();
+ settingsService.initLocaleSettings(configuredLocales);
}
return configuredLocales;
}
- private void initLocaleSettings() {
-
- configuredLocales = new LinkedHashMap<>();
-
- try {
- JSONArray entries = new JSONArray(getValueForKey(SettingsServiceBean.Key.Languages, "[]"));
- for (Object obj : entries) {
- JSONObject entry = (JSONObject) obj;
- String locale = entry.getString("locale");
- String title = entry.getString("title");
-
- configuredLocales.put(locale, title);
- }
- } catch (JSONException e) {
- //e.printStackTrace();
- // do we want to know? - probably not
- }
- }
-
public boolean isDoiInstallation() {
String protocol = getValueForKey(SettingsServiceBean.Key.Protocol);
if ("doi".equals(protocol)) {
@@ -490,31 +473,16 @@ public void validateEmbargoDate(FacesContext context, UIComponent component, Obj
Map getBaseMetadataLanguageMap(boolean refresh) {
if (languageMap == null || refresh) {
- languageMap = new HashMap();
-
- /* If MetadataLanaguages is set, use it.
- * If not, we can't assume anything and should avoid assuming a metadata language
- */
- String mlString = getValueForKey(SettingsServiceBean.Key.MetadataLanguages,"");
-
- if(mlString.isEmpty()) {
- mlString="[]";
- }
- JsonReader jsonReader = Json.createReader(new StringReader(mlString));
- JsonArray languages = jsonReader.readArray();
- for(JsonValue jv: languages) {
- JsonObject lang = (JsonObject) jv;
- languageMap.put(lang.getString("locale"), lang.getString("title"));
- }
+ languageMap = settingsService.getBaseMetadataLanguageMap(languageMap, true);
}
return languageMap;
}
public Map getMetadataLanguages(DvObjectContainer target) {
Map currentMap = new HashMap();
- currentMap.putAll(getBaseMetadataLanguageMap(true));
- languageMap.put(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE, getDefaultMetadataLanguageLabel(target));
- return languageMap;
+ currentMap.putAll(getBaseMetadataLanguageMap(false));
+ currentMap.put(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE, getDefaultMetadataLanguageLabel(target));
+ return currentMap;
}
private String getDefaultMetadataLanguageLabel(DvObjectContainer target) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index a9c706996a7..5420762afd8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -305,7 +305,7 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie
//Verify metadatalanguage is allowed
if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) {
- throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed.");
+ throw new BadRequestException("Specified metadatalanguage (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed.");
}
Dataset managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u)));
@@ -489,7 +489,7 @@ public Response recreateDataset(String jsonLDBody, @PathParam("identifier") Stri
//Verify metadatalanguage is allowed
if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) {
- throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed.");
+ throw new BadRequestException("Specified metadatalanguage (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed.");
}
if (ds.getVersions().isEmpty()) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
index efa944cf633..e13ea806dc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
@@ -9,12 +9,23 @@
import javax.ejb.Stateless;
import javax.inject.Named;
import javax.json.Json;
+import javax.json.JsonArray;
import javax.json.JsonObject;
+import javax.json.JsonReader;
+import javax.json.JsonValue;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
import java.io.StringReader;
+import java.util.HashMap;
import java.util.HashSet;
+import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -730,5 +741,53 @@ public Set listAll() {
return new HashSet<>(em.createNamedQuery("Setting.findAll", Setting.class).getResultList());
}
+ public Map getBaseMetadataLanguageMap(Map languageMap, boolean refresh) {
+ if (languageMap == null || refresh) {
+ languageMap = new HashMap();
+
+ /* If MetadataLanaguages is set, use it.
+ * If not, we can't assume anything and should avoid assuming a metadata language
+ */
+ String mlString = getValueForKey(SettingsServiceBean.Key.MetadataLanguages,"");
+
+ if(mlString.isEmpty()) {
+ mlString="[]";
+ }
+ JsonReader jsonReader = Json.createReader(new StringReader(mlString));
+ JsonArray languages = jsonReader.readArray();
+ for(JsonValue jv: languages) {
+ JsonObject lang = (JsonObject) jv;
+ languageMap.put(lang.getString("locale"), lang.getString("title"));
+ }
+ }
+ return languageMap;
+ }
+ public void initLocaleSettings(Map configuredLocales) {
+
+ try {
+ JSONArray entries = new JSONArray(getValueForKey(SettingsServiceBean.Key.Languages, "[]"));
+ for (Object obj : entries) {
+ JSONObject entry = (JSONObject) obj;
+ String locale = entry.getString("locale");
+ String title = entry.getString("title");
+
+ configuredLocales.put(locale, title);
+ }
+ } catch (JSONException e) {
+ //e.printStackTrace();
+ // do we want to know? - probably not
+ }
+ }
+
+
+ public Set getConfiguredLanguages() {
+ Set langs = new HashSet();
+ langs.addAll(getBaseMetadataLanguageMap(new HashMap(), true).keySet());
+ Map configuredLocales = new LinkedHashMap<>();
+ initLocaleSettings(configuredLocales);
+ langs.addAll(configuredLocales.keySet());
+ return langs;
+ }
+
}
From 36e077fa49e7483b895f21e26b727c18d0887f35 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Mon, 21 Feb 2022 16:21:24 -0500
Subject: [PATCH 044/366] Add debug info to test
---
src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index c08a71eea65..685c45e128e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -2326,7 +2326,7 @@ public void testReCreateDataset() {
// Get the semantic metadata
Response response = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken);
response.then().assertThat().statusCode(OK.getStatusCode());
-
+ response.prettyPeek();
String expectedString = getData(response.getBody().asString());
// Delete the dataset via native API
@@ -2337,6 +2337,7 @@ public void testReCreateDataset() {
// Now use the migrate API to recreate the dataset
// Now use the migrate API to recreate the dataset
response = UtilIT.recreateDatasetJsonLD(apiToken, dataverseAlias, expectedString);
+ response.prettyPeek();
String body = response.getBody().asString();
response.then().assertThat().statusCode(CREATED.getStatusCode());
From 99174b9acc5926dad348d2b10536b15afa963c93 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Mon, 21 Feb 2022 17:53:04 -0500
Subject: [PATCH 045/366] check undefined code, not null
---
.../java/edu/harvard/iq/dataverse/util/bagit/OREMap.java | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
index ff9de5d8f25..7ae14655e81 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
@@ -8,6 +8,7 @@
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetFieldType;
import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DvObjectContainer;
import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
import edu.harvard.iq.dataverse.branding.BrandingUtil;
@@ -214,9 +215,10 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
aggBuilder.add(JsonLDTerm.schemaOrg("includedInDataCatalog").getLabel(),
BrandingUtil.getRootDataverseCollectionName());
-
- addIfNotNull(aggBuilder, JsonLDTerm.schemaOrg("inLanguage"), dataset.getMetadataLanguage());
-
+ String mdl = dataset.getMetadataLanguage();
+ if(!mdl.equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)) {
+ aggBuilder.add(JsonLDTerm.schemaOrg("inLanguage").getLabel(), mdl);
+ }
// The aggregation aggregates aggregatedresources (Datafiles) which each have
// their own entry and metadata
From 2163721e13034ec3598b34291852ca9d9d722ba4 Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Mon, 21 Feb 2022 18:53:48 -0500
Subject: [PATCH 046/366] explicitly print
---
src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 ++
1 file changed, 2 insertions(+)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 685c45e128e..d72b754fdb6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -2334,6 +2334,8 @@ public void testReCreateDataset() {
deleteDatasetResponse.prettyPrint();
assertEquals(200, deleteDatasetResponse.getStatusCode());
+
+ logger.info("SENDING to reCreate Dataset: " + expectedString);
// Now use the migrate API to recreate the dataset
// Now use the migrate API to recreate the dataset
response = UtilIT.recreateDatasetJsonLD(apiToken, dataverseAlias, expectedString);
From 4549f0c224ecde4d099218ecaef430bd598634fb Mon Sep 17 00:00:00 2001
From: qqmyers
Date: Wed, 23 Feb 2022 09:52:20 -0500
Subject: [PATCH 047/366] TDL Bundle text
---
src/main/java/propertyFiles/Bundle.properties | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index 8abca8ff3fd..35d813586ce 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -145,7 +145,7 @@ contact.header=Contact {0}
contact.dataverse.header=Email Dataverse Contact
contact.dataset.header=Email Dataset Contact
contact.to=To
-contact.support=Support
+contact.support=TDL Dataverse Support
contact.from=From
contact.from.required=User email is required.
contact.from.invalid=Email is invalid.
@@ -317,9 +317,9 @@ login.System=Login System
login.forgot.text=Forgot your password?
login.builtin=Dataverse Account
login.institution=Institutional Account
-login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
+login.institution.blurb=Log in or sign up with your institutional account — learn more. If you are not affiliated with a TDR member institution (see dropdown menu), please use the Google Login option.
login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance.
-login.builtin.credential.usernameOrEmail=Username/Email
+login.builtin.credential.usernameOrEmail=Admin ID
login.builtin.credential.password=Password
login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
login.signup.blurb=Sign up for a Dataverse account.
@@ -335,12 +335,12 @@ login.button.orcid=Create or Connect your ORCID
# authentication providers
auth.providers.title=Other options
auth.providers.tip=You can convert a Dataverse account to use one of the options above. More information about account creation.
-auth.providers.title.builtin=Username/Email
+auth.providers.title.builtin=Admin ID
auth.providers.title.shib=Your Institution
auth.providers.title.orcid=ORCID
-auth.providers.title.google=Google
+auth.providers.title.google=Google (No TDR affiliation)
auth.providers.title.github=GitHub
-auth.providers.blurb=Log in or sign up with your {0} account — more information about account creation. Having trouble? Please contact {3} for assistance.
+auth.providers.blurb=Log in or sign up with your Google account — learn more. If you are not affiliated with a TDR member institution, please use the Google Login option. Having trouble? Please contact {3} for assistance.
auth.providers.persistentUserIdName.orcid=ORCID iD
auth.providers.persistentUserIdName.github=ID
auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers.
@@ -383,7 +383,7 @@ shib.welcomeExistingUserMessageDefaultInstitution=your institution
shib.dataverseUsername=Dataverse Username
shib.currentDataversePassword=Current Dataverse Password
shib.accountInformation=Account Information
-shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account.
+shib.offerToCreateNewAccount=Contact your TDR liaison to get help and training. Published content cannot be easily deleted.
shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account.
# oauth2/firstLogin.xhtml
From 7279c800fab3745634a3f1b4c01ec5094574c9f5 Mon Sep 17 00:00:00 2001
From: Stephen Kraffmiller
Date: Wed, 23 Feb 2022 11:07:53 -0500
Subject: [PATCH 048/366] #8191 consolidate delete function
---
.../edu/harvard/iq/dataverse/DatasetPage.java | 44 +++++++------------
src/main/webapp/editFilesFragment.xhtml | 6 ++-
2 files changed, 21 insertions(+), 29 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 80917d58a1c..cb0539738c6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -143,6 +143,7 @@
import edu.harvard.iq.dataverse.search.SearchServiceBean;
import edu.harvard.iq.dataverse.search.SearchUtil;
import edu.harvard.iq.dataverse.search.SolrClientService;
+import edu.harvard.iq.dataverse.util.FileMetadataUtil;
import java.util.Comparator;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -3374,7 +3375,16 @@ public String deleteFiles() throws CommandException{
}
deleteFiles(filesToDelete);
- String retVal = save();
+ String retVal;
+
+ if (editMode == EditMode.CREATE) {
+ workingVersion.setFileMetadatas(new ArrayList<>());
+ retVal = "";
+ } else {
+ retVal = save();
+ }
+
+
//And delete them only after the dataset is updated
for(Embargo emb: orphanedEmbargoes) {
embargoService.deleteById(emb.getId(), ((AuthenticatedUser)session.getUser()).getUserIdentifier());
@@ -3409,32 +3419,12 @@ private void deleteFiles(List filesToDelete) {
// So below we are deleting the metadata from the version; we are
// NOT adding the file to the filesToBeDeleted list that will be
// passed to the UpdateDatasetCommand. -- L.A. Aug 2017
- Iterator fmit = dataset.getEditVersion().getFileMetadatas().iterator();
- while (fmit.hasNext()) {
- FileMetadata fmd = fmit.next();
- if (markedForDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) {
- // And if this is an image file that happens to be assigned
- // as the dataset thumbnail, let's null the assignment here:
-
- if (fmd.getDataFile().equals(dataset.getThumbnailFile())) {
- dataset.setThumbnailFile(null);
- }
- /* It should not be possible to get here if this file
- is not in fact released! - so the code block below
- is not needed.
- //if not published then delete identifier
- if (!fmd.getDataFile().isReleased()){
- try{
- commandEngine.submit(new DeleteDataFileCommand(fmd.getDataFile(), dvRequestService.getDataverseRequest()));
- } catch (CommandException e){
- //this command is here to delete the identifier of unreleased files
- //if it fails then a reserved identifier may still be present on the remote provider
- }
- } */
- fmit.remove();
- break;
- }
- }
+
+ FileMetadataUtil.removeFileMetadataFromList(workingVersion.getFileMetadatas(), markedForDelete);
+
+ FileMetadataUtil.removeDataFileFromList(newFiles, markedForDelete.getDataFile());
+ FileUtil.deleteTempFile(markedForDelete.getDataFile(), dataset, ingestService);
+
}
}
diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml
index 6558bb47b38..f6b5157a1a5 100644
--- a/src/main/webapp/editFilesFragment.xhtml
+++ b/src/main/webapp/editFilesFragment.xhtml
@@ -434,8 +434,10 @@
-
- #{bundle['file.deleteFileDialog.tip']}
-
- #{bundle['file.deleteFileDialog.failed.tip']}
-
-
-