Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
e5cfcbc
Refactor: Datasets.validateDatasetFieldValues
GPortas Feb 19, 2025
01cb7b4
Changed: dataset field validation logic moved to new DatasetFieldVali…
GPortas Feb 19, 2025
77f94d0
Changed: updated testAddUpdateDatasetViaNativeAPI IT
GPortas Feb 19, 2025
09089cd
Changed: using new DatasetFieldServiceBean method for knowing require…
GPortas Feb 20, 2025
aa5690a
Added: DatasetFieldValidatorTest and minor renamings
GPortas Feb 21, 2025
ac02c79
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Feb 21, 2025
fe77364
Stash: UpdateDatasetFieldsCommand WIP
GPortas Feb 24, 2025
3038fca
Fixed: missing jakarta ejb and inject annotations for DatasetFieldsVa…
GPortas Feb 24, 2025
3778021
Refactor: using UpdateDatasetFieldsCommand in editVersionMetadata API…
GPortas Feb 24, 2025
d5ad310
Changed: renamed bundle string
GPortas Feb 24, 2025
e12b62c
Fixed: DatasetsIT assertion
GPortas Feb 24, 2025
dfab42c
Stash: refactoring UpdateDatasetFieldsCommand
GPortas Feb 24, 2025
6a227f5
Refactor: UpdateDatasetFieldsCommand
GPortas Feb 24, 2025
aa2d49e
Stash: UpdateDatasetFieldsCommandTest WIP
GPortas Feb 25, 2025
0f00413
Stash: UpdateDatasetFieldsCommandTest happy path tests WIP
GPortas Feb 25, 2025
baa674e
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Feb 25, 2025
585bd61
Added: test cases to UpdateDatasetFieldsCommandTest
GPortas Feb 25, 2025
8dcacc2
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Feb 26, 2025
9c22b58
Added: controlled vocabulary test cases to UpdateDatasetFieldsCommand…
GPortas Feb 26, 2025
eea544a
Changed: resetDatasetField for compound fields behavior in UpdateData…
GPortas Feb 26, 2025
a1a3222
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Feb 26, 2025
d2075cf
Changed: restored resetDatasetField for compound fields behavior in U…
GPortas Feb 26, 2025
079ce4f
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Feb 27, 2025
7288d8b
Changed: allowing empty controlled vocabulary fields in edit metadata…
GPortas Mar 1, 2025
57c9855
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Mar 1, 2025
64e457d
Added: test cases to testAddUpdateDatasetViaNativeAPI
GPortas Mar 1, 2025
39615fd
Added: new mechanism in AbstractApiBean to detect outdated dataset up…
GPortas Mar 2, 2025
3902ef0
Added: test cases to testAddUpdateDatasetViaNativeAPI for sourceInter…
GPortas Mar 2, 2025
52ea3a2
Added: docs for sourceInternalVersionNumber query param
GPortas Mar 2, 2025
0865db0
Added: docs for edit metadata endpoint fields removal
GPortas Mar 2, 2025
96d84c8
Added: release notes for #11243
GPortas Mar 2, 2025
663da6a
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Mar 4, 2025
11c6f3e
Fixed: wrong doc reference in docs
GPortas Mar 4, 2025
dcca973
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Mar 4, 2025
e597914
Merge branch 'develop' into 11243-editmetadata-api-empty-values
GPortas Mar 11, 2025
5b1c06d
Merge branch 'develop' of github.com:IQSS/dataverse into 11243-editme…
GPortas Mar 19, 2025
42a78aa
Refactor: more readable JSON strings in DatasetsIT
GPortas Mar 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions doc/release-notes/11243-editmetadata-api-extension.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
### Edit Dataset Metadata API extension

- This endpoint now allows removing fields (by sending empty values), as long as they are not required by the dataset.
- New ``sourceInternalVersionNumber`` optional query parameter, which prevents inconsistencies by managing updates that
may occur from other users while a dataset is being edited.
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
{
"fields": [
{
"typeName": "alternativeTitle",
"multiple": true,
"typeClass": "primitive",
"value": []
},
{
"typeName": "distributor",
"multiple": true,
"typeClass": "compound",
"value": [
{
"distributorName": {
"typeName": "distributorName",
"multiple": false,
"typeClass": "primitive",
"value": ""
},
"distributorAffiliation": {
"typeName": "distributorAffiliation",
"multiple": false,
"typeClass": "primitive",
"value": ""
}
}
]
},
{
"fields": [
{
"typeName": "author",
"value": [
{
"authorName": {
"typeName": "authorName",
"value": "Belicheck, Bill"
},
"authorAffiliation": {
"typeName": "authorIdentifierScheme",
"value": ""
}
}
]
}
]
}
]
}
24 changes: 24 additions & 0 deletions doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2124,6 +2124,30 @@ The fully expanded example above (without environment variables) looks like this

For these edits your JSON file need only include those dataset fields which you would like to edit. A sample JSON file may be downloaded here: :download:`dataset-edit-metadata-sample.json <../_static/api/dataset-edit-metadata-sample.json>`

This endpoint also allows removing fields, as long as they are not required by the dataset. To remove a field, send an empty value (``""``) for individual fields. For multiple fields, send an empty array (``[]``). A sample JSON file for removing fields may be downloaded here: :download:`dataset-edit-metadata-delete-fields-sample.json <../_static/api/dataset-edit-metadata-delete-fields-sample.json>`

If another user updates the dataset version metadata before you send the update request, data inconsistencies may occur. To prevent this, you can use the optional ``sourceInternalVersionNumber`` query parameter. This parameter must include the internal version number corresponding to the dataset version being updated. Note that internal version numbers increase sequentially with each version update.

If this parameter is provided, the update will proceed only if the internal version number remains unchanged. Otherwise, the request will fail with an error.

Example using ``sourceInternalVersionNumber``:

.. code-block:: bash

export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
export SOURCE_INTERNAL_VERSION_NUMBER=5

curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true&sourceInternalVersionNumber=$SOURCE_INTERNAL_VERSION_NUMBER" --upload-file dataset-update-metadata.json

The fully expanded example above (without environment variables) looks like this:

.. code-block:: bash

curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true&sourceInternalVersionNumber=5" --upload-file dataset-update-metadata.json


Delete Dataset Metadata
~~~~~~~~~~~~~~~~~~~~~~~

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -923,6 +923,42 @@ public List<DatasetFieldType> findAllInMetadataBlockAndDataverse(MetadataBlock m
return em.createQuery(criteriaQuery).getResultList();
}

public boolean isFieldRequiredInDataverse(DatasetFieldType datasetFieldType, Dataverse dataverse) {
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);

Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);
Root<DatasetFieldType> datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);

// Join Dataverse with DataverseFieldTypeInputLevel on the "dataverseFieldTypeInputLevels" attribute, using a LEFT JOIN.
Join<Dataverse, DataverseFieldTypeInputLevel> datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT);

// Define a predicate to include DatasetFieldTypes that are marked as required in the input level.
Predicate requiredAsInputLevelPredicate = criteriaBuilder.and(
criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
);

// Define a predicate to include the required fields in the installation.
Predicate requiredInTheInstallationPredicate = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);

// Build the final WHERE clause by combining all the predicates.
criteriaQuery.where(
criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()),
criteriaBuilder.equal(datasetFieldTypeRoot.get("id"), datasetFieldType.getId()),
criteriaBuilder.or(
requiredAsInputLevelPredicate,
requiredInTheInstallationPredicate
)
);

criteriaQuery.select(criteriaBuilder.count(datasetFieldTypeRoot));

Long count = em.createQuery(criteriaQuery).getSingleResult();

return count != null && count > 0;
}

private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boolean onlyDisplayedOnCreate, CriteriaQuery<DatasetFieldType> criteriaQuery, CriteriaBuilder criteriaBuilder, Root<DatasetFieldType> datasetFieldTypeRoot, Root<MetadataBlock> metadataBlockRoot) {
Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);

Expand Down Expand Up @@ -960,7 +996,7 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
// Define a predicate to exclude DatasetFieldTypes that have no associated input level (i.e., the subquery does not return a result).
Predicate hasNoInputLevelPredicate = criteriaBuilder.not(criteriaBuilder.exists(subquery));

// Define a predicate to include the required fields in Dataverse.
// Define a predicate to include the required fields in the installation.
Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);

// Define a predicate for displaying DatasetFieldTypes on create.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
import edu.harvard.iq.dataverse.dataset.DatasetFieldsValidator;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean;
Expand Down Expand Up @@ -189,6 +190,9 @@ public class EjbDataverseEngine {

@EJB
DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean;

@EJB
DatasetFieldsValidator datasetFieldsValidator;

@EJB
EjbDataverseEngineInner innerEngine;
Expand Down Expand Up @@ -531,6 +535,11 @@ public DataverseFeaturedItemServiceBean dataverseFeaturedItems() {
return dataverseFeaturedItemServiceBean;
}

@Override
public DatasetFieldsValidator datasetFieldsValidator() {
return datasetFieldsValidator;
}

@Override
public StorageUseServiceBean storageUse() {
return storageUseService;
Expand Down
13 changes: 9 additions & 4 deletions src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,7 @@

import java.io.InputStream;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import java.util.UUID;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
Expand Down Expand Up @@ -446,6 +443,14 @@ public Command<DatasetVersion> handleLatestPublished() {
return dsv;
}

protected void validateInternalVersionNumberIsNotOutdated(Dataset dataset, int internalVersion) throws WrappedResponse {
if (dataset.getLatestVersion().getVersion() > internalVersion) {
throw new WrappedResponse(
badRequest(BundleUtil.getStringFromBundle("abstractApiBean.error.datasetInternalVersionNumberIsOutdated", Collections.singletonList(Integer.toString(internalVersion))))
);
}
}

protected DataFile findDataFileOrDie(String id) throws WrappedResponse {
DataFile datafile;
if (id.equals(PERSISTENT_ID_KEY)) {
Expand Down
Loading