Skip to content

Commit

Permalink
Merge pull request #11144 from IQSS/11130-update-dataverse-api-remove…
Browse files Browse the repository at this point in the history
…-metadatablock

Update Dataverse API removes metadatablocks if optional params are omitted
  • Loading branch information
ofahimIQSS authored Jan 14, 2025
2 parents 58fd3e9 + 82319e1 commit 5073daa
Show file tree
Hide file tree
Showing 9 changed files with 229 additions and 64 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
### Fixes consequences for not adding some optional fields in update dataverse API

Omitting optional fields inputLevels, facetIds, or metadataBlockNames caused data to be deleted.
This fix no longer deletes data for these fields. Two new flags have been added to the ``metadataBlocks`` Json object to signal the deletion of the data.
- ``inheritMetadataBlocksFromParent: true`` will remove ``metadataBlockNames`` and ``inputLevels`` if the Json objects are omitted.
- ``inheritFacetsFromParent: true`` will remove ``facetIds`` if the Json object is omitted.

For more information, see issue [#11130](https://github.com/IQSS/dataverse/issues/11130)
17 changes: 14 additions & 3 deletions doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -128,12 +128,23 @@ Note that setting any of these fields overwrites the previous configuration.

When it comes to omitting these fields in the JSON:

- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent.
- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection.
- Omitting the entire ``metadataBlocks`` object in the request JSON would exclude the three sub-objects, resulting in the application of the two changes described above.
- Omitting ``facetIds`` or ``metadataBlockNames`` causes no change to the Dataverse collection. To delete the current configuration and inherit the corresponding configuration from its parent include the flag ``inheritFacetsFromParent`` and/or ``inheritMetadataBlocksFromParent`` respectively.
- Omitting ``inputLevels`` causes no change to the Dataverse collection. Including the flag ``inheritMetadataBlocksFromParent`` will cause the custom ``inputLevels`` to be deleted and inherited from the parent.
- Omitting the entire ``metadataBlocks`` object in the request JSON would cause no change to the ``inputLevels``, ``facetIds`` or ``metadataBlockNames`` of the Dataverse collection.

To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs.

To force the configurations to be deleted and inherited from the parent's configuration include the following ``metadataBlocks`` object in your JSON

.. code-block:: json
"metadataBlocks": {
"inheritMetadataBlocksFromParent": true,
"inheritFacetsFromParent": true
}
.. note:: Including both the list ``metadataBlockNames`` and the flag ``"inheritMetadataBlocksFromParent": true`` will result in an error being returned {"status": "ERROR", "message": "Metadata block can not contain both metadataBlockNames and inheritMetadataBlocksFromParent: true"}. The same is true for ``facetIds`` and ``inheritFacetsFromParent``.

See also :ref:`collection-attributes-api`.

.. _view-dataverse:
Expand Down
65 changes: 52 additions & 13 deletions src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package edu.harvard.iq.dataverse.api;

import com.google.common.collect.Lists;
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean;
Expand Down Expand Up @@ -195,7 +196,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod
List<DatasetFieldType> facets = parseFacets(body);

AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc);
dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true));
dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO));
return ok(json(dataverse));

} catch (WrappedResponse ww) {
Expand All @@ -221,31 +222,60 @@ private DataverseDTO parseAndValidateUpdateDataverseRequestBody(String body) thr
}
}

/*
return null - ignore
return empty list - delete and inherit from parent
return non-empty list - update
*/
private List<DataverseFieldTypeInputLevel> parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse {
JsonObject metadataBlocksJson = getMetadataBlocksJson(body);
if (metadataBlocksJson == null) {
return null;
JsonArray inputLevelsArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("inputLevels") : null;

if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) {
return Lists.newArrayList(); // delete
}
JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels");
return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null;
return parseInputLevels(inputLevelsArray, dataverse);
}

/*
return null - ignore
return empty list - delete and inherit from parent
return non-empty list - update
*/
private List<MetadataBlock> parseMetadataBlocks(String body) throws WrappedResponse {
JsonObject metadataBlocksJson = getMetadataBlocksJson(body);
if (metadataBlocksJson == null) {
return null;
JsonArray metadataBlocksArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("metadataBlockNames") : null;

if (metadataBlocksArray != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) {
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "metadataBlockNames", "inheritMetadataBlocksFromParent");
throw new WrappedResponse(badRequest(errorMessage));
}
if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) {
return Lists.newArrayList(); // delete and inherit from parent
}
JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames");
return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null;

return parseNewDataverseMetadataBlocks(metadataBlocksArray);
}

/*
return null - ignore
return empty list - delete and inherit from parent
return non-empty list - update
*/
private List<DatasetFieldType> parseFacets(String body) throws WrappedResponse {
JsonObject metadataBlocksJson = getMetadataBlocksJson(body);
if (metadataBlocksJson == null) {
return null;
JsonArray facetsArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("facetIds") : null;

if (facetsArray != null && metadataBlocksJson.containsKey("inheritFacetsFromParent") && metadataBlocksJson.getBoolean("inheritFacetsFromParent")) {
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "facetIds", "inheritFacetsFromParent");
throw new WrappedResponse(badRequest(errorMessage));
}

if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritFacetsFromParent") && metadataBlocksJson.getBoolean("inheritFacetsFromParent")) {
return Lists.newArrayList(); // delete and inherit from parent
}
JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds");
return facetsArray != null ? parseFacets(facetsArray) : null;

return parseFacets(facetsArray);
}

private JsonObject getMetadataBlocksJson(String body) {
Expand Down Expand Up @@ -277,6 +307,9 @@ private Response handleEJBException(EJBException ex, String action) {
}

private List<MetadataBlock> parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse {
if (metadataBlockNamesArray == null) {
return null;
}
List<MetadataBlock> selectedMetadataBlocks = new ArrayList<>();
for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) {
MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString());
Expand Down Expand Up @@ -745,6 +778,9 @@ public Response updateInputLevels(@Context ContainerRequestContext crc, @PathPar
}

private List<DataverseFieldTypeInputLevel> parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse {
if (inputLevelsArray == null) {
return null;
}
List<DataverseFieldTypeInputLevel> newInputLevels = new ArrayList<>();
for (JsonValue value : inputLevelsArray) {
JsonObject inputLevel = (JsonObject) value;
Expand All @@ -771,6 +807,9 @@ private List<DataverseFieldTypeInputLevel> parseInputLevels(JsonArray inputLevel
}

private List<DatasetFieldType> parseFacets(JsonArray facetsArray) throws WrappedResponse {
if (facetsArray == null) {
return null;
}
List<DatasetFieldType> facets = new LinkedList<>();
for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) {
DatasetFieldType dsfType = findDatasetFieldType(facetId.getString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,13 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand<Dataverse>
private final List<DataverseFieldTypeInputLevel> inputLevels;
private final List<DatasetFieldType> facets;
protected final List<MetadataBlock> metadataBlocks;
private final boolean resetRelationsOnNullValues;

public AbstractWriteDataverseCommand(Dataverse dataverse,
Dataverse affectedDataverse,
DataverseRequest request,
List<DatasetFieldType> facets,
List<DataverseFieldTypeInputLevel> inputLevels,
List<MetadataBlock> metadataBlocks,
boolean resetRelationsOnNullValues) {
List<MetadataBlock> metadataBlocks) {
super(request, affectedDataverse);
this.dataverse = dataverse;
if (facets != null) {
Expand All @@ -45,7 +43,6 @@ public AbstractWriteDataverseCommand(Dataverse dataverse,
} else {
this.metadataBlocks = null;
}
this.resetRelationsOnNullValues = resetRelationsOnNullValues;
}

@Override
Expand All @@ -59,46 +56,61 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
return ctxt.dataverses().save(dataverse);
}

/*
metadataBlocks = null - ignore
metadataBlocks is empty - delete and inherit from parent
metadataBlocks is not empty - set with new updated values
*/
private void processMetadataBlocks() {
if (metadataBlocks != null && !metadataBlocks.isEmpty()) {
dataverse.setMetadataBlockRoot(true);
dataverse.setMetadataBlocks(metadataBlocks);
} else if (resetRelationsOnNullValues) {
dataverse.setMetadataBlockRoot(false);
dataverse.clearMetadataBlocks();
if (metadataBlocks != null) {
if (metadataBlocks.isEmpty()) {
dataverse.setMetadataBlockRoot(false);
dataverse.clearMetadataBlocks();
} else {
dataverse.setMetadataBlockRoot(true);
dataverse.setMetadataBlocks(metadataBlocks);
}
}
}

/*
facets = null - ignore
facets is empty - delete and inherit from parent
facets is not empty - set with new updated values
*/
private void processFacets(CommandContext ctxt) {
if (facets != null) {
ctxt.facets().deleteFacetsFor(dataverse);
dataverse.setDataverseFacets(new ArrayList<>());

if (!facets.isEmpty()) {
if (facets.isEmpty()) {
ctxt.facets().deleteFacetsFor(dataverse);
dataverse.setFacetRoot(false);
} else {
ctxt.facets().deleteFacetsFor(dataverse);
dataverse.setDataverseFacets(new ArrayList<>());
dataverse.setFacetRoot(true);
for (int i = 0; i < facets.size(); i++) {
ctxt.facets().create(i, facets.get(i), dataverse);
}
}

for (int i = 0; i < facets.size(); i++) {
ctxt.facets().create(i, facets.get(i), dataverse);
}
} else if (resetRelationsOnNullValues) {
ctxt.facets().deleteFacetsFor(dataverse);
dataverse.setFacetRoot(false);
}
}

/*
inputLevels = null - ignore
inputLevels is empty - delete
inputLevels is not empty - set with new updated values
*/
private void processInputLevels(CommandContext ctxt) {
if (inputLevels != null) {
if (!inputLevels.isEmpty()) {
if (inputLevels.isEmpty()) {
ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse);
} else {
dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels);
ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse);
inputLevels.forEach(inputLevel -> {
inputLevel.setDataverse(dataverse);
ctxt.fieldTypeInputLevels().create(inputLevel);
});
}
ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse);
inputLevels.forEach(inputLevel -> {
inputLevel.setDataverse(dataverse);
ctxt.fieldTypeInputLevels().create(inputLevel);
});
} else if (resetRelationsOnNullValues) {
ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created,
List<DatasetFieldType> facets,
List<DataverseFieldTypeInputLevel> inputLevels,
List<MetadataBlock> metadataBlocks) {
super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false);
super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse,
List<Dataverse> featuredDataverses,
DataverseRequest request,
List<DataverseFieldTypeInputLevel> inputLevels) {
this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false);
this(dataverse, facets, featuredDataverses, request, inputLevels, null, null);
}

public UpdateDataverseCommand(Dataverse dataverse,
Expand All @@ -41,9 +41,8 @@ public UpdateDataverseCommand(Dataverse dataverse,
DataverseRequest request,
List<DataverseFieldTypeInputLevel> inputLevels,
List<MetadataBlock> metadataBlocks,
DataverseDTO updatedDataverseDTO,
boolean resetRelationsOnNullValues) {
super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues);
DataverseDTO updatedDataverseDTO) {
super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks);
if (featuredDataverses != null) {
this.featuredDataverseList = new ArrayList<>(featuredDataverses);
} else {
Expand Down
1 change: 1 addition & 0 deletions src/main/java/propertyFiles/Bundle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -984,6 +984,7 @@ dataverse.inputlevels.error.cannotberequiredifnotincluded=The input level for th
dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}'
dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable
dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0}
dataverse.metadatablocks.error.containslistandinheritflag=Metadata block can not contain both {0} and {1}: true
dataverse.create.error.jsonparse=Error parsing Json: {0}
dataverse.create.error.jsonparsetodataverse=Error parsing the POSTed json into a dataverse: {0}
# rolesAndPermissionsFragment.xhtml
Expand Down
Loading

0 comments on commit 5073daa

Please sign in to comment.