Skip to content

Commit

Permalink
Changed: using new DatasetFieldServiceBean method for knowing require…
Browse files Browse the repository at this point in the history
…d dataset field types in DatasetFieldValidator
  • Loading branch information
GPortas committed Feb 20, 2025
1 parent 77f94d0 commit 09089cd
Show file tree
Hide file tree
Showing 3 changed files with 94 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -923,6 +923,42 @@ public List<DatasetFieldType> findAllInMetadataBlockAndDataverse(MetadataBlock m
return em.createQuery(criteriaQuery).getResultList();
}

public boolean isFieldRequiredInDataverse(DatasetFieldType datasetFieldType, Dataverse dataverse) {
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);

Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);
Root<DatasetFieldType> datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);

// Join Dataverse with DataverseFieldTypeInputLevel on the "dataverseFieldTypeInputLevels" attribute, using a LEFT JOIN.
Join<Dataverse, DataverseFieldTypeInputLevel> datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT);

// Define a predicate to include DatasetFieldTypes that are marked as required in the input level.
Predicate requiredAsInputLevelPredicate = criteriaBuilder.and(
criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
);

// Define a predicate to include the required fields in the installation.
Predicate requiredInTheInstallationPredicate = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);

// Build the final WHERE clause by combining all the predicates.
criteriaQuery.where(
criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()),
criteriaBuilder.equal(datasetFieldTypeRoot.get("id"), datasetFieldType.getId()),
criteriaBuilder.or(
requiredAsInputLevelPredicate,
requiredInTheInstallationPredicate
)
);

criteriaQuery.select(criteriaBuilder.count(datasetFieldTypeRoot));

Long count = em.createQuery(criteriaQuery).getSingleResult();

return count != null && count > 0;
}

private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boolean onlyDisplayedOnCreate, CriteriaQuery<DatasetFieldType> criteriaQuery, CriteriaBuilder criteriaBuilder, Root<DatasetFieldType> datasetFieldTypeRoot, Root<MetadataBlock> metadataBlockRoot) {
Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);

Expand Down Expand Up @@ -953,7 +989,7 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
// Define a predicate to exclude DatasetFieldTypes that have no associated input level (i.e., the subquery does not return a result).
Predicate hasNoInputLevelPredicate = criteriaBuilder.not(criteriaBuilder.exists(subquery));

// Define a predicate to include the required fields in Dataverse.
// Define a predicate to include the required fields in the installation.
Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);

// Define a predicate for displaying DatasetFieldTypes on create.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,27 @@
package edu.harvard.iq.dataverse.api;

import edu.harvard.iq.dataverse.DatasetField;
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.util.BundleUtil;
import jakarta.inject.Inject;

import java.util.List;
import java.util.StringJoiner;

public class DatasetFieldValidator {

public static String validate(List<DatasetField> fields) {
@Inject
private DatasetFieldServiceBean datasetFieldService;

public String validateUpdatedFields(List<DatasetField> fields, DatasetVersion datasetVersion) {
StringJoiner errors = new StringJoiner(" ");

for (DatasetField dsf : fields) {
if (!datasetFieldService.isFieldRequiredInDataverse(dsf.getDatasetFieldType(), datasetVersion.getDataset().getOwner())) {
continue;
}

String fieldName = dsf.getDatasetFieldType().getDisplayName();

if (isEmptyMultipleValue(dsf)) {
Expand All @@ -30,24 +40,24 @@ public static String validate(List<DatasetField> fields) {
return errors.length() > 0 ? errors.toString() : "";
}

private static boolean isEmptyMultipleValue(DatasetField dsf) {
private boolean isEmptyMultipleValue(DatasetField dsf) {
return dsf.getDatasetFieldType().isAllowMultiples() &&
dsf.getControlledVocabularyValues().isEmpty() &&
dsf.getDatasetFieldCompoundValues().isEmpty() &&
dsf.getDatasetFieldValues().isEmpty();
}

private static boolean isEmptyControlledVocabulary(DatasetField dsf) {
private boolean isEmptyControlledVocabulary(DatasetField dsf) {
return dsf.getDatasetFieldType().isControlledVocabulary() &&
dsf.getSingleControlledVocabularyValue().getStrValue().isEmpty();
}

private static boolean isEmptyCompoundValue(DatasetField dsf) {
private boolean isEmptyCompoundValue(DatasetField dsf) {
return dsf.getDatasetFieldType().isCompound() &&
dsf.getDatasetFieldCompoundValues().isEmpty();
}

private static boolean isEmptySingleValue(DatasetField dsf) {
private boolean isEmptySingleValue(DatasetField dsf) {
return !dsf.getDatasetFieldType().isControlledVocabulary() &&
!dsf.getDatasetFieldType().isCompound() &&
dsf.getSingleValue().getValue().isEmpty();
Expand Down
82 changes: 42 additions & 40 deletions src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@
import org.apache.commons.lang3.StringUtils;
import org.eclipse.microprofile.openapi.annotations.Operation;
import org.eclipse.microprofile.openapi.annotations.media.Content;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
Expand Down Expand Up @@ -193,6 +192,9 @@ public class Datasets extends AbstractApiBean {
@Inject
DatasetTypeServiceBean datasetTypeSvc;

@Inject
DatasetFieldValidator datasetFieldValidator;

/**
* Used to consolidate the way we parse and handle dataset versions.
* @param <T>
Expand Down Expand Up @@ -1065,18 +1067,18 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque
//Get the current draft or create a new version to update
DatasetVersion dsv = ds.getOrCreateEditVersion();
dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
List<DatasetField> fields = new LinkedList<>();
List<DatasetField> updatedFields = new LinkedList<>();
DatasetField singleField;

JsonArray fieldsJson = json.getJsonArray("fields");
if (fieldsJson == null) {
singleField = jsonParser().parseField(json, Boolean.FALSE);
fields.add(singleField);
updatedFields.add(singleField);
} else {
fields = jsonParser().parseMultipleFields(json);
updatedFields = jsonParser().parseMultipleFields(json);
}

String validationErrors = DatasetFieldValidator.validate(fields);
String validationErrors = datasetFieldValidator.validateUpdatedFields(updatedFields, dsv);
if (!validationErrors.isEmpty()) {
logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + validationErrors, validationErrors);
return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.processDatasetUpdate.parseError", List.of(validationErrors)));
Expand All @@ -1088,79 +1090,79 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque
// and compare to the version fields
//if exist add/replace values
//if not add entire dsf
for (DatasetField updateField : fields) {
for (DatasetField updatedField : updatedFields) {
boolean found = false;
for (DatasetField dsf : dsv.getDatasetFields()) {
if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
for (DatasetField datasetVersionField : dsv.getDatasetFields()) {
if (datasetVersionField.getDatasetFieldType().equals(updatedField.getDatasetFieldType())) {
found = true;
if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
if (datasetVersionField.isEmpty() || datasetVersionField.getDatasetFieldType().isAllowMultiples() || replaceData) {
List priorCVV = new ArrayList<>();
String cvvDisplay = "";

if (updateField.getDatasetFieldType().isControlledVocabulary()) {
cvvDisplay = dsf.getDisplayValue();
for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
if (updatedField.getDatasetFieldType().isControlledVocabulary()) {
cvvDisplay = datasetVersionField.getDisplayValue();
for (ControlledVocabularyValue cvvOld : datasetVersionField.getControlledVocabularyValues()) {
priorCVV.add(cvvOld);
}
}

if (replaceData) {
if (dsf.getDatasetFieldType().isAllowMultiples()) {
dsf.setDatasetFieldCompoundValues(new ArrayList<>());
dsf.setDatasetFieldValues(new ArrayList<>());
dsf.setControlledVocabularyValues(new ArrayList<>());
if (datasetVersionField.getDatasetFieldType().isAllowMultiples()) {
datasetVersionField.setDatasetFieldCompoundValues(new ArrayList<>());
datasetVersionField.setDatasetFieldValues(new ArrayList<>());
datasetVersionField.setControlledVocabularyValues(new ArrayList<>());
priorCVV.clear();
dsf.getControlledVocabularyValues().clear();
datasetVersionField.getControlledVocabularyValues().clear();
} else {
dsf.setSingleValue("");
dsf.setSingleControlledVocabularyValue(null);
datasetVersionField.setSingleValue("");
datasetVersionField.setSingleControlledVocabularyValue(null);
}
cvvDisplay="";
}
if (updateField.getDatasetFieldType().isControlledVocabulary()) {
if (dsf.getDatasetFieldType().isAllowMultiples()) {
for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
if (updatedField.getDatasetFieldType().isControlledVocabulary()) {
if (datasetVersionField.getDatasetFieldType().isAllowMultiples()) {
for (ControlledVocabularyValue cvv : updatedField.getControlledVocabularyValues()) {
if (!cvvDisplay.contains(cvv.getStrValue())) {
priorCVV.add(cvv);
}
}
dsf.setControlledVocabularyValues(priorCVV);
datasetVersionField.setControlledVocabularyValues(priorCVV);
} else {
dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
datasetVersionField.setSingleControlledVocabularyValue(updatedField.getSingleControlledVocabularyValue());
}
} else {
if (!updateField.getDatasetFieldType().isCompound()) {
if (dsf.getDatasetFieldType().isAllowMultiples()) {
for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
dfv.setDatasetField(dsf);
dsf.getDatasetFieldValues().add(dfv);
if (!updatedField.getDatasetFieldType().isCompound()) {
if (datasetVersionField.getDatasetFieldType().isAllowMultiples()) {
for (DatasetFieldValue dfv : updatedField.getDatasetFieldValues()) {
if (!datasetVersionField.getDisplayValue().contains(dfv.getDisplayValue())) {
dfv.setDatasetField(datasetVersionField);
datasetVersionField.getDatasetFieldValues().add(dfv);
}
}
} else {
dsf.setSingleValue(updateField.getValue());
datasetVersionField.setSingleValue(updatedField.getValue());
}
} else {
for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
dfcv.setParentDatasetField(dsf);
dsf.setDatasetVersion(dsv);
dsf.getDatasetFieldCompoundValues().add(dfcv);
for (DatasetFieldCompoundValue dfcv : updatedField.getDatasetFieldCompoundValues()) {
if (!datasetVersionField.getCompoundDisplayValue().contains(updatedField.getCompoundDisplayValue())) {
dfcv.setParentDatasetField(datasetVersionField);
datasetVersionField.setDatasetVersion(dsv);
datasetVersionField.getDatasetFieldCompoundValues().add(dfcv);
}
}
}
}
} else {
if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
if (!datasetVersionField.isEmpty() && !datasetVersionField.getDatasetFieldType().isAllowMultiples() || !replaceData) {
return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + datasetVersionField.getDatasetFieldType().getDisplayName() + ")");
}
}
break;
}
}
if (!found) {
updateField.setDatasetVersion(dsv);
dsv.getDatasetFields().add(updateField);
updatedField.setDatasetVersion(dsv);
dsv.getDatasetFields().add(updatedField);
}
}
DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
Expand Down

0 comments on commit 09089cd

Please sign in to comment.