Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/release-notes/12258-publish-submit-contains-files.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
## Bug: Publish and Submit for review must contain files
When `requireFilesToPublishDataset` is set on a Dataverse a Dataset must contain files to be published or submitted for review. This fix make sure the dataset version contains files, and not just the dataset. It also adds this check to the `Submit for Review` functionality.
4 changes: 4 additions & 0 deletions doc/sphinx-guides/source/api/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ This API changelog is experimental and we would love feedback on its usefulness.
v6.11
-----

- The following API will now return ``403`` if the ``requireFilesToPublishDataset`` flag is set and the dataset version contains 0 files.

- **/api/datasets/{Id}/submitForReview**

- The Croissant :ref:`metadata export format <metadata-export-formats>` has been updated from version 1.0 to 1.1, which is reflected in the ``conformsTo`` property. The unused ``wd`` property has been dropped.

v6.10
Expand Down
2 changes: 1 addition & 1 deletion doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1299,7 +1299,7 @@ The following attributes are supported:
* ``description`` Description
* ``affiliation`` Affiliation
* ``filePIDsEnabled`` ("true" or "false") Restricted to use by superusers and only when the :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>` setting is true. Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting).
* ``requireFilesToPublishDataset`` ("true" or "false") Restricted to use by superusers. Defines if Dataset needs files in order to be published. If not set the determination will be made through inheritance by checking the owners of this collection. Publishing by a superusers will not be blocked.
* ``requireFilesToPublishDataset`` ("true" or "false") Restricted to use by superusers. Defines if Dataset version needs files in order to be published or submitted for review. If not set the determination will be made through inheritance by checking the owners of this collection. Publishing by a superusers will not be blocked.
* ``allowedDatasetTypes`` Restricted to use by superusers. By default "dataset" is implied. Pass a comma-separated list of dataset types (e.g. "dataset,software"). You cannot unset this attribute so if you want to delete a dataset type, set ``allowedDatasetTypes`` to a dataset type you won't be deleting. See also :ref:`dataset-types`.

See also :ref:`update-dataverse-api`.
Expand Down
28 changes: 27 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.impl.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import jakarta.ejb.EJB;
Expand Down Expand Up @@ -253,9 +254,34 @@ public boolean canIssueDeleteDatasetCommand(DvObject dvo){

// PUBLISH DATASET
public boolean canIssuePublishDatasetCommand(DvObject dvo){
// Return false if dataset has 0 files and user want to 'publish' or 'submit for review' and 'publish dataset requires files' flag is set
if (dvo.isInstanceofDataset()) {
Dataverse dv =((Dataset)dvo).getOwner();
if (dv != null) {
List<FileMetadata> metadataList = ((Dataset) dvo).getLatestVersion().getFileMetadatas();
if (metadataList.size() == 0 && dv.getEffectiveRequiresFilesToPublishDataset()) {
return false;
}
}
}
return canIssueCommand(dvo, PublishDatasetCommand.class);
}


// SUBMIT DATASET FOR REVIEW
public boolean canIssueSubmitDatasetForReviewCommand(DvObject dvo){
// Return false if dataset has 0 files and user want to 'publish' or 'submit for review' and 'publish dataset requires files' flag is set
if (dvo.isInstanceofDataset()) {
Dataverse dv =((Dataset)dvo).getOwner();
if (dv != null) {
List<FileMetadata> metadataList = ((Dataset) dvo).getLatestVersion().getFileMetadatas();
if (metadataList.size() == 0 && dv.getEffectiveRequiresFilesToPublishDataset()) {
return false;
}
}
}
return canIssueCommand(dvo, SubmitDatasetForReviewCommand.class);
}

// For the dataverse_header fragment (and therefore, most of the pages),
// we need to know if authenticated users can add dataverses and datasets to the
// root collection. For the "Add Data" menu further in the search include fragment
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -319,4 +319,14 @@ protected void registerExternalVocabValuesIfAny(CommandContext ctxt, DatasetVers
}
}
}

// To block Publishing dataset or Submitting dataset for review
protected boolean getEffectiveRequiresFilesToPublishDataset() {
if (getUser().isSuperuser()) {
return false;
} else {
Dataverse dv = getDataset().getOwner();
return dv != null && dv.getEffectiveRequiresFilesToPublishDataset();
}
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetLock;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
Expand All @@ -17,6 +15,7 @@

import jakarta.persistence.OptimisticLockException;

import java.util.List;
import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.Logger;
Expand Down Expand Up @@ -233,19 +232,12 @@ private void verifyCommandArguments(CommandContext ctxt) throws IllegalCommandEx
throw new IllegalCommandException("Cannot release as minor version. Re-try as major release.", this);
}

if (getDataset().getFiles().isEmpty() && getEffectiveRequiresFilesToPublishDataset()) {
List<FileMetadata> files = getDataset().getLatestVersion().getFileMetadatas();
if ((files == null || files.isEmpty()) && getEffectiveRequiresFilesToPublishDataset()) {
throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.mayNotPublish.FilesRequired"), this);
}
}
}
private boolean getEffectiveRequiresFilesToPublishDataset() {
if (getUser().isSuperuser()) {
return false;
} else {
Dataverse dv = getDataset().getOwner();
return dv != null && dv.getEffectiveRequiresFilesToPublishDataset();
}
}

@Override
public boolean onSuccess(CommandContext ctxt, Object r) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,25 +1,17 @@
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetLock;
import edu.harvard.iq.dataverse.DatasetVersionUser;
import edu.harvard.iq.dataverse.UserNotification;
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Future;
import org.apache.solr.client.solrj.SolrServerException;

@RequiredPermissions(Permission.EditDataset)
public class SubmitDatasetForReviewCommand extends AbstractDatasetCommand<Dataset> {
Expand All @@ -41,6 +33,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.submit.failure.inReview"), this);
}

List<FileMetadata> files = getDataset().getLatestVersion().getFileMetadatas();
if ((files == null || files.isEmpty()) && getEffectiveRequiresFilesToPublishDataset()) {
throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.mayNotSubmitForReview.FilesRequired"), this);
}

//SEK 9-1 Add Lock before saving dataset
DatasetLock inReviewLock = new DatasetLock(DatasetLock.Reason.InReview, getRequest().getAuthenticatedUser());
ctxt.engine().submit(new AddLockCommand(getRequest(), getDataset(), inReviewLock));
Expand Down
1 change: 1 addition & 0 deletions src/main/java/propertyFiles/Bundle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -1637,6 +1637,7 @@ dataset.mayNotPublish.both= This dataset cannot be published until {0} is publis
dataset.mayNotPublish.twoGenerations= This dataset cannot be published until {0} and {1} are published.
dataset.mayNotBePublished.both.button=Yes, Publish Both
dataset.mayNotPublish.FilesRequired=Published datasets should contain at least one data file.
dataset.mayNotSubmitForReview.FilesRequired=The dataset must contain at least one data file in order to be submitted for review.
dataset.viewVersion.unpublished=View Unpublished Version
dataset.viewVersion.published=View Published Version
dataset.link.title=Link Dataset
Expand Down
10 changes: 6 additions & 4 deletions src/main/webapp/dataset.xhtml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@
<ui:param name="canUpdateDataset" value="#{DatasetPage.canUpdateDataset()}"/>
<ui:param name="canDownloadFiles" value="#{DatasetPage.canDownloadFiles()}"/>
<ui:param name="canIssuePublishDatasetCommand" value="#{permissionsWrapper.canIssuePublishDatasetCommand(DatasetPage.dataset)}"/>
<ui:param name="showPublishLink" value="#{version == DatasetPage.dataset.latestVersion
<ui:param name="canIssueSubmitDatasetForReviewCommand" value="#{permissionsWrapper.canIssueSubmitDatasetForReviewCommand(DatasetPage.dataset)}"/>
<ui:param name="showPublishLink" value="#{version == DatasetPage.dataset.latestVersion
and DatasetPage.dataset.latestVersion.versionState=='DRAFT'
and canIssuePublishDatasetCommand}"/>
<ui:param name="versionHasTabular" value="#{DatasetPage.versionHasTabular}"/>
Expand Down Expand Up @@ -56,7 +57,8 @@
and !DatasetPage.datasetLockedInWorkflow
and DatasetPage.dataset.latestVersion.versionState=='DRAFT'
and canUpdateDataset
and !canIssuePublishDatasetCommand}"/>
and !canIssuePublishDatasetCommand
and canIssueSubmitDatasetForReviewCommand}"/>
<ui:param name="showReturnToAuthorLink" value="#{DatasetPage.dataset.latestVersion.versionState=='DRAFT' and latestVersionInReview
and canIssuePublishDatasetCommand}"/>
<ui:param name="showAccessDatasetButtonGroup" value="#{(canDownloadFiles or versionHasGlobus)
Expand Down Expand Up @@ -358,7 +360,7 @@
<!-- END: DOWNLOAD/ACCESS DATASET -->

<!-- PUBLISH DATASET -->
<div class="btn-group btn-group-justified" jsf:rendered="#{showPublishLink or showSubmitForReviewLink}">
<div class="btn-group btn-group-justified" jsf:rendered="#{showSubmitForReviewLink or showReturnToAuthorLink or showPublishLink or latestVersionInReview}">
<div class="btn-group">
<!-- Publish BTN -->
<h:outputLink value="#" disabled="#{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess or !valid}">
Expand All @@ -372,7 +374,7 @@
<f:passThroughAttribute name="aria-haspopup" value="true"/>
<f:passThroughAttribute name="aria-expanded" value="false"/>
</c:if>
#{showPublishLink ? bundle['dataset.publishBtn'] : (latestVersionInReview ? bundle['dataset.disabledSubmittedBtn'] : bundle['dataset.submitBtn'])} <span jsf:rendered="#{(showSubmitForReviewLink or showReturnToAuthorLink) and showPublishLink}" class="caret"></span>
#{showPublishLink ? bundle['dataset.publishBtn'] : (latestVersionInReview ? bundle['dataset.disabledSubmittedBtn'] : bundle['dataset.submitBtn'])} <span jsf:rendered="#{(showSubmitForReviewLink or showReturnToAuthorLink or showPublishLink or latestVersionInReview)}" class="caret"></span>
</h:outputLink>
<!-- Publish BTN DROPDOWN-MENU OPTIONS -->
<ul class="dropdown-menu pull-right text-left">
Expand Down
12 changes: 12 additions & 0 deletions src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -6868,10 +6868,22 @@ public void testRequireFilesToPublishDatasets() {
String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(id), pathToFile, apiToken);
uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
Integer fileId = UtilIT.getDataFileIdFromResponse(uploadResponse);

publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(String.valueOf(id), "major", apiToken);
publishDatasetResponse.prettyPrint();
publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());

// Remove the file and try to publish again. Dataset still has a file but the new version has none
Response deleteResponse = UtilIT.deleteFileInDataset(fileId, apiToken);
deleteResponse.prettyPrint();
deleteResponse.then().assertThat().statusCode(OK.getStatusCode());

publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(String.valueOf(id), "major", apiToken);
publishDatasetResponse.prettyPrint();
publishDatasetResponse.then().assertThat()
.statusCode(FORBIDDEN.getStatusCode())
.body("message", containsString( BundleUtil.getStringFromBundle("dataset.mayNotPublish.FilesRequired")));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
package edu.harvard.iq.dataverse.api;

import edu.harvard.iq.dataverse.util.BundleUtil;
import io.restassured.RestAssured;
import io.restassured.path.json.JsonPath;
import io.restassured.path.xml.XmlPath;
import io.restassured.response.Response;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonObjectBuilder;

import static edu.harvard.iq.dataverse.UserNotification.Type.*;
Expand Down Expand Up @@ -478,4 +478,60 @@ public void testCuratorSendsCommentsToAuthor() {

}

@Test
public void testRequireFilesToSubmitDatasetForReview() {
// create dataverse owner and dataset creator
Response superUser = UtilIT.createRandomUser();
superUser.prettyPrint();
superUser.then().assertThat()
.statusCode(OK.getStatusCode());
String username = UtilIT.getUsernameFromResponse(superUser);
String superUserApiToken = UtilIT.getApiTokenFromResponse(superUser);
Response makeSuperUserResponse = UtilIT.setSuperuserStatus(username, true);
makeSuperUserResponse.then().assertThat()
.statusCode(OK.getStatusCode());

Response createCurator = UtilIT.createRandomUser();
createCurator.prettyPrint();
createCurator.then().assertThat()
.statusCode(OK.getStatusCode());
String authorUsername = UtilIT.getUsernameFromResponse(createCurator);
String apiToken = UtilIT.getApiTokenFromResponse(createCurator);

// Create the dataverse and set it to require files to publish and submit for review
Response createDataverseResponse = UtilIT.createRandomDataverse(superUserApiToken);
createDataverseResponse.prettyPrint();
createDataverseResponse.then().assertThat()
.statusCode(CREATED.getStatusCode());

String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);

Response setDataverseAttributeResponse = UtilIT.setCollectionAttribute(dataverseAlias, "requireFilesToPublishDataset", "true", superUserApiToken);
setDataverseAttributeResponse.prettyPrint();
setDataverseAttributeResponse.then().assertThat()
.statusCode(OK.getStatusCode());

// grant role to dataset creator to be able to create a dataset in the dataverse
Response grantAuthorAddDataset = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR.toString(), "@" + authorUsername, superUserApiToken);
grantAuthorAddDataset.prettyPrint();
grantAuthorAddDataset.then().assertThat()
.body("data.assignee", equalTo("@" + authorUsername))
.body("data._roleAlias", equalTo("dsContributor"))
.statusCode(OK.getStatusCode());

// Create a dataset with no files
Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDataset.prettyPrint();
createDataset.then().assertThat()
.statusCode(CREATED.getStatusCode());

String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDataset);

// Submit for review with no data files
Response submitForReview = UtilIT.submitDatasetForReview(datasetPersistentId, apiToken);
submitForReview.prettyPrint();
submitForReview.then().assertThat()
.statusCode(FORBIDDEN.getStatusCode())
.body("message", equalTo(BundleUtil.getStringFromBundle("dataset.mayNotSubmitForReview.FilesRequired")));
}
}
Loading