diff --git a/doc/release-notes/10541-root-alias-name2.md b/doc/release-notes/10541-root-alias-name2.md
new file mode 100644
index 00000000000..e699d40994b
--- /dev/null
+++ b/doc/release-notes/10541-root-alias-name2.md
@@ -0,0 +1 @@
+The [tutorial](https://dataverse-guide--11201.org.readthedocs.build/en/11201/container/running/demo.html#root-collection-customization-alias-name-etc) on running Dataverse in Docker has been updated to explain how to configure the root collection using a JSON file. See also #10541 and #11201.
diff --git a/doc/release-notes/11178-bug-fix-sort-by-newest-first.md b/doc/release-notes/11178-bug-fix-sort-by-newest-first.md
new file mode 100644
index 00000000000..8fb3f65b2f1
--- /dev/null
+++ b/doc/release-notes/11178-bug-fix-sort-by-newest-first.md
@@ -0,0 +1,8 @@
+### Bug fix: Sorting by "newest first"
+
+Fixed an issue where draft versions of datasets were sorted using the release timestamp of their most recent major version.
+This caused newer drafts to appear incorrectly alongside their corresponding major version, instead of at the top, when sorted by "newest first".
+Sorting now uses the last update timestamp when sorting draft datasets.
+The sorting behavior of published dataset versions (major and minor) is unchanged.
+
+**Upgrade instructions**: draft datasets must be reindexed for this fix to take effect.
\ No newline at end of file
diff --git a/doc/release-notes/11256-news-on-support-for-external-vocabulary.md b/doc/release-notes/11256-news-on-support-for-external-vocabulary.md
new file mode 100644
index 00000000000..180f888e4df
--- /dev/null
+++ b/doc/release-notes/11256-news-on-support-for-external-vocabulary.md
@@ -0,0 +1,4 @@
+### News on Support for External Vocabulary Services
+
+It is now possible to fill Dataverse keywords metadata using an OntoPortal service.
+The code has been shared on [GDCC GitHub Repository](https://github.com/gdcc/dataverse-external-vocab-support#scripts-in-production).
\ No newline at end of file
diff --git a/doc/release-notes/11264-slower refresh for long running locks.md b/doc/release-notes/11264-slower refresh for long running locks.md
new file mode 100644
index 00000000000..96e0cec0ccb
--- /dev/null
+++ b/doc/release-notes/11264-slower refresh for long running locks.md
@@ -0,0 +1 @@
+When a dataset has a long running lock, including when it is 'in review', Dataverse will now slow the page refresh rate over time.
diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst
index 2483d3217a5..3cb0274c936 100644
--- a/doc/sphinx-guides/source/container/running/demo.rst
+++ b/doc/sphinx-guides/source/container/running/demo.rst
@@ -29,6 +29,8 @@ To stop the containers hit ``Ctrl-c`` (hold down the ``Ctrl`` key and then hit t
To start the containers, run ``docker compose up``.
+.. _starting-over:
+
Deleting Data and Starting Over
-------------------------------
@@ -46,6 +48,8 @@ Starting Fresh
For this exercise, please start fresh by stopping all containers and removing the ``data`` directory.
+.. _demo-persona:
+
Creating and Running a Demo Persona
+++++++++++++++++++++++++++++++++++
@@ -137,6 +141,29 @@ In the example below of configuring :ref:`:FooterCopyright` we use the default u
One you make this change it should be visible in the copyright in the bottom left of every page.
+Root Collection Customization (Alias, Name, etc.)
++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Before running ``docker compose up`` for the first time, you can customize the root collection by placing a JSON file in the right place.
+
+First, in the "demo" directory you created (see :ref:`demo-persona`), create a subdirectory called "config":
+
+``mkdir demo/config``
+
+Next, download :download:`dataverse-complete.json <../../_static/api/dataverse-complete.json>` and put it in the "config" directory you just created. The contents of your "demo" directory should look something like this:
+
+.. code-block:: bash
+
+ % find demo
+ demo
+ demo/config
+ demo/config/dataverse-complete.json
+ demo/init.sh
+
+Edit ``dataverse-complete.json`` to have the values you want. You'll want to refer to :ref:`update-dataverse-api` in the API Guide to understand the format. In that documentation you can find optional parameters as well.
+
+To test your JSON file, run ``docker compose up``. Again, this only works when you are running ``docker compose up`` for the first time. (You can always start over. See :ref:`starting-over`.)
+
Multiple Languages
++++++++++++++++++
diff --git a/modules/container-configbaker/scripts/bootstrap/demo/init.sh b/modules/container-configbaker/scripts/bootstrap/demo/init.sh
index e8d1d07dd2d..f9718c83f65 100644
--- a/modules/container-configbaker/scripts/bootstrap/demo/init.sh
+++ b/modules/container-configbaker/scripts/bootstrap/demo/init.sh
@@ -19,6 +19,16 @@ echo ""
echo "Setting DOI provider to \"FAKE\"..."
curl -sS -X PUT -d FAKE "${DATAVERSE_URL}/api/admin/settings/:DoiProvider"
+API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \")
+export API_TOKEN
+
+ROOT_COLLECTION_JSON=/scripts/bootstrap/demo/config/dataverse-complete.json
+if [ -f $ROOT_COLLECTION_JSON ]; then
+ echo ""
+ echo "Updating root collection based on $ROOT_COLLECTION_JSON..."
+ curl -sS -X PUT -H "X-Dataverse-key:$API_TOKEN" "$DATAVERSE_URL/api/dataverses/:root" --upload-file $ROOT_COLLECTION_JSON
+fi
+
echo ""
echo "Revoke the key that allows for creation of builtin users..."
curl -sS -X DELETE "${DATAVERSE_URL}/api/admin/settings/BuiltinUsers.KEY"
diff --git a/pom.xml b/pom.xml
index cb16f16c229..95137dc94cc 100644
--- a/pom.xml
+++ b/pom.xml
@@ -494,7 +494,7 @@
com.nimbusds
oauth2-oidc-sdk
- 10.13.2
+ 11.22.1
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
index f56fe608a52..c46599e83b5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
@@ -23,6 +23,7 @@
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.sql.Timestamp;
@@ -60,6 +61,7 @@ public class IngestMessageBean implements MessageListener {
@EJB IngestServiceBean ingestService;
@EJB UserNotificationServiceBean userNotificationService;
@EJB AuthenticationServiceBean authenticationServiceBean;
+ @EJB IndexServiceBean indexService;
public IngestMessageBean() {
@@ -111,6 +113,7 @@ public void onMessage(Message message) {
// and "mixed success and failure" emails. Now we never list successfully
// ingested files so this line is commented out.
// sbIngestedFiles.append(String.format("%s", datafile.getCurrentName()));
+ indexService.asyncIndexDataset(datafile.getOwner(), true);
} else {
logger.warning("Error occurred during ingest job for file id " + datafile_id + "!");
sbIngestedFiles.append(String.format("%s", datafile.getCurrentName()));
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java
index 201a5f5f781..d3158a3c55a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java
@@ -69,8 +69,6 @@ public class XmlMetadataTemplate {
public static final String XML_SCHEMA_VERSION = "4.5";
private DoiMetadata doiMetadata;
- //QDR - used to get ROR name from ExternalVocabularyValue via pidProvider.get
- private PidProvider pidProvider = null;
public XmlMetadataTemplate() {
}
@@ -98,13 +96,6 @@ private void generateXML(DvObject dvObject, OutputStream outputStream) throws XM
String language = null; // machine locale? e.g. for Publisher which is global
String metadataLanguage = null; // when set, otherwise = language?
- //QDR - used to get ROR name from ExternalVocabularyValue via pidProvider.get
- GlobalId pid = null;
- pid = dvObject.getGlobalId();
- if ((pid == null) && (dvObject instanceof DataFile df)) {
- pid = df.getOwner().getGlobalId();
- }
- pidProvider = PidUtil.getPidProvider(pid.getProviderId());
XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream);
xmlw.writeStartElement("resource");
boolean deaccessioned=false;
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
index 839dd4a7e08..a2149b44c41 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
@@ -947,32 +947,36 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
index d5c80cde1aa..c3bfdb80e6e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
@@ -104,6 +104,180 @@ public void testRetrieveMyDataAsJsonString() {
assertEquals(200, deleteUserResponse.getStatusCode());
}
+ @Test
+ public void testRetrieveMyDataAsJsonStringSortOrder() {
+ // Create superuser
+ Response createSuperUserResponse = UtilIT.createRandomUser();
+ String superUserIdentifier = UtilIT.getUsernameFromResponse(createSuperUserResponse);
+ String superUserApiToken = UtilIT.getApiTokenFromResponse(createSuperUserResponse);
+ Response makeSuperUserResponse = UtilIT.setSuperuserStatus(superUserIdentifier, true);
+ assertEquals(OK.getStatusCode(), makeSuperUserResponse.getStatusCode());
+
+ // Create regular user
+ Response createUserResponse = UtilIT.createRandomUser();
+ String userApiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+ String userIdentifier = UtilIT.getUsernameFromResponse(createUserResponse);
+
+ // Call as regular user with no result
+ Response myDataEmptyResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L)));
+ assertEquals(prettyPrintError("myDataFinder.error.result.role.empty", Arrays.asList("Contributor")), myDataEmptyResponse.prettyPrint());
+ assertEquals(OK.getStatusCode(), myDataEmptyResponse.getStatusCode());
+
+ // Create and publish a dataverse
+ Response createDataverseResponse = UtilIT.createRandomDataverse(superUserApiToken);
+ createDataverseResponse.prettyPrint();
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+ Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, superUserApiToken);
+ publishDataverse.then().assertThat().statusCode(OK.getStatusCode());
+
+ // Allow user to create datasets in dataverse
+ Response grantRole = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR, "@" + userIdentifier, superUserApiToken);
+ grantRole.prettyPrint();
+ assertEquals(OK.getStatusCode(), grantRole.getStatusCode());
+
+ // As user, create two datasets and submit them for review
+ Response createDatasetOneResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken);
+ createDatasetOneResponse.prettyPrint();
+ Integer datasetOneId = UtilIT.getDatasetIdFromResponse(createDatasetOneResponse);
+ String datasetOnePid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetOneResponse);
+ UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4);
+
+ Response createDatasetTwoResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken);
+ createDatasetTwoResponse.prettyPrint();
+ Integer datasetTwoId = UtilIT.getDatasetIdFromResponse(createDatasetTwoResponse);
+ String datasetTwoPid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetTwoResponse);
+ UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4);
+
+ // Request datasets belonging to user
+ Response twoDatasetsInReviewResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L)));
+ twoDatasetsInReviewResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), twoDatasetsInReviewResponse.getStatusCode());
+ JsonPath jsonPathTwoDatasetsInReview = twoDatasetsInReviewResponse.getBody().jsonPath();
+ assertEquals(2, jsonPathTwoDatasetsInReview.getInt("data.total_count"));
+ // Expect newest dataset (dataset 2) first
+ assertEquals(datasetTwoId, jsonPathTwoDatasetsInReview.getInt("data.items[0].entity_id"));
+ assertEquals("DRAFT", jsonPathTwoDatasetsInReview.getString("data.items[0].versionState"));
+ assertEquals(datasetOneId, jsonPathTwoDatasetsInReview.getInt("data.items[1].entity_id"));
+ assertEquals("DRAFT", jsonPathTwoDatasetsInReview.getString("data.items[1].versionState"));
+
+ // Publish dataset 1
+ Response publishDatasetOne = UtilIT.publishDatasetViaNativeApi(datasetOneId, "major", superUserApiToken);
+ publishDatasetOne.prettyPrint();
+ publishDatasetOne.then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4);
+
+ // Publish dataset 2
+ Response publishDatasetTwo = UtilIT.publishDatasetViaNativeApi(datasetTwoId, "major", superUserApiToken);
+ publishDatasetTwo.prettyPrint();
+ publishDatasetTwo.then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4);
+
+ // Request datasets belonging to user
+ Response twoPublishedDatasetsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L)));
+ twoPublishedDatasetsResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), twoPublishedDatasetsResponse.getStatusCode());
+ JsonPath jsonPathTwoPublishedDatasets = twoPublishedDatasetsResponse.getBody().jsonPath();
+ assertEquals(2, jsonPathTwoPublishedDatasets.getInt("data.total_count"));
+ // Expect newest dataset (dataset 2) first
+ assertEquals(datasetTwoId, jsonPathTwoPublishedDatasets.getInt("data.items[0].entity_id"));
+ assertEquals("RELEASED", jsonPathTwoPublishedDatasets.getString("data.items[0].versionState"));
+ assertEquals(datasetOneId, jsonPathTwoPublishedDatasets.getInt("data.items[1].entity_id"));
+ assertEquals("RELEASED", jsonPathTwoPublishedDatasets.getString("data.items[1].versionState"));
+
+ // Create new draft version of dataset 1 by updating metadata
+ String pathToJsonFilePostPub= "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json";
+ Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetOnePid, pathToJsonFilePostPub, userApiToken);
+ addDataToPublishedVersion.prettyPrint();
+ addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4);
+
+ // Request datasets belonging to user
+ Response twoPublishedDatasetsOneDraftResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L)));
+ twoPublishedDatasetsOneDraftResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), twoPublishedDatasetsOneDraftResponse.getStatusCode());
+ JsonPath jsonPathTwoPublishedDatasetsOneDraft = twoPublishedDatasetsOneDraftResponse.getBody().jsonPath();
+ assertEquals(3, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.total_count"));
+
+ // Expect newest dataset version (draft of dataset 1) first
+ assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[0].entity_id"));
+ assertEquals("DRAFT", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[0].versionState"));
+ // ...followed by dataset 2 (created after dataset 1)
+ assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[1].entity_id"));
+ assertEquals("RELEASED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[1].versionState"));
+ // ...followed by dataset 1 (oldest, created before dataset 2)
+ assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[2].entity_id"));
+ assertEquals("RELEASED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[2].versionState"));
+
+ // Create new draft version of dataset 2 by uploading a file
+ String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
+ Response uploadImage = UtilIT.uploadFileViaNative(datasetTwoId.toString(), pathToFile, userApiToken);
+ uploadImage.prettyPrint();
+ uploadImage.then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4);
+
+ // Request datasets belonging to user
+ Response twoPublishedDatasetsTwoDraftsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L)));
+ twoPublishedDatasetsTwoDraftsResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), twoPublishedDatasetsTwoDraftsResponse.getStatusCode());
+ JsonPath jsonPathTwoPublishedDatasetsTwoDrafts = twoPublishedDatasetsTwoDraftsResponse.getBody().jsonPath();
+ assertEquals(4, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.total_count"));
+
+ // Expect newest dataset version (draft of dataset 2) first
+ assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[0].entity_id"));
+ assertEquals("DRAFT", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[0].versionState"));
+ assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[1].entity_id"));
+ assertEquals("DRAFT", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[1].versionState"));
+ assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[2].entity_id"));
+ assertEquals("RELEASED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[2].versionState"));
+ assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[3].entity_id"));
+ assertEquals("RELEASED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[3].versionState"));
+
+ // Publish minor version of dataset 1
+ Response publishDatasetOneMinor = UtilIT.publishDatasetViaNativeApi(datasetOneId, "minor", superUserApiToken);
+ publishDatasetOneMinor.prettyPrint();
+ publishDatasetOneMinor.then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4);
+
+ // Request datasets belonging to user
+ Response oneMinorOneMajorOneDraftDatasetResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L)));
+ oneMinorOneMajorOneDraftDatasetResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), oneMinorOneMajorOneDraftDatasetResponse.getStatusCode());
+ JsonPath jsonPathOneMinorOneMajorOneDraftDataset = oneMinorOneMajorOneDraftDatasetResponse.getBody().jsonPath();
+ assertEquals(3, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.total_count"));
+
+ // Expect minor version of dataset 1 to be sorted last (based on release date of major version)
+ assertEquals(datasetTwoId, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[0].entity_id"));
+ assertEquals("DRAFT", jsonPathOneMinorOneMajorOneDraftDataset.getString("data.items[0].versionState"));
+
+ assertEquals(datasetTwoId, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[1].entity_id"));
+ assertEquals("RELEASED", jsonPathOneMinorOneMajorOneDraftDataset.getString("data.items[1].versionState"));
+
+ assertEquals(datasetOneId, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[2].entity_id"));
+ assertEquals("RELEASED", jsonPathOneMinorOneMajorOneDraftDataset.getString("data.items[2].versionState"));
+ assertEquals(1, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[2].majorVersion"));
+ assertEquals(1, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[2].minorVersion"));
+
+ // Clean up
+ Response deleteDatasetOneResponse = UtilIT.destroyDataset(datasetOneId, superUserApiToken);
+ deleteDatasetOneResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), deleteDatasetOneResponse.getStatusCode());
+ Response deleteDatasetTwoResponse = UtilIT.destroyDataset(datasetTwoId, superUserApiToken);
+ deleteDatasetTwoResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), deleteDatasetTwoResponse.getStatusCode());
+
+ Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, superUserApiToken);
+ deleteDataverseResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), deleteDataverseResponse.getStatusCode());
+
+ Response deleteUserResponse = UtilIT.deleteUser(userIdentifier);
+ deleteUserResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), deleteUserResponse.getStatusCode());
+
+ Response deleteSuperUserResponse = UtilIT.deleteUser(superUserIdentifier);
+ deleteSuperUserResponse.prettyPrint();
+ assertEquals(OK.getStatusCode(), deleteSuperUserResponse.getStatusCode());
+ }
+
private static String prettyPrintError(String resourceBundleKey, List params) {
final String errorMessage;
if (params == null || params.isEmpty()) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index 504e5e707c9..6220ecda525 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -33,6 +33,10 @@
import static jakarta.ws.rs.core.Response.Status.*;
import static java.lang.Thread.sleep;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -1838,4 +1842,53 @@ public void testShowTypeCounts() throws InterruptedException {
.body("data.total_count_per_object_type.Datasets", CoreMatchers.is(0))
.body("data.total_count_per_object_type.Files", CoreMatchers.is(0));
}
+
+ @Test
+ public void testTabularFiles() throws IOException {
+ Response createUser = UtilIT.createRandomUser();
+ createUser.then().assertThat().statusCode(OK.getStatusCode());
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+ Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+ createDataverseResponse.prettyPrint();
+ createDataverseResponse.then().assertThat()
+ .statusCode(CREATED.getStatusCode());
+
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+ Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ createDataset.prettyPrint();
+ createDataset.then().assertThat()
+ .statusCode(CREATED.getStatusCode());
+
+ Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset);
+ String datasetPid = UtilIT.getDatasetPersistentIdFromResponse(createDataset);
+
+ Path pathToDataFile = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "data.csv");
+ String contentOfCsv = ""
+ + "name,pounds,species,treats\n"
+ + "Midnight,15,dog,milkbones\n"
+ + "Tiger,17,cat,cat grass\n"
+ + "Panther,21,cat,cat nip\n";
+ java.nio.file.Files.write(pathToDataFile, contentOfCsv.getBytes());
+
+ Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToDataFile.toString(), apiToken);
+ uploadFile.prettyPrint();
+ uploadFile.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.files[0].label", equalTo("data.csv"));
+
+ assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToDataFile);
+
+ Long fileId = JsonPath.from(uploadFile.body().asString()).getLong("data.files[0].dataFile.id");
+
+ Response search = UtilIT.search("entityId:" + fileId, apiToken);
+ search.prettyPrint();
+ search.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.items[0].name", is("data.tab"))
+ .body("data.items[0].variables", is(4))
+ .body("data.items[0].observations", is(3));
+ }
+
}