From aee4ae40b3ac7182cd41c16ed2987863c31ba841 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Wed, 22 Jan 2025 17:21:12 +0100 Subject: [PATCH 01/21] test: add failing test in DataRetrieverApiIT for expected sort order of datasets --- .../iq/dataverse/api/DataRetrieverApiIT.java | 139 ++++++++++++++++++ 1 file changed, 139 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index d5c80cde1aa..be748109dbd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -104,6 +104,145 @@ public void testRetrieveMyDataAsJsonString() { assertEquals(200, deleteUserResponse.getStatusCode()); } + @Test + public void testRetrieveMyDataAsJsonStringSortOrder() { + // Create superuser + Response createSuperUserResponse = UtilIT.createRandomUser(); + String superUserIdentifier = UtilIT.getUsernameFromResponse(createSuperUserResponse); + String superUserApiToken = UtilIT.getApiTokenFromResponse(createSuperUserResponse); + Response makeSuperUserResponse = UtilIT.setSuperuserStatus(superUserIdentifier, true); + assertEquals(OK.getStatusCode(), makeSuperUserResponse.getStatusCode()); + + // Create regular user + Response createUserResponse = UtilIT.createRandomUser(); + String userApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + String userIdentifier = UtilIT.getUsernameFromResponse(createUserResponse); + + // Create curator user +// Response createCuratorResponse = UtilIT.createRandomUser(); +// String curatorApiToken = UtilIT.getApiTokenFromResponse(createCuratorResponse); +// String curatorIdentifier = UtilIT.getUsernameFromResponse(createCuratorResponse); + + // Call as regular user with no result + Response myDataEmptyResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + assertEquals(prettyPrintError("myDataFinder.error.result.role.empty", Arrays.asList("Contributor")), myDataEmptyResponse.prettyPrint()); + assertEquals(OK.getStatusCode(), myDataEmptyResponse.getStatusCode()); + + // Create and publish a dataverse + Response createDataverseResponse = UtilIT.createRandomDataverse(superUserApiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, superUserApiToken); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + + // Allow user to create datasets in dataverse + Response grantRole = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR, "@" + userIdentifier, superUserApiToken); + grantRole.prettyPrint(); + assertEquals(OK.getStatusCode(), grantRole.getStatusCode()); + + // As user, create and publish two datasets + Response createDatasetOneResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken); + createDatasetOneResponse.prettyPrint(); + Integer datasetOneId = UtilIT.getDatasetIdFromResponse(createDatasetOneResponse); + String datasetOnePid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetOneResponse); + UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); + + Response createDatasetTwoResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken); + createDatasetTwoResponse.prettyPrint(); + Integer datasetTwoId = UtilIT.getDatasetIdFromResponse(createDatasetTwoResponse); + String datasetTwoPid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetTwoResponse); + UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4); + + Response publishDatasetOne = UtilIT.publishDatasetViaNativeApi(datasetOneId, "major", superUserApiToken); + publishDatasetOne.prettyPrint(); + publishDatasetOne.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetTwo = UtilIT.publishDatasetViaNativeApi(datasetTwoId, "major", superUserApiToken); + publishDatasetTwo.prettyPrint(); + publishDatasetTwo.then().assertThat().statusCode(OK.getStatusCode()); + + // Request datasets belonging to user + Response twoPublishedDatasetsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + twoPublishedDatasetsResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), twoPublishedDatasetsResponse.getStatusCode()); + JsonPath jsonPathTwoPublishedDatasets = twoPublishedDatasetsResponse.getBody().jsonPath(); + assertEquals(2, jsonPathTwoPublishedDatasets.getInt("data.total_count")); + // Expect newest dataset (dataset 2) first + assertEquals(datasetTwoId, jsonPathTwoPublishedDatasets.getInt("data.items[0].entity_id")); + assertEquals(datasetOneId, jsonPathTwoPublishedDatasets.getInt("data.items[1].entity_id")); + + // Create new draft version of dataset 1 by updating metadata + String pathToJsonFilePostPub= "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetOnePid, pathToJsonFilePostPub, userApiToken); + addDataToPublishedVersion.prettyPrint(); + addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); + Response submitDatasetOneForReview = UtilIT.submitDatasetForReview(datasetOnePid, userApiToken); + submitDatasetOneForReview.prettyPrint(); + submitDatasetOneForReview.then().assertThat().statusCode(OK.getStatusCode()); + + // Request datasets belonging to user + Response twoPublishedDatasetsOneDraftResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + twoPublishedDatasetsOneDraftResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), twoPublishedDatasetsOneDraftResponse.getStatusCode()); + JsonPath jsonPathTwoPublishedDatasetsOneDraft = twoPublishedDatasetsOneDraftResponse.getBody().jsonPath(); + assertEquals(3, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.total_count")); + + // Expect newest dataset version (draft of dataset 1) first + assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[0].entity_id")); + assertEquals("DRAFT", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[0].versionState")); + assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[1].entity_id")); + assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[1].versionState")); + assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[2].entity_id")); + assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[2].versionState")); + + // Create new draft version of dataset 2 by uploading a file + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadImage = UtilIT.uploadFileViaNative(datasetTwoId.toString(), pathToFile, userApiToken); + uploadImage.prettyPrint(); + uploadImage.then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4); + Response submitDatasetTwoForReview = UtilIT.submitDatasetForReview(datasetTwoPid, userApiToken); + submitDatasetTwoForReview.prettyPrint(); + submitDatasetTwoForReview.then().assertThat().statusCode(OK.getStatusCode()); + + // Request datasets belonging to user + Response twoPublishedDatasetsTwoDraftsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + twoPublishedDatasetsTwoDraftsResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), twoPublishedDatasetsTwoDraftsResponse.getStatusCode()); + JsonPath jsonPathTwoPublishedDatasetsTwoDrafts = twoPublishedDatasetsTwoDraftsResponse.getBody().jsonPath(); + assertEquals(4, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.total_count")); + + // Expect newest dataset version (draft of dataset 2) first + assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[0].entity_id")); + assertEquals("DRAFT", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[0].versionState")); + assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[1].entity_id")); + assertEquals("DRAFT", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[1].versionState")); + assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[2].entity_id")); + assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[2].versionState")); + assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[3].entity_id")); + assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[3].versionState")); + + // Clean up + Response deleteDatasetOneResponse = UtilIT.deleteDatasetViaNativeApi(datasetOneId, superUserApiToken); + deleteDatasetOneResponse.prettyPrint(); + assertEquals(200, deleteDatasetOneResponse.getStatusCode()); + Response deleteDatasetTwoResponse = UtilIT.deleteDatasetViaNativeApi(datasetTwoId, superUserApiToken); + deleteDatasetTwoResponse.prettyPrint(); + assertEquals(200, deleteDatasetTwoResponse.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, superUserApiToken); + deleteDataverseResponse.prettyPrint(); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(userIdentifier); + deleteUserResponse.prettyPrint(); + assertEquals(200, deleteUserResponse.getStatusCode()); + + Response deleteCuratorResponse = UtilIT.deleteUser(superUserIdentifier); + deleteCuratorResponse.prettyPrint(); + assertEquals(200, deleteCuratorResponse.getStatusCode()); + } + private static String prettyPrintError(String resourceBundleKey, List params) { final String errorMessage; if (params == null || params.isEmpty()) { From 8e6b47fc21d605a7957e5ac34e0b911635b14b21 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Thu, 23 Jan 2025 11:24:37 +0100 Subject: [PATCH 02/21] fix: use last update time of dataset version during indexing (instead of release time of most recent major version) --- .../iq/dataverse/search/IndexServiceBean.java | 41 +++++++------------ 1 file changed, 14 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 3f60a9bd1a2..02587118c3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -953,33 +953,15 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Thu, 23 Jan 2025 11:25:35 +0100 Subject: [PATCH 03/21] test: extend test in DataRetrieverApiIT for expected sort order of datasets --- .../iq/dataverse/api/DataRetrieverApiIT.java | 86 +++++++++++-------- 1 file changed, 51 insertions(+), 35 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index be748109dbd..db7b0df922f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -118,11 +118,6 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { String userApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); String userIdentifier = UtilIT.getUsernameFromResponse(createUserResponse); - // Create curator user -// Response createCuratorResponse = UtilIT.createRandomUser(); -// String curatorApiToken = UtilIT.getApiTokenFromResponse(createCuratorResponse); -// String curatorIdentifier = UtilIT.getUsernameFromResponse(createCuratorResponse); - // Call as regular user with no result Response myDataEmptyResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); assertEquals(prettyPrintError("myDataFinder.error.result.role.empty", Arrays.asList("Contributor")), myDataEmptyResponse.prettyPrint()); @@ -140,45 +135,67 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { grantRole.prettyPrint(); assertEquals(OK.getStatusCode(), grantRole.getStatusCode()); - // As user, create and publish two datasets + // As user, create two datasets and submit them for review Response createDatasetOneResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken); createDatasetOneResponse.prettyPrint(); Integer datasetOneId = UtilIT.getDatasetIdFromResponse(createDatasetOneResponse); String datasetOnePid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetOneResponse); UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); + Response submitDatasetOneForReview = UtilIT.submitDatasetForReview(datasetOnePid, userApiToken); + submitDatasetOneForReview.prettyPrint(); + submitDatasetOneForReview.then().assertThat().statusCode(OK.getStatusCode()); + Response createDatasetTwoResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken); createDatasetTwoResponse.prettyPrint(); Integer datasetTwoId = UtilIT.getDatasetIdFromResponse(createDatasetTwoResponse); String datasetTwoPid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetTwoResponse); UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4); + Response submitDatasetTwoForReview = UtilIT.submitDatasetForReview(datasetTwoPid, userApiToken); + submitDatasetTwoForReview.prettyPrint(); + submitDatasetTwoForReview.then().assertThat().statusCode(OK.getStatusCode()); + + // Request datasets belonging to user + Response twoDatasetsInReviewResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + twoDatasetsInReviewResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), twoDatasetsInReviewResponse.getStatusCode()); + JsonPath jsonPathTwoDatasetsInReview = twoDatasetsInReviewResponse.getBody().jsonPath(); + assertEquals(2, jsonPathTwoDatasetsInReview.getInt("data.total_count")); + // Expect newest dataset (dataset 2) first + assertEquals(datasetTwoId, jsonPathTwoDatasetsInReview.getInt("data.items[0].entity_id")); + assertEquals("DRAFT", jsonPathTwoDatasetsInReview.getString("data.items[0].versionState")); + assertEquals(datasetOneId, jsonPathTwoDatasetsInReview.getInt("data.items[1].entity_id")); + assertEquals("DRAFT", jsonPathTwoDatasetsInReview.getString("data.items[1].versionState")); + + // Publish older dataset (dataset 1) Response publishDatasetOne = UtilIT.publishDatasetViaNativeApi(datasetOneId, "major", superUserApiToken); publishDatasetOne.prettyPrint(); publishDatasetOne.then().assertThat().statusCode(OK.getStatusCode()); + + // Request datasets belonging to user + Response afterPublishingOneDatasetResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + afterPublishingOneDatasetResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), afterPublishingOneDatasetResponse.getStatusCode()); + JsonPath jsonPathAfterPublishingOneDataset = afterPublishingOneDatasetResponse.getBody().jsonPath(); + assertEquals(2, jsonPathAfterPublishingOneDataset.getInt("data.total_count")); + // Expect that being published moved dataset1 to the top + assertEquals(datasetOneId, jsonPathAfterPublishingOneDataset.getInt("data.items[0].entity_id")); + assertEquals("RELEASED", jsonPathAfterPublishingOneDataset.getString("data.items[0].versionState")); + assertEquals(datasetTwoId, jsonPathAfterPublishingOneDataset.getInt("data.items[1].entity_id")); + assertEquals("DRAFT", jsonPathAfterPublishingOneDataset.getString("data.items[1].versionState")); + + // Publish dataset 2 Response publishDatasetTwo = UtilIT.publishDatasetViaNativeApi(datasetTwoId, "major", superUserApiToken); publishDatasetTwo.prettyPrint(); publishDatasetTwo.then().assertThat().statusCode(OK.getStatusCode()); - // Request datasets belonging to user - Response twoPublishedDatasetsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); - twoPublishedDatasetsResponse.prettyPrint(); - assertEquals(OK.getStatusCode(), twoPublishedDatasetsResponse.getStatusCode()); - JsonPath jsonPathTwoPublishedDatasets = twoPublishedDatasetsResponse.getBody().jsonPath(); - assertEquals(2, jsonPathTwoPublishedDatasets.getInt("data.total_count")); - // Expect newest dataset (dataset 2) first - assertEquals(datasetTwoId, jsonPathTwoPublishedDatasets.getInt("data.items[0].entity_id")); - assertEquals(datasetOneId, jsonPathTwoPublishedDatasets.getInt("data.items[1].entity_id")); - // Create new draft version of dataset 1 by updating metadata String pathToJsonFilePostPub= "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetOnePid, pathToJsonFilePostPub, userApiToken); addDataToPublishedVersion.prettyPrint(); addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); - Response submitDatasetOneForReview = UtilIT.submitDatasetForReview(datasetOnePid, userApiToken); - submitDatasetOneForReview.prettyPrint(); - submitDatasetOneForReview.then().assertThat().statusCode(OK.getStatusCode()); // Request datasets belonging to user Response twoPublishedDatasetsOneDraftResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); @@ -190,10 +207,12 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { // Expect newest dataset version (draft of dataset 1) first assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[0].entity_id")); assertEquals("DRAFT", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[0].versionState")); + // ...followed by dataset 2 (most recently published) assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[1].entity_id")); - assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[1].versionState")); + assertEquals("RELEASED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[1].versionState")); + // ...followed by dataset 1 (least recently published) assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[2].entity_id")); - assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[2].versionState")); + assertEquals("RELEASED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[2].versionState")); // Create new draft version of dataset 2 by uploading a file String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; @@ -201,9 +220,6 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { uploadImage.prettyPrint(); uploadImage.then().assertThat().statusCode(OK.getStatusCode()); UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4); - Response submitDatasetTwoForReview = UtilIT.submitDatasetForReview(datasetTwoPid, userApiToken); - submitDatasetTwoForReview.prettyPrint(); - submitDatasetTwoForReview.then().assertThat().statusCode(OK.getStatusCode()); // Request datasets belonging to user Response twoPublishedDatasetsTwoDraftsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); @@ -218,29 +234,29 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[1].entity_id")); assertEquals("DRAFT", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[1].versionState")); assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[2].entity_id")); - assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[2].versionState")); + assertEquals("RELEASED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[2].versionState")); assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[3].entity_id")); - assertEquals("PUBLISHED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[3].versionState")); + assertEquals("RELEASED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[3].versionState")); // Clean up - Response deleteDatasetOneResponse = UtilIT.deleteDatasetViaNativeApi(datasetOneId, superUserApiToken); + Response deleteDatasetOneResponse = UtilIT.destroyDataset(datasetOneId, superUserApiToken); deleteDatasetOneResponse.prettyPrint(); - assertEquals(200, deleteDatasetOneResponse.getStatusCode()); - Response deleteDatasetTwoResponse = UtilIT.deleteDatasetViaNativeApi(datasetTwoId, superUserApiToken); + assertEquals(OK.getStatusCode(), deleteDatasetOneResponse.getStatusCode()); + Response deleteDatasetTwoResponse = UtilIT.destroyDataset(datasetTwoId, superUserApiToken); deleteDatasetTwoResponse.prettyPrint(); - assertEquals(200, deleteDatasetTwoResponse.getStatusCode()); + assertEquals(OK.getStatusCode(), deleteDatasetTwoResponse.getStatusCode()); Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, superUserApiToken); deleteDataverseResponse.prettyPrint(); - assertEquals(200, deleteDataverseResponse.getStatusCode()); + assertEquals(OK.getStatusCode(), deleteDataverseResponse.getStatusCode()); Response deleteUserResponse = UtilIT.deleteUser(userIdentifier); deleteUserResponse.prettyPrint(); - assertEquals(200, deleteUserResponse.getStatusCode()); + assertEquals(OK.getStatusCode(), deleteUserResponse.getStatusCode()); - Response deleteCuratorResponse = UtilIT.deleteUser(superUserIdentifier); - deleteCuratorResponse.prettyPrint(); - assertEquals(200, deleteCuratorResponse.getStatusCode()); + Response deleteSuperUserResponse = UtilIT.deleteUser(superUserIdentifier); + deleteSuperUserResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), deleteSuperUserResponse.getStatusCode()); } private static String prettyPrintError(String resourceBundleKey, List params) { From e4320244559986889c04bbc0ca86715e5ff7e38e Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Thu, 23 Jan 2025 11:33:54 +0100 Subject: [PATCH 04/21] docs: add release note for 11178-bug-fix-sort-by-newest-first --- doc/release-notes/11178-bug-fix-sort-by-newest-first.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 doc/release-notes/11178-bug-fix-sort-by-newest-first.md diff --git a/doc/release-notes/11178-bug-fix-sort-by-newest-first.md b/doc/release-notes/11178-bug-fix-sort-by-newest-first.md new file mode 100644 index 00000000000..473ce18aede --- /dev/null +++ b/doc/release-notes/11178-bug-fix-sort-by-newest-first.md @@ -0,0 +1,7 @@ +### Bug fix: Sorting by "newest first" + +Fixed an issue where draft and minor versions of datasets were sorted using the release timestamp of their most recent major version. +This caused newer drafts or minor versions to appear incorrectly alongside their corresponding major version, instead of at the top, when sorted by "newest first". +Sorting now consistently uses the last update timestamp for all dataset versions (draft, minor, and major). + +**Upgrade instructions**: all datasets must be reindexed for this fix to take effect. \ No newline at end of file From 3b0ac1648dae673b862368a51d8f49eb0f144093 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 30 Jan 2025 15:55:19 -0500 Subject: [PATCH 05/21] explain how to customize root collection in docker #10541 --- .../source/container/running/demo.rst | 15 +++++++++++++++ .../scripts/bootstrap/demo/init.sh | 7 +++++++ 2 files changed, 22 insertions(+) diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 2483d3217a5..dc0c12cf003 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -46,6 +46,8 @@ Starting Fresh For this exercise, please start fresh by stopping all containers and removing the ``data`` directory. +.. _demo-persona: + Creating and Running a Demo Persona +++++++++++++++++++++++++++++++++++ @@ -137,6 +139,19 @@ In the example below of configuring :ref:`:FooterCopyright` we use the default u One you make this change it should be visible in the copyright in the bottom left of every page. +Root Collection Customization (Alias, Name, etc.) ++++++++++++++++++++++++++++++++++++++++++++++++++ + +Before running ``docker compose up`` for the first time, you can customize the root collection by editing the ``init.sh`` script above. + +First, uncomment the section that say "Updating root collection". Note that it references the file ``/scripts/bootstrap/demo/dataverse-complete.json``. + +Next, download :download:`dataverse-complete.json <../../_static/api/dataverse-complete.json>` and put it in the "demo" directory you created (see :ref:`demo-persona`). That directory should how have two files: ``init.sh`` and ``dataverse-complete.json``. + +Edit ``dataverse-complete.json`` to have the values you want. You'll want to refer to :ref:`update-dataverse-api` in the API Guide to understand the format. In that documentation you can find optional parameters as well. + +To test your JSON file, run ``docker compose up``. Again, this only works when you are running ``docker compose up`` for the first time. + Multiple Languages ++++++++++++++++++ diff --git a/modules/container-configbaker/scripts/bootstrap/demo/init.sh b/modules/container-configbaker/scripts/bootstrap/demo/init.sh index e8d1d07dd2d..8233a64dc4b 100644 --- a/modules/container-configbaker/scripts/bootstrap/demo/init.sh +++ b/modules/container-configbaker/scripts/bootstrap/demo/init.sh @@ -19,6 +19,13 @@ echo "" echo "Setting DOI provider to \"FAKE\"..." curl -sS -X PUT -d FAKE "${DATAVERSE_URL}/api/admin/settings/:DoiProvider" +API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \") +export API_TOKEN + +#echo "" +#echo "Updating root collection..." +#curl -sS -X PUT -H "X-Dataverse-key:$API_TOKEN" "$DATAVERSE_URL/api/dataverses/:root" --upload-file /scripts/bootstrap/demo/dataverse-complete.json + echo "" echo "Revoke the key that allows for creation of builtin users..." curl -sS -X DELETE "${DATAVERSE_URL}/api/admin/settings/BuiltinUsers.KEY" From 7da6094126cc8045d55a4c190a6ddea98cfdf92c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 30 Jan 2025 16:03:42 -0500 Subject: [PATCH 06/21] add release note #10541 --- doc/release-notes/10541-root-alias-name2.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10541-root-alias-name2.md diff --git a/doc/release-notes/10541-root-alias-name2.md b/doc/release-notes/10541-root-alias-name2.md new file mode 100644 index 00000000000..e699d40994b --- /dev/null +++ b/doc/release-notes/10541-root-alias-name2.md @@ -0,0 +1 @@ +The [tutorial](https://dataverse-guide--11201.org.readthedocs.build/en/11201/container/running/demo.html#root-collection-customization-alias-name-etc) on running Dataverse in Docker has been updated to explain how to configure the root collection using a JSON file. See also #10541 and #11201. From 8baae03aeb1acdf8962932a1f936891f13b54f8f Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Mon, 3 Feb 2025 15:31:29 +0100 Subject: [PATCH 07/21] fix: revert sorting behaviour of published datasets --- .../iq/dataverse/search/IndexServiceBean.java | 34 +++++++++++++++---- .../iq/dataverse/api/DataRetrieverApiIT.java | 24 ++++++------- 2 files changed, 40 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 02587118c3c..0ba3111db7a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -955,13 +955,35 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set(Arrays.asList(6L))); afterPublishingOneDatasetResponse.prettyPrint(); assertEquals(OK.getStatusCode(), afterPublishingOneDatasetResponse.getStatusCode()); JsonPath jsonPathAfterPublishingOneDataset = afterPublishingOneDatasetResponse.getBody().jsonPath(); assertEquals(2, jsonPathAfterPublishingOneDataset.getInt("data.total_count")); - // Expect that being published moved dataset1 to the top - assertEquals(datasetOneId, jsonPathAfterPublishingOneDataset.getInt("data.items[0].entity_id")); + // Expect newest dataset (dataset 2) first + assertEquals(datasetTwoId, jsonPathAfterPublishingOneDataset.getInt("data.items[0].entity_id")); assertEquals("RELEASED", jsonPathAfterPublishingOneDataset.getString("data.items[0].versionState")); - assertEquals(datasetTwoId, jsonPathAfterPublishingOneDataset.getInt("data.items[1].entity_id")); - assertEquals("DRAFT", jsonPathAfterPublishingOneDataset.getString("data.items[1].versionState")); - - // Publish dataset 2 - Response publishDatasetTwo = UtilIT.publishDatasetViaNativeApi(datasetTwoId, "major", superUserApiToken); - publishDatasetTwo.prettyPrint(); - publishDatasetTwo.then().assertThat().statusCode(OK.getStatusCode()); + assertEquals(datasetOneId, jsonPathAfterPublishingOneDataset.getInt("data.items[1].entity_id")); + assertEquals("RELEASED", jsonPathAfterPublishingOneDataset.getString("data.items[1].versionState")); // Create new draft version of dataset 1 by updating metadata String pathToJsonFilePostPub= "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; @@ -207,10 +207,10 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { // Expect newest dataset version (draft of dataset 1) first assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[0].entity_id")); assertEquals("DRAFT", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[0].versionState")); - // ...followed by dataset 2 (most recently published) + // ...followed by dataset 2 (created after dataset 1) assertEquals(datasetTwoId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[1].entity_id")); assertEquals("RELEASED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[1].versionState")); - // ...followed by dataset 1 (least recently published) + // ...followed by dataset 1 (oldest, created before dataset 2) assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsOneDraft.getInt("data.items[2].entity_id")); assertEquals("RELEASED", jsonPathTwoPublishedDatasetsOneDraft.getString("data.items[2].versionState")); From e181110f31a503354f5ef1074303a41e1bf8c0f8 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Mon, 3 Feb 2025 15:47:49 +0100 Subject: [PATCH 08/21] test: add test for sorting behaviour of minor dataset versions --- .../iq/dataverse/api/DataRetrieverApiIT.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index 921c7e03c55..3bce9b02620 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -238,6 +238,30 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { assertEquals(datasetOneId, jsonPathTwoPublishedDatasetsTwoDrafts.getInt("data.items[3].entity_id")); assertEquals("RELEASED", jsonPathTwoPublishedDatasetsTwoDrafts.getString("data.items[3].versionState")); + // Publish minor version of dataset 1 + Response publishDatasetOneMinor = UtilIT.publishDatasetViaNativeApi(datasetOneId, "minor", superUserApiToken); + publishDatasetOneMinor.prettyPrint(); + publishDatasetOneMinor.then().assertThat().statusCode(OK.getStatusCode()); + + // Request datasets belonging to user + Response threePublishedDatasetsOneDraftResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + threePublishedDatasetsOneDraftResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), threePublishedDatasetsOneDraftResponse.getStatusCode()); + JsonPath jsonPathThreePublishedDatasetsOneDraft = threePublishedDatasetsOneDraftResponse.getBody().jsonPath(); + assertEquals(3, jsonPathThreePublishedDatasetsOneDraft.getInt("data.total_count")); + + // Expect minor version of dataset 1 to be sorted last (based on release date of major version) + assertEquals(datasetTwoId, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[0].entity_id")); + assertEquals("DRAFT", jsonPathThreePublishedDatasetsOneDraft.getString("data.items[0].versionState")); + + assertEquals(datasetTwoId, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[1].entity_id")); + assertEquals("RELEASED", jsonPathThreePublishedDatasetsOneDraft.getString("data.items[1].versionState")); + + assertEquals(datasetOneId, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[2].entity_id")); + assertEquals("RELEASED", jsonPathThreePublishedDatasetsOneDraft.getString("data.items[2].versionState")); + assertEquals(1, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[2].majorVersion")); + assertEquals(1, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[2].minorVersion")); + // Clean up Response deleteDatasetOneResponse = UtilIT.destroyDataset(datasetOneId, superUserApiToken); deleteDatasetOneResponse.prettyPrint(); From bad4df7a94b6b120327b2717fbbdd8524e2bf2b1 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Mon, 3 Feb 2025 16:00:08 +0100 Subject: [PATCH 09/21] docs: update release note for 11178-bug-fix-sort-by-newest-first --- doc/release-notes/11178-bug-fix-sort-by-newest-first.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/11178-bug-fix-sort-by-newest-first.md b/doc/release-notes/11178-bug-fix-sort-by-newest-first.md index 473ce18aede..8fb3f65b2f1 100644 --- a/doc/release-notes/11178-bug-fix-sort-by-newest-first.md +++ b/doc/release-notes/11178-bug-fix-sort-by-newest-first.md @@ -1,7 +1,8 @@ ### Bug fix: Sorting by "newest first" -Fixed an issue where draft and minor versions of datasets were sorted using the release timestamp of their most recent major version. -This caused newer drafts or minor versions to appear incorrectly alongside their corresponding major version, instead of at the top, when sorted by "newest first". -Sorting now consistently uses the last update timestamp for all dataset versions (draft, minor, and major). +Fixed an issue where draft versions of datasets were sorted using the release timestamp of their most recent major version. +This caused newer drafts to appear incorrectly alongside their corresponding major version, instead of at the top, when sorted by "newest first". +Sorting now uses the last update timestamp when sorting draft datasets. +The sorting behavior of published dataset versions (major and minor) is unchanged. -**Upgrade instructions**: all datasets must be reindexed for this fix to take effect. \ No newline at end of file +**Upgrade instructions**: draft datasets must be reindexed for this fix to take effect. \ No newline at end of file From c68e0ba348a39727f8c58695bbbef46440fdaf20 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Wed, 5 Feb 2025 14:08:40 +0100 Subject: [PATCH 10/21] test: fix inaccurate variable names in DataRetrieverApiIT --- .../iq/dataverse/api/DataRetrieverApiIT.java | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index 3bce9b02620..f19db360628 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -179,16 +179,16 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { publishDatasetTwo.then().assertThat().statusCode(OK.getStatusCode()); // Request datasets belonging to user - Response afterPublishingOneDatasetResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); - afterPublishingOneDatasetResponse.prettyPrint(); - assertEquals(OK.getStatusCode(), afterPublishingOneDatasetResponse.getStatusCode()); - JsonPath jsonPathAfterPublishingOneDataset = afterPublishingOneDatasetResponse.getBody().jsonPath(); - assertEquals(2, jsonPathAfterPublishingOneDataset.getInt("data.total_count")); + Response twoPublishedDatasetsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + twoPublishedDatasetsResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), twoPublishedDatasetsResponse.getStatusCode()); + JsonPath jsonPathTwoPublishedDatasets = twoPublishedDatasetsResponse.getBody().jsonPath(); + assertEquals(2, jsonPathTwoPublishedDatasets.getInt("data.total_count")); // Expect newest dataset (dataset 2) first - assertEquals(datasetTwoId, jsonPathAfterPublishingOneDataset.getInt("data.items[0].entity_id")); - assertEquals("RELEASED", jsonPathAfterPublishingOneDataset.getString("data.items[0].versionState")); - assertEquals(datasetOneId, jsonPathAfterPublishingOneDataset.getInt("data.items[1].entity_id")); - assertEquals("RELEASED", jsonPathAfterPublishingOneDataset.getString("data.items[1].versionState")); + assertEquals(datasetTwoId, jsonPathTwoPublishedDatasets.getInt("data.items[0].entity_id")); + assertEquals("RELEASED", jsonPathTwoPublishedDatasets.getString("data.items[0].versionState")); + assertEquals(datasetOneId, jsonPathTwoPublishedDatasets.getInt("data.items[1].entity_id")); + assertEquals("RELEASED", jsonPathTwoPublishedDatasets.getString("data.items[1].versionState")); // Create new draft version of dataset 1 by updating metadata String pathToJsonFilePostPub= "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; @@ -244,23 +244,23 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { publishDatasetOneMinor.then().assertThat().statusCode(OK.getStatusCode()); // Request datasets belonging to user - Response threePublishedDatasetsOneDraftResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); - threePublishedDatasetsOneDraftResponse.prettyPrint(); - assertEquals(OK.getStatusCode(), threePublishedDatasetsOneDraftResponse.getStatusCode()); - JsonPath jsonPathThreePublishedDatasetsOneDraft = threePublishedDatasetsOneDraftResponse.getBody().jsonPath(); - assertEquals(3, jsonPathThreePublishedDatasetsOneDraft.getInt("data.total_count")); + Response oneMinorOneMajorOneDraftDatasetResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); + oneMinorOneMajorOneDraftDatasetResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), oneMinorOneMajorOneDraftDatasetResponse.getStatusCode()); + JsonPath jsonPathOneMinorOneMajorOneDraftDataset = oneMinorOneMajorOneDraftDatasetResponse.getBody().jsonPath(); + assertEquals(3, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.total_count")); // Expect minor version of dataset 1 to be sorted last (based on release date of major version) - assertEquals(datasetTwoId, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[0].entity_id")); - assertEquals("DRAFT", jsonPathThreePublishedDatasetsOneDraft.getString("data.items[0].versionState")); + assertEquals(datasetTwoId, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[0].entity_id")); + assertEquals("DRAFT", jsonPathOneMinorOneMajorOneDraftDataset.getString("data.items[0].versionState")); - assertEquals(datasetTwoId, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[1].entity_id")); - assertEquals("RELEASED", jsonPathThreePublishedDatasetsOneDraft.getString("data.items[1].versionState")); + assertEquals(datasetTwoId, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[1].entity_id")); + assertEquals("RELEASED", jsonPathOneMinorOneMajorOneDraftDataset.getString("data.items[1].versionState")); - assertEquals(datasetOneId, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[2].entity_id")); - assertEquals("RELEASED", jsonPathThreePublishedDatasetsOneDraft.getString("data.items[2].versionState")); - assertEquals(1, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[2].majorVersion")); - assertEquals(1, jsonPathThreePublishedDatasetsOneDraft.getInt("data.items[2].minorVersion")); + assertEquals(datasetOneId, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[2].entity_id")); + assertEquals("RELEASED", jsonPathOneMinorOneMajorOneDraftDataset.getString("data.items[2].versionState")); + assertEquals(1, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[2].majorVersion")); + assertEquals(1, jsonPathOneMinorOneMajorOneDraftDataset.getInt("data.items[2].minorVersion")); // Clean up Response deleteDatasetOneResponse = UtilIT.destroyDataset(datasetOneId, superUserApiToken); From 761b1af098727e8d1c3cab68542a7f622b8f56cb Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Wed, 5 Feb 2025 17:56:29 +0100 Subject: [PATCH 11/21] fix: add missing sleeps to try to fix failing test in DataRetrieverApiIT --- .../harvard/iq/dataverse/api/DataRetrieverApiIT.java | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index f19db360628..c3bfdb80e6e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -142,20 +142,12 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { String datasetOnePid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetOneResponse); UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); - Response submitDatasetOneForReview = UtilIT.submitDatasetForReview(datasetOnePid, userApiToken); - submitDatasetOneForReview.prettyPrint(); - submitDatasetOneForReview.then().assertThat().statusCode(OK.getStatusCode()); - Response createDatasetTwoResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, userApiToken); createDatasetTwoResponse.prettyPrint(); Integer datasetTwoId = UtilIT.getDatasetIdFromResponse(createDatasetTwoResponse); String datasetTwoPid = UtilIT.getDatasetPersistentIdFromResponse(createDatasetTwoResponse); UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4); - Response submitDatasetTwoForReview = UtilIT.submitDatasetForReview(datasetTwoPid, userApiToken); - submitDatasetTwoForReview.prettyPrint(); - submitDatasetTwoForReview.then().assertThat().statusCode(OK.getStatusCode()); - // Request datasets belonging to user Response twoDatasetsInReviewResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); twoDatasetsInReviewResponse.prettyPrint(); @@ -172,11 +164,13 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { Response publishDatasetOne = UtilIT.publishDatasetViaNativeApi(datasetOneId, "major", superUserApiToken); publishDatasetOne.prettyPrint(); publishDatasetOne.then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); // Publish dataset 2 Response publishDatasetTwo = UtilIT.publishDatasetViaNativeApi(datasetTwoId, "major", superUserApiToken); publishDatasetTwo.prettyPrint(); publishDatasetTwo.then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.sleepForReindex(datasetTwoId.toString(), userApiToken, 4); // Request datasets belonging to user Response twoPublishedDatasetsResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); @@ -242,6 +236,7 @@ public void testRetrieveMyDataAsJsonStringSortOrder() { Response publishDatasetOneMinor = UtilIT.publishDatasetViaNativeApi(datasetOneId, "minor", superUserApiToken); publishDatasetOneMinor.prettyPrint(); publishDatasetOneMinor.then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.sleepForReindex(datasetOneId.toString(), userApiToken, 4); // Request datasets belonging to user Response oneMinorOneMajorOneDraftDatasetResponse = UtilIT.retrieveMyDataAsJsonString(userApiToken, "", new ArrayList<>(Arrays.asList(6L))); From 2ee746b83496913294181783a3775c3c2a268118 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 6 Feb 2025 18:44:08 -0500 Subject: [PATCH 12/21] obsolete code causing test issues --- .../dataverse/pidproviders/doi/XmlMetadataTemplate.java | 9 --------- 1 file changed, 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index 201a5f5f781..d3158a3c55a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -69,8 +69,6 @@ public class XmlMetadataTemplate { public static final String XML_SCHEMA_VERSION = "4.5"; private DoiMetadata doiMetadata; - //QDR - used to get ROR name from ExternalVocabularyValue via pidProvider.get - private PidProvider pidProvider = null; public XmlMetadataTemplate() { } @@ -98,13 +96,6 @@ private void generateXML(DvObject dvObject, OutputStream outputStream) throws XM String language = null; // machine locale? e.g. for Publisher which is global String metadataLanguage = null; // when set, otherwise = language? - //QDR - used to get ROR name from ExternalVocabularyValue via pidProvider.get - GlobalId pid = null; - pid = dvObject.getGlobalId(); - if ((pid == null) && (dvObject instanceof DataFile df)) { - pid = df.getOwner().getGlobalId(); - } - pidProvider = PidUtil.getPidProvider(pid.getProviderId()); XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream); xmlw.writeStartElement("resource"); boolean deaccessioned=false; From ec6637b5eebc64d1e136e665752885e665eed287 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Wed, 12 Feb 2025 09:00:18 +0100 Subject: [PATCH 13/21] fix: clean up logger calls related to dataset index timestamp choice --- .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 0ba3111db7a..b180c12b6f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -963,10 +963,10 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Wed, 12 Feb 2025 09:11:05 -0500 Subject: [PATCH 14/21] oauth update to allow building --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index cb16f16c229..95137dc94cc 100644 --- a/pom.xml +++ b/pom.xml @@ -494,7 +494,7 @@ com.nimbusds oauth2-oidc-sdk - 10.13.2 + 11.22.1 From 4f8ea658cee5400a0e31baa9f1fc35705cf32f00 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 12 Feb 2025 14:43:51 -0500 Subject: [PATCH 15/21] look for JSON file in a directory called "config" #10541 --- .../source/container/running/demo.rst | 20 +++++++++++++++---- .../scripts/bootstrap/demo/init.sh | 9 ++++++--- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index dc0c12cf003..3cb0274c936 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -29,6 +29,8 @@ To stop the containers hit ``Ctrl-c`` (hold down the ``Ctrl`` key and then hit t To start the containers, run ``docker compose up``. +.. _starting-over: + Deleting Data and Starting Over ------------------------------- @@ -142,15 +144,25 @@ One you make this change it should be visible in the copyright in the bottom lef Root Collection Customization (Alias, Name, etc.) +++++++++++++++++++++++++++++++++++++++++++++++++ -Before running ``docker compose up`` for the first time, you can customize the root collection by editing the ``init.sh`` script above. +Before running ``docker compose up`` for the first time, you can customize the root collection by placing a JSON file in the right place. + +First, in the "demo" directory you created (see :ref:`demo-persona`), create a subdirectory called "config": + +``mkdir demo/config`` -First, uncomment the section that say "Updating root collection". Note that it references the file ``/scripts/bootstrap/demo/dataverse-complete.json``. +Next, download :download:`dataverse-complete.json <../../_static/api/dataverse-complete.json>` and put it in the "config" directory you just created. The contents of your "demo" directory should look something like this: + +.. code-block:: bash -Next, download :download:`dataverse-complete.json <../../_static/api/dataverse-complete.json>` and put it in the "demo" directory you created (see :ref:`demo-persona`). That directory should how have two files: ``init.sh`` and ``dataverse-complete.json``. + % find demo + demo + demo/config + demo/config/dataverse-complete.json + demo/init.sh Edit ``dataverse-complete.json`` to have the values you want. You'll want to refer to :ref:`update-dataverse-api` in the API Guide to understand the format. In that documentation you can find optional parameters as well. -To test your JSON file, run ``docker compose up``. Again, this only works when you are running ``docker compose up`` for the first time. +To test your JSON file, run ``docker compose up``. Again, this only works when you are running ``docker compose up`` for the first time. (You can always start over. See :ref:`starting-over`.) Multiple Languages ++++++++++++++++++ diff --git a/modules/container-configbaker/scripts/bootstrap/demo/init.sh b/modules/container-configbaker/scripts/bootstrap/demo/init.sh index 8233a64dc4b..f9718c83f65 100644 --- a/modules/container-configbaker/scripts/bootstrap/demo/init.sh +++ b/modules/container-configbaker/scripts/bootstrap/demo/init.sh @@ -22,9 +22,12 @@ curl -sS -X PUT -d FAKE "${DATAVERSE_URL}/api/admin/settings/:DoiProvider" API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \") export API_TOKEN -#echo "" -#echo "Updating root collection..." -#curl -sS -X PUT -H "X-Dataverse-key:$API_TOKEN" "$DATAVERSE_URL/api/dataverses/:root" --upload-file /scripts/bootstrap/demo/dataverse-complete.json +ROOT_COLLECTION_JSON=/scripts/bootstrap/demo/config/dataverse-complete.json +if [ -f $ROOT_COLLECTION_JSON ]; then + echo "" + echo "Updating root collection based on $ROOT_COLLECTION_JSON..." + curl -sS -X PUT -H "X-Dataverse-key:$API_TOKEN" "$DATAVERSE_URL/api/dataverses/:root" --upload-file $ROOT_COLLECTION_JSON +fi echo "" echo "Revoke the key that allows for creation of builtin users..." From 1ad4a7390be7513f16671942432726dee7e74435 Mon Sep 17 00:00:00 2001 From: Steven FEREY Date: Thu, 13 Feb 2025 17:36:53 +0100 Subject: [PATCH 16/21] Added release note --- .../11256-news-on-support-for-external-vocabulary.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 doc/release-notes/11256-news-on-support-for-external-vocabulary.md diff --git a/doc/release-notes/11256-news-on-support-for-external-vocabulary.md b/doc/release-notes/11256-news-on-support-for-external-vocabulary.md new file mode 100644 index 00000000000..f1421d3f6be --- /dev/null +++ b/doc/release-notes/11256-news-on-support-for-external-vocabulary.md @@ -0,0 +1,4 @@ +### News on Support for External Vocabulary Services + +One Dataverse installation had implememented the possibility to fill Dataverse keywords metadata using an OntoPortal service. +The code has been shared on [GDCC GitHub Repository](https://github.com/gdcc/dataverse-external-vocab-support#scripts-in-production). \ No newline at end of file From a163a4660b4c732e227454d39949e533433b8389 Mon Sep 17 00:00:00 2001 From: Steven FEREY Date: Thu, 13 Feb 2025 17:53:41 +0100 Subject: [PATCH 17/21] Adapt message --- .../11256-news-on-support-for-external-vocabulary.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/11256-news-on-support-for-external-vocabulary.md b/doc/release-notes/11256-news-on-support-for-external-vocabulary.md index f1421d3f6be..180f888e4df 100644 --- a/doc/release-notes/11256-news-on-support-for-external-vocabulary.md +++ b/doc/release-notes/11256-news-on-support-for-external-vocabulary.md @@ -1,4 +1,4 @@ ### News on Support for External Vocabulary Services -One Dataverse installation had implememented the possibility to fill Dataverse keywords metadata using an OntoPortal service. +It is now possible to fill Dataverse keywords metadata using an OntoPortal service. The code has been shared on [GDCC GitHub Repository](https://github.com/gdcc/dataverse-external-vocab-support#scripts-in-production). \ No newline at end of file From 52e9ca5e216280fa1803977f9e98b1f29ef94be1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 14 Feb 2025 14:04:22 -0500 Subject: [PATCH 18/21] index after file ingest #11182 --- .../dataverse/ingest/IngestMessageBean.java | 3 ++ .../harvard/iq/dataverse/api/SearchIT.java | 53 +++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index f56fe608a52..b6c0b7b0341 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.sql.Timestamp; @@ -60,6 +61,7 @@ public class IngestMessageBean implements MessageListener { @EJB IngestServiceBean ingestService; @EJB UserNotificationServiceBean userNotificationService; @EJB AuthenticationServiceBean authenticationServiceBean; + @EJB IndexServiceBean indexService; public IngestMessageBean() { @@ -111,6 +113,7 @@ public void onMessage(Message message) { // and "mixed success and failure" emails. Now we never list successfully // ingested files so this line is commented out. // sbIngestedFiles.append(String.format("
  • %s
  • ", datafile.getCurrentName())); + indexService.indexDataset(datafile.getOwner(), true); } else { logger.warning("Error occurred during ingest job for file id " + datafile_id + "!"); sbIngestedFiles.append(String.format("
  • %s
  • ", datafile.getCurrentName())); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 504e5e707c9..6220ecda525 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -33,6 +33,10 @@ import static jakarta.ws.rs.core.Response.Status.*; import static java.lang.Thread.sleep; +import java.nio.file.Path; +import java.nio.file.Paths; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -1838,4 +1842,53 @@ public void testShowTypeCounts() throws InterruptedException { .body("data.total_count_per_object_type.Datasets", CoreMatchers.is(0)) .body("data.total_count_per_object_type.Files", CoreMatchers.is(0)); } + + @Test + public void testTabularFiles() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = UtilIT.getDatasetPersistentIdFromResponse(createDataset); + + Path pathToDataFile = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "data.csv"); + String contentOfCsv = "" + + "name,pounds,species,treats\n" + + "Midnight,15,dog,milkbones\n" + + "Tiger,17,cat,cat grass\n" + + "Panther,21,cat,cat nip\n"; + java.nio.file.Files.write(pathToDataFile, contentOfCsv.getBytes()); + + Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToDataFile.toString(), apiToken); + uploadFile.prettyPrint(); + uploadFile.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("data.csv")); + + assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToDataFile); + + Long fileId = JsonPath.from(uploadFile.body().asString()).getLong("data.files[0].dataFile.id"); + + Response search = UtilIT.search("entityId:" + fileId, apiToken); + search.prettyPrint(); + search.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.items[0].name", is("data.tab")) + .body("data.items[0].variables", is(4)) + .body("data.items[0].observations", is(3)); + } + } From b6f484cae7c08c94e4a36fb796df8ca2b1ab6d57 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 14 Feb 2025 14:47:41 -0500 Subject: [PATCH 19/21] switch to async index #11182 --- .../java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index b6c0b7b0341..c46599e83b5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -113,7 +113,7 @@ public void onMessage(Message message) { // and "mixed success and failure" emails. Now we never list successfully // ingested files so this line is commented out. // sbIngestedFiles.append(String.format("
  • %s
  • ", datafile.getCurrentName())); - indexService.indexDataset(datafile.getOwner(), true); + indexService.asyncIndexDataset(datafile.getOwner(), true); } else { logger.warning("Error occurred during ingest job for file id " + datafile_id + "!"); sbIngestedFiles.append(String.format("
  • %s
  • ", datafile.getCurrentName())); From 1d6153dff40f9654ca7a0c95e15211094d4f5362 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 17 Feb 2025 09:41:38 -0500 Subject: [PATCH 20/21] add pseudo-exponential backoff --- src/main/webapp/dataset.xhtml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index b4454b75775..9ade65fc17d 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -858,6 +858,11 @@ $(this).ready(function () { refreshIfStillLocked(); }); + + var initialInterval = 5000; // 5 seconds + var maxInterval = 300000; // 5 minutes + var currentInterval = initialInterval; + var backoffFactor = 1.2; // Exponential factor function refreshIfStillLocked() { if ($('input[id$="datasetLockedForAnyReasonVariable"]').val() === 'true') { // if dataset is locked, instruct the page to @@ -882,18 +887,22 @@ $('button[id$="refreshButton"]').trigger('click'); //refreshAllCommand(); }, 1500); + } else { + // Reset the interval if the dataset is unlocked + currentInterval = initialInterval; } } } + function waitAndCheckLockAgain() { setTimeout(function () { // refresh the lock in the // backing bean; i.e., check, if the ingest has // already completed in the background: - //$('button[id$="refreshButton"]').trigger('click'); - //refreshLockCommand(); refreshAllLocksCommand(); - }, 10000); + // Increase the interval exponentially for the next check + currentInterval = Math.min((currentInterval * backoffFactor) + 2, maxInterval); + }, currentInterval); } //]]> From a81edf1397cab95098129b3f2f988280e343f258 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 17 Feb 2025 09:54:45 -0500 Subject: [PATCH 21/21] release note --- doc/release-notes/11264-slower refresh for long running locks.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/11264-slower refresh for long running locks.md diff --git a/doc/release-notes/11264-slower refresh for long running locks.md b/doc/release-notes/11264-slower refresh for long running locks.md new file mode 100644 index 00000000000..96e0cec0ccb --- /dev/null +++ b/doc/release-notes/11264-slower refresh for long running locks.md @@ -0,0 +1 @@ +When a dataset has a long running lock, including when it is 'in review', Dataverse will now slow the page refresh rate over time.