diff --git a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/custom-homepage-dynamic.html b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/custom-homepage-dynamic.html index f6230ccbc2b..4845eedea14 100644 --- a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/custom-homepage-dynamic.html +++ b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/custom-homepage-dynamic.html @@ -2,6 +2,9 @@
Deposit and share your data. Get academic credit.
-

Harvard Dataverse is a digital repository. Deposit data and code here.

+

Harvard Dataverse is a repository for research data. Deposit data and code here.

+ +

+ datasets +    + downloads +

+ - Add data to Harvard Dataverse + Add a dataset
-
Organize datasets and gather metrics in your own virtual archive.
-

A dataverse is a virtual archive. A dataverse can contain dataverses, datasets, files and metadata.

+
Organize datasets and gather metrics in your own repository.
+

A dataverse is a container for all your datasets, files, and metadata.

+ +

+ dataverses +

+ - Create my own dataverse + Add a dataverse
-
+
+
-
Find data across research fields, preview metadata, and download files.
+
Find data across research fields, preview metadata, and download files
- + @@ -74,47 +103,71 @@
-
Browse by subject
+

Browse by subject

-
+
-
-
Datasets from journal articles
+
+
Datasets from journal dataverses

-
Loading...
+

Loading...

- -
-
Datasets from research projects, groups and researchers
+ + +
+
Datasets from other dataverses

-
Loading...
- +

Loading...

+
+
+ +
+ + @@ -128,23 +181,69 @@
Activity
-
-
-

All Files datasets added file downloads

+ +
+ +
+
+
Datasets
+
All Activity
+
Past 30 Days
+
+
+
All
+
...
+
...
+
+
+
Deposited
+
...
+
...
+
+
+
Harvested
+
...
+
...
+
-
-

Past 30 Days datasets added file downloads

+ + + +
+
+
Files
+
All Activity
+
Past 30 Days
+
+
+
Downloaded
+
...
+
...
+
+
+
Deposited
+
...
+
...
+
+
-
+
@@ -154,7 +253,8 @@ //switch baseUrl to point to a different server than the local box var baseUrl = ""; - var thumbBaseURL = baseUrl + "/api/datasets/:persistentId/thumbnail" + "?persistentId="; + // NOTE: REMOVED THUMBNAILS + // var thumbBaseURL = baseUrl + "/api/datasets/:persistentId/thumbnail" + "?persistentId="; var metricBaseUrl = baseUrl + "/api/info/metrics/"; var simpleCatSearch = baseUrl + "/api/search?q=*&type=dataset&sort=dateSort&order=desc&fq=categoryOfDataverse:"; @@ -162,22 +262,33 @@ $.get(simpleCatSearch + fqString + "&per_page=" + resultCount, function(jData) { var resultHtml = ""; jData.data.items.forEach(function(item){ - var date = new Date(item.published_at); - resultHtml += "
"; - resultHtml += ""; - resultHtml += ""; - resultHtml += "
"; + var options = { year: 'numeric', month: 'short', day: 'numeric' }; + var date = new Date(item.published_at).toLocaleString('en-US', options); + resultHtml += "
"; + // NOTE: REMOVED THUMBNAILS + // resultHtml += ""; + // NOTE: REMOVED THUMBNAILS... in next line, change grid layout class to `col-xs-9 col-md-10` + resultHtml += ""; + resultHtml += "
"; }); document.getElementById(elm).innerHTML = resultHtml; }); } //For metrics that return simple json with a count + //Can take a single element or an array of elements function queryMetricSimple(metricRelPath, month, elm) { $.get(metricBaseUrl + metricRelPath + month, function(jData) { var resultCount = jData.data.count; - document.getElementById(elm).innerHTML = resultCount; + if(Array.isArray(elm)) { + elm.forEach(function(e) { + document.getElementById(e).innerHTML = resultCount.toLocaleString('en'); + }); + } else { + document.getElementById(elm).innerHTML = resultCount.toLocaleString('en'); + } }); } @@ -185,38 +296,62 @@ $.get(metricBaseUrl + metricRelPath + month, function(jData) { var resultCount = jData.data.count; var roundedDatasetCount = Math.floor(resultCount / 100) * 100; - var dynamicSearchPlaceholder = "Search over " + roundedDatasetCount + " datasets..."; + var dynamicSearchPlaceholder = "Search over " + roundedDatasetCount.toLocaleString('en') + " datasets..."; $("#inputDataverseSearch").attr({"placeholder" : dynamicSearchPlaceholder}); }); } - - function querySubject(elm) { + + function querySubjectDataverseDataset(elm) { + var dvArray = []; + var fullArray = []; $.get(metricBaseUrl + "dataverses/bySubject", function(jData) { - var subArray = []; - var resultHtml = ""; jData.data.forEach(function(item) { - if(item.subject !== "N/A") - subArray.push([item.subject, item.count]); + if(item.subject !== "N/A" && item.subject !== "Other") { + dvArray.push([item.subject, item.count]); + } }); - subArray.sort(); - subArray.forEach(function(subject){ - resultHtml += "

" + subject[0] + " " + subject[1] + "

"; + $.get(metricBaseUrl + "datasets/bySubject?dataLocation=all", function(jData) { + var resultHtml = ""; + jData.data.forEach(function(item) { + if(item.subject !== "N/A" && item.subject !== "Other") { + var dvCount = 0; + for (var dvi = 0; dvi < dvArray.length; dvi++) { + if(item.subject === dvArray[dvi][0]) { + dvCount = dvArray[dvi][1]; + break; + } + } + fullArray.push([item.subject, (item.count + dvCount -1).toLocaleString('en')]); //subtract 1 to remove root dv from counts + } + }); + fullArray.sort(); + fullArray.forEach(function(subject){ + // NOTE: The alias of the root dataverse will need to be configured in this URL + resultHtml += "

" + subject[0] + " " + subject[1] + "

"; + }); + document.getElementById(elm).innerHTML = resultHtml; }); - document.getElementById(elm).innerHTML = resultHtml; }); } + queryMetricSimple("datasets", "?dataLocation=all", ["headAllTimeAllDatasetsValue", "activityAllTimeAllDatasetsValue"]); + queryMetricSimple("downloads", "", ["headAllTimeAllDownloadsValue", "activityAllTimeAllDownloadsValue"]); + queryMetricSimple("dataverses", "", "headAllTimeAllDataversesValue"); + writeRecentDatasetsInDataverses("(Journal)" , 3, "journals"); - writeRecentDatasetsInDataverses("(\"Research+Project\"%20OR%20Researcher%20OR%20\"Research+Group\")" , 3, "researchers"); - - queryMetricSimple("datasets", "", "activityAllTimeDatasetsValue"); - queryMetricSimple("downloads", "", "activityAllTimeFilesValue"); + writeRecentDatasetsInDataverses("(\"Research+Project\"%20OR%20Researcher%20OR%20\"Research+Group\")" , 6, "researchers"); - queryMetricSimple("datasets/pastDays", "/30", "activity30DaysDatasetsValue"); - queryMetricSimple("downloads/pastDays", "/30", "activity30DaysFilesValue"); - - querySubject("dataversesBySubject"); - queryMetricSearch("datasets", ""); + queryMetricSimple("datasets/pastDays", "/30?dataLocation=all", "activity30DaysAllDatasetsValue"); + queryMetricSimple("datasets", "?dataLocation=local", "activityAllTimeDepositedDatasetsValue"); + queryMetricSimple("datasets/pastDays", "/30?dataLocation=local", "activity30DaysDepositedDatasetsValue"); + queryMetricSimple("datasets", "?dataLocation=remote", "activityAllTimeHarvestedDatasetsValue"); + queryMetricSimple("datasets/pastDays", "/30?dataLocation=remote", "activity30DaysHarvestedDatasetsValue"); + + queryMetricSimple("downloads/pastDays", "/30", "activity30DaysAllDownloadsValue"); + queryMetricSimple("files", "", "activityAllTimeDepositedFilesValue"); + queryMetricSimple("files/pastDays", "/30", "activity30DaysDepositedFilesValue"); + querySubjectDataverseDataset("dataversesBySubject"); + queryMetricSearch("datasets", "?dataLocation=all"); //]]> diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index 92bc659b960..e66761be3f1 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -72,3 +72,8 @@ Send Dataset metadata to PID provider Forces update to metadata provided to the PID provider of a published dataset. Only accessible to superusers. :: curl -H "X-Dataverse-key: $API_TOKEN" -X POST http://$SERVER/api/datasets/$dataset-id/modifyRegistrationMetadata + +Make Metadata Updates Without Changing Dataset Version +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +As a superuser, click "Update Current Version" when publishing. (This option is only available when a 'Minor' update would be allowed.) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 5e7c013c0d9..3a91dca5024 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -87,7 +87,7 @@ original "Saved Original", the proprietary (SPSS, Stata, R, etc.) file fr "All Formats" bundled download for Tabular Files. ------------------------------------------------ +------------------------------------------------- ``/api/access/datafile/bundle/$id`` diff --git a/doc/sphinx-guides/source/api/metrics.rst b/doc/sphinx-guides/source/api/metrics.rst index 821b74b0a96..c86a9111e0b 100755 --- a/doc/sphinx-guides/source/api/metrics.rst +++ b/doc/sphinx-guides/source/api/metrics.rst @@ -24,7 +24,7 @@ Example: ``curl https://demo.dataverse.org/api/info/metrics/downloads`` To-Month -------- -Returns a count of various objects in dataverse up to a specified month ``$YYYY-DD`` in YYYY-MM format (i.e. ``2018-01``):: +Returns a count of various objects in dataverse up to a specified month ``$YYYY-DD`` in YYYY-MM format (e.g. ``2018-01``):: GET https://$SERVER/api/info/metrics/$type/toMonth/$YYYY-DD @@ -36,7 +36,7 @@ Example: ``curl https://demo.dataverse.org/api/info/metrics/dataverses/toMonth/2 Past Days --------- -Returns a count of various objects in dataverse for the past ``$days`` (i.e. ``30``):: +Returns a count of various objects in dataverse for the past ``$days`` (e.g. ``30``):: GET https://$SERVER/api/info/metrics/$type/pastDays/$days @@ -45,8 +45,8 @@ Returns a count of various objects in dataverse for the past ``$days`` (i.e. ``3 Example: ``curl https://demo.dataverse.org/api/info/metrics/datasets/pastDays/30`` -Dataverse Specific Commands ---------------------------- +Dataverse Specific Metrics +-------------------------- By Subject ~~~~~~~~~~~~~~~ @@ -64,18 +64,41 @@ Returns the number of dataverses by each category:: GET https://$SERVER/api/info/metrics/dataverses/byCategory -Dataset Specific Commands -------------------------- +Dataset Specific Metrics +------------------------ By Subject -~~~~~~~~~~~~~~~ +~~~~~~~~~~ Returns the number of datasets by each subject:: GET https://$SERVER/api/info/metrics/datasets/bySubject + +By Subject, and to Month +~~~~~~~~~~~~~~~~~~~~~~~~ + +Returns the number of datasets by each subject, and up to a specified month ``$YYYY-DD`` in YYYY-MM format (e.g. ``2018-01``):: + + GET https://$SERVER/api/info/metrics/datasets/bySubject/toMonth/$YYYY-DD + +Example: ``curl https://demo.dataverse.org/api/info/metrics/datasets/bySubject/toMonth/2018-01`` + .. |CORS| raw:: html CORS - \ No newline at end of file + + + +Metric Query Parameters +----------------------- + +To further tailor your metric, query parameters can be provided. + +dataLocation +~~~~~~~~~~~~ + +Specifies whether the metric should query ``local`` data, ``remote`` data (e.g. harvested), or ``all`` data when getting results. Only works for dataset metrics. + +Example: ``curl https://demo.dataverse.org/api/info/metrics/datasets/?dataLocation=remote`` diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index bc1b3439042..b149695681b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -372,7 +372,7 @@ For these deletes your JSON file must include an exact match of those dataset fi Publish a Dataset ~~~~~~~~~~~~~~~~~ -Publishes the dataset whose id is passed. If this is the first version of the dataset, its version number will be set to ``1.0``. Otherwise, the new dataset version number is determined by the most recent version number and the ``type`` parameter. Passing ``type=minor`` increases the minor version number (2.3 is updated to 2.4). Passing ``type=major`` increases the major version number (2.3 is updated to 3.0). :: +Publishes the dataset whose id is passed. If this is the first version of the dataset, its version number will be set to ``1.0``. Otherwise, the new dataset version number is determined by the most recent version number and the ``type`` parameter. Passing ``type=minor`` increases the minor version number (2.3 is updated to 2.4). Passing ``type=major`` increases the major version number (2.3 is updated to 3.0). Superusers can pass ``type=updatecurrent`` to update metadata without changing the version number:: POST http://$SERVER/api/datasets/$id/actions/:publish?type=$type&key=$apiKey diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index b2b37e71425..8ea51b24ce4 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -85,7 +85,11 @@ https://demo.dataverse.org/api/search?q=trees "file_content_type":"image/png", "size_in_bytes":8361, "md5":"0386269a5acb2c57b4eade587ff4db64", - "dataset_citation":"Spruce, Sabrina, 2016, \"Spruce Goose\", http://dx.doi.org/10.5072/FK2/NFSEHG, Root Dataverse, V1" + "file_persistent_id": "doi:10.5072/FK2/XTT5BV/PCCHV7", + "dataset_name": "Dataset One", + "dataset_id": "32", + "dataset_persistent_id": "doi:10.5072/FK2/XTT5BV", + "dataset_citation":"Spruce, Sabrina, 2016, \"Spruce Goose\", http://dx.doi.org/10.5072/FK2/XTT5BV, Root Dataverse, V1" }, { "name":"Birds", diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 42ef04f3c8d..f3e954ca1f6 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -65,9 +65,9 @@ # built documents. # # The short X.Y version. -version = '4.10.1' +version = '4.11' # The full version, including alpha/beta/rc tags. -release = '4.10.1' +release = '4.11' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index 5346237ab39..b9090b86be3 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -8,7 +8,7 @@ Dataverse has a flexible workflow mechanism that can be used to trigger actions Introduction ---------- +------------ Dataverse can perform two sequences of actions when datasets are published: one prior to publishing (marked by a ``PrePublishDataset`` trigger), and one after the publication has succeeded (``PostPublishDataset``). The pre-publish workflow is useful for having an external system prepare a dataset for being publicly accessed (a possibly lengthy activity that requires moving files around, uploading videos to a streaming server, etc.), or to start an approval process. A post-publish workflow might be used for sending notifications about the newly published dataset. @@ -104,7 +104,7 @@ Available variables are: * ``releaseStatus`` archiver -+++++++ +++++++++ A step that sends an archival copy of a Dataset Version to a configured archiver, e.g. the DuraCloud interface of Chronopolis. See the `DuraCloud/Chronopolis Integration documentation `_ for further detail. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 035d00b668d..94c16e2f81a 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -606,7 +606,7 @@ In the Chronopolis case, since the transfer from the DuraCloud front-end to arch **PostPublication Workflow** -To automate the submission of archival copies to an archive as part of publication, one can setup a Dataverse Workflow using the "archiver" workflow step - see the :doc:`developers/workflows` guide. +To automate the submission of archival copies to an archive as part of publication, one can setup a Dataverse Workflow using the "archiver" workflow step - see the :doc:`/developers/workflows` guide. . The archiver step uses the configuration information discussed above including the :ArchiverClassName setting. The workflow step definition should include the set of properties defined in \:ArchiverSettings in the workflow definition. To active this workflow, one must first install a workflow using the archiver step. A simple workflow that invokes the archiver step configured to submit to DuraCloud as its only action is included in dataverse at /scripts/api/data/workflows/internal-archiver-workflow.json. diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 2f2262c7514..4a77a536724 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -223,7 +223,7 @@ Solr will warn about needing to increase the number of file descriptors and max solr soft nofile 65000 solr hard nofile 65000 -On operating systems which use systemd such as RHEL or CentOS 7, you may then add a line like LimitNOFILE=65000 to the systemd unit file, or adjust the limits on a running process using the prlimit tool:: +On operating systems which use systemd such as RHEL or CentOS 7, you may then add a line like LimitNOFILE=65000 for the number of open file descriptors and a line with LimitNPROC=65000 for the max processes to the systemd unit file, or adjust the limits on a running process using the prlimit tool:: # sudo prlimit --pid pid --nofile=65000:65000 diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index 8b5a91936ff..6ee3396ab0a 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -6,8 +6,9 @@ Dataverse Guides Versions This list provides a way to refer to previous versions of the Dataverse guides, which we still host. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. -- 4.10.1 +- 4.11 +- `4.10.1 `__ - `4.10 `__ - `4.9.4 `__ - `4.9.3 `__ diff --git a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar b/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar deleted file mode 100644 index 5fb068a19d0..00000000000 Binary files a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar and /dev/null differ diff --git a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar.md5 b/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar.md5 deleted file mode 100644 index 60015568115..00000000000 --- a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -f6099186cd4ef67ea91b4c3b724c1113 diff --git a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar.sha1 b/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar.sha1 deleted file mode 100644 index a9340ef0a62..00000000000 --- a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2232318434cab52dd755fba7003958204459f404 diff --git a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom.md5 b/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom.md5 deleted file mode 100644 index 3eab42071ef..00000000000 --- a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -b1390a875687dad3cc6527b83f84e635 diff --git a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom.sha1 b/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom.sha1 deleted file mode 100644 index 9f1c0f74448..00000000000 --- a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6548a529e301aeebbed9ecc0034fb7b997bd47b diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar new file mode 100644 index 00000000000..1f8e1c3eb12 Binary files /dev/null and b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar differ diff --git a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom similarity index 78% rename from local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom rename to local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom index f588049e389..e3c09349172 100644 --- a/local_lib/net/handle/handle/2006-06-16-generated/handle-2006-06-16-generated.pom +++ b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom @@ -4,5 +4,6 @@ 4.0.0 net.handle handle - 2006-06-16-generated + 8.1.1 + POM was created from install:install-file diff --git a/pom.xml b/pom.xml index ca02aa4b5a0..ca0f91b5a82 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ --> edu.harvard.iq dataverse - 4.10.1 + 4.11 war dataverse @@ -328,7 +328,7 @@ net.handle handle - 2006-06-16-generated + 8.1.1 @@ -558,6 +558,10 @@ org.slf4j log4j-over-slf4j + + ch.qos.logback + logback-classic + @@ -573,6 +577,10 @@ com.amazonaws aws-java-sdk-sqs + + ch.qos.logback + logback-classic + diff --git a/scripts/database/create/create_v4.11.sql b/scripts/database/create/create_v4.11.sql new file mode 100644 index 00000000000..4ba735c5186 --- /dev/null +++ b/scripts/database/create/create_v4.11.sql @@ -0,0 +1,351 @@ +CREATE TABLE DATAVERSETHEME (ID SERIAL NOT NULL, BACKGROUNDCOLOR VARCHAR(255), LINKCOLOR VARCHAR(255), LINKURL VARCHAR(255), LOGO VARCHAR(255), LOGOALIGNMENT VARCHAR(255), LOGOBACKGROUNDCOLOR VARCHAR(255), LOGOFORMAT VARCHAR(255), TAGLINE VARCHAR(255), TEXTCOLOR VARCHAR(255), dataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSETHEME_dataverse_id ON DATAVERSETHEME (dataverse_id); +CREATE TABLE DATAFILECATEGORY (ID SERIAL NOT NULL, NAME VARCHAR(255) NOT NULL, DATASET_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAFILECATEGORY_dataset_id ON DATAFILECATEGORY (dataset_id); +CREATE TABLE ROLEASSIGNMENT (ID SERIAL NOT NULL, ASSIGNEEIDENTIFIER VARCHAR(255) NOT NULL, PRIVATEURLTOKEN VARCHAR(255), DEFINITIONPOINT_ID BIGINT NOT NULL, ROLE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_ROLEASSIGNMENT_assigneeidentifier ON ROLEASSIGNMENT (assigneeidentifier); +CREATE INDEX INDEX_ROLEASSIGNMENT_definitionpoint_id ON ROLEASSIGNMENT (definitionpoint_id); +CREATE INDEX INDEX_ROLEASSIGNMENT_role_id ON ROLEASSIGNMENT (role_id); +CREATE TABLE WORKFLOW (ID SERIAL NOT NULL, NAME VARCHAR(255), PRIMARY KEY (ID)); +CREATE TABLE OAISET (ID SERIAL NOT NULL, DEFINITION TEXT, DELETED BOOLEAN, DESCRIPTION TEXT, NAME TEXT, SPEC TEXT, UPDATEINPROGRESS BOOLEAN, VERSION BIGINT, PRIMARY KEY (ID)); +CREATE TABLE DATAVERSELINKINGDATAVERSE (ID SERIAL NOT NULL, LINKCREATETIME TIMESTAMP, DATAVERSE_ID BIGINT NOT NULL, LINKINGDATAVERSE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSELINKINGDATAVERSE_dataverse_id ON DATAVERSELINKINGDATAVERSE (dataverse_id); +CREATE INDEX INDEX_DATAVERSELINKINGDATAVERSE_linkingDataverse_id ON DATAVERSELINKINGDATAVERSE (linkingDataverse_id); +CREATE TABLE METADATABLOCK (ID SERIAL NOT NULL, DISPLAYNAME VARCHAR(255) NOT NULL, NAME VARCHAR(255) NOT NULL, namespaceuri TEXT, owner_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_METADATABLOCK_name ON METADATABLOCK (name); +CREATE INDEX INDEX_METADATABLOCK_owner_id ON METADATABLOCK (owner_id); +CREATE TABLE CONFIRMEMAILDATA (ID SERIAL NOT NULL, CREATED TIMESTAMP NOT NULL, EXPIRES TIMESTAMP NOT NULL, TOKEN VARCHAR(255), AUTHENTICATEDUSER_ID BIGINT NOT NULL UNIQUE, PRIMARY KEY (ID)); +CREATE INDEX INDEX_CONFIRMEMAILDATA_token ON CONFIRMEMAILDATA (token); +CREATE INDEX INDEX_CONFIRMEMAILDATA_authenticateduser_id ON CONFIRMEMAILDATA (authenticateduser_id); +CREATE TABLE OAUTH2TOKENDATA (ID SERIAL NOT NULL, ACCESSTOKEN TEXT, EXPIRYDATE TIMESTAMP, OAUTHPROVIDERID VARCHAR(255), RAWRESPONSE TEXT, REFRESHTOKEN VARCHAR(64), SCOPE VARCHAR(64), TOKENTYPE VARCHAR(32), USER_ID BIGINT, PRIMARY KEY (ID)); +CREATE TABLE DVOBJECT (ID SERIAL NOT NULL, DTYPE VARCHAR(31), AUTHORITY VARCHAR(255), CREATEDATE TIMESTAMP NOT NULL, GLOBALIDCREATETIME TIMESTAMP, IDENTIFIER VARCHAR(255), IDENTIFIERREGISTERED BOOLEAN, INDEXTIME TIMESTAMP, MODIFICATIONTIME TIMESTAMP NOT NULL, PERMISSIONINDEXTIME TIMESTAMP, PERMISSIONMODIFICATIONTIME TIMESTAMP, PREVIEWIMAGEAVAILABLE BOOLEAN, PROTOCOL VARCHAR(255), PUBLICATIONDATE TIMESTAMP, STORAGEIDENTIFIER VARCHAR(255), CREATOR_ID BIGINT, OWNER_ID BIGINT, RELEASEUSER_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DVOBJECT_dtype ON DVOBJECT (dtype); +CREATE INDEX INDEX_DVOBJECT_owner_id ON DVOBJECT (owner_id); +CREATE INDEX INDEX_DVOBJECT_creator_id ON DVOBJECT (creator_id); +CREATE INDEX INDEX_DVOBJECT_releaseuser_id ON DVOBJECT (releaseuser_id); +CREATE TABLE DATAVERSE (ID BIGINT NOT NULL, AFFILIATION VARCHAR(255), ALIAS VARCHAR(255) NOT NULL UNIQUE, DATAVERSETYPE VARCHAR(255) NOT NULL, description TEXT, FACETROOT BOOLEAN, GUESTBOOKROOT BOOLEAN, METADATABLOCKROOT BOOLEAN, NAME VARCHAR(255) NOT NULL, PERMISSIONROOT BOOLEAN, TEMPLATEROOT BOOLEAN, THEMEROOT BOOLEAN, DEFAULTCONTRIBUTORROLE_ID BIGINT, DEFAULTTEMPLATE_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSE_defaultcontributorrole_id ON DATAVERSE (defaultcontributorrole_id); +CREATE INDEX INDEX_DATAVERSE_defaulttemplate_id ON DATAVERSE (defaulttemplate_id); +CREATE INDEX INDEX_DATAVERSE_alias ON DATAVERSE (alias); +CREATE INDEX INDEX_DATAVERSE_affiliation ON DATAVERSE (affiliation); +CREATE INDEX INDEX_DATAVERSE_dataversetype ON DATAVERSE (dataversetype); +CREATE INDEX INDEX_DATAVERSE_facetroot ON DATAVERSE (facetroot); +CREATE INDEX INDEX_DATAVERSE_guestbookroot ON DATAVERSE (guestbookroot); +CREATE INDEX INDEX_DATAVERSE_metadatablockroot ON DATAVERSE (metadatablockroot); +CREATE INDEX INDEX_DATAVERSE_templateroot ON DATAVERSE (templateroot); +CREATE INDEX INDEX_DATAVERSE_permissionroot ON DATAVERSE (permissionroot); +CREATE INDEX INDEX_DATAVERSE_themeroot ON DATAVERSE (themeroot); +CREATE TABLE IPV6RANGE (ID BIGINT NOT NULL, BOTTOMA BIGINT, BOTTOMB BIGINT, BOTTOMC BIGINT, BOTTOMD BIGINT, TOPA BIGINT, TOPB BIGINT, TOPC BIGINT, TOPD BIGINT, OWNER_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_IPV6RANGE_owner_id ON IPV6RANGE (owner_id); +CREATE TABLE SAVEDSEARCHFILTERQUERY (ID SERIAL NOT NULL, FILTERQUERY TEXT, SAVEDSEARCH_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_SAVEDSEARCHFILTERQUERY_savedsearch_id ON SAVEDSEARCHFILTERQUERY (savedsearch_id); +CREATE TABLE STORAGESITE (ID SERIAL NOT NULL, hostname TEXT, name TEXT, PRIMARYSTORAGE BOOLEAN NOT NULL, transferProtocols TEXT, PRIMARY KEY (ID)); +CREATE TABLE DATAVERSEFACET (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, datasetfieldtype_id BIGINT, dataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSEFACET_dataverse_id ON DATAVERSEFACET (dataverse_id); +CREATE INDEX INDEX_DATAVERSEFACET_datasetfieldtype_id ON DATAVERSEFACET (datasetfieldtype_id); +CREATE INDEX INDEX_DATAVERSEFACET_displayorder ON DATAVERSEFACET (displayorder); +CREATE TABLE OAIRECORD (ID SERIAL NOT NULL, GLOBALID VARCHAR(255), LASTUPDATETIME TIMESTAMP, REMOVED BOOLEAN, SETNAME VARCHAR(255), PRIMARY KEY (ID)); +CREATE TABLE DATAVERSEFEATUREDDATAVERSE (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, dataverse_id BIGINT, featureddataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSEFEATUREDDATAVERSE_dataverse_id ON DATAVERSEFEATUREDDATAVERSE (dataverse_id); +CREATE INDEX INDEX_DATAVERSEFEATUREDDATAVERSE_featureddataverse_id ON DATAVERSEFEATUREDDATAVERSE (featureddataverse_id); +CREATE INDEX INDEX_DATAVERSEFEATUREDDATAVERSE_displayorder ON DATAVERSEFEATUREDDATAVERSE (displayorder); +CREATE TABLE HARVESTINGCLIENT (ID SERIAL NOT NULL, ARCHIVEDESCRIPTION TEXT, ARCHIVEURL VARCHAR(255), DELETED BOOLEAN, HARVESTSTYLE VARCHAR(255), HARVESTTYPE VARCHAR(255), HARVESTINGNOW BOOLEAN, HARVESTINGSET VARCHAR(255), HARVESTINGURL VARCHAR(255), METADATAPREFIX VARCHAR(255), NAME VARCHAR(255) NOT NULL UNIQUE, SCHEDULEDAYOFWEEK INTEGER, SCHEDULEHOUROFDAY INTEGER, SCHEDULEPERIOD VARCHAR(255), SCHEDULED BOOLEAN, dataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_HARVESTINGCLIENT_dataverse_id ON HARVESTINGCLIENT (dataverse_id); +CREATE INDEX INDEX_HARVESTINGCLIENT_harvesttype ON HARVESTINGCLIENT (harvesttype); +CREATE INDEX INDEX_HARVESTINGCLIENT_harveststyle ON HARVESTINGCLIENT (harveststyle); +CREATE INDEX INDEX_HARVESTINGCLIENT_harvestingurl ON HARVESTINGCLIENT (harvestingurl); +CREATE TABLE APITOKEN (ID SERIAL NOT NULL, CREATETIME TIMESTAMP NOT NULL, DISABLED BOOLEAN NOT NULL, EXPIRETIME TIMESTAMP NOT NULL, TOKENSTRING VARCHAR(255) NOT NULL UNIQUE, AUTHENTICATEDUSER_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_APITOKEN_authenticateduser_id ON APITOKEN (authenticateduser_id); +CREATE TABLE DATASETFIELDVALUE (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, value TEXT, DATASETFIELD_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETFIELDVALUE_datasetfield_id ON DATASETFIELDVALUE (datasetfield_id); +CREATE TABLE CUSTOMQUESTION (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, HIDDEN BOOLEAN, QUESTIONSTRING VARCHAR(255) NOT NULL, QUESTIONTYPE VARCHAR(255) NOT NULL, REQUIRED BOOLEAN, GUESTBOOK_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_CUSTOMQUESTION_guestbook_id ON CUSTOMQUESTION (guestbook_id); +CREATE TABLE VARIABLERANGEITEM (ID SERIAL NOT NULL, VALUE DECIMAL(38), DATAVARIABLE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_VARIABLERANGEITEM_datavariable_id ON VARIABLERANGEITEM (datavariable_id); +CREATE TABLE VARIABLERANGE (ID SERIAL NOT NULL, BEGINVALUE VARCHAR(255), BEGINVALUETYPE INTEGER, ENDVALUE VARCHAR(255), ENDVALUETYPE INTEGER, DATAVARIABLE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_VARIABLERANGE_datavariable_id ON VARIABLERANGE (datavariable_id); +CREATE TABLE SHIBGROUP (ID SERIAL NOT NULL, ATTRIBUTE VARCHAR(255) NOT NULL, NAME VARCHAR(255) NOT NULL, PATTERN VARCHAR(255) NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE WORKFLOWCOMMENT (ID SERIAL NOT NULL, CREATED TIMESTAMP NOT NULL, MESSAGE TEXT, TYPE VARCHAR(255) NOT NULL, AUTHENTICATEDUSER_ID BIGINT, DATASETVERSION_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE DATASETFIELD (ID SERIAL NOT NULL, DATASETFIELDTYPE_ID BIGINT NOT NULL, DATASETVERSION_ID BIGINT, PARENTDATASETFIELDCOMPOUNDVALUE_ID BIGINT, TEMPLATE_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETFIELD_datasetfieldtype_id ON DATASETFIELD (datasetfieldtype_id); +CREATE INDEX INDEX_DATASETFIELD_datasetversion_id ON DATASETFIELD (datasetversion_id); +CREATE INDEX INDEX_DATASETFIELD_parentdatasetfieldcompoundvalue_id ON DATASETFIELD (parentdatasetfieldcompoundvalue_id); +CREATE INDEX INDEX_DATASETFIELD_template_id ON DATASETFIELD (template_id); +CREATE TABLE PERSISTEDGLOBALGROUP (ID BIGINT NOT NULL, DTYPE VARCHAR(31), DESCRIPTION VARCHAR(255), DISPLAYNAME VARCHAR(255), PERSISTEDGROUPALIAS VARCHAR(255) UNIQUE, PRIMARY KEY (ID)); +CREATE INDEX INDEX_PERSISTEDGLOBALGROUP_dtype ON PERSISTEDGLOBALGROUP (dtype); +CREATE TABLE IPV4RANGE (ID BIGINT NOT NULL, BOTTOMASLONG BIGINT, TOPASLONG BIGINT, OWNER_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_IPV4RANGE_owner_id ON IPV4RANGE (owner_id); +CREATE TABLE DATASETVERSION (ID SERIAL NOT NULL, UNF VARCHAR(255), ARCHIVALCOPYLOCATION TEXT, ARCHIVENOTE VARCHAR(1000), ARCHIVETIME TIMESTAMP, CREATETIME TIMESTAMP NOT NULL, DEACCESSIONLINK VARCHAR(255), LASTUPDATETIME TIMESTAMP NOT NULL, MINORVERSIONNUMBER BIGINT, RELEASETIME TIMESTAMP, VERSION BIGINT, VERSIONNOTE VARCHAR(1000), VERSIONNUMBER BIGINT, VERSIONSTATE VARCHAR(255), DATASET_ID BIGINT, termsOfUseAndAccess_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETVERSION_dataset_id ON DATASETVERSION (dataset_id); +CREATE TABLE METRIC (ID SERIAL NOT NULL, DATALOCATION TEXT, DAYSTRING TEXT, LASTCALLEDDATE TIMESTAMP NOT NULL, NAME VARCHAR(255) NOT NULL, VALUEJSON TEXT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_METRIC_id ON METRIC (id); +CREATE TABLE USERNOTIFICATION (ID SERIAL NOT NULL, EMAILED BOOLEAN, OBJECTID BIGINT, READNOTIFICATION BOOLEAN, SENDDATE TIMESTAMP, TYPE INTEGER NOT NULL, REQUESTOR_ID BIGINT, USER_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_USERNOTIFICATION_user_id ON USERNOTIFICATION (user_id); +CREATE TABLE WORKFLOWSTEPDATA (ID SERIAL NOT NULL, PROVIDERID VARCHAR(255), STEPTYPE VARCHAR(255), PARENT_ID BIGINT, index INTEGER, PRIMARY KEY (ID)); +CREATE TABLE CUSTOMFIELDMAP (ID SERIAL NOT NULL, SOURCEDATASETFIELD VARCHAR(255), SOURCETEMPLATE VARCHAR(255), TARGETDATASETFIELD VARCHAR(255), PRIMARY KEY (ID)); +CREATE INDEX INDEX_CUSTOMFIELDMAP_sourcedatasetfield ON CUSTOMFIELDMAP (sourcedatasetfield); +CREATE INDEX INDEX_CUSTOMFIELDMAP_sourcetemplate ON CUSTOMFIELDMAP (sourcetemplate); +CREATE TABLE GUESTBOOK (ID SERIAL NOT NULL, CREATETIME TIMESTAMP NOT NULL, EMAILREQUIRED BOOLEAN, ENABLED BOOLEAN, INSTITUTIONREQUIRED BOOLEAN, NAME VARCHAR(255), NAMEREQUIRED BOOLEAN, POSITIONREQUIRED BOOLEAN, DATAVERSE_ID BIGINT, PRIMARY KEY (ID)); +CREATE TABLE ACTIONLOGRECORD (ID VARCHAR(36) NOT NULL, ACTIONRESULT VARCHAR(255), ACTIONSUBTYPE VARCHAR(255), ACTIONTYPE VARCHAR(255), ENDTIME TIMESTAMP, INFO TEXT, STARTTIME TIMESTAMP, USERIDENTIFIER VARCHAR(255), PRIMARY KEY (ID)); +CREATE INDEX INDEX_ACTIONLOGRECORD_useridentifier ON ACTIONLOGRECORD (useridentifier); +CREATE INDEX INDEX_ACTIONLOGRECORD_actiontype ON ACTIONLOGRECORD (actiontype); +CREATE INDEX INDEX_ACTIONLOGRECORD_starttime ON ACTIONLOGRECORD (starttime); +CREATE TABLE MAPLAYERMETADATA (ID SERIAL NOT NULL, EMBEDMAPLINK VARCHAR(255) NOT NULL, ISJOINLAYER BOOLEAN, JOINDESCRIPTION TEXT, LASTVERIFIEDSTATUS INTEGER, LASTVERIFIEDTIME TIMESTAMP, LAYERLINK VARCHAR(255) NOT NULL, LAYERNAME VARCHAR(255) NOT NULL, MAPIMAGELINK VARCHAR(255), MAPLAYERLINKS TEXT, WORLDMAPUSERNAME VARCHAR(255) NOT NULL, DATASET_ID BIGINT NOT NULL, DATAFILE_ID BIGINT NOT NULL UNIQUE, PRIMARY KEY (ID)); +CREATE INDEX INDEX_MAPLAYERMETADATA_dataset_id ON MAPLAYERMETADATA (dataset_id); +CREATE TABLE SAVEDSEARCH (ID SERIAL NOT NULL, QUERY TEXT, CREATOR_ID BIGINT NOT NULL, DEFINITIONPOINT_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_SAVEDSEARCH_definitionpoint_id ON SAVEDSEARCH (definitionpoint_id); +CREATE INDEX INDEX_SAVEDSEARCH_creator_id ON SAVEDSEARCH (creator_id); +CREATE TABLE EXPLICITGROUP (ID SERIAL NOT NULL, DESCRIPTION VARCHAR(1024), DISPLAYNAME VARCHAR(255), GROUPALIAS VARCHAR(255) UNIQUE, GROUPALIASINOWNER VARCHAR(255), OWNER_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_EXPLICITGROUP_owner_id ON EXPLICITGROUP (owner_id); +CREATE INDEX INDEX_EXPLICITGROUP_groupaliasinowner ON EXPLICITGROUP (groupaliasinowner); +CREATE TABLE FOREIGNMETADATAFORMATMAPPING (ID SERIAL NOT NULL, DISPLAYNAME VARCHAR(255) NOT NULL, NAME VARCHAR(255) NOT NULL, SCHEMALOCATION VARCHAR(255), STARTELEMENT VARCHAR(255), PRIMARY KEY (ID)); +CREATE INDEX INDEX_FOREIGNMETADATAFORMATMAPPING_name ON FOREIGNMETADATAFORMATMAPPING (name); +CREATE TABLE EXTERNALTOOL (ID SERIAL NOT NULL, CONTENTTYPE TEXT, DESCRIPTION TEXT, DISPLAYNAME VARCHAR(255) NOT NULL, TOOLPARAMETERS VARCHAR(255) NOT NULL, TOOLURL VARCHAR(255) NOT NULL, TYPE VARCHAR(255) NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE DATASETFIELDDEFAULTVALUE (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, STRVALUE TEXT, DATASETFIELD_ID BIGINT NOT NULL, DEFAULTVALUESET_ID BIGINT NOT NULL, PARENTDATASETFIELDDEFAULTVALUE_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETFIELDDEFAULTVALUE_datasetfield_id ON DATASETFIELDDEFAULTVALUE (datasetfield_id); +CREATE INDEX INDEX_DATASETFIELDDEFAULTVALUE_defaultvalueset_id ON DATASETFIELDDEFAULTVALUE (defaultvalueset_id); +CREATE INDEX INDEX_DATASETFIELDDEFAULTVALUE_parentdatasetfielddefaultvalue_id ON DATASETFIELDDEFAULTVALUE (parentdatasetfielddefaultvalue_id); +CREATE INDEX INDEX_DATASETFIELDDEFAULTVALUE_displayorder ON DATASETFIELDDEFAULTVALUE (displayorder); +CREATE TABLE PENDINGWORKFLOWINVOCATION (INVOCATIONID VARCHAR(255) NOT NULL, DATASETEXTERNALLYRELEASED BOOLEAN, IPADDRESS VARCHAR(255), NEXTMINORVERSIONNUMBER BIGINT, NEXTVERSIONNUMBER BIGINT, PENDINGSTEPIDX INTEGER, TYPEORDINAL INTEGER, USERID VARCHAR(255), WORKFLOW_ID BIGINT, DATASET_ID BIGINT, PRIMARY KEY (INVOCATIONID)); +CREATE TABLE DEFAULTVALUESET (ID SERIAL NOT NULL, NAME VARCHAR(255) NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE AUTHENTICATEDUSER (ID SERIAL NOT NULL, AFFILIATION VARCHAR(255), CREATEDTIME TIMESTAMP NOT NULL, EMAIL VARCHAR(255) NOT NULL UNIQUE, EMAILCONFIRMED TIMESTAMP, FIRSTNAME VARCHAR(255), LASTAPIUSETIME TIMESTAMP, LASTLOGINTIME TIMESTAMP, LASTNAME VARCHAR(255), POSITION VARCHAR(255), SUPERUSER BOOLEAN, USERIDENTIFIER VARCHAR(255) NOT NULL UNIQUE, PRIMARY KEY (ID)); +CREATE TABLE DATATABLE (ID SERIAL NOT NULL, CASEQUANTITY BIGINT, ORIGINALFILEFORMAT VARCHAR(255), ORIGINALFILESIZE BIGINT, ORIGINALFORMATVERSION VARCHAR(255), RECORDSPERCASE BIGINT, UNF VARCHAR(255) NOT NULL, VARQUANTITY BIGINT, DATAFILE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATATABLE_datafile_id ON DATATABLE (datafile_id); +CREATE TABLE INGESTREPORT (ID SERIAL NOT NULL, ENDTIME TIMESTAMP, REPORT TEXT, STARTTIME TIMESTAMP, STATUS INTEGER, TYPE INTEGER, DATAFILE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_INGESTREPORT_datafile_id ON INGESTREPORT (datafile_id); +CREATE TABLE AUTHENTICATIONPROVIDERROW (ID VARCHAR(255) NOT NULL, ENABLED BOOLEAN, FACTORYALIAS VARCHAR(255), FACTORYDATA TEXT, SUBTITLE VARCHAR(255), TITLE VARCHAR(255), PRIMARY KEY (ID)); +CREATE INDEX INDEX_AUTHENTICATIONPROVIDERROW_enabled ON AUTHENTICATIONPROVIDERROW (enabled); +CREATE TABLE FOREIGNMETADATAFIELDMAPPING (ID SERIAL NOT NULL, datasetfieldName TEXT, foreignFieldXPath TEXT, ISATTRIBUTE BOOLEAN, FOREIGNMETADATAFORMATMAPPING_ID BIGINT, PARENTFIELDMAPPING_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_FOREIGNMETADATAFIELDMAPPING_foreignmetadataformatmapping_id ON FOREIGNMETADATAFIELDMAPPING (foreignmetadataformatmapping_id); +CREATE INDEX INDEX_FOREIGNMETADATAFIELDMAPPING_foreignfieldxpath ON FOREIGNMETADATAFIELDMAPPING (foreignfieldxpath); +CREATE INDEX INDEX_FOREIGNMETADATAFIELDMAPPING_parentfieldmapping_id ON FOREIGNMETADATAFIELDMAPPING (parentfieldmapping_id); +CREATE TABLE FILEMETADATA (ID SERIAL NOT NULL, DESCRIPTION TEXT, DIRECTORYLABEL VARCHAR(255), LABEL VARCHAR(255) NOT NULL, prov_freeform TEXT, RESTRICTED BOOLEAN, VERSION BIGINT, DATAFILE_ID BIGINT NOT NULL, DATASETVERSION_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_FILEMETADATA_datafile_id ON FILEMETADATA (datafile_id); +CREATE INDEX INDEX_FILEMETADATA_datasetversion_id ON FILEMETADATA (datasetversion_id); +CREATE TABLE CUSTOMQUESTIONVALUE (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, VALUESTRING VARCHAR(255) NOT NULL, CUSTOMQUESTION_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE SUMMARYSTATISTIC (ID SERIAL NOT NULL, TYPE INTEGER, VALUE VARCHAR(255), DATAVARIABLE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_SUMMARYSTATISTIC_datavariable_id ON SUMMARYSTATISTIC (datavariable_id); +CREATE TABLE worldmapauth_token (ID SERIAL NOT NULL, CREATED TIMESTAMP NOT NULL, HASEXPIRED BOOLEAN NOT NULL, LASTREFRESHTIME TIMESTAMP NOT NULL, MODIFIED TIMESTAMP NOT NULL, TOKEN VARCHAR(255), APPLICATION_ID BIGINT NOT NULL, DATAFILE_ID BIGINT NOT NULL, DATAVERSEUSER_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE UNIQUE INDEX token_value ON worldmapauth_token (token); +CREATE INDEX INDEX_worldmapauth_token_application_id ON worldmapauth_token (application_id); +CREATE INDEX INDEX_worldmapauth_token_datafile_id ON worldmapauth_token (datafile_id); +CREATE INDEX INDEX_worldmapauth_token_dataverseuser_id ON worldmapauth_token (dataverseuser_id); +CREATE TABLE PASSWORDRESETDATA (ID SERIAL NOT NULL, CREATED TIMESTAMP NOT NULL, EXPIRES TIMESTAMP NOT NULL, REASON VARCHAR(255), TOKEN VARCHAR(255), BUILTINUSER_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_PASSWORDRESETDATA_token ON PASSWORDRESETDATA (token); +CREATE INDEX INDEX_PASSWORDRESETDATA_builtinuser_id ON PASSWORDRESETDATA (builtinuser_id); +CREATE TABLE CONTROLLEDVOCABULARYVALUE (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, IDENTIFIER VARCHAR(255), STRVALUE TEXT, DATASETFIELDTYPE_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_CONTROLLEDVOCABULARYVALUE_datasetfieldtype_id ON CONTROLLEDVOCABULARYVALUE (datasetfieldtype_id); +CREATE INDEX INDEX_CONTROLLEDVOCABULARYVALUE_displayorder ON CONTROLLEDVOCABULARYVALUE (displayorder); +CREATE TABLE DATASETLINKINGDATAVERSE (ID SERIAL NOT NULL, LINKCREATETIME TIMESTAMP NOT NULL, DATASET_ID BIGINT NOT NULL, LINKINGDATAVERSE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETLINKINGDATAVERSE_dataset_id ON DATASETLINKINGDATAVERSE (dataset_id); +CREATE INDEX INDEX_DATASETLINKINGDATAVERSE_linkingDataverse_id ON DATASETLINKINGDATAVERSE (linkingDataverse_id); +CREATE TABLE DATASET (ID BIGINT NOT NULL, FILEACCESSREQUEST BOOLEAN, HARVESTIDENTIFIER VARCHAR(255), LASTEXPORTTIME TIMESTAMP, USEGENERICTHUMBNAIL BOOLEAN, citationDateDatasetFieldType_id BIGINT, harvestingClient_id BIGINT, guestbook_id BIGINT, thumbnailfile_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASET_guestbook_id ON DATASET (guestbook_id); +CREATE INDEX INDEX_DATASET_thumbnailfile_id ON DATASET (thumbnailfile_id); +CREATE TABLE CLIENTHARVESTRUN (ID SERIAL NOT NULL, DELETEDDATASETCOUNT BIGINT, FAILEDDATASETCOUNT BIGINT, FINISHTIME TIMESTAMP, HARVESTRESULT INTEGER, HARVESTEDDATASETCOUNT BIGINT, STARTTIME TIMESTAMP, HARVESTINGCLIENT_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE worldmapauth_tokentype (ID SERIAL NOT NULL, CONTACTEMAIL VARCHAR(255), CREATED TIMESTAMP NOT NULL, HOSTNAME VARCHAR(255), IPADDRESS VARCHAR(255), MAPITLINK VARCHAR(255) NOT NULL, MD5 VARCHAR(255) NOT NULL, MODIFIED TIMESTAMP NOT NULL, NAME VARCHAR(255) NOT NULL, timeLimitMinutes int default 30, timeLimitSeconds bigint default 1800, PRIMARY KEY (ID)); +CREATE UNIQUE INDEX application_name ON worldmapauth_tokentype (name); +CREATE TABLE DATAFILETAG (ID SERIAL NOT NULL, TYPE INTEGER NOT NULL, DATAFILE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAFILETAG_datafile_id ON DATAFILETAG (datafile_id); +CREATE TABLE AUTHENTICATEDUSERLOOKUP (ID SERIAL NOT NULL, AUTHENTICATIONPROVIDERID VARCHAR(255), PERSISTENTUSERID VARCHAR(255), AUTHENTICATEDUSER_ID BIGINT NOT NULL UNIQUE, PRIMARY KEY (ID)); +CREATE TABLE INGESTREQUEST (ID SERIAL NOT NULL, CONTROLCARD VARCHAR(255), FORCETYPECHECK BOOLEAN, LABELSFILE VARCHAR(255), TEXTENCODING VARCHAR(255), datafile_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_INGESTREQUEST_datafile_id ON INGESTREQUEST (datafile_id); +CREATE TABLE SETTING (NAME VARCHAR(255) NOT NULL, CONTENT TEXT, PRIMARY KEY (NAME)); +CREATE TABLE DATAVERSECONTACT (ID SERIAL NOT NULL, CONTACTEMAIL VARCHAR(255) NOT NULL, DISPLAYORDER INTEGER, dataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSECONTACT_dataverse_id ON DATAVERSECONTACT (dataverse_id); +CREATE INDEX INDEX_DATAVERSECONTACT_contactemail ON DATAVERSECONTACT (contactemail); +CREATE INDEX INDEX_DATAVERSECONTACT_displayorder ON DATAVERSECONTACT (displayorder); +CREATE TABLE VARIABLECATEGORY (ID SERIAL NOT NULL, CATORDER INTEGER, FREQUENCY FLOAT, LABEL VARCHAR(255), MISSING BOOLEAN, VALUE VARCHAR(255), DATAVARIABLE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_VARIABLECATEGORY_datavariable_id ON VARIABLECATEGORY (datavariable_id); +CREATE TABLE DATAVARIABLE (ID SERIAL NOT NULL, FACTOR BOOLEAN, FILEENDPOSITION BIGINT, FILEORDER INTEGER, FILESTARTPOSITION BIGINT, FORMAT VARCHAR(255), FORMATCATEGORY VARCHAR(255), INTERVAL INTEGER, LABEL TEXT, NAME VARCHAR(255), NUMBEROFDECIMALPOINTS BIGINT, ORDEREDFACTOR BOOLEAN, RECORDSEGMENTNUMBER BIGINT, TYPE INTEGER, UNF VARCHAR(255), UNIVERSE VARCHAR(255), WEIGHTED BOOLEAN, DATATABLE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVARIABLE_datatable_id ON DATAVARIABLE (datatable_id); +CREATE TABLE CONTROLLEDVOCABALTERNATE (ID SERIAL NOT NULL, STRVALUE TEXT, CONTROLLEDVOCABULARYVALUE_ID BIGINT NOT NULL, DATASETFIELDTYPE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_CONTROLLEDVOCABALTERNATE_controlledvocabularyvalue_id ON CONTROLLEDVOCABALTERNATE (controlledvocabularyvalue_id); +CREATE INDEX INDEX_CONTROLLEDVOCABALTERNATE_datasetfieldtype_id ON CONTROLLEDVOCABALTERNATE (datasetfieldtype_id); +CREATE TABLE DATAFILE (ID BIGINT NOT NULL, CHECKSUMTYPE VARCHAR(255) NOT NULL, CHECKSUMVALUE VARCHAR(255) NOT NULL, CONTENTTYPE VARCHAR(255) NOT NULL, FILESIZE BIGINT, INGESTSTATUS CHAR(1), PREVIOUSDATAFILEID BIGINT, prov_entityname TEXT, RESTRICTED BOOLEAN, ROOTDATAFILEID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAFILE_ingeststatus ON DATAFILE (ingeststatus); +CREATE INDEX INDEX_DATAFILE_checksumvalue ON DATAFILE (checksumvalue); +CREATE INDEX INDEX_DATAFILE_contenttype ON DATAFILE (contenttype); +CREATE INDEX INDEX_DATAFILE_restricted ON DATAFILE (restricted); +CREATE TABLE DataverseFieldTypeInputLevel (ID SERIAL NOT NULL, INCLUDE BOOLEAN, REQUIRED BOOLEAN, datasetfieldtype_id BIGINT, dataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DataverseFieldTypeInputLevel_dataverse_id ON DataverseFieldTypeInputLevel (dataverse_id); +CREATE INDEX INDEX_DataverseFieldTypeInputLevel_datasetfieldtype_id ON DataverseFieldTypeInputLevel (datasetfieldtype_id); +CREATE INDEX INDEX_DataverseFieldTypeInputLevel_required ON DataverseFieldTypeInputLevel (required); +CREATE TABLE BUILTINUSER (ID SERIAL NOT NULL, ENCRYPTEDPASSWORD VARCHAR(255), PASSWORDENCRYPTIONVERSION INTEGER, USERNAME VARCHAR(255) NOT NULL UNIQUE, PRIMARY KEY (ID)); +CREATE INDEX INDEX_BUILTINUSER_userName ON BUILTINUSER (userName); +CREATE TABLE TERMSOFUSEANDACCESS (ID SERIAL NOT NULL, AVAILABILITYSTATUS TEXT, CITATIONREQUIREMENTS TEXT, CONDITIONS TEXT, CONFIDENTIALITYDECLARATION TEXT, CONTACTFORACCESS TEXT, DATAACCESSPLACE TEXT, DEPOSITORREQUIREMENTS TEXT, DISCLAIMER TEXT, FILEACCESSREQUEST BOOLEAN, LICENSE VARCHAR(255), ORIGINALARCHIVE TEXT, RESTRICTIONS TEXT, SIZEOFCOLLECTION TEXT, SPECIALPERMISSIONS TEXT, STUDYCOMPLETION TEXT, TERMSOFACCESS TEXT, TERMSOFUSE TEXT, PRIMARY KEY (ID)); +CREATE TABLE DOIDATACITEREGISTERCACHE (ID SERIAL NOT NULL, DOI VARCHAR(255) UNIQUE, STATUS VARCHAR(255), URL VARCHAR(255), XML TEXT, PRIMARY KEY (ID)); +CREATE TABLE HARVESTINGDATAVERSECONFIG (ID BIGINT NOT NULL, ARCHIVEDESCRIPTION TEXT, ARCHIVEURL VARCHAR(255), HARVESTSTYLE VARCHAR(255), HARVESTTYPE VARCHAR(255), HARVESTINGSET VARCHAR(255), HARVESTINGURL VARCHAR(255), dataverse_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_HARVESTINGDATAVERSECONFIG_dataverse_id ON HARVESTINGDATAVERSECONFIG (dataverse_id); +CREATE INDEX INDEX_HARVESTINGDATAVERSECONFIG_harvesttype ON HARVESTINGDATAVERSECONFIG (harvesttype); +CREATE INDEX INDEX_HARVESTINGDATAVERSECONFIG_harveststyle ON HARVESTINGDATAVERSECONFIG (harveststyle); +CREATE INDEX INDEX_HARVESTINGDATAVERSECONFIG_harvestingurl ON HARVESTINGDATAVERSECONFIG (harvestingurl); +CREATE TABLE ALTERNATIVEPERSISTENTIDENTIFIER (ID SERIAL NOT NULL, AUTHORITY VARCHAR(255), GLOBALIDCREATETIME TIMESTAMP, IDENTIFIER VARCHAR(255), IDENTIFIERREGISTERED BOOLEAN, PROTOCOL VARCHAR(255), STORAGELOCATIONDESIGNATOR BOOLEAN, DVOBJECT_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE TABLE DATASETFIELDCOMPOUNDVALUE (ID SERIAL NOT NULL, DISPLAYORDER INTEGER, PARENTDATASETFIELD_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETFIELDCOMPOUNDVALUE_parentdatasetfield_id ON DATASETFIELDCOMPOUNDVALUE (parentdatasetfield_id); +CREATE TABLE DATASETVERSIONUSER (ID SERIAL NOT NULL, LASTUPDATEDATE TIMESTAMP NOT NULL, authenticatedUser_id BIGINT, datasetversion_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETVERSIONUSER_authenticateduser_id ON DATASETVERSIONUSER (authenticateduser_id); +CREATE INDEX INDEX_DATASETVERSIONUSER_datasetversion_id ON DATASETVERSIONUSER (datasetversion_id); +CREATE TABLE GUESTBOOKRESPONSE (ID SERIAL NOT NULL, DOWNLOADTYPE VARCHAR(255), EMAIL VARCHAR(255), INSTITUTION VARCHAR(255), NAME VARCHAR(255), POSITION VARCHAR(255), RESPONSETIME TIMESTAMP, SESSIONID VARCHAR(255), AUTHENTICATEDUSER_ID BIGINT, DATAFILE_ID BIGINT NOT NULL, DATASET_ID BIGINT NOT NULL, DATASETVERSION_ID BIGINT, GUESTBOOK_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_GUESTBOOKRESPONSE_guestbook_id ON GUESTBOOKRESPONSE (guestbook_id); +CREATE INDEX INDEX_GUESTBOOKRESPONSE_datafile_id ON GUESTBOOKRESPONSE (datafile_id); +CREATE INDEX INDEX_GUESTBOOKRESPONSE_dataset_id ON GUESTBOOKRESPONSE (dataset_id); +CREATE TABLE CUSTOMQUESTIONRESPONSE (ID SERIAL NOT NULL, response TEXT, CUSTOMQUESTION_ID BIGINT NOT NULL, GUESTBOOKRESPONSE_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_CUSTOMQUESTIONRESPONSE_guestbookresponse_id ON CUSTOMQUESTIONRESPONSE (guestbookresponse_id); +CREATE TABLE TEMPLATE (ID SERIAL NOT NULL, CREATETIME TIMESTAMP NOT NULL, NAME VARCHAR(255) NOT NULL, USAGECOUNT BIGINT, DATAVERSE_ID BIGINT, termsOfUseAndAccess_id BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_TEMPLATE_dataverse_id ON TEMPLATE (dataverse_id); +CREATE TABLE DATASETLOCK (ID SERIAL NOT NULL, INFO VARCHAR(255), REASON VARCHAR(255) NOT NULL, STARTTIME TIMESTAMP, DATASET_ID BIGINT NOT NULL, USER_ID BIGINT NOT NULL, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETLOCK_user_id ON DATASETLOCK (user_id); +CREATE INDEX INDEX_DATASETLOCK_dataset_id ON DATASETLOCK (dataset_id); +CREATE TABLE DATAVERSEROLE (ID SERIAL NOT NULL, ALIAS VARCHAR(255) NOT NULL UNIQUE, DESCRIPTION VARCHAR(255), NAME VARCHAR(255) NOT NULL, PERMISSIONBITS BIGINT, OWNER_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATAVERSEROLE_owner_id ON DATAVERSEROLE (owner_id); +CREATE INDEX INDEX_DATAVERSEROLE_name ON DATAVERSEROLE (name); +CREATE INDEX INDEX_DATAVERSEROLE_alias ON DATAVERSEROLE (alias); +CREATE TABLE DATASETFIELDTYPE (ID SERIAL NOT NULL, ADVANCEDSEARCHFIELDTYPE BOOLEAN, ALLOWCONTROLLEDVOCABULARY BOOLEAN, ALLOWMULTIPLES BOOLEAN, description TEXT, DISPLAYFORMAT VARCHAR(255), DISPLAYONCREATE BOOLEAN, DISPLAYORDER INTEGER, FACETABLE BOOLEAN, FIELDTYPE VARCHAR(255) NOT NULL, name TEXT, REQUIRED BOOLEAN, title TEXT, uri TEXT, VALIDATIONFORMAT VARCHAR(255), WATERMARK VARCHAR(255), METADATABLOCK_ID BIGINT, PARENTDATASETFIELDTYPE_ID BIGINT, PRIMARY KEY (ID)); +CREATE INDEX INDEX_DATASETFIELDTYPE_metadatablock_id ON DATASETFIELDTYPE (metadatablock_id); +CREATE INDEX INDEX_DATASETFIELDTYPE_parentdatasetfieldtype_id ON DATASETFIELDTYPE (parentdatasetfieldtype_id); +CREATE TABLE FILEMETADATA_DATAFILECATEGORY (fileCategories_ID BIGINT NOT NULL, fileMetadatas_ID BIGINT NOT NULL, PRIMARY KEY (fileCategories_ID, fileMetadatas_ID)); +CREATE INDEX INDEX_FILEMETADATA_DATAFILECATEGORY_filecategories_id ON FILEMETADATA_DATAFILECATEGORY (filecategories_id); +CREATE INDEX INDEX_FILEMETADATA_DATAFILECATEGORY_filemetadatas_id ON FILEMETADATA_DATAFILECATEGORY (filemetadatas_id); +CREATE TABLE dataverse_citationDatasetFieldTypes (dataverse_id BIGINT NOT NULL, citationdatasetfieldtype_id BIGINT NOT NULL, PRIMARY KEY (dataverse_id, citationdatasetfieldtype_id)); +CREATE TABLE dataversesubjects (dataverse_id BIGINT NOT NULL, controlledvocabularyvalue_id BIGINT NOT NULL, PRIMARY KEY (dataverse_id, controlledvocabularyvalue_id)); +CREATE TABLE DATAVERSE_METADATABLOCK (Dataverse_ID BIGINT NOT NULL, metadataBlocks_ID BIGINT NOT NULL, PRIMARY KEY (Dataverse_ID, metadataBlocks_ID)); +CREATE TABLE DATASETFIELD_CONTROLLEDVOCABULARYVALUE (DatasetField_ID BIGINT NOT NULL, controlledVocabularyValues_ID BIGINT NOT NULL, PRIMARY KEY (DatasetField_ID, controlledVocabularyValues_ID)); +CREATE INDEX INDEX_DATASETFIELD_CONTROLLEDVOCABULARYVALUE_datasetfield_id ON DATASETFIELD_CONTROLLEDVOCABULARYVALUE (datasetfield_id); +CREATE INDEX INDEX_DATASETFIELD_CONTROLLEDVOCABULARYVALUE_controlledvocabularyvalues_id ON DATASETFIELD_CONTROLLEDVOCABULARYVALUE (controlledvocabularyvalues_id); +CREATE TABLE WorkflowStepData_STEPPARAMETERS (WorkflowStepData_ID BIGINT, STEPPARAMETERS VARCHAR(2048), STEPPARAMETERS_KEY VARCHAR(255)); +CREATE TABLE WorkflowStepData_STEPSETTINGS (WorkflowStepData_ID BIGINT, STEPSETTINGS VARCHAR(2048), STEPSETTINGS_KEY VARCHAR(255)); +CREATE TABLE ExplicitGroup_CONTAINEDROLEASSIGNEES (ExplicitGroup_ID BIGINT, CONTAINEDROLEASSIGNEES VARCHAR(255)); +CREATE TABLE EXPLICITGROUP_AUTHENTICATEDUSER (ExplicitGroup_ID BIGINT NOT NULL, containedAuthenticatedUsers_ID BIGINT NOT NULL, PRIMARY KEY (ExplicitGroup_ID, containedAuthenticatedUsers_ID)); +CREATE TABLE explicitgroup_explicitgroup (explicitgroup_id BIGINT NOT NULL, containedexplicitgroups_id BIGINT NOT NULL, PRIMARY KEY (explicitgroup_id, containedexplicitgroups_id)); +CREATE TABLE PendingWorkflowInvocation_LOCALDATA (PendingWorkflowInvocation_INVOCATIONID VARCHAR(255), LOCALDATA VARCHAR(255), LOCALDATA_KEY VARCHAR(255)); +CREATE TABLE fileaccessrequests (datafile_id BIGINT NOT NULL, authenticated_user_id BIGINT NOT NULL, PRIMARY KEY (datafile_id, authenticated_user_id)); +ALTER TABLE ROLEASSIGNMENT ADD CONSTRAINT UNQ_ROLEASSIGNMENT_0 UNIQUE (assigneeIdentifier, role_id, definitionPoint_id); +ALTER TABLE DVOBJECT ADD CONSTRAINT UNQ_DVOBJECT_0 UNIQUE (authority,protocol,identifier); +ALTER TABLE DATASETVERSION ADD CONSTRAINT UNQ_DATASETVERSION_0 UNIQUE (dataset_id,versionnumber,minorversionnumber); +ALTER TABLE FOREIGNMETADATAFIELDMAPPING ADD CONSTRAINT UNQ_FOREIGNMETADATAFIELDMAPPING_0 UNIQUE (foreignMetadataFormatMapping_id, foreignFieldXpath); +ALTER TABLE AUTHENTICATEDUSERLOOKUP ADD CONSTRAINT UNQ_AUTHENTICATEDUSERLOOKUP_0 UNIQUE (persistentuserid, authenticationproviderid); +ALTER TABLE DataverseFieldTypeInputLevel ADD CONSTRAINT UNQ_DataverseFieldTypeInputLevel_0 UNIQUE (dataverse_id, datasetfieldtype_id); +ALTER TABLE DATAVERSETHEME ADD CONSTRAINT FK_DATAVERSETHEME_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAFILECATEGORY ADD CONSTRAINT FK_DATAFILECATEGORY_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE ROLEASSIGNMENT ADD CONSTRAINT FK_ROLEASSIGNMENT_ROLE_ID FOREIGN KEY (ROLE_ID) REFERENCES DATAVERSEROLE (ID); +ALTER TABLE ROLEASSIGNMENT ADD CONSTRAINT FK_ROLEASSIGNMENT_DEFINITIONPOINT_ID FOREIGN KEY (DEFINITIONPOINT_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSELINKINGDATAVERSE ADD CONSTRAINT FK_DATAVERSELINKINGDATAVERSE_DATAVERSE_ID FOREIGN KEY (DATAVERSE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSELINKINGDATAVERSE ADD CONSTRAINT FK_DATAVERSELINKINGDATAVERSE_LINKINGDATAVERSE_ID FOREIGN KEY (LINKINGDATAVERSE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE METADATABLOCK ADD CONSTRAINT FK_METADATABLOCK_owner_id FOREIGN KEY (owner_id) REFERENCES DVOBJECT (ID); +ALTER TABLE CONFIRMEMAILDATA ADD CONSTRAINT FK_CONFIRMEMAILDATA_AUTHENTICATEDUSER_ID FOREIGN KEY (AUTHENTICATEDUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE OAUTH2TOKENDATA ADD CONSTRAINT FK_OAUTH2TOKENDATA_USER_ID FOREIGN KEY (USER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DVOBJECT ADD CONSTRAINT FK_DVOBJECT_CREATOR_ID FOREIGN KEY (CREATOR_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DVOBJECT ADD CONSTRAINT FK_DVOBJECT_RELEASEUSER_ID FOREIGN KEY (RELEASEUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DVOBJECT ADD CONSTRAINT FK_DVOBJECT_OWNER_ID FOREIGN KEY (OWNER_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSE ADD CONSTRAINT FK_DATAVERSE_DEFAULTTEMPLATE_ID FOREIGN KEY (DEFAULTTEMPLATE_ID) REFERENCES TEMPLATE (ID); +ALTER TABLE DATAVERSE ADD CONSTRAINT FK_DATAVERSE_ID FOREIGN KEY (ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSE ADD CONSTRAINT FK_DATAVERSE_DEFAULTCONTRIBUTORROLE_ID FOREIGN KEY (DEFAULTCONTRIBUTORROLE_ID) REFERENCES DATAVERSEROLE (ID); +ALTER TABLE IPV6RANGE ADD CONSTRAINT FK_IPV6RANGE_OWNER_ID FOREIGN KEY (OWNER_ID) REFERENCES PERSISTEDGLOBALGROUP (ID); +ALTER TABLE SAVEDSEARCHFILTERQUERY ADD CONSTRAINT FK_SAVEDSEARCHFILTERQUERY_SAVEDSEARCH_ID FOREIGN KEY (SAVEDSEARCH_ID) REFERENCES SAVEDSEARCH (ID); +ALTER TABLE DATAVERSEFACET ADD CONSTRAINT FK_DATAVERSEFACET_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE DATAVERSEFACET ADD CONSTRAINT FK_DATAVERSEFACET_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSEFEATUREDDATAVERSE ADD CONSTRAINT FK_DATAVERSEFEATUREDDATAVERSE_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSEFEATUREDDATAVERSE ADD CONSTRAINT FK_DATAVERSEFEATUREDDATAVERSE_featureddataverse_id FOREIGN KEY (featureddataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE HARVESTINGCLIENT ADD CONSTRAINT FK_HARVESTINGCLIENT_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE APITOKEN ADD CONSTRAINT FK_APITOKEN_AUTHENTICATEDUSER_ID FOREIGN KEY (AUTHENTICATEDUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DATASETFIELDVALUE ADD CONSTRAINT FK_DATASETFIELDVALUE_DATASETFIELD_ID FOREIGN KEY (DATASETFIELD_ID) REFERENCES DATASETFIELD (ID); +ALTER TABLE CUSTOMQUESTION ADD CONSTRAINT FK_CUSTOMQUESTION_GUESTBOOK_ID FOREIGN KEY (GUESTBOOK_ID) REFERENCES GUESTBOOK (ID); +ALTER TABLE VARIABLERANGEITEM ADD CONSTRAINT FK_VARIABLERANGEITEM_DATAVARIABLE_ID FOREIGN KEY (DATAVARIABLE_ID) REFERENCES DATAVARIABLE (ID); +ALTER TABLE VARIABLERANGE ADD CONSTRAINT FK_VARIABLERANGE_DATAVARIABLE_ID FOREIGN KEY (DATAVARIABLE_ID) REFERENCES DATAVARIABLE (ID); +ALTER TABLE WORKFLOWCOMMENT ADD CONSTRAINT FK_WORKFLOWCOMMENT_AUTHENTICATEDUSER_ID FOREIGN KEY (AUTHENTICATEDUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE WORKFLOWCOMMENT ADD CONSTRAINT FK_WORKFLOWCOMMENT_DATASETVERSION_ID FOREIGN KEY (DATASETVERSION_ID) REFERENCES DATASETVERSION (ID); +ALTER TABLE DATASETFIELD ADD CONSTRAINT FK_DATASETFIELD_DATASETFIELDTYPE_ID FOREIGN KEY (DATASETFIELDTYPE_ID) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE DATASETFIELD ADD CONSTRAINT FK_DATASETFIELD_TEMPLATE_ID FOREIGN KEY (TEMPLATE_ID) REFERENCES TEMPLATE (ID); +ALTER TABLE DATASETFIELD ADD CONSTRAINT FK_DATASETFIELD_DATASETVERSION_ID FOREIGN KEY (DATASETVERSION_ID) REFERENCES DATASETVERSION (ID); +ALTER TABLE DATASETFIELD ADD CONSTRAINT FK_DATASETFIELD_PARENTDATASETFIELDCOMPOUNDVALUE_ID FOREIGN KEY (PARENTDATASETFIELDCOMPOUNDVALUE_ID) REFERENCES DATASETFIELDCOMPOUNDVALUE (ID); +ALTER TABLE IPV4RANGE ADD CONSTRAINT FK_IPV4RANGE_OWNER_ID FOREIGN KEY (OWNER_ID) REFERENCES PERSISTEDGLOBALGROUP (ID); +ALTER TABLE DATASETVERSION ADD CONSTRAINT FK_DATASETVERSION_termsOfUseAndAccess_id FOREIGN KEY (termsOfUseAndAccess_id) REFERENCES TERMSOFUSEANDACCESS (ID); +ALTER TABLE DATASETVERSION ADD CONSTRAINT FK_DATASETVERSION_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE USERNOTIFICATION ADD CONSTRAINT FK_USERNOTIFICATION_USER_ID FOREIGN KEY (USER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE USERNOTIFICATION ADD CONSTRAINT FK_USERNOTIFICATION_REQUESTOR_ID FOREIGN KEY (REQUESTOR_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE WORKFLOWSTEPDATA ADD CONSTRAINT FK_WORKFLOWSTEPDATA_PARENT_ID FOREIGN KEY (PARENT_ID) REFERENCES WORKFLOW (ID); +ALTER TABLE GUESTBOOK ADD CONSTRAINT FK_GUESTBOOK_DATAVERSE_ID FOREIGN KEY (DATAVERSE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE MAPLAYERMETADATA ADD CONSTRAINT FK_MAPLAYERMETADATA_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE MAPLAYERMETADATA ADD CONSTRAINT FK_MAPLAYERMETADATA_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE SAVEDSEARCH ADD CONSTRAINT FK_SAVEDSEARCH_DEFINITIONPOINT_ID FOREIGN KEY (DEFINITIONPOINT_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE SAVEDSEARCH ADD CONSTRAINT FK_SAVEDSEARCH_CREATOR_ID FOREIGN KEY (CREATOR_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE EXPLICITGROUP ADD CONSTRAINT FK_EXPLICITGROUP_OWNER_ID FOREIGN KEY (OWNER_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETFIELDDEFAULTVALUE ADD CONSTRAINT FK_DATASETFIELDDEFAULTVALUE_DEFAULTVALUESET_ID FOREIGN KEY (DEFAULTVALUESET_ID) REFERENCES DEFAULTVALUESET (ID); +ALTER TABLE DATASETFIELDDEFAULTVALUE ADD CONSTRAINT FK_DATASETFIELDDEFAULTVALUE_DATASETFIELD_ID FOREIGN KEY (DATASETFIELD_ID) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE DATASETFIELDDEFAULTVALUE ADD CONSTRAINT FK_DATASETFIELDDEFAULTVALUE_PARENTDATASETFIELDDEFAULTVALUE_ID FOREIGN KEY (PARENTDATASETFIELDDEFAULTVALUE_ID) REFERENCES DATASETFIELDDEFAULTVALUE (ID); +ALTER TABLE PENDINGWORKFLOWINVOCATION ADD CONSTRAINT FK_PENDINGWORKFLOWINVOCATION_WORKFLOW_ID FOREIGN KEY (WORKFLOW_ID) REFERENCES WORKFLOW (ID); +ALTER TABLE PENDINGWORKFLOWINVOCATION ADD CONSTRAINT FK_PENDINGWORKFLOWINVOCATION_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATATABLE ADD CONSTRAINT FK_DATATABLE_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE INGESTREPORT ADD CONSTRAINT FK_INGESTREPORT_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE FOREIGNMETADATAFIELDMAPPING ADD CONSTRAINT FK_FOREIGNMETADATAFIELDMAPPING_FOREIGNMETADATAFORMATMAPPING_ID FOREIGN KEY (FOREIGNMETADATAFORMATMAPPING_ID) REFERENCES FOREIGNMETADATAFORMATMAPPING (ID); +ALTER TABLE FOREIGNMETADATAFIELDMAPPING ADD CONSTRAINT FK_FOREIGNMETADATAFIELDMAPPING_PARENTFIELDMAPPING_ID FOREIGN KEY (PARENTFIELDMAPPING_ID) REFERENCES FOREIGNMETADATAFIELDMAPPING (ID); +ALTER TABLE FILEMETADATA ADD CONSTRAINT FK_FILEMETADATA_DATASETVERSION_ID FOREIGN KEY (DATASETVERSION_ID) REFERENCES DATASETVERSION (ID); +ALTER TABLE FILEMETADATA ADD CONSTRAINT FK_FILEMETADATA_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE CUSTOMQUESTIONVALUE ADD CONSTRAINT FK_CUSTOMQUESTIONVALUE_CUSTOMQUESTION_ID FOREIGN KEY (CUSTOMQUESTION_ID) REFERENCES CUSTOMQUESTION (ID); +ALTER TABLE SUMMARYSTATISTIC ADD CONSTRAINT FK_SUMMARYSTATISTIC_DATAVARIABLE_ID FOREIGN KEY (DATAVARIABLE_ID) REFERENCES DATAVARIABLE (ID); +ALTER TABLE worldmapauth_token ADD CONSTRAINT FK_worldmapauth_token_DATAVERSEUSER_ID FOREIGN KEY (DATAVERSEUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE worldmapauth_token ADD CONSTRAINT FK_worldmapauth_token_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE worldmapauth_token ADD CONSTRAINT FK_worldmapauth_token_APPLICATION_ID FOREIGN KEY (APPLICATION_ID) REFERENCES worldmapauth_tokentype (ID); +ALTER TABLE PASSWORDRESETDATA ADD CONSTRAINT FK_PASSWORDRESETDATA_BUILTINUSER_ID FOREIGN KEY (BUILTINUSER_ID) REFERENCES BUILTINUSER (ID); +ALTER TABLE CONTROLLEDVOCABULARYVALUE ADD CONSTRAINT FK_CONTROLLEDVOCABULARYVALUE_DATASETFIELDTYPE_ID FOREIGN KEY (DATASETFIELDTYPE_ID) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE DATASETLINKINGDATAVERSE ADD CONSTRAINT FK_DATASETLINKINGDATAVERSE_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETLINKINGDATAVERSE ADD CONSTRAINT FK_DATASETLINKINGDATAVERSE_LINKINGDATAVERSE_ID FOREIGN KEY (LINKINGDATAVERSE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASET ADD CONSTRAINT FK_DATASET_ID FOREIGN KEY (ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASET ADD CONSTRAINT FK_DATASET_harvestingClient_id FOREIGN KEY (harvestingClient_id) REFERENCES HARVESTINGCLIENT (ID); +ALTER TABLE DATASET ADD CONSTRAINT FK_DATASET_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES GUESTBOOK (ID); +ALTER TABLE DATASET ADD CONSTRAINT FK_DATASET_thumbnailfile_id FOREIGN KEY (thumbnailfile_id) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASET ADD CONSTRAINT FK_DATASET_citationDateDatasetFieldType_id FOREIGN KEY (citationDateDatasetFieldType_id) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE CLIENTHARVESTRUN ADD CONSTRAINT FK_CLIENTHARVESTRUN_HARVESTINGCLIENT_ID FOREIGN KEY (HARVESTINGCLIENT_ID) REFERENCES HARVESTINGCLIENT (ID); +ALTER TABLE DATAFILETAG ADD CONSTRAINT FK_DATAFILETAG_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE AUTHENTICATEDUSERLOOKUP ADD CONSTRAINT FK_AUTHENTICATEDUSERLOOKUP_AUTHENTICATEDUSER_ID FOREIGN KEY (AUTHENTICATEDUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE INGESTREQUEST ADD CONSTRAINT FK_INGESTREQUEST_datafile_id FOREIGN KEY (datafile_id) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSECONTACT ADD CONSTRAINT FK_DATAVERSECONTACT_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE VARIABLECATEGORY ADD CONSTRAINT FK_VARIABLECATEGORY_DATAVARIABLE_ID FOREIGN KEY (DATAVARIABLE_ID) REFERENCES DATAVARIABLE (ID); +ALTER TABLE DATAVARIABLE ADD CONSTRAINT FK_DATAVARIABLE_DATATABLE_ID FOREIGN KEY (DATATABLE_ID) REFERENCES DATATABLE (ID); +ALTER TABLE CONTROLLEDVOCABALTERNATE ADD CONSTRAINT FK_CONTROLLEDVOCABALTERNATE_DATASETFIELDTYPE_ID FOREIGN KEY (DATASETFIELDTYPE_ID) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE CONTROLLEDVOCABALTERNATE ADD CONSTRAINT FK_CONTROLLEDVOCABALTERNATE_CONTROLLEDVOCABULARYVALUE_ID FOREIGN KEY (CONTROLLEDVOCABULARYVALUE_ID) REFERENCES CONTROLLEDVOCABULARYVALUE (ID); +ALTER TABLE DATAFILE ADD CONSTRAINT FK_DATAFILE_ID FOREIGN KEY (ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DataverseFieldTypeInputLevel ADD CONSTRAINT FK_DataverseFieldTypeInputLevel_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE DataverseFieldTypeInputLevel ADD CONSTRAINT FK_DataverseFieldTypeInputLevel_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE HARVESTINGDATAVERSECONFIG ADD CONSTRAINT FK_HARVESTINGDATAVERSECONFIG_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE ALTERNATIVEPERSISTENTIDENTIFIER ADD CONSTRAINT FK_ALTERNATIVEPERSISTENTIDENTIFIER_DVOBJECT_ID FOREIGN KEY (DVOBJECT_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETFIELDCOMPOUNDVALUE ADD CONSTRAINT FK_DATASETFIELDCOMPOUNDVALUE_PARENTDATASETFIELD_ID FOREIGN KEY (PARENTDATASETFIELD_ID) REFERENCES DATASETFIELD (ID); +ALTER TABLE DATASETVERSIONUSER ADD CONSTRAINT FK_DATASETVERSIONUSER_authenticatedUser_id FOREIGN KEY (authenticatedUser_id) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DATASETVERSIONUSER ADD CONSTRAINT FK_DATASETVERSIONUSER_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES DATASETVERSION (ID); +ALTER TABLE GUESTBOOKRESPONSE ADD CONSTRAINT FK_GUESTBOOKRESPONSE_DATAFILE_ID FOREIGN KEY (DATAFILE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE GUESTBOOKRESPONSE ADD CONSTRAINT FK_GUESTBOOKRESPONSE_DATASETVERSION_ID FOREIGN KEY (DATASETVERSION_ID) REFERENCES DATASETVERSION (ID); +ALTER TABLE GUESTBOOKRESPONSE ADD CONSTRAINT FK_GUESTBOOKRESPONSE_GUESTBOOK_ID FOREIGN KEY (GUESTBOOK_ID) REFERENCES GUESTBOOK (ID); +ALTER TABLE GUESTBOOKRESPONSE ADD CONSTRAINT FK_GUESTBOOKRESPONSE_AUTHENTICATEDUSER_ID FOREIGN KEY (AUTHENTICATEDUSER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE GUESTBOOKRESPONSE ADD CONSTRAINT FK_GUESTBOOKRESPONSE_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE CUSTOMQUESTIONRESPONSE ADD CONSTRAINT FK_CUSTOMQUESTIONRESPONSE_CUSTOMQUESTION_ID FOREIGN KEY (CUSTOMQUESTION_ID) REFERENCES CUSTOMQUESTION (ID); +ALTER TABLE CUSTOMQUESTIONRESPONSE ADD CONSTRAINT FK_CUSTOMQUESTIONRESPONSE_GUESTBOOKRESPONSE_ID FOREIGN KEY (GUESTBOOKRESPONSE_ID) REFERENCES GUESTBOOKRESPONSE (ID); +ALTER TABLE TEMPLATE ADD CONSTRAINT FK_TEMPLATE_DATAVERSE_ID FOREIGN KEY (DATAVERSE_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE TEMPLATE ADD CONSTRAINT FK_TEMPLATE_termsOfUseAndAccess_id FOREIGN KEY (termsOfUseAndAccess_id) REFERENCES TERMSOFUSEANDACCESS (ID); +ALTER TABLE DATASETLOCK ADD CONSTRAINT FK_DATASETLOCK_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETLOCK ADD CONSTRAINT FK_DATASETLOCK_USER_ID FOREIGN KEY (USER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DATAVERSEROLE ADD CONSTRAINT FK_DATAVERSEROLE_OWNER_ID FOREIGN KEY (OWNER_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETFIELDTYPE ADD CONSTRAINT FK_DATASETFIELDTYPE_PARENTDATASETFIELDTYPE_ID FOREIGN KEY (PARENTDATASETFIELDTYPE_ID) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE DATASETFIELDTYPE ADD CONSTRAINT FK_DATASETFIELDTYPE_METADATABLOCK_ID FOREIGN KEY (METADATABLOCK_ID) REFERENCES METADATABLOCK (ID); +ALTER TABLE FILEMETADATA_DATAFILECATEGORY ADD CONSTRAINT FK_FILEMETADATA_DATAFILECATEGORY_fileMetadatas_ID FOREIGN KEY (fileMetadatas_ID) REFERENCES FILEMETADATA (ID); +ALTER TABLE FILEMETADATA_DATAFILECATEGORY ADD CONSTRAINT FK_FILEMETADATA_DATAFILECATEGORY_fileCategories_ID FOREIGN KEY (fileCategories_ID) REFERENCES DATAFILECATEGORY (ID); +ALTER TABLE dataverse_citationDatasetFieldTypes ADD CONSTRAINT FK_dataverse_citationDatasetFieldTypes_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE dataverse_citationDatasetFieldTypes ADD CONSTRAINT dataverse_citationDatasetFieldTypes_citationdatasetfieldtype_id FOREIGN KEY (citationdatasetfieldtype_id) REFERENCES DATASETFIELDTYPE (ID); +ALTER TABLE dataversesubjects ADD CONSTRAINT FK_dataversesubjects_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES DVOBJECT (ID); +ALTER TABLE dataversesubjects ADD CONSTRAINT FK_dataversesubjects_controlledvocabularyvalue_id FOREIGN KEY (controlledvocabularyvalue_id) REFERENCES CONTROLLEDVOCABULARYVALUE (ID); +ALTER TABLE DATAVERSE_METADATABLOCK ADD CONSTRAINT FK_DATAVERSE_METADATABLOCK_Dataverse_ID FOREIGN KEY (Dataverse_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATAVERSE_METADATABLOCK ADD CONSTRAINT FK_DATAVERSE_METADATABLOCK_metadataBlocks_ID FOREIGN KEY (metadataBlocks_ID) REFERENCES METADATABLOCK (ID); +ALTER TABLE DATASETFIELD_CONTROLLEDVOCABULARYVALUE ADD CONSTRAINT FK_DATASETFIELD_CONTROLLEDVOCABULARYVALUE_DatasetField_ID FOREIGN KEY (DatasetField_ID) REFERENCES DATASETFIELD (ID); +ALTER TABLE DATASETFIELD_CONTROLLEDVOCABULARYVALUE ADD CONSTRAINT DTASETFIELDCONTROLLEDVOCABULARYVALUEcntrolledVocabularyValuesID FOREIGN KEY (controlledVocabularyValues_ID) REFERENCES CONTROLLEDVOCABULARYVALUE (ID); +ALTER TABLE WorkflowStepData_STEPPARAMETERS ADD CONSTRAINT FK_WorkflowStepData_STEPPARAMETERS_WorkflowStepData_ID FOREIGN KEY (WorkflowStepData_ID) REFERENCES WORKFLOWSTEPDATA (ID); +ALTER TABLE WorkflowStepData_STEPSETTINGS ADD CONSTRAINT FK_WorkflowStepData_STEPSETTINGS_WorkflowStepData_ID FOREIGN KEY (WorkflowStepData_ID) REFERENCES WORKFLOWSTEPDATA (ID); +ALTER TABLE ExplicitGroup_CONTAINEDROLEASSIGNEES ADD CONSTRAINT FK_ExplicitGroup_CONTAINEDROLEASSIGNEES_ExplicitGroup_ID FOREIGN KEY (ExplicitGroup_ID) REFERENCES EXPLICITGROUP (ID); +ALTER TABLE EXPLICITGROUP_AUTHENTICATEDUSER ADD CONSTRAINT FK_EXPLICITGROUP_AUTHENTICATEDUSER_ExplicitGroup_ID FOREIGN KEY (ExplicitGroup_ID) REFERENCES EXPLICITGROUP (ID); +ALTER TABLE EXPLICITGROUP_AUTHENTICATEDUSER ADD CONSTRAINT EXPLICITGROUP_AUTHENTICATEDUSER_containedAuthenticatedUsers_ID FOREIGN KEY (containedAuthenticatedUsers_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE explicitgroup_explicitgroup ADD CONSTRAINT FK_explicitgroup_explicitgroup_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES EXPLICITGROUP (ID); +ALTER TABLE explicitgroup_explicitgroup ADD CONSTRAINT FK_explicitgroup_explicitgroup_containedexplicitgroups_id FOREIGN KEY (containedexplicitgroups_id) REFERENCES EXPLICITGROUP (ID); +ALTER TABLE PendingWorkflowInvocation_LOCALDATA ADD CONSTRAINT PndngWrkflwInvocationLOCALDATAPndngWrkflwInvocationINVOCATIONID FOREIGN KEY (PendingWorkflowInvocation_INVOCATIONID) REFERENCES PENDINGWORKFLOWINVOCATION (INVOCATIONID); +ALTER TABLE fileaccessrequests ADD CONSTRAINT FK_fileaccessrequests_datafile_id FOREIGN KEY (datafile_id) REFERENCES DVOBJECT (ID); +ALTER TABLE fileaccessrequests ADD CONSTRAINT FK_fileaccessrequests_authenticated_user_id FOREIGN KEY (authenticated_user_id) REFERENCES AUTHENTICATEDUSER (ID); +CREATE TABLE SEQUENCE (SEQ_NAME VARCHAR(50) NOT NULL, SEQ_COUNT DECIMAL(38), PRIMARY KEY (SEQ_NAME)); +INSERT INTO SEQUENCE(SEQ_NAME, SEQ_COUNT) values ('SEQ_GEN', 0); diff --git a/scripts/database/releases.txt b/scripts/database/releases.txt index fd4bda9c2c2..074d1f7275a 100644 --- a/scripts/database/releases.txt +++ b/scripts/database/releases.txt @@ -30,3 +30,4 @@ v4.9.3 v4.9.4 v4.10 v4.10.1 +v4.11 diff --git a/scripts/database/upgrades/upgrade_v4.10.1_to_4.11.sql b/scripts/database/upgrades/upgrade_v4.10.1_to_4.11.sql deleted file mode 100644 index 7d55b4518f1..00000000000 --- a/scripts/database/upgrades/upgrade_v4.10.1_to_4.11.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE datasetversion ADD COLUMN archivalcopylocation text; -ALTER TABLE externaltool ADD COLUMN contenttype text NOT NULL default 'text/tab-separated-values'; diff --git a/scripts/database/upgrades/upgrade_v4.10.1_to_v4.11.sql b/scripts/database/upgrades/upgrade_v4.10.1_to_v4.11.sql new file mode 100644 index 00000000000..e5cd1809263 --- /dev/null +++ b/scripts/database/upgrades/upgrade_v4.10.1_to_v4.11.sql @@ -0,0 +1,8 @@ +ALTER TABLE datasetversion ADD COLUMN archivalcopylocation text; +ALTER TABLE externaltool ADD COLUMN contenttype text NOT NULL default 'text/tab-separated-values'; +TRUNCATE metric; +ALTER TABLE metric ADD COLUMN dayString text; +ALTER TABLE metric ADD COLUMN dataLocation text; +ALTER TABLE metric DROP CONSTRAINT "metric_metricname_key"; +ALTER TABLE metric RENAME COLUMN metricValue TO valueJson; +ALTER TABLE metric RENAME COLUMN metricName TO name; diff --git a/scripts/database/upgrades/upgrade_v4.8.6_to_v4.9.sql b/scripts/database/upgrades/upgrade_v4.8.6_to_v4.9.sql index 1158b2942a8..5335c9ae270 100644 --- a/scripts/database/upgrades/upgrade_v4.8.6_to_v4.9.sql +++ b/scripts/database/upgrades/upgrade_v4.8.6_to_v4.9.sql @@ -62,12 +62,11 @@ ALTER TABLE dvobject DROP COLUMN doiseparator; --Add new setting into content for shoulder INSERT INTO setting(name, content) -VALUES (':Shoulder', (SELECT substring(content, strpos(content,'/')+1) || '/' from setting where name = ':Authority')); +SELECT ':Shoulder', substring(content, strpos(content,'/')+1) || '/' from setting where name = ':Authority' and strpos(content,'/')>0; - --strip shoulder from authority setting - UPDATE setting - SET content=(SELECT substring(content from 0 for strpos(content,'/')) - FROM setting - WHERE name=':Authority' and strpos(content,'/')>0) where name=':Authority'; +--strip shoulder from authority setting if the shoulder exists +UPDATE setting +SET content= case when (strpos(content,'/')>0) then substring(content from 0 for strpos(content,'/')) +else content end where name=':Authority'; update datasetfieldtype set displayformat = '#VALUE' where name in ('alternativeURL', 'keywordVocabularyURI', 'topicClassVocabURI', 'publicationURL', 'producerURL', 'distributorURL'); diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index dcfcfa4a133..d140703f9f9 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -71,6 +71,7 @@ defaultBody=Default Body filter=Filter # dataverse_header.xhtml +header.noscript=Please enable JavaScript in your browser. It is required to use most of the features of Dataverse. header.status.header=Status header.search.title=Search all dataverses... header.about=About @@ -1204,6 +1205,7 @@ dataset.publishBoth.tip=Once you publish this dataset it must remain published. dataset.unregistered.tip= This dataset is unregistered. We will attempt to register it before publishing. dataset.republish.tip=Are you sure you want to republish this dataset? dataset.selectVersionNumber=Select if this is a minor or major version update. +dataset.updateRelease=Update Current Version (will permanently overwrite the latest published version) dataset.majorRelease=Major Release dataset.minorRelease=Minor Release dataset.majorRelease.tip=Due to the nature of changes to the current draft this will be a major release ({0}) @@ -1573,9 +1575,9 @@ file.dataFilesTab.versions.description.deaccessionedReason=Deaccessioned Reason: file.dataFilesTab.versions.description.beAccessedAt=The dataset can now be accessed at: file.dataFilesTab.versions.viewDetails.btn=View Details file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}. -file.deleteDialog.tip=Are you sure you want to delete this dataset? You cannot undelete this dataset. +file.deleteDialog.tip=Are you sure you want to delete this dataset and all of its files? You cannot undelete this dataset. file.deleteDialog.header=Delete Dataset -file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? You cannot undelete this draft. +file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? Files will be reverted to the most recently published version. You cannot undelete this draft. file.deleteDraftDialog.header=Delete Draft Version file.deleteFileDialog.tip=The file(s) will be deleted after you click on the Save Changes button on the bottom of this page. file.deleteFileDialog.immediate=The file will be deleted after you click on the Delete button. @@ -2021,6 +2023,10 @@ dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy +datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. +datasetversion.update.archive.failure=Dataset Version Update succeeded, but the attempt to update the archival copy failed. +datasetversion.update.success=The published version of your Dataset has been updated. +datasetversion.update.archive.success=The published version of your Dataset, and its archival copy, have been updated. #ThemeWidgetFragment.java theme.validateTagline=Tagline must be at most 140 characters. @@ -2149,6 +2155,7 @@ access.api.revokeAccess.success.for.single.file=File Downloader access has been access.api.requestList.fileNotFound=Could not find datafile with id {0}. access.api.requestList.noKey=You must provide a key to get list of access requests for a file. access.api.requestList.noRequestsFound=There are no access requests for this file {0}. +access.api.exception.metadata.not.available.for.nontabular.file=This type of metadata is only available for tabular files. #permission permission.AddDataverse.label=AddDataverse diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java index 50f92f81fb5..4b1211590e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java @@ -23,6 +23,7 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; +import org.apache.commons.lang.StringEscapeUtils; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; @@ -149,7 +150,9 @@ public static String getMetadataFromDvObject(String identifier, Map getFunders() { } if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.contributorType)) { contributorType = subField.getDisplayValue(); - // TODO: Consider how this will work in French, Chinese, etc. - String funderString = "Funder"; - if (funderString.equals(contributorType)) { - addFunder = true; - } } } - if (addFunder) { + //SEK 02/12/2019 move outside loop to prevent contrib type to carry over to next contributor + // TODO: Consider how this will work in French, Chinese, etc. + if ("Funder".equals(contributorType)) { retList.add(contributorName); } } @@ -1806,7 +1803,7 @@ public String getJsonLd() { if (!funderNames.isEmpty()) { JsonArrayBuilder funderArray = Json.createArrayBuilder(); for (String funderName : funderNames) { - JsonObjectBuilder funder = Json.createObjectBuilder(); + JsonObjectBuilder funder = NullSafeJsonBuilder.jsonObjectBuilder(); funder.add("@type", "Organization"); funder.add("name", funderName); funderArray.add(funder); diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 9c06e3f2a43..bf447f120b7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -325,10 +325,10 @@ public void downloadCitationXML(FileMetadata fileMetadata, Dataset dataset, bool String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: - fileNameString = "attachment;filename=" + getFileNameDOI(citation.getPersistentId()) + ".xml"; + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".xml"; } else { // Datafile-level citation: - fileNameString = "attachment;filename=" + getFileNameDOI(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.ENDNOTE); + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.ENDNOTE); } response.setHeader("Content-Disposition", fileNameString); try { @@ -370,10 +370,10 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: - fileNameString = "attachment;filename=" + getFileNameDOI(citation.getPersistentId()) + ".ris"; + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".ris"; } else { // Datafile-level citation: - fileNameString = "attachment;filename=" + getFileNameDOI(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.RIS); + fileNameString = "attachment;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.RIS); } response.setHeader("Content-Disposition", fileNameString); @@ -387,8 +387,8 @@ public void downloadCitationRIS(FileMetadata fileMetadata, Dataset dataset, bool } } - private String getFileNameDOI(GlobalId id) { - return "DOI:" + id.getAuthority() + "_" + id.getIdentifier(); + private String getFileNameFromPid(GlobalId id) { + return id.asString(); } public void downloadDatasetCitationBibtex(Dataset dataset) { @@ -421,10 +421,10 @@ public void downloadCitationBibtex(FileMetadata fileMetadata, Dataset dataset, b String fileNameString; if (fileMetadata == null || fileMetadata.getLabel() == null) { // Dataset-level citation: - fileNameString = "inline;filename=" + getFileNameDOI(citation.getPersistentId()) + ".bib"; + fileNameString = "inline;filename=" + getFileNameFromPid(citation.getPersistentId()) + ".bib"; } else { // Datafile-level citation: - fileNameString = "inline;filename=" + getFileNameDOI(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.BIBTEX); + fileNameString = "inline;filename=" + getFileNameFromPid(citation.getPersistentId()) + "-" + FileUtil.getCiteDataFileFilename(citation.getFileTitle(), FileUtil.FileCitationExtension.BIBTEX); } response.setHeader("Content-Disposition", fileNameString); diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 4a7b3ff68b5..52ff08c7aa8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -188,12 +188,18 @@ public String init() { this.guestbookResponse = this.guestbookResponseService.initGuestbookResponseForFragment(fileMetadata, session); - // this.getFileDownloadHelper().setGuestbookResponse(guestbookResponse); - + // Find external tools based on their type, the file content type, and whether + // ingest has created a derived file for that type + // Currently, tabular data files are the only type of derived file created, so + // isTabularData() works - true for tabular types where a .tab file has been + // created and false for other mimetypes + String contentType = file.getContentType(); + //For tabular data, indicate successful ingest by returning a contentType for the derived .tab file if (file.isTabularData()) { - configureTools = externalToolService.findByType(ExternalTool.Type.CONFIGURE); - exploreTools = externalToolService.findByType(ExternalTool.Type.EXPLORE); + contentType=DataFileServiceBean.MIME_TYPE_TSV_ALT; } + configureTools = externalToolService.findByType(ExternalTool.Type.CONFIGURE, contentType); + exploreTools = externalToolService.findByType(ExternalTool.Type.EXPLORE, contentType); } else { @@ -837,7 +843,7 @@ public String getPublicDownloadUrl() { e.printStackTrace(); } - return FileUtil.getPublicDownloadUrl(systemConfig.getDataverseSiteUrl(), persistentId); + return FileUtil.getPublicDownloadUrl(systemConfig.getDataverseSiteUrl(), persistentId, fileId); } public List getConfigureTools() { diff --git a/src/main/java/edu/harvard/iq/dataverse/Metric.java b/src/main/java/edu/harvard/iq/dataverse/Metric.java index ebcde546002..b1b1a276ceb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Metric.java +++ b/src/main/java/edu/harvard/iq/dataverse/Metric.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import java.io.IOException; import java.io.Serializable; import java.sql.Timestamp; import java.util.Date; @@ -17,7 +18,6 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; -import javax.persistence.Transient; /** * @@ -34,34 +34,36 @@ public class Metric implements Serializable { @Column(nullable = false) private int id; - @Column(nullable = false, unique = true) - private String metricName; + @Column(nullable = false) + private String name; @Column(columnDefinition = "TEXT", nullable = false) - private String metricValue; + private String valueJson; + + @Column(columnDefinition = "TEXT", nullable = true) + private String dataLocation; + + @Column(columnDefinition = "TEXT", nullable = true) + private String dayString; @Temporal(value = TemporalType.TIMESTAMP) @Column(nullable = false) private Date lastCalledDate; - @Transient - private static final String separator = "_"; - @Deprecated public Metric() { } //For monthly and day metrics - public Metric(String metricTitle, String dayString, String metricValue) { - this.metricName = generateMetricName(metricTitle, dayString); - this.metricValue = metricValue; - this.lastCalledDate = new Timestamp(new Date().getTime()); - } - - //For all-time metrics - public Metric(String metricName, String metricValue) { - this.metricName = metricName; - this.metricValue = metricValue; + + public Metric(String name, String dayString, String dataLocation, String value) throws IOException { + if(null == name || null == value) { + throw new IOException("A created metric must have a metricName and metricValue"); + } + this.name = name; + this.valueJson = value; + this.dataLocation = dataLocation; + this.dayString = dayString; this.lastCalledDate = new Timestamp(new Date().getTime()); } @@ -79,30 +81,30 @@ public void setId(int id) { this.id = id; } - public String getMetricDateString() { - return metricName.substring(metricName.indexOf(separator) + 1); + public String getDateString() { + return dayString; } - public String getMetricTitle() { - int monthSeperatorIndex = metricName.indexOf(separator); - if (monthSeperatorIndex >= 0) { - return metricName.substring(0, monthSeperatorIndex); - } - return metricName; + public String getDataLocation() { + return dataLocation; + } + + public String getName() { + return name; } /** - * @return the metricValue + * @return the valueJson */ - public String getMetricValue() { - return metricValue; + public String getValueJson() { + return valueJson; } /** - * @param metricValue the metricValue to set + * @param metricValue the valueJson to set */ - public void setMetricValue(String metricValue) { - this.metricValue = metricValue; + public void setValueJson(String metricValue) { + this.valueJson = metricValue; } /** @@ -119,14 +121,4 @@ public void setLastCalledDate(Date calledDate) { this.lastCalledDate = calledDate; } - public static String generateMetricName(String title, String dateString) { - if (title.contains(separator) || dateString.contains(separator)) { - throw new IllegalArgumentException("Metric title or date contains character reserved for seperator"); - } - if (separator.contains("-")) { - throw new IllegalArgumentException("Metric seperator cannot be '-', value reserved for dates"); - } - return title + separator + dateString; - } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 332f59dcea9..492e0ffb1fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -389,12 +389,14 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId, @Qu DataFile dataFile = null; - //httpHeaders.add("Content-disposition", "attachment; filename=\"dataverse_files.zip\""); - //httpHeaders.add("Content-Type", "application/zip; name=\"dataverse_files.zip\""); - response.setHeader("Content-disposition", "attachment; filename=\"dataverse_files.zip\""); - dataFile = findDataFileOrDieWrapper(fileId); + if (!dataFile.isTabularData()) { + throw new BadRequestException("tabular data required"); + } + + response.setHeader("Content-disposition", "attachment; filename=\"dataverse_files.zip\""); + String fileName = dataFile.getFileMetadata().getLabel().replaceAll("\\.tab$", "-ddi.xml"); response.setHeader("Content-disposition", "attachment; filename=\""+fileName+"\""); response.setHeader("Content-Type", "application/xml; name=\""+fileName+"\""); @@ -477,7 +479,7 @@ public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") if (df.isTabularData()) { dInfo.addServiceAvailable(new OptionalAccessService("preprocessed", "application/json", "format=prep", "Preprocessed data in JSON")); } else { - throw new ServiceUnavailableException("Preprocessed Content Metadata requested on a non-tabular data file."); + throw new BadRequestException("tabular data required"); } DownloadInstance downloadInstance = new DownloadInstance(dInfo); if (downloadInstance.checkIfServiceSupportedAndSetConverter("format", "prep")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 4f868d90ae7..7a314d4c6dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -38,10 +38,12 @@ import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; +import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand; @@ -76,6 +78,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.ArchiverUtil; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -167,6 +170,9 @@ public class Datasets extends AbstractApiBean { @EJB S3PackageImporter s3PackageImporter; + @EJB + SettingsServiceBean settingsService; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -775,11 +781,12 @@ public Response publishDataseUsingGetDeprecated( @PathParam("id") String id, @Qu public Response publishDataset(@PathParam("id") String id, @QueryParam("type") String type) { try { if (type == null) { - return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major' or 'minor')."); + return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent')."); } - + boolean updateCurrent=false; + AuthenticatedUser user = findAuthenticatedUserOrDie(); type = type.toLowerCase(); - boolean isMinor; + boolean isMinor=false; switch (type) { case "minor": isMinor = true; @@ -787,16 +794,80 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S case "major": isMinor = false; break; + case "updatecurrent": + if(user.isSuperuser()) { + updateCurrent=true; + } else { + return error(Response.Status.FORBIDDEN, "Only superusers can update the current version"); + } + break; default: - return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major' or 'minor'."); + return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'."); } Dataset ds = findDatasetOrDie(id); + if (updateCurrent) { + /* + * Note: The code here mirrors that in the + * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes + * to the core logic (i.e. beyond updating the messaging about results) should + * be applied to the code there as well. + */ + String errorMsg = null; + String successMsg = null; + try { + CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user)); + ds = commandEngine.submit(cmd); + successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success"); + + // If configured, update archive copy as well + String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString()); + DatasetVersion updateVersion = ds.getLatestVersion(); + AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion); + if (archiveCommand != null) { + // Delete the record of any existing copy since it is now out of date/incorrect + updateVersion.setArchivalCopyLocation(null); + /* + * Then try to generate and submit an archival copy. Note that running this + * command within the CuratePublishedDatasetVersionCommand was causing an error: + * "The attribute [id] of class + * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary + * key column in the database. Updates are not allowed." To avoid that, and to + * simplify reporting back to the GUI whether this optional step succeeded, I've + * pulled this out as a separate submit(). + */ + try { + updateVersion = commandEngine.submit(archiveCommand); + if (updateVersion.getArchivalCopyLocation() != null) { + successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success"); + } else { + successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure"); + } + } catch (CommandException ex) { + successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString(); + logger.severe(ex.getMessage()); + } + } + } catch (CommandException ex) { + errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString(); + logger.severe(ex.getMessage()); + } + if (errorMsg != null) { + return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg); + } else { + return Response.ok(Json.createObjectBuilder() + .add("status", STATUS_OK) + .add("status_details", successMsg) + .add("data", json(ds)).build()) + .type(MediaType.APPLICATION_JSON) + .build(); + } + } else { PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, - createDataverseRequest(findAuthenticatedUserOrDie()), + createDataverseRequest(user), isMinor)); return res.isCompleted() ? ok(json(res.getDataset())) : accepted(json(res.getDataset())); - + } } catch (WrappedResponse ex) { return ex.getResponse(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java index 6b77f7fa32c..2694252091f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java @@ -2,13 +2,17 @@ import edu.harvard.iq.dataverse.Metric; import edu.harvard.iq.dataverse.metrics.MetricsUtil; +import java.util.Arrays; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import javax.ws.rs.core.UriInfo; /** * API endpoints for various metrics. @@ -22,60 +26,75 @@ */ @Path("info/metrics") public class Metrics extends AbstractApiBean { - /** Dataverses */ + /** Dataverses */ + @GET @Path("dataverses") - public Response getDataversesAllTime() { - return getDataversesToMonth(MetricsUtil.getCurrentMonth()); + public Response getDataversesAllTime(@Context UriInfo uriInfo) { + return getDataversesToMonth(uriInfo, MetricsUtil.getCurrentMonth()); } @Deprecated //for better path @GET @Path("dataverses/toMonth") - public Response getDataversesToMonthCurrent() { - return getDataversesToMonth(MetricsUtil.getCurrentMonth()); + public Response getDataversesToMonthCurrent(@Context UriInfo uriInfo) { + return getDataversesToMonth(uriInfo, MetricsUtil.getCurrentMonth()); } @GET @Path("dataverses/toMonth/{yyyymm}") - public Response getDataversesToMonth(@PathParam("yyyymm") String yyyymm) { + public Response getDataversesToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") String yyyymm) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "dataversesToMonth"; try { String sanitizedyyyymm = MetricsUtil.sanitizeYearMonthUserInput(yyyymm); - String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm); + String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm, null); if (null == jsonString) { //run query and save Long count = metricsSvc.dataversesToMonth(sanitizedyyyymm); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, sanitizedyyyymm, jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, sanitizedyyyymm, null, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + //TODO: Eventually the catch in each endpoint should be more specific + // and more general errors should be logged. } catch (Exception ex) { return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); } } - + @GET @Path("dataverses/pastDays/{days}") - public Response getDataversesPastDays(@PathParam("days") int days) { + public Response getDataversesPastDays(@Context UriInfo uriInfo, @PathParam("days") int days) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "dataversesPastDays"; if(days < 1) { return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); } try { - String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days)); + String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), null); if (null == jsonString) { //run query and save Long count = metricsSvc.dataversesPastDays(days); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, String.valueOf(days), jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, String.valueOf(days), null, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); @@ -87,16 +106,22 @@ public Response getDataversesPastDays(@PathParam("days") int days) { @GET @Path("dataverses/byCategory") - public Response getDataversesByCategory() { + public Response getDataversesByCategory(@Context UriInfo uriInfo) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "dataversesByCategory"; try { - String jsonArrayString = metricsSvc.returnUnexpiredCacheAllTime(metricName); + String jsonArrayString = metricsSvc.returnUnexpiredCacheAllTime(metricName, null); if (null == jsonArrayString) { //run query and save JsonArrayBuilder jsonArrayBuilder = MetricsUtil.dataversesByCategoryToJson(metricsSvc.dataversesByCategory()); jsonArrayString = jsonArrayBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, jsonArrayString), false); + metricsSvc.save(new Metric(metricName, null, null, jsonArrayString)); } return allowCors(ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString))); @@ -107,16 +132,22 @@ public Response getDataversesByCategory() { @GET @Path("dataverses/bySubject") - public Response getDataversesBySubject() { + public Response getDataversesBySubject(@Context UriInfo uriInfo) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "dataversesBySubject"; try { - String jsonArrayString = metricsSvc.returnUnexpiredCacheAllTime(metricName); + String jsonArrayString = metricsSvc.returnUnexpiredCacheAllTime(metricName, null); if (null == jsonArrayString) { //run query and save JsonArrayBuilder jsonArrayBuilder = MetricsUtil.dataversesBySubjectToJson(metricsSvc.dataversesBySubject()); jsonArrayString = jsonArrayBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, jsonArrayString), false); + metricsSvc.save(new Metric(metricName, null, null, jsonArrayString)); } return allowCors(ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString))); @@ -129,31 +160,38 @@ public Response getDataversesBySubject() { @GET @Path("datasets") - public Response getDatasetsAllTime() { - return getDatasetsToMonth(MetricsUtil.getCurrentMonth()); + public Response getDatasetsAllTime(@Context UriInfo uriInfo, @QueryParam("dataLocation") String dataLocation) { + return getDatasetsToMonth(uriInfo, MetricsUtil.getCurrentMonth(), dataLocation); } @Deprecated //for better path @GET @Path("datasets/toMonth") - public Response getDatasetsToMonthCurrent() { - return getDatasetsToMonth(MetricsUtil.getCurrentMonth()); + public Response getDatasetsToMonthCurrent(@Context UriInfo uriInfo, @QueryParam("dataLocation") String dataLocation) { + return getDatasetsToMonth(uriInfo, MetricsUtil.getCurrentMonth(), dataLocation); } @GET @Path("datasets/toMonth/{yyyymm}") - public Response getDatasetsToMonth(@PathParam("yyyymm") String yyyymm) { + public Response getDatasetsToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") String yyyymm, @QueryParam("dataLocation") String dataLocation) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{"dataLocation"}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "datasetsToMonth"; try { String sanitizedyyyymm = MetricsUtil.sanitizeYearMonthUserInput(yyyymm); - String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm); + String validDataLocation = MetricsUtil.validateDataLocationStringType(dataLocation); + String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm, validDataLocation); if (null == jsonString) { //run query and save - Long count = metricsSvc.datasetsToMonth(sanitizedyyyymm); + Long count = metricsSvc.datasetsToMonth(sanitizedyyyymm, validDataLocation); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, sanitizedyyyymm, jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, sanitizedyyyymm, validDataLocation, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); @@ -165,20 +203,27 @@ public Response getDatasetsToMonth(@PathParam("yyyymm") String yyyymm) { @GET @Path("datasets/pastDays/{days}") - public Response getDatasetsPastDays(@PathParam("days") int days) { + public Response getDatasetsPastDays(@Context UriInfo uriInfo, @PathParam("days") int days, @QueryParam("dataLocation") String dataLocation) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{"dataLocation"}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "datasetsPastDays"; if(days < 1) { return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); } try { - String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days)); + String validDataLocation = MetricsUtil.validateDataLocationStringType(dataLocation); + String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), validDataLocation); if (null == jsonString) { //run query and save - Long count = metricsSvc.datasetsPastDays(days); + Long count = metricsSvc.datasetsPastDays(days, validDataLocation); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, String.valueOf(days), jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, String.valueOf(days), validDataLocation, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); @@ -190,16 +235,30 @@ public Response getDatasetsPastDays(@PathParam("days") int days) { @GET @Path("datasets/bySubject") - public Response getDatasetsBySubject() { - String metricName = "datasetsBySubject"; + public Response getDatasetsBySubject(@Context UriInfo uriInfo, @QueryParam("dataLocation") String dataLocation) { + return getDatasetsBySubjectToMonth(uriInfo, MetricsUtil.getCurrentMonth(), dataLocation); + } + + @GET + @Path("datasets/bySubject/toMonth/{yyyymm}") + public Response getDatasetsBySubjectToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") String yyyymm, @QueryParam("dataLocation") String dataLocation) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{"dataLocation"}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + + String metricName = "datasetsBySubjectToMonth"; try { - String jsonArrayString = metricsSvc.returnUnexpiredCacheAllTime(metricName); - + String sanitizedyyyymm = MetricsUtil.sanitizeYearMonthUserInput(yyyymm); + String validDataLocation = MetricsUtil.validateDataLocationStringType(dataLocation); + String jsonArrayString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm, validDataLocation); + if (null == jsonArrayString) { //run query and save - JsonArrayBuilder jsonArrayBuilder = MetricsUtil.datasetsBySubjectToJson(metricsSvc.datasetsBySubject()); + JsonArrayBuilder jsonArrayBuilder = MetricsUtil.datasetsBySubjectToJson(metricsSvc.datasetsBySubjectToMonth(sanitizedyyyymm, validDataLocation)); jsonArrayString = jsonArrayBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, jsonArrayString), false); + metricsSvc.save(new Metric(metricName, sanitizedyyyymm, validDataLocation, jsonArrayString)); } return allowCors(ok(MetricsUtil.stringToJsonArrayBuilder(jsonArrayString))); @@ -207,35 +266,41 @@ public Response getDatasetsBySubject() { return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); } } - + /** Files */ @GET @Path("files") - public Response getFilesAllTime() { - return getFilesToMonth(MetricsUtil.getCurrentMonth()); + public Response getFilesAllTime(@Context UriInfo uriInfo) { + return getFilesToMonth(uriInfo, MetricsUtil.getCurrentMonth()); } @Deprecated //for better path @GET @Path("files/toMonth") - public Response getFilesToMonthCurrent() { - return getFilesToMonth(MetricsUtil.getCurrentMonth()); + public Response getFilesToMonthCurrent(@Context UriInfo uriInfo) { + return getFilesToMonth(uriInfo, MetricsUtil.getCurrentMonth()); } @GET @Path("files/toMonth/{yyyymm}") - public Response getFilesToMonth(@PathParam("yyyymm") String yyyymm) { + public Response getFilesToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") String yyyymm) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "filesToMonth"; try { String sanitizedyyyymm = MetricsUtil.sanitizeYearMonthUserInput(yyyymm); - String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm); + String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm, null); if (null == jsonString) { //run query and save Long count = metricsSvc.filesToMonth(sanitizedyyyymm); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, sanitizedyyyymm, jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, sanitizedyyyymm, null, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); @@ -246,20 +311,26 @@ public Response getFilesToMonth(@PathParam("yyyymm") String yyyymm) { @GET @Path("files/pastDays/{days}") - public Response getFilesPastDays(@PathParam("days") int days) { + public Response getFilesPastDays(@Context UriInfo uriInfo, @PathParam("days") int days) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "filesPastDays"; if(days < 1) { return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); } try { - String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days)); + String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), null); if (null == jsonString) { //run query and save Long count = metricsSvc.filesPastDays(days); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, String.valueOf(days), jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, String.valueOf(days), null, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); @@ -273,34 +344,43 @@ public Response getFilesPastDays(@PathParam("days") int days) { @GET @Path("downloads") - public Response getDownloadsAllTime() { - return getDownloadsToMonth(MetricsUtil.getCurrentMonth()); + public Response getDownloadsAllTime(@Context UriInfo uriInfo) { + return getDownloadsToMonth(uriInfo, MetricsUtil.getCurrentMonth()); } @Deprecated //for better path @GET @Path("downloads/toMonth") - public Response getDownloadsToMonthCurrent() { - return getDownloadsToMonth(MetricsUtil.getCurrentMonth()); + public Response getDownloadsToMonthCurrent(@Context UriInfo uriInfo) { + return getDownloadsToMonth(uriInfo, MetricsUtil.getCurrentMonth()); } @GET @Path("downloads/toMonth/{yyyymm}") - public Response getDownloadsToMonth(@PathParam("yyyymm") String yyyymm) { + public Response getDownloadsToMonth(@Context UriInfo uriInfo, @PathParam("yyyymm") String yyyymm) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "downloadsToMonth"; - + try { + String sanitizedyyyymm = MetricsUtil.sanitizeYearMonthUserInput(yyyymm); - String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm); + String jsonString = metricsSvc.returnUnexpiredCacheMonthly(metricName, sanitizedyyyymm, null); if (null == jsonString) { //run query and save Long count = metricsSvc.downloadsToMonth(sanitizedyyyymm); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, sanitizedyyyymm, jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, sanitizedyyyymm, null, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); } catch (Exception ex) { return allowCors(error(BAD_REQUEST, ex.getLocalizedMessage())); } @@ -308,20 +388,26 @@ public Response getDownloadsToMonth(@PathParam("yyyymm") String yyyymm) { @GET @Path("downloads/pastDays/{days}") - public Response getDownloadsPastDays(@PathParam("days") int days) { + public Response getDownloadsPastDays(@Context UriInfo uriInfo, @PathParam("days") int days) { + try { + errorIfUnrecongizedQueryParamPassed(uriInfo, new String[]{""}); + } catch (IllegalArgumentException ia) { + return allowCors(error(BAD_REQUEST, ia.getLocalizedMessage())); + } + String metricName = "downloadsPastDays"; if(days < 1) { return allowCors(error(BAD_REQUEST, "Invalid parameter for number of days.")); } try { - String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days)); + String jsonString = metricsSvc.returnUnexpiredCacheDayBased(metricName, String.valueOf(days), null); if (null == jsonString) { //run query and save Long count = metricsSvc.downloadsPastDays(days); JsonObjectBuilder jsonObjBuilder = MetricsUtil.countToJson(count); jsonString = jsonObjBuilder.build().toString(); - metricsSvc.save(new Metric(metricName, String.valueOf(days), jsonString), true); //if not using cache save new + metricsSvc.save(new Metric(metricName, String.valueOf(days), null, jsonString)); } return allowCors(ok(MetricsUtil.stringToJsonObjectBuilder(jsonString))); @@ -331,4 +417,13 @@ public Response getDownloadsPastDays(@PathParam("days") int days) { } } + private void errorIfUnrecongizedQueryParamPassed(UriInfo uriDetails, String[] allowedQueryParams) throws IllegalArgumentException { + for(String theKey : uriDetails.getQueryParameters().keySet()) { + if(!Arrays.stream(allowedQueryParams).anyMatch(theKey::equals)) { + throw new IllegalArgumentException("queryParameter " + theKey + " not supported for this endpont"); + } + } + + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/BadRequestExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/BadRequestExceptionHandler.java new file mode 100644 index 00000000000..f6412a871ac --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/BadRequestExceptionHandler.java @@ -0,0 +1,49 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.api.errorhandlers; + +import edu.harvard.iq.dataverse.util.BundleUtil; +import javax.json.Json; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.BadRequestException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; + +/** + * + * @author skraffmi + */ +@Provider +public class BadRequestExceptionHandler implements ExceptionMapper { + + @Context + HttpServletRequest request; + + @Override + public Response toResponse(BadRequestException ex) { + System.out.print( ex.getMessage()); + String uri = request.getRequestURI(); + String exMessage = ex.getMessage(); + String outputMessage; + if (exMessage != null && exMessage.toLowerCase().startsWith("tabular data required")) { + outputMessage = BundleUtil.getStringFromBundle("access.api.exception.metadata.not.available.for.nontabular.file"); + } else { + outputMessage = "Bad Request. The API request cannot be completed with the parameters supplied. Please check your code for typos, or consult our API guide at http://guides.dataverse.org."; + } + return Response.status(400) + .entity( Json.createObjectBuilder() + .add("status", "ERROR") + .add("code", 400) + .add("message", "'" + uri + "' " + outputMessage) + .build()) + .type("application/json").build(); + + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java new file mode 100644 index 00000000000..55fc2334bd1 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -0,0 +1,184 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.export.ExportException; +import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.ArchiverUtil; +import edu.harvard.iq.dataverse.workflows.WorkflowComment; + +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * + * @author qqmyers + * + * Adapted from UpdateDatasetVersionCommand + */ +@RequiredPermissions(Permission.EditDataset) +public class CuratePublishedDatasetVersionCommand extends AbstractDatasetCommand { + + private static final Logger logger = Logger.getLogger(CuratePublishedDatasetVersionCommand.class.getCanonicalName()); + final private boolean validateLenient = true; + + public CuratePublishedDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) { + super(aRequest, theDataset); + } + + public boolean isValidateLenient() { + return validateLenient; + } + + @Override + public Dataset execute(CommandContext ctxt) throws CommandException { + if (!getUser().isSuperuser()) { + throw new IllegalCommandException("Only superusers can curate published dataset versions", this); + } + + ctxt.permissions().checkEditDatasetLock(getDataset(), getRequest(), this); + // Invariant: Dataset has no locks preventing the update + DatasetVersion updateVersion = getDataset().getLatestVersionForCopy(); + + // Copy metadata from draft version to latest published version + updateVersion.setDatasetFields(getDataset().getEditVersion().initDatasetFields()); + + validateOrDie(updateVersion, isValidateLenient()); + + // final DatasetVersion editVersion = getDataset().getEditVersion(); + tidyUpFields(updateVersion); + + // Merge the new version into out JPA context + ctxt.em().merge(updateVersion); + + + TermsOfUseAndAccess oldTerms = updateVersion.getTermsOfUseAndAccess(); + TermsOfUseAndAccess newTerms = getDataset().getEditVersion().getTermsOfUseAndAccess(); + newTerms.setDatasetVersion(updateVersion); + updateVersion.setTermsOfUseAndAccess(newTerms); + //Put old terms on version that will be deleted.... + getDataset().getEditVersion().setTermsOfUseAndAccess(oldTerms); + + List newComments = getDataset().getEditVersion().getWorkflowComments(); + if (newComments!=null && newComments.size() >0) { + for(WorkflowComment wfc: newComments) { + wfc.setDatasetVersion(updateVersion); + } + updateVersion.getWorkflowComments().addAll(newComments); + } + + + // we have to merge to update the database but not flush because + // we don't want to create two draft versions! + Dataset tempDataset = ctxt.em().merge(getDataset()); + + // Look for file metadata changes and update published metadata if needed + for (DataFile dataFile : tempDataset.getFiles()) { + List fmdList = dataFile.getFileMetadatas(); + FileMetadata draftFmd = dataFile.getLatestFileMetadata(); + FileMetadata publishedFmd = null; + for (FileMetadata fmd : fmdList) { + if (fmd.getDatasetVersion().equals(updateVersion)) { + publishedFmd = fmd; + break; + } + } + boolean metadataUpdated = false; + if (draftFmd != null && publishedFmd != null) { + if (!draftFmd.getLabel().equals(publishedFmd.getLabel())) { + publishedFmd.setLabel(draftFmd.getLabel()); + metadataUpdated = true; + } + String draftDesc = draftFmd.getDescription(); + String pubDesc = publishedFmd.getDescription(); + if ((draftDesc!=null && (!draftDesc.equals(pubDesc))) || (draftDesc==null && pubDesc!=null)) { + publishedFmd.setDescription(draftDesc); + metadataUpdated = true; + } + if (!draftFmd.getCategories().equals(publishedFmd.getCategories())) { + publishedFmd.setCategories(draftFmd.getCategories()); + metadataUpdated = true; + } + if (!draftFmd.isRestricted() == publishedFmd.isRestricted()) { + publishedFmd.setRestricted(draftFmd.isRestricted()); + metadataUpdated = true; + } + String draftProv = draftFmd.getProvFreeForm(); + String pubProv = publishedFmd.getProvFreeForm(); + if ((draftProv != null && (!draftProv.equals(pubProv)))||(draftProv==null && pubProv!=null)) { + publishedFmd.setProvFreeForm(draftProv); + metadataUpdated = true; + } + + } else { + throw new IllegalCommandException("Cannot change files in the dataset", this); + } + if (metadataUpdated) { + dataFile.setModificationTime(getTimestamp()); + } + // Now delete filemetadata from draft version before deleting the version itself + FileMetadata mergedFmd = ctxt.em().merge(draftFmd); + ctxt.em().remove(mergedFmd); + // including removing metadata from the list on the datafile + draftFmd.getDataFile().getFileMetadatas().remove(draftFmd); + tempDataset.getEditVersion().getFileMetadatas().remove(draftFmd); + // And any references in the list held by categories + for (DataFileCategory cat : tempDataset.getCategories()) { + cat.getFileMetadatas().remove(draftFmd); + } + } + + // Update modification time on the published version and the dataset + updateVersion.setLastUpdateTime(getTimestamp()); + tempDataset.setModificationTime(getTimestamp()); + + Dataset savedDataset = ctxt.em().merge(tempDataset); + + // Flush before calling DeleteDatasetVersion which calls + // PrivateUrlServiceBean.getPrivateUrlFromDatasetId() that will query the DB and + // fail if our changes aren't there + ctxt.em().flush(); + + // Now delete draft version + DeleteDatasetVersionCommand cmd; + + cmd = new DeleteDatasetVersionCommand(getRequest(), savedDataset); + ctxt.engine().submit(cmd); + // Running the command above reindexes the dataset, so we don't need to do it + // again in here. + + // And update metadata at PID provider + ctxt.engine().submit( + new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest())); + + //And the exported metadata files + try { + ExportService instance = ExportService.getInstance(ctxt.settings()); + instance.exportAllFormats(getDataset()); + } catch (ExportException ex) { + // Just like with indexing, a failure to export is not a fatal condition. + logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage()); + } + + + // Update so that getDataset() in updateDatasetUser will get the up-to-date copy + // (with no draft version) + setDataset(savedDataset); + updateDatasetUser(ctxt); + + + + + return savedDataset; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java index c4d53466f82..d7cbd12eb25 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java @@ -27,7 +27,7 @@ public class DeleteDatasetVersionCommand extends AbstractVoidCommand { private static final Logger logger = Logger.getLogger(DeleteDatasetVersionCommand.class.getCanonicalName()); - private final Dataset doomed; + private Dataset doomed; public DeleteDatasetVersionCommand(DataverseRequest aRequest, Dataset dataset) { super(aRequest, dataset); @@ -37,7 +37,7 @@ public DeleteDatasetVersionCommand(DataverseRequest aRequest, Dataset dataset) { @Override protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.permissions().checkEditDatasetLock(doomed, getRequest(), this); - + doomed = ctxt.em().find(Dataset.class, doomed.getId()); // if you are deleting a dataset that only has 1 draft, we are actually destroying the dataset if (doomed.getVersions().size() == 1) { ctxt.engine().submit(new DestroyDatasetCommand(doomed, getRequest())); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java index e36fe06b863..7e37241563c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.sql.Timestamp; import java.util.Collections; @@ -52,12 +53,29 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); ctxt.em().merge(target); ctxt.em().flush(); + // When updating, we want to traverse through files even if the dataset itself + // didn't need updating. + String currentGlobalIdProtocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, ""); + String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); + boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabled(); + // We will skip trying to update the global identifiers for datafiles if they + // aren't being used. + // If they are, we need to assure that there's an existing PID or, as when + // creating PIDs, that the protocol matches that of the dataset DOI if + // we're going to create a DEPENDENT file PID. + String protocol = target.getProtocol(); for (DataFile df : target.getFiles()) { - doiRetString = idServiceBean.publicizeIdentifier(df); - if (doiRetString) { - df.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - ctxt.em().merge(df); - ctxt.em().flush(); + if (isFilePIDsEnabled && // using file PIDs and + (!(df.getIdentifier() == null || df.getIdentifier().isEmpty()) || // identifier exists, or + currentGlobalIdProtocol.equals(protocol) || // right protocol to create dependent DOIs, or + dataFilePIDFormat.equals("INDEPENDENT"))// or independent. TODO(pm) - check authority too + ) { + doiRetString = idServiceBean.publicizeIdentifier(df); + if (doiRetString) { + df.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + ctxt.em().merge(df); + ctxt.em().flush(); + } } } } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java index eecb2126d4f..05c79731d40 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java @@ -48,12 +48,25 @@ public List findAll() { * @return A list of tools or an empty list. */ public List findByType(Type type) { + return findByType(type, null); + } + + /** + * @param type + * @param contentType - mimetype + * @return A list of tools or an empty list. + */ + public List findByType(Type type, String contentType) { List externalTools = new ArrayList<>(); //If contentType==null, get all tools of the given ExternalTool.Type - TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ExternalTool AS o WHERE o.type = :type", ExternalTool.class); + TypedQuery typedQuery = contentType != null ? em.createQuery("SELECT OBJECT(o) FROM ExternalTool AS o WHERE o.type = :type AND o.contentType = :contentType", ExternalTool.class): + em.createQuery("SELECT OBJECT(o) FROM ExternalTool AS o WHERE o.type = :type", ExternalTool.class); typedQuery.setParameter("type", type); + if(contentType!=null) { + typedQuery.setParameter("contentType", contentType); + } List toolsFromQuery = typedQuery.getResultList(); if (toolsFromQuery != null) { externalTools = toolsFromQuery; @@ -165,4 +178,6 @@ private static String getOptionalTopLevelField(JsonObject jsonObject, String key } + + } diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 10f9f7440f2..2b1caa22fe9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -1,18 +1,22 @@ package edu.harvard.iq.dataverse.metrics; import edu.harvard.iq.dataverse.Metric; +import static edu.harvard.iq.dataverse.metrics.MetricsUtil.*; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.Serializable; +import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.Date; import java.util.List; +import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.persistence.EntityManager; +import javax.persistence.NoResultException; import javax.persistence.NonUniqueResultException; import javax.persistence.PersistenceContext; import javax.persistence.Query; @@ -29,12 +33,13 @@ public class MetricsServiceBean implements Serializable { @EJB SystemConfig systemConfig; + /** Dataverses */ /** * @param yyyymm Month in YYYY-MM format. */ - public long dataversesToMonth(String yyyymm) throws Exception { + public long dataversesToMonth(String yyyymm) throws Exception { Query query = em.createNativeQuery("" + "select count(dvobject.id)\n" + "from dataverse\n" @@ -42,8 +47,8 @@ public long dataversesToMonth(String yyyymm) throws Exception { + "where dvobject.publicationdate is not null\n" + "and date_trunc('month', publicationdate) <= to_date('" + yyyymm + "','YYYY-MM');" ); - logger.fine("query: " + query); - + logger.log(Level.FINE, "Metric query: {0}", query); + return (long) query.getSingleResult(); } @@ -55,9 +60,8 @@ public long dataversesPastDays(int days) throws Exception { + "where dvobject.publicationdate is not null\n" + "and publicationdate > current_date - interval '"+days+"' day;\n" ); - - logger.fine("query: " + query); - + logger.log(Level.FINE, "Metric query: {0}", query); + return (long) query.getSingleResult(); } @@ -70,27 +74,106 @@ public List dataversesByCategory() throws Exception { + "group by dataversetype\n" + "order by count desc;" ); - - logger.fine("query: " + query); + logger.log(Level.FINE, "Metric query: {0}", query); + return query.getResultList(); } public List dataversesBySubject() { Query query = em.createNativeQuery("" + "select cvv.strvalue, count(dataverse_id) from dataversesubjects\n" - + "join controlledvocabularyvalue cvv ON cvv.id = controlledvocabularyvalue_id\n" + + "join controlledvocabularyvalue cvv ON cvv.id = controlledvocabularyvalue_id \n" + //+ "where dataverse_id != ( select id from dvobject where owner_id is null) \n" //removes root, we decided to do this in the homepage js instead + "group by cvv.strvalue\n" + "order by count desc;" - ); - logger.info("query: " + query); - + logger.log(Level.FINE, "Metric query: {0}", query); + return query.getResultList(); } /** Datasets */ - public List datasetsBySubject() { + + /** + * @param yyyymm Month in YYYY-MM format. + */ + public long datasetsToMonth(String yyyymm, String dataLocation) throws Exception { + String dataLocationLine = "(date_trunc('month', releasetime) <= to_date('" + yyyymm +"','YYYY-MM') and dataset.harvestingclient_id IS NULL)\n"; + + if(!DATA_LOCATION_LOCAL.equals(dataLocation)) { //Default api state is DATA_LOCATION_LOCAL + //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated + String harvestBaseLine = "(date_trunc('month', createtime) <= to_date('" + yyyymm +"','YYYY-MM') and dataset.harvestingclient_id IS NOT NULL)\n"; + if (DATA_LOCATION_REMOTE.equals(dataLocation)) { + dataLocationLine = harvestBaseLine; //replace + } else if(DATA_LOCATION_ALL.equals(dataLocation)) { + dataLocationLine = "(" + dataLocationLine + " OR " + harvestBaseLine + ")\n"; //append + } + } + + // Note that this SQL line in the code below: + // datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) + // behaves somewhat counter-intuitively if the versionnumber and/or + // minorversionnumber is/are NULL - it results in an empty string + // (NOT the string "{dataset_id}:", in other words). Some harvested + // versions do not have version numbers (only the ones harvested from + // other Dataverses!) It works fine + // for our purposes below, because we are simply counting the selected + // lines - i.e. we don't care if some of these lines are empty. + // But do not use this notation if you need the values returned to + // meaningfully identify the datasets! + + Query query = em.createNativeQuery( + "select count(*)\n" + +"from (\n" + + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + + "from datasetversion\n" + + "join dataset on dataset.id = datasetversion.dataset_id\n" + + "where versionstate='RELEASED' \n" + + "and \n" + + dataLocationLine //be careful about adding more and statements after this line. + + "group by dataset_id \n" + +") sub_temp" + ); + logger.log(Level.FINE, "Metric query: {0}", query); + + return (long) query.getSingleResult(); + } + + public List datasetsBySubjectToMonth(String yyyymm, String dataLocation) { + // The SQL code below selects the local, non-harvested dataset versions: + // A published local datasets may have more than one released version! + // So that's why we have to jump through some extra hoops below + // in order to select the latest one: + String originClause = "(datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in\n" + + "(\n" + + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + + " from datasetversion\n" + + " join dataset on dataset.id = datasetversion.dataset_id\n" + + " where versionstate='RELEASED'\n" + + " and dataset.harvestingclient_id is null\n" + + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + " group by dataset_id\n" + + "))\n"; + + if(!DATA_LOCATION_LOCAL.equals(dataLocation)) { //Default api state is DATA_LOCATION_LOCAL + //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated + // But we can operate on the assumption that all the harvested datasets + // are published, and there is always only one version per dataset - + // so the query is simpler: + String harvestOriginClause = "(\n" + + " datasetversion.dataset_id = dataset.id\n" + + " AND dataset.harvestingclient_id IS NOT null \n" + + " AND date_trunc('month', datasetversion.createtime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + ")\n"; + + if (DATA_LOCATION_REMOTE.equals(dataLocation)) { + originClause = harvestOriginClause; //replace + } else if(DATA_LOCATION_ALL.equals(dataLocation)) { + originClause = "(" + originClause + " OR " + harvestOriginClause + ")\n"; //append + } + } + Query query = em.createNativeQuery("" + "SELECT strvalue, count(dataset.id)\n" + "FROM datasetfield_controlledvocabularyvalue \n" @@ -98,67 +181,44 @@ public List datasetsBySubject() { + "JOIN datasetfield ON datasetfield.id = datasetfield_controlledvocabularyvalue.datasetfield_id\n" + "JOIN datasetfieldtype ON datasetfieldtype.id = controlledvocabularyvalue.datasetfieldtype_id\n" + "JOIN datasetversion ON datasetversion.id = datasetfield.datasetversion_id\n" - + "JOIN dvobject ON dvobject.id = datasetversion.dataset_id\n" + "JOIN dataset ON dataset.id = datasetversion.dataset_id\n" + "WHERE\n" - + "datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in \n" - + "(\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" - + "from datasetversion\n" - + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "where versionstate='RELEASED'\n" - + "and dataset.harvestingclient_id is null\n" - + "group by dataset_id \n" - + ")\n" + + originClause + "AND datasetfieldtype.name = 'subject'\n" + "GROUP BY strvalue\n" + "ORDER BY count(dataset.id) desc;" ); - logger.info("query: " + query); + logger.log(Level.FINE, "Metric query: {0}", query); return query.getResultList(); } - /** - * @param yyyymm Month in YYYY-MM format. - */ - public long datasetsToMonth(String yyyymm) throws Exception { - Query query = em.createNativeQuery("" - + "select count(*)\n" - + "from datasetversion\n" - + "where datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in \n" - + "(\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" - + "from datasetversion\n" - + "join dataset on dataset.id = datasetversion.dataset_id\n" - + "where versionstate='RELEASED' \n" - + "and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" - + "and dataset.harvestingclient_id is null\n" - + "group by dataset_id \n" - + ");" - ); - logger.fine("query: " + query); - - return (long) query.getSingleResult(); - } - - public long datasetsPastDays(int days) throws Exception { + public long datasetsPastDays(int days, String dataLocation) throws Exception { + String dataLocationLine = "(releasetime > current_date - interval '"+days+"' day and dataset.harvestingclient_id IS NULL)\n"; + + if(!DATA_LOCATION_LOCAL.equals(dataLocation)) { //Default api state is DATA_LOCATION_LOCAL + //we have to use createtime for harvest as post dvn3 harvests do not have releasetime populated + String harvestBaseLine = "(createtime > current_date - interval '"+days+"' day and dataset.harvestingclient_id IS NOT NULL)\n"; + if (DATA_LOCATION_REMOTE.equals(dataLocation)) { + dataLocationLine = harvestBaseLine; //replace + } else if(DATA_LOCATION_ALL.equals(dataLocation)) { + dataLocationLine += " or " +harvestBaseLine; //append + } + } Query query = em.createNativeQuery( - "select count(*)\n" + - "from datasetversion\n" + - "where datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in \n" + - "(\n" + - " select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + - " from datasetversion\n" + - " join dataset on dataset.id = datasetversion.dataset_id\n" + - " where versionstate='RELEASED' \n" + - " and releasetime > current_date - interval '"+days+"' day\n" + - " and dataset.harvestingclient_id is null\n" + - " group by dataset_id \n" + - ");" + "select count(*)\n" + +"from (\n" + + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max\n" + + "from datasetversion\n" + + "join dataset on dataset.id = datasetversion.dataset_id\n" + + "where versionstate='RELEASED' \n" + + "and \n" + + dataLocationLine //be careful about adding more and statements after this line. + + "group by dataset_id \n" + +") sub_temp" ); - logger.fine("query: " + query); + logger.log(Level.FINE, "Metric query: {0}", query); return (long) query.getSingleResult(); } @@ -185,7 +245,8 @@ public long filesToMonth(String yyyymm) throws Exception { + "group by dataset_id \n" + ");" ); - logger.fine("query: " + query); + logger.log(Level.FINE, "Metric query: {0}", query); + return (long) query.getSingleResult(); } @@ -205,25 +266,50 @@ public long filesPastDays(int days) throws Exception { + "group by dataset_id \n" + ");" ); - - logger.fine("query: " + query); + logger.log(Level.FINE, "Metric query: {0}", query); return (long) query.getSingleResult(); } /** Downloads */ - /** + /* + * This includes getting historic download without a timestamp if query + * is earlier than earliest timestamped record + * * @param yyyymm Month in YYYY-MM format. */ public long downloadsToMonth(String yyyymm) throws Exception { - Query query = em.createNativeQuery("" - + "select count(id)\n" - + "from guestbookresponse\n" - + "where date_trunc('month', responsetime) <= to_date('" + yyyymm + "','YYYY-MM');" + Query earlyDateQuery = em.createNativeQuery("" + + "select responsetime from guestbookresponse\n" + + "ORDER BY responsetime LIMIT 1;" ); - logger.fine("query: " + query); - return (long) query.getSingleResult(); + + try { + Timestamp earlyDateTimestamp = (Timestamp) earlyDateQuery.getSingleResult(); + Date earliestDate = new Date(earlyDateTimestamp.getTime()); + SimpleDateFormat formatter2 = new SimpleDateFormat("yyyy-MM"); + Date dateQueried = formatter2.parse(yyyymm); + + if(!dateQueried.before(earliestDate)) { + Query query = em.createNativeQuery("" + + "select count(id)\n" + + "from guestbookresponse\n" + + "where date_trunc('month', responsetime) <= to_date('" + yyyymm + "','YYYY-MM')" + + "or responsetime is NULL;" //includes historic guestbook records without date + ); + logger.log(Level.FINE, "Metric query: {0}", query); + return (long) query.getSingleResult(); + } + else { + //When we query before the earliest dated record, return 0; + return 0L; + } + } catch(NoResultException e) { + //If earlyDateQuery.getSingleResult is null, then there are no guestbooks and we can return 0 + return 0L; + } + } public long downloadsPastDays(int days) throws Exception { @@ -232,37 +318,36 @@ public long downloadsPastDays(int days) throws Exception { + "from guestbookresponse\n" + "where responsetime > current_date - interval '"+days+"' day;\n" ); - - logger.fine("query: " + query); + logger.log(Level.FINE, "Metric query: {0}", query); return (long) query.getSingleResult(); } /** Helper functions for metric caching */ - public String returnUnexpiredCacheDayBased(String metricName, String days) throws Exception { - Metric queriedMetric = getMetric(metricName, days); + public String returnUnexpiredCacheDayBased(String metricName, String days, String dataLocation) throws Exception { + Metric queriedMetric = getMetric(metricName, dataLocation, days); if (!doWeQueryAgainDayBased(queriedMetric)) { - return queriedMetric.getMetricValue(); + return queriedMetric.getValueJson(); } return null; } - public String returnUnexpiredCacheMonthly(String metricName, String yyyymm) throws Exception { - Metric queriedMetric = getMetric(metricName, yyyymm); + public String returnUnexpiredCacheMonthly(String metricName, String yyyymm, String dataLocation) throws Exception { + Metric queriedMetric = getMetric(metricName, dataLocation, yyyymm); if (!doWeQueryAgainMonthly(queriedMetric)) { - return queriedMetric.getMetricValue(); + return queriedMetric.getValueJson(); } return null; } - public String returnUnexpiredCacheAllTime(String metricName) throws Exception { - Metric queriedMetric = getMetric(metricName); + public String returnUnexpiredCacheAllTime(String metricName, String dataLocation) throws Exception { + Metric queriedMetric = getMetric(metricName, dataLocation, null); //MAD: not passing a date if (!doWeQueryAgainAllTime(queriedMetric)) { - return queriedMetric.getMetricValue(); + return queriedMetric.getValueJson(); } return null; } @@ -291,7 +376,7 @@ public boolean doWeQueryAgainMonthly(Metric queriedMetric) { return true; } - String yyyymm = queriedMetric.getMetricDateString(); + String yyyymm = queriedMetric.getDateString(); String thisMonthYYYYMM = MetricsUtil.getCurrentMonth(); Date lastCalled = queriedMetric.getLastCalledDate(); @@ -331,13 +416,9 @@ public boolean doWeQueryAgainAllTime(Metric queriedMetric) { return (todayMinus.after(lastCalled)); } - public Metric save(Metric newMetric, boolean monthly) throws Exception { - Metric oldMetric; - if (monthly) { - oldMetric = getMetric(newMetric.getMetricTitle(), newMetric.getMetricDateString()); - } else { - oldMetric = getMetric(newMetric.getMetricTitle()); - } + public Metric save(Metric newMetric) throws Exception { + Metric oldMetric = getMetric(newMetric.getName(), newMetric.getDataLocation(), newMetric.getDateString()); + if (oldMetric != null) { em.remove(oldMetric); em.flush(); @@ -347,22 +428,33 @@ public Metric save(Metric newMetric, boolean monthly) throws Exception { } //This works for date and day based metrics - public Metric getMetric(String metricTitle, String dayString) throws Exception { - String searchMetricName = Metric.generateMetricName(metricTitle, dayString); - - return getMetric(searchMetricName); - } - - public Metric getMetric(String searchMetricName) throws Exception { - Query query = em.createQuery("select object(o) from Metric as o where o.metricName = :metricName", Metric.class); - query.setParameter("metricName", searchMetricName); + //It is ok to pass null for dataLocation and dayString + public Metric getMetric(String name, String dataLocation, String dayString) throws Exception { + Query query = em.createQuery("select object(o) from Metric as o" + + " where o.name = :name" + + " and o.dataLocation" + (dataLocation == null ? " is null" : " = :dataLocation") + + " and o.dayString" + (dayString == null ? " is null" : " = :dayString") + , Metric.class); + query.setParameter("name", name); + if(dataLocation != null){ query.setParameter("dataLocation", dataLocation);} + if(dayString != null) {query.setParameter("dayString", dayString);} + + logger.log(Level.FINE, "getMetric query: {0}", query); + Metric metric = null; try { metric = (Metric) query.getSingleResult(); } catch (javax.persistence.NoResultException nr) { //do nothing } catch (NonUniqueResultException nur) { - throw new Exception("Multiple cached results found for this query. Contact your system administrator."); + //duplicates can happen when a new/requeried metric is called twice and saved twice before one can use the cache + //this remove all but the 0th index one in that case + for(int i = 1; i < query.getResultList().size(); i++) { + Metric extraMetric = (Metric) query.getResultList().get(i); + em.remove(extraMetric); + em.flush(); + } + metric = (Metric) query.getResultList().get(0); } return metric; } diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java index 96a9ef53974..a3e4c68c848 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java @@ -1,10 +1,8 @@ package edu.harvard.iq.dataverse.metrics; import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.Metric; import java.io.StringReader; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; @@ -30,6 +28,10 @@ public class MetricsUtil { private final static String CATEGORY = "category"; private final static String SUBJECT = "subject"; public static String YEAR_AND_MONTH_PATTERN = "yyyy-MM"; + + public static final String DATA_LOCATION_LOCAL = "local"; + public static final String DATA_LOCATION_REMOTE = "remote"; + public static final String DATA_LOCATION_ALL = "all"; public static JsonObjectBuilder countToJson(long count) { JsonObjectBuilder job = Json.createObjectBuilder(); @@ -112,6 +114,17 @@ public static String sanitizeYearMonthUserInput(String userInput) throws Excepti return sanitized; } + public static String validateDataLocationStringType(String dataLocation) throws Exception { + if( null == dataLocation || "".equals(dataLocation)) { + dataLocation = DATA_LOCATION_LOCAL; + } + if(!(DATA_LOCATION_LOCAL.equals(dataLocation) || DATA_LOCATION_REMOTE.equals(dataLocation) || DATA_LOCATION_ALL.equals(dataLocation))) { + throw new Exception("The inputted data location is not valid"); + } + + return dataLocation; + } + public static String getCurrentMonth() { return LocalDate.now().format(DateTimeFormatter.ofPattern(MetricsUtil.YEAR_AND_MONTH_PATTERN)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index da7a40eecc1..f75671515f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -1233,7 +1233,7 @@ public void setDisplayCardValues() { if (dataverse.getId().equals(result.getParentIdAsLong())) { // definitely NOT linked: result.setIsInTree(true); - } else if (result.getParentIdAsLong() == 1L) { + } else if (result.getParentIdAsLong() == dataverseService.findRootDataverse().getId()) { // the object's parent is the root Dv; and the current // Dv is NOT root... definitely linked: result.setIsInTree(false); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 45fa189787a..77a5e3ef563 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -576,6 +576,10 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List clazz = Class.forName(className); - if (AbstractSubmitToArchiveCommand.class.isAssignableFrom(clazz)) { - Constructor ctor; - ctor = clazz.getConstructor(DataverseRequest.class, DatasetVersion.class); - return (AbstractSubmitToArchiveCommand) ctor.newInstance(new Object[] { dvr, version }); + if (className != null) { + try { + Class clazz = Class.forName(className); + if (AbstractSubmitToArchiveCommand.class.isAssignableFrom(clazz)) { + Constructor ctor; + ctor = clazz.getConstructor(DataverseRequest.class, DatasetVersion.class); + return (AbstractSubmitToArchiveCommand) ctor.newInstance(new Object[] { dvr, version }); + } + } catch (Exception e) { + logger.warning("Unable to instantiate an Archiver of class: " + className); + e.printStackTrace(); } - } catch (Exception e) { - logger.warning("Unable to instantiate an Archiver of class: " + className); - e.printStackTrace(); } - return null; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 18523f80eea..64d959773d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1347,11 +1347,18 @@ public static boolean isPubliclyDownloadable(FileMetadata fileMetadata) { * This is what the UI displays for "Download URL" on the file landing page * (DOIs rather than file IDs. */ - public static String getPublicDownloadUrl(String dataverseSiteUrl, String persistentId) { - String path = "/api/access/datafile/:persistentId?persistentId=" + persistentId; - return dataverseSiteUrl + path; + public static String getPublicDownloadUrl(String dataverseSiteUrl, String persistentId, Long fileId) { + String path = null; + if(persistentId != null) { + path = dataverseSiteUrl + "/api/access/datafile/:persistentId?persistentId=" + persistentId; + } else if( fileId != null) { + path = dataverseSiteUrl + "/api/access/datafile/" + fileId; + } else { + logger.info("In getPublicDownloadUrl but persistentId & fileId are both null!"); + } + return path; } - + /** * The FileDownloadServiceBean operates on file IDs, not DOIs. */ diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java index b028e5b3915..c4588176870 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java @@ -115,4 +115,8 @@ public Timestamp getCreated() { return created; } + public void setDatasetVersion(DatasetVersion dv) { + datasetVersion=dv; + } + } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 7c9f3e9aac1..4a850b3ff11 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -39,6 +39,18 @@ + + + + + + + + + + + + @@ -1401,6 +1413,9 @@ + + +

@@ -1559,9 +1574,6 @@ function updateTemplate() { $('button[id$="updateTemplate"]').trigger('click'); } - function registerDataset() { - $('button[id$="registerDataset"]').trigger('click'); - } function checkNewlyRestricted() { if ($('input[id$="showAccessPopup"]').val() === 'true') { PF('accessPopup').show(); diff --git a/src/main/webapp/dataverse_header.xhtml b/src/main/webapp/dataverse_header.xhtml index f3112a97678..23aa98ab3fa 100644 --- a/src/main/webapp/dataverse_header.xhtml +++ b/src/main/webapp/dataverse_header.xhtml @@ -181,6 +181,14 @@

+ +
diff --git a/src/main/webapp/dataverse_homepage.xhtml b/src/main/webapp/dataverse_homepage.xhtml index 48b56a8a410..bbf03ea22ad 100644 --- a/src/main/webapp/dataverse_homepage.xhtml +++ b/src/main/webapp/dataverse_homepage.xhtml @@ -12,7 +12,7 @@ - + diff --git a/src/main/webapp/dataverse_template.xhtml b/src/main/webapp/dataverse_template.xhtml index e54980e7d9c..dd0b89d2b17 100644 --- a/src/main/webapp/dataverse_template.xhtml +++ b/src/main/webapp/dataverse_template.xhtml @@ -19,6 +19,7 @@ + diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 64d109570bc..d1939a0431d 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -428,7 +428,7 @@
-
+
diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 1762f887881..ada8774b55a 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -130,25 +130,26 @@
-
+

+   - + +  

-
-
+
diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css index 3d95f5c15b6..7a0f8b05400 100644 --- a/src/main/webapp/resources/css/structure.css +++ b/src/main/webapp/resources/css/structure.css @@ -1,4 +1,13 @@ /*--------- BODY --------- */ +html { + /* Sticky footer */ + position:relative; + min-height:100%; +} +body { + /* Sticky footer Margin bottom by footer height */ + margin-bottom:160px; +} body.widget-view {margin-bottom:54px;} body .ui-widget {font-size: inherit;} .ui-widget-content a {color: #337AB7;} @@ -40,8 +49,8 @@ body .ui-widget {font-size: inherit;} #status-alert {margin-top:0; margin-bottom:0;} #status-alert div.alert {border:0; box-shadow:none;} -#footer {margin-top:3em; padding-bottom:4em; color:#808080;} -#footer.widget-view {position:fixed; left:0px; bottom:0px; margin:0; padding:4px 0 0 0; min-height:44px; width:100%; background:#fff;} +#footer {position:absolute; bottom:0; margin-top:3em; padding-bottom:100px; width:100%; height:60px; color:#808080;} +#footer.widget-view {position:fixed; left:0; bottom:0; margin:0; padding:4px 0 0 0; min-height:44px; background:#fff;} #footer .poweredbylogo {text-align:right;} #footer .poweredbylogo span {font-size:.85em;margin-right:.3em;} #footer .version {vertical-align:bottom;white-space:nowrap;} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java index f0ae408b761..f6478bf379e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java @@ -2,6 +2,8 @@ import com.jayway.restassured.RestAssured; import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.metrics.MetricsUtil; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.OK; import org.junit.AfterClass; import static org.junit.Assert.assertEquals; @@ -29,72 +31,93 @@ public static void cleanUpClass() { public void testGetDataversesToMonth() { String yyyymm = "2018-04"; // yyyymm = null; - Response response = UtilIT.metricsDataversesToMonth(yyyymm); + Response response = UtilIT.metricsDataversesToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDataversesToMonth(yyyymm); + response = UtilIT.metricsDataversesToMonth(yyyymm, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); - + assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsDataversesToMonth(yyyymm, "dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); + } @Test public void testGetDatasetsToMonth() { String yyyymm = "2018-04"; // yyyymm = null; - Response response = UtilIT.metricsDatasetsToMonth(yyyymm); + Response response = UtilIT.metricsDatasetsToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDatasetsToMonth(yyyymm); + response = UtilIT.metricsDatasetsToMonth(yyyymm, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test ok when passing extra query params + response = UtilIT.metricsDatasetsToMonth(yyyymm, "dataLocation=local"); + response.then().assertThat() + .statusCode(OK.getStatusCode()); } @Test public void testGetFilesToMonth() { String yyyymm = "2018-04"; // yyyymm = null; - Response response = UtilIT.metricsFilesToMonth(yyyymm); + Response response = UtilIT.metricsFilesToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsFilesToMonth(yyyymm); + response = UtilIT.metricsFilesToMonth(yyyymm, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsFilesToMonth(yyyymm, "dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @Test public void testGetDownloadsToMonth() { String yyyymm = "2018-04"; // yyyymm = null; - Response response = UtilIT.metricsDownloadsToMonth(yyyymm); + Response response = UtilIT.metricsDownloadsToMonth(yyyymm, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDownloadsToMonth(yyyymm); + response = UtilIT.metricsDownloadsToMonth(yyyymm, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsDownloadsToMonth(yyyymm, "dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @@ -102,36 +125,46 @@ public void testGetDownloadsToMonth() { public void testGetDataversesPastDays() { String days = "30"; - Response response = UtilIT.metricsDataversesPastDays(days); + Response response = UtilIT.metricsDataversesPastDays(days, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDataversesPastDays(days); + response = UtilIT.metricsDataversesPastDays(days, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsDataversesPastDays(days, "dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @Test public void testGetDatasetsPastDays() { String days = "30"; - Response response = UtilIT.metricsDatasetsPastDays(days); + Response response = UtilIT.metricsDatasetsPastDays(days, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDatasetsPastDays(days); + response = UtilIT.metricsDatasetsPastDays(days, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test ok when passing extra query params + response = UtilIT.metricsDatasetsPastDays(days, "dataLocation=local"); + response.then().assertThat() + .statusCode(OK.getStatusCode()); } @@ -139,85 +172,133 @@ public void testGetDatasetsPastDays() { public void testGetFilesPastDays() { String days = "30"; - Response response = UtilIT.metricsFilesPastDays(days); + Response response = UtilIT.metricsFilesPastDays(days, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsFilesPastDays(days); + response = UtilIT.metricsFilesPastDays(days, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsFilesPastDays(days, "dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @Test public void testGetDownloadsPastDays() { String days = "30"; - Response response = UtilIT.metricsDownloadsPastDays(days); + Response response = UtilIT.metricsDownloadsPastDays(days, null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDownloadsPastDays(days); + response = UtilIT.metricsDownloadsPastDays(days, null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsDownloadsPastDays(days, "dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @Test public void testGetDataverseByCategory() { - Response response = UtilIT.metricsDataversesByCategory(); + Response response = UtilIT.metricsDataversesByCategory(null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDataversesByCategory(); + response = UtilIT.metricsDataversesByCategory(null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsDataversesByCategory("dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @Test public void testGetDataverseBySubject() { - Response response = UtilIT.metricsDataversesBySubject(); + Response response = UtilIT.metricsDataversesBySubject(null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDataversesBySubject(); + response = UtilIT.metricsDataversesBySubject(null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test error when passing extra query params + response = UtilIT.metricsDataversesBySubject("dataLocation=local"); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); } @Test public void testGetDatasetsBySubject() { - Response response = UtilIT.metricsDatasetsBySubject(); + Response response = UtilIT.metricsDatasetsBySubject(null); String precache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); //Run each query twice and compare results to tests caching - response = UtilIT.metricsDatasetsBySubject(); + response = UtilIT.metricsDatasetsBySubject(null); String postcache = response.prettyPrint(); response.then().assertThat() .statusCode(OK.getStatusCode()); assertEquals(precache, postcache); + + //Test ok when passing extra query params + response = UtilIT.metricsDatasetsBySubject("dataLocation=local"); + response.then().assertThat() + .statusCode(OK.getStatusCode()); } + @Test + public void testGetDatasetsBySubjectToMonth() { + String thismonth = MetricsUtil.getCurrentMonth(); + Response response = UtilIT.metricsDatasetsBySubjectToMonth(thismonth, null); + String precache = response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()); + + //Run each query twice and compare results to tests caching + // See the "TODO" at the beginning of the class; + // ideally, we'll want to have more comprehensive tests. + response = UtilIT.metricsDatasetsBySubjectToMonth(thismonth, null); + String postcache = response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()); + + assertEquals(precache, postcache); + + //Test ok passing extra query params + response = UtilIT.metricsDatasetsBySubjectToMonth(thismonth, "dataLocation=local"); + response.then().assertThat() + .statusCode(OK.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 15dd5538da5..33da7e6cb71 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1699,75 +1699,128 @@ static Response deleteStorageSite(long storageSiteId) { .delete("/api/admin/storageSites/" + storageSiteId); } - static Response metricsDataversesToMonth(String yyyymm) { + static Response metricsDataversesToMonth(String yyyymm, String queryParams) { String optionalYyyyMm = ""; if (yyyymm != null) { optionalYyyyMm = "/" + yyyymm; } + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/dataverses/toMonth" + optionalYyyyMm); + return requestSpecification.get("/api/info/metrics/dataverses/toMonth" + optionalYyyyMm + optionalQueryParams); } - static Response metricsDatasetsToMonth(String yyyymm) { + static Response metricsDatasetsToMonth(String yyyymm, String queryParams) { String optionalYyyyMm = ""; if (yyyymm != null) { optionalYyyyMm = "/" + yyyymm; } + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/datasets/toMonth" + optionalYyyyMm); + return requestSpecification.get("/api/info/metrics/datasets/toMonth" + optionalYyyyMm + optionalQueryParams); } - static Response metricsFilesToMonth(String yyyymm) { + static Response metricsFilesToMonth(String yyyymm, String queryParams) { String optionalYyyyMm = ""; if (yyyymm != null) { optionalYyyyMm = "/" + yyyymm; } + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/files/toMonth" + optionalYyyyMm); + return requestSpecification.get("/api/info/metrics/files/toMonth" + optionalYyyyMm + optionalQueryParams); } - static Response metricsDownloadsToMonth(String yyyymm) { + static Response metricsDownloadsToMonth(String yyyymm, String queryParams) { String optionalYyyyMm = ""; if (yyyymm != null) { optionalYyyyMm = "/" + yyyymm; } + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/downloads/toMonth" + optionalYyyyMm); + return requestSpecification.get("/api/info/metrics/downloads/toMonth" + optionalYyyyMm + optionalQueryParams); } - static Response metricsDataversesPastDays(String days) { + static Response metricsDataversesPastDays(String days, String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/dataverses/pastDays/" + days); + return requestSpecification.get("/api/info/metrics/dataverses/pastDays/" + days + optionalQueryParams); } - static Response metricsDatasetsPastDays(String days) { + static Response metricsDatasetsPastDays(String days, String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/datasets/pastDays/" + days); + return requestSpecification.get("/api/info/metrics/datasets/pastDays/" + days + optionalQueryParams); } - static Response metricsFilesPastDays(String days) { + static Response metricsFilesPastDays(String days, String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/files/pastDays/" + days); + return requestSpecification.get("/api/info/metrics/files/pastDays/" + days + optionalQueryParams); } - static Response metricsDownloadsPastDays(String days) { + static Response metricsDownloadsPastDays(String days, String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/downloads/pastDays/" + days); + return requestSpecification.get("/api/info/metrics/downloads/pastDays/" + days + optionalQueryParams); } - static Response metricsDataversesByCategory() { + static Response metricsDataversesByCategory(String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/dataverses/byCategory"); + return requestSpecification.get("/api/info/metrics/dataverses/byCategory" + optionalQueryParams); } - static Response metricsDataversesBySubject() { + static Response metricsDataversesBySubject(String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/dataverses/bySubject"); + return requestSpecification.get("/api/info/metrics/dataverses/bySubject" + optionalQueryParams); } - static Response metricsDatasetsBySubject() { + static Response metricsDatasetsBySubject(String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } + RequestSpecification requestSpecification = given(); + return requestSpecification.get("/api/info/metrics/datasets/bySubject" + optionalQueryParams); + } + + static Response metricsDatasetsBySubjectToMonth(String month, String queryParams) { + String optionalQueryParams = ""; + if (queryParams != null) { + optionalQueryParams = "?" + queryParams; + } RequestSpecification requestSpecification = given(); - return requestSpecification.get("/api/info/metrics/datasets/bySubject"); + return requestSpecification.get("/api/info/metrics/datasets/bySubject/toMonth/" + month + optionalQueryParams); } static Response clearMetricCache() { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index fcfded0dd2b..f3db16082b5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -159,8 +159,10 @@ public void testgetFileDownloadUrl() { @Test public void testGetPublicDownloadUrl() { - assertEquals("null/api/access/datafile/:persistentId?persistentId=null", FileUtil.getPublicDownloadUrl(null, null)); - assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP")); + assertEquals(null, FileUtil.getPublicDownloadUrl(null, null, null)); + assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP", 33L)); //pid before fileId + assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP", null)); + assertEquals("https://demo.dataverse.org/api/access/datafile/33", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", null, 33L)); //pid before fileId } @Test diff --git a/src/test/resources/json/dataset-finch2.json b/src/test/resources/json/dataset-finch2.json index b3c01eb3d82..d37438cc881 100644 --- a/src/test/resources/json/dataset-finch2.json +++ b/src/test/resources/json/dataset-finch2.json @@ -202,6 +202,20 @@ "typeClass": "primitive", "value": "National Science Foundation" } + }, + { + "contributorType": { + "typeName": "contributorType", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "Data Collector" + }, + "contributorName": { + "typeName": "contributorName", + "multiple": false, + "typeClass": "primitive", + "value": "Watson, John" + } } ] },