From 5b3406551b2385abe4efa1b6320243d64de54030 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 29 Sep 2022 11:47:27 +0200 Subject: [PATCH 0001/1092] added api-direct-upload option for storage configurations --- doc/release-notes/4.20-release-notes.md | 11 +++++++++-- doc/sphinx-guides/source/installation/config.rst | 1 + .../harvard/iq/dataverse/dataaccess/StorageIO.java | 3 ++- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 3 ++- .../edu/harvard/iq/dataverse/util/SystemConfig.java | 3 ++- 5 files changed, 16 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index e29953db101..8044047134f 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -90,10 +90,16 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid ### Direct S3 Upload Changes -Direct upload to S3 is enabled per store by one new jvm option: +Direct upload to S3 in UI and API is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" - + +That option makes direct upload by default in UI, in the API you can use either: direct or through Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: + + ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" + +That option leaves through Dataverse upload by default in UI, but makes both: through Dataverse and direct uploads possible via API. + The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. @@ -127,6 +133,7 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. - The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset to any storage. (Through API only and when the uploading tool has direct access to the storage used; i.e., uplad the file first and register it through API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f2de9d5702f..2b605ae8945 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -546,6 +546,7 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset to the S3 store. ``false`` + dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset through API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 90e4a54dbe8..8e2dd9fa961 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -605,7 +605,8 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")); + return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 339de904f9e..0a41da4f7dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1748,7 +1748,8 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); - boolean directEnabled = Boolean.getBoolean("dataverse.files." + driverId + ".upload-redirect"); + boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 7abd0d02065..4553a71a1d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1112,7 +1112,8 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - return Boolean.getBoolean("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect"); + // this method is used in UI only, therfore "dataverse.files." + driverId + ".api-direct-upload" is not used here + return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } public String getDataCiteRestApiUrlString() { From 5db560e999454a0b974215c6d9bc8373d4595fc0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 29 Sep 2022 13:43:33 +0200 Subject: [PATCH 0002/1092] improvements in the documentation --- doc/release-notes/4.20-release-notes.md | 8 ++++---- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index 8044047134f..79037d8cd8c 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -94,11 +94,11 @@ Direct upload to S3 in UI and API is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" -That option makes direct upload by default in UI, in the API you can use either: direct or through Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: +This option makes direct upload the default in the UI. In the API, you can use either: direct upload or upload via Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" -That option leaves through Dataverse upload by default in UI, but makes both: through Dataverse and direct uploads possible via API. +That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. @@ -132,8 +132,8 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo ## New JVM Options for file storage drivers - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. -- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) -- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset to any storage. (Through API only and when the uploading tool has direct access to the storage used; i.e., uplad the file first and register it through API!) +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset in any storage. (Via API only and when the uploading tool has direct access to the relevant storage used; i.e., upload the file first and register it via API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2b605ae8945..3245aeccfaf 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -545,8 +545,8 @@ List of S3 Storage Options dataverse.files..label **Required** label to be shown in the UI for this storage (none) dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` - dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset to the S3 store. ``false`` - dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset through API only. ``false`` + dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` + dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` From c7cfb533b261b7e36afd78b2061d48d2f55c4736 Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 3 Oct 2022 01:17:42 +0200 Subject: [PATCH 0003/1092] Remove unused GPL-licensed code For unknown reasons, in 2009 several files from the JDK were copied into the Dataverse codebase, instead of referenced. It appears that these classes weren't really used. --- .../spi/FileMetadataExtractorSpi.java | 18 - .../ingest/plugin/spi/DigraphNode.java | 188 ---- .../plugin/spi/IngestServiceProvider.java | 25 +- .../plugin/spi/PartiallyOrderedSet.java | 241 ----- .../plugin/spi/RegisterableService.java | 90 -- .../ingest/plugin/spi/ServiceRegistry.java | 861 ------------------ 6 files changed, 1 insertion(+), 1422 deletions(-) delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java index ab8f610cb06..a30dfafe67f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java @@ -10,9 +10,7 @@ import java.util.logging.*; import java.io.*; -import edu.harvard.iq.dataverse.ingest.plugin.spi.RegisterableService; import edu.harvard.iq.dataverse.ingest.plugin.spi.IngestServiceProvider; -import edu.harvard.iq.dataverse.ingest.plugin.spi.ServiceRegistry; import java.nio.MappedByteBuffer; import java.util.Locale; @@ -44,22 +42,6 @@ public FileMetadataExtractorSpi(String vendorName, String version) { this.version = version; } - public void onRegistration(ServiceRegistry registry, - Class category) {} - - - public void onDeregistration(ServiceRegistry registry, - Class category) {} - - public String getVersion() { - return version; - } - - public String getVendorName() { - return vendorName; - } - - public abstract String getDescription(Locale locale); protected String[] names = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java deleted file mode 100644 index 4db48b5c06a..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ - -package edu.harvard.iq.dataverse.ingest.plugin.spi; - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - -//package javax.imageio.spi; - -import java.io.Serializable; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - -/** - * A node in a directed graph. In addition to an arbitrary - * Object containing user data associated with the node, - * each node maintains a Sets of nodes which are pointed - * to by the current node (available from getOutNodes). - * The in-degree of the node (that is, number of nodes that point to - * the current node) may be queried. - * - */ -class DigraphNode implements Cloneable, Serializable { - - /** The data associated with this node. */ - protected Object data; - - /** - * A Set of neighboring nodes pointed to by this - * node. - */ - protected Set outNodes = new HashSet(); - - /** The in-degree of the node. */ - protected int inDegree = 0; - - /** - * A Set of neighboring nodes that point to this - * node. - */ - private Set inNodes = new HashSet(); - - public DigraphNode(Object data) { - this.data = data; - } - - /** Returns the Object referenced by this node. */ - public Object getData() { - return data; - } - - /** - * Returns an Iterator containing the nodes pointed - * to by this node. - */ - public Iterator getOutNodes() { - return outNodes.iterator(); - } - - /** - * Adds a directed edge to the graph. The outNodes list of this - * node is updated and the in-degree of the other node is incremented. - * - * @param node a DigraphNode. - * - * @return true if the node was not previously the - * target of an edge. - */ - public boolean addEdge(DigraphNode node) { - if (outNodes.contains(node)) { - return false; - } - - outNodes.add(node); - node.inNodes.add(this); - node.incrementInDegree(); - return true; - } - - /** - * Returns true if an edge exists between this node - * and the given node. - * - * @param node a DigraphNode. - * - * @return true if the node is the target of an edge. - */ - public boolean hasEdge(DigraphNode node) { - return outNodes.contains(node); - } - - /** - * Removes a directed edge from the graph. The outNodes list of this - * node is updated and the in-degree of the other node is decremented. - * - * @return true if the node was previously the target - * of an edge. - */ - public boolean removeEdge(DigraphNode node) { - if (!outNodes.contains(node)) { - return false; - } - - outNodes.remove(node); - node.inNodes.remove(this); - node.decrementInDegree(); - return true; - } - - /** - * Removes this node from the graph, updating neighboring nodes - * appropriately. - */ - public void dispose() { - Object[] inNodesArray = inNodes.toArray(); - for(int i=0; iServiceRegistry. - * - * @param registry the ServiceRegistry instance. - * @param category a Class object that indicatges - * its registry category under which this object has been registered. - * category. - */ - public void onRegistration(ServiceRegistry registry, - Class category) {} - - /** - * A callback whenever this Spi class is deregistered from - * a ServiceRegistry. - * - * @param registry the ServiceRegistry instance. - * @param category a Class object that indicatges - * its registry category from which this object is being de-registered. - */ - public void onDeregistration(ServiceRegistry registry, - Class category) {} /** * Gets the value of the version field. * diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java deleted file mode 100644 index 87f4f57cdb6..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ - -package edu.harvard.iq.dataverse.ingest.plugin.spi; - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - -//package javax.imageio.spi; - -import java.util.AbstractSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.Map; -import java.util.Set; - -/** - * A set of Objects with pairwise orderings between them. - * The iterator method provides the elements in - * topologically sorted order. Elements participating in a cycle - * are not returned. - * - * Unlike the SortedSet and SortedMap - * interfaces, which require their elements to implement the - * Comparable interface, this class receives ordering - * information via its setOrdering and - * unsetPreference methods. This difference is due to - * the fact that the relevant ordering between elements is unlikely to - * be inherent in the elements themselves; rather, it is set - * dynamically accoring to application policy. For example, in a - * service provider registry situation, an application might allow the - * user to set a preference order for service provider objects - * supplied by a trusted vendor over those supplied by another. - * - */ -class PartiallyOrderedSet extends AbstractSet { - - // The topological sort (roughly) follows the algorithm described in - // Horowitz and Sahni, _Fundamentals of Data Structures_ (1976), - // p. 315. - - // Maps Objects to DigraphNodes that contain them - private Map poNodes = new HashMap(); - - // The set of Objects - private Set nodes = poNodes.keySet(); - - /** - * Constructs a PartiallyOrderedSet. - */ - public PartiallyOrderedSet() {} - - public int size() { - return nodes.size(); - } - - public boolean contains(Object o) { - return nodes.contains(o); - } - - /** - * Returns an iterator over the elements contained in this - * collection, with an ordering that respects the orderings set - * by the setOrdering method. - */ - public Iterator iterator() { - return new PartialOrderIterator(poNodes.values().iterator()); - } - - /** - * Adds an Object to this - * PartiallyOrderedSet. - */ - public boolean add(Object o) { - if (nodes.contains(o)) { - return false; - } - - DigraphNode node = new DigraphNode(o); - poNodes.put(o, node); - return true; - } - - /** - * Removes an Object from this - * PartiallyOrderedSet. - */ - public boolean remove(Object o) { - DigraphNode node = (DigraphNode)poNodes.get(o); - if (node == null) { - return false; - } - - poNodes.remove(o); - node.dispose(); - return true; - } - - public void clear() { - poNodes.clear(); - } - - /** - * Sets an ordering between two nodes. When an iterator is - * requested, the first node will appear earlier in the - * sequence than the second node. If a prior ordering existed - * between the nodes in the opposite order, it is removed. - * - * @return true if no prior ordering existed - * between the nodes, falseotherwise. - */ - public boolean setOrdering(Object first, Object second) { - DigraphNode firstPONode = - (DigraphNode)poNodes.get(first); - DigraphNode secondPONode = - (DigraphNode)poNodes.get(second); - - secondPONode.removeEdge(firstPONode); - return firstPONode.addEdge(secondPONode); - } - - /** - * Removes any ordering between two nodes. - * - * @return true if a prior prefence existed between the nodes. - */ - public boolean unsetOrdering(Object first, Object second) { - DigraphNode firstPONode = - (DigraphNode)poNodes.get(first); - DigraphNode secondPONode = - (DigraphNode)poNodes.get(second); - - return firstPONode.removeEdge(secondPONode) || - secondPONode.removeEdge(firstPONode); - } - - /** - * Returns true if an ordering exists between two - * nodes. - */ - public boolean hasOrdering(Object preferred, Object other) { - DigraphNode preferredPONode = - (DigraphNode)poNodes.get(preferred); - DigraphNode otherPONode = - (DigraphNode)poNodes.get(other); - - return preferredPONode.hasEdge(otherPONode); - } -} - -class PartialOrderIterator implements Iterator { - - LinkedList zeroList = new LinkedList(); - Map inDegrees = new HashMap(); // DigraphNode -> Integer - - public PartialOrderIterator(Iterator iter) { - // Initialize scratch in-degree values, zero list - while (iter.hasNext()) { - DigraphNode node = (DigraphNode)iter.next(); - int inDegree = node.getInDegree(); - inDegrees.put(node, new Integer(inDegree)); - - // Add nodes with zero in-degree to the zero list - if (inDegree == 0) { - zeroList.add(node); - } - } - } - - public boolean hasNext() { - return !zeroList.isEmpty(); - } - - public Object next() { - DigraphNode first = (DigraphNode)zeroList.removeFirst(); - - // For each out node of the output node, decrement its in-degree - Iterator outNodes = first.getOutNodes(); - while (outNodes.hasNext()) { - DigraphNode node = (DigraphNode)outNodes.next(); - int inDegree = ((Integer)inDegrees.get(node)).intValue() - 1; - inDegrees.put(node, new Integer(inDegree)); - - // If the in-degree has fallen to 0, place the node on the list - if (inDegree == 0) { - zeroList.add(node); - } - } - - return first.getData(); - } - - public void remove() { - throw new UnsupportedOperationException(); - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java deleted file mode 100644 index d3609b1e4b9..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ - -package edu.harvard.iq.dataverse.ingest.plugin.spi; - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000-2004 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - - -/** - * An optional interface that may be provided by service provider - * objects that will be registered with a - * ServiceRegistry. If this interface is present, - * notification of registration and deregistration will be performed. - * - * @see ServiceRegistry - * - */ -public interface RegisterableService { - - /** - * Called when an object implementing this interface is added to - * the given category of the given - * registry. The object may already be registered - * under another category or categories. - * - * @param registry a ServiceRegistry where this - * object has been registered. - * @param category a Class object indicating the - * registry category under which this object has been registered. - */ - void onRegistration(ServiceRegistry registry, Class category); - - /** - * Called when an object implementing this interface is removed - * from the given category of the given - * registry. The object may still be registered - * under another category or categories. - * - * @param registry a ServiceRegistry from which this - * object is being (wholly or partially) deregistered. - * @param category a Class object indicating the - * registry category from which this object is being deregistered. - */ - void onDeregistration(ServiceRegistry registry, Class category); -} diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java deleted file mode 100644 index 1794adb5de2..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java +++ /dev/null @@ -1,861 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ -package edu.harvard.iq.dataverse.ingest.plugin.spi; - - - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000-2007 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - -//package javax.imageio.spi; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.ServiceLoader; - -/** - * A registry for service provider instances. - * - *

A service is a well-known set of interfaces and (usually - * abstract) classes. A service provider is a specific - * implementation of a service. The classes in a provider typically - * implement the interface or subclass the class defined by the - * service itself. - * - *

Service providers are stored in one or more categories, - * each of which is defined by a class of interface (described by a - * Class object) that all of its members must implement. - * The set of categories may be changed dynamically. - * - *

Only a single instance of a given leaf class (that is, the - * actual class returned by getClass(), as opposed to any - * inherited classes or interfaces) may be registered. That is, - * suppose that the - * com.mycompany.mypkg.GreenServiceProvider class - * implements the com.mycompany.mypkg.MyService - * interface. If a GreenServiceProvider instance is - * registered, it will be stored in the category defined by the - * MyService class. If a new instance of - * GreenServiceProvider is registered, it will replace - * the previous instance. In practice, service provider objects are - * usually singletons so this behavior is appropriate. - * - *

To declare a service provider, a services - * subdirectory is placed within the META-INF directory - * that is present in every JAR file. This directory contains a file - * for each service provider interface that has one or more - * implementation classes present in the JAR file. For example, if - * the JAR file contained a class named - * com.mycompany.mypkg.MyServiceImpl which implements the - * javax.someapi.SomeService interface, the JAR file - * would contain a file named:

- * META-INF/services/javax.someapi.SomeService 
- * - * containing the line: - * - *
- * com.mycompany.mypkg.MyService
- * 
- * - *

The service provider classes should be to be lightweight and - * quick to load. Implementations of these interfaces should avoid - * complex dependencies on other classes and on native code. The usual - * pattern for more complex services is to register a lightweight - * proxy for the heavyweight service. - * - *

An application may customize the contents of a registry as it - * sees fit, so long as it has the appropriate runtime permission. - * - *

For more details on declaring service providers, and the JAR - * format in general, see the - * JAR File Specification. - * - * @see RegisterableService - * - */ -public class ServiceRegistry { - - // Class -> Registry - private Map, SubRegistry> categoryMap = new HashMap<>(); - - /** - * Constructs a ServiceRegistry instance with a - * set of categories taken from the categories - * argument. - * - * @param categories an Iterator containing - * Class objects to be used to define categories. - * - * @exception IllegalArgumentException if - * categories is null. - */ - public ServiceRegistry(Iterator> categories) { - if (categories == null) { - throw new IllegalArgumentException("categories == null!"); - } - while (categories.hasNext()) { - Class category = categories.next(); - SubRegistry reg = new SubRegistry(this, category); - categoryMap.put(category, reg); - } - } - - // The following two methods expose functionality from - // sun.misc.Service. If that class is made public, they may be - // removed. - // - // The sun.misc.ServiceConfigurationError class may also be - // exposed, in which case the references to 'an - // Error' below should be changed to 'a - // ServiceConfigurationError'. - - /** - * Searches for implementations of a particular service class - * using the given class loader. - * - *

This method transforms the name of the given service class - * into a provider-configuration filename as described in the - * class comment and then uses the getResources - * method of the given class loader to find all available files - * with that name. These files are then read and parsed to - * produce a list of provider-class names. The iterator that is - * returned uses the given class loader to look up and then - * instantiate each element of the list. - * - *

Because it is possible for extensions to be installed into - * a running Java virtual machine, this method may return - * different results each time it is invoked. - * - * @param providerClass a Classobject indicating the - * class or interface of the service providers being detected. - * - * @param loader the class loader to be used to load - * provider-configuration files and instantiate provider classes, - * or null if the system class loader (or, failing that - * the bootstrap class loader) is to be used. - * - * @return An Iterator that yields provider objects - * for the given service, in some arbitrary order. The iterator - * will throw an Error if a provider-configuration - * file violates the specified format or if a provider class - * cannot be found and instantiated. - * - * @exception IllegalArgumentException if - * providerClass is null. - */ - public static Iterator lookupProviders(Class providerClass, - ClassLoader loader) - { - if (providerClass == null) { - throw new IllegalArgumentException("providerClass == null!"); - } - return ServiceLoader.load(providerClass, loader).iterator(); - } - - /** - * Locates and incrementally instantiates the available providers - * of a given service using the context class loader. This - * convenience method is equivalent to: - * - *

-     *   ClassLoader cl = Thread.currentThread().getContextClassLoader();
-     *   return Service.providers(service, cl);
-     * 
- * - * @param providerClass a Classobject indicating the - * class or interface of the service providers being detected. - * - * @return An Iterator that yields provider objects - * for the given service, in some arbitrary order. The iterator - * will throw an Error if a provider-configuration - * file violates the specified format or if a provider class - * cannot be found and instantiated. - * - * @exception IllegalArgumentException if - * providerClass is null. - */ - public static Iterator lookupProviders(Class providerClass) { - if (providerClass == null) { - throw new IllegalArgumentException("providerClass == null!"); - } - return ServiceLoader.load(providerClass).iterator(); - } - - /** - * Returns an Iterator of Class objects - * indicating the current set of categories. The iterator will be - * empty if no categories exist. - * - * @return an Iterator containing - * Classobjects. - */ - public Iterator> getCategories() { - Set> keySet = categoryMap.keySet(); - return keySet.iterator(); - } - - /** - * Returns an Iterator containing the subregistries to which the - * provider belongs. - */ - private Iterator getSubRegistries(Object provider) { - List l = new ArrayList<>(); - Iterator> iter = categoryMap.keySet().iterator(); - while (iter.hasNext()) { - Class c = iter.next(); - if (c.isAssignableFrom(provider.getClass())) { - l.add(categoryMap.get(c)); - } - } - return l.iterator(); - } - - /** - * Adds a service provider object to the registry. The provider - * is associated with the given category. - * - *

If provider implements the - * RegisterableService interface, its - * onRegistration method will be called. Its - * onDeregistration method will be called each time - * it is deregistered from a category, for example if a - * category is removed or the registry is garbage collected. - * - * @param provider the service provide object to be registered. - * @param category the category under which to register the - * provider. - * - * @return true if no provider of the same class was previously - * registered in the same category category. - * - * @exception IllegalArgumentException if provider is - * null. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - * @exception ClassCastException if provider does not implement - * the Class defined by category. - */ - public boolean registerServiceProvider(T provider, - Class category) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (!category.isAssignableFrom(provider.getClass())) { - throw new ClassCastException(); - } - - return reg.registerServiceProvider(provider); - } - - /** - * Adds a service provider object to the registry. The provider - * is associated within each category present in the registry - * whose Class it implements. - * - *

If provider implements the - * RegisterableService interface, its - * onRegistration method will be called once for each - * category it is registered under. Its - * onDeregistration method will be called each time - * it is deregistered from a category or when the registry is - * finalized. - * - * @param provider the service provider object to be registered. - * - * @exception IllegalArgumentException if - * provider is null. - */ - public void registerServiceProvider(Object provider) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - Iterator regs = getSubRegistries(provider); - while (regs.hasNext()) { - SubRegistry reg = regs.next(); - reg.registerServiceProvider(provider); - } - } - - /** - * Adds a set of service provider objects, taken from an - * Iterator to the registry. Each provider is - * associated within each category present in the registry whose - * Class it implements. - * - *

For each entry of providers that implements - * the RegisterableService interface, its - * onRegistration method will be called once for each - * category it is registered under. Its - * onDeregistration method will be called each time - * it is deregistered from a category or when the registry is - * finalized. - * - * @param providers an Iterator containing service provider - * objects to be registered. - * - * @exception IllegalArgumentException if providers - * is null or contains a null entry. - */ - public void registerServiceProviders(Iterator providers) { - if (providers == null) { - throw new IllegalArgumentException("provider == null!"); - } - while (providers.hasNext()) { - registerServiceProvider(providers.next()); - } - } - - /** - * Removes a service provider object from the given category. If - * the provider was not previously registered, nothing happens and - * false is returned. Otherwise, true - * is returned. If an object of the same class as - * provider but not equal (using ==) to - * provider is registered, it will not be - * deregistered. - * - *

If provider implements the - * RegisterableService interface, its - * onDeregistration method will be called. - * - * @param provider the service provider object to be deregistered. - * @param category the category from which to deregister the - * provider. - * - * @return true if the provider was previously - * registered in the same category category, - * false otherwise. - * - * @exception IllegalArgumentException if provider is - * null. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - * @exception ClassCastException if provider does not implement - * the class defined by category. - */ - public boolean deregisterServiceProvider(T provider, - Class category) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (!category.isAssignableFrom(provider.getClass())) { - throw new ClassCastException(); - } - return reg.deregisterServiceProvider(provider); - } - - /** - * Removes a service provider object from all categories that - * contain it. - * - * @param provider the service provider object to be deregistered. - * - * @exception IllegalArgumentException if provider is - * null. - */ - public void deregisterServiceProvider(Object provider) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - Iterator regs = getSubRegistries(provider); - while (regs.hasNext()) { - SubRegistry reg = regs.next(); - reg.deregisterServiceProvider(provider); - } - } - - /** - * Returns true if provider is currently - * registered. - * - * @param provider the service provider object to be queried. - * - * @return true if the given provider has been - * registered. - * - * @exception IllegalArgumentException if provider is - * null. - */ - public boolean contains(Object provider) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - Iterator regs = getSubRegistries(provider); - while (regs.hasNext()) { - SubRegistry reg = regs.next(); - if (reg.contains(provider)) { - return true; - } - } - - return false; - } - - /** - * Returns an Iterator containing all registered - * service providers in the given category. If - * useOrdering is false, the iterator - * will return all of the server provider objects in an arbitrary - * order. Otherwise, the ordering will respect any pairwise - * orderings that have been set. If the graph of pairwise - * orderings contains cycles, any providers that belong to a cycle - * will not be returned. - * - * @param category the category to be retrieved from. - * @param useOrdering true if pairwise orderings - * should be taken account in ordering the returned objects. - * - * @return an Iterator containing service provider - * objects from the given category, possibly in order. - * - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public Iterator getServiceProviders(Class category, - boolean useOrdering) { - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - return reg.getServiceProviders(useOrdering); - } - - /** - * A simple filter interface used by - * ServiceRegistry.getServiceProviders to select - * providers matching an arbitrary criterion. Classes that - * implement this interface should be defined in order to make use - * of the getServiceProviders method of - * ServiceRegistry that takes a Filter. - * - * @see ServiceRegistry#getServiceProviders(Class, ServiceRegistry.Filter, boolean) - */ - public interface Filter { - - /** - * Returns true if the given - * provider object matches the criterion defined - * by this Filter. - * - * @param provider a service provider Object. - * - * @return true if the provider matches the criterion. - */ - boolean filter(Object provider); - } - - /** - * Returns an Iterator containing service provider - * objects within a given category that satisfy a criterion - * imposed by the supplied ServiceRegistry.Filter - * object's filter method. - * - *

The useOrdering argument controls the - * ordering of the results using the same rules as - * getServiceProviders(Class, boolean). - * - * @param category the category to be retrieved from. - * @param filter an instance of ServiceRegistry.Filter - * whose filter method will be invoked. - * @param useOrdering true if pairwise orderings - * should be taken account in ordering the returned objects. - * - * @return an Iterator containing service provider - * objects from the given category, possibly in order. - * - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public Iterator getServiceProviders(Class category, - Filter filter, - boolean useOrdering) { - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - Iterator iter = getServiceProviders(category, useOrdering); - return new FilterIterator(iter, filter); - } - - /** - * Returns the currently registered service provider object that - * is of the given class type. At most one object of a given - * class is allowed to be registered at any given time. If no - * registered object has the desired class type, null - * is returned. - * - * @param providerClass the Class of the desired - * service provider object. - * - * @return a currently registered service provider object with the - * desired Classtype, or null is none is - * present. - * - * @exception IllegalArgumentException if providerClass is - * null. - */ - public T getServiceProviderByClass(Class providerClass) { - if (providerClass == null) { - throw new IllegalArgumentException("providerClass == null!"); - } - for (Class c : categoryMap.keySet()) { - if (c.isAssignableFrom(providerClass)) { - SubRegistry reg = (SubRegistry)categoryMap.get(c); - T provider = reg.getServiceProviderByClass(providerClass); - if (provider != null) { - return provider; - } - } - } - return null; - } - - /** - * Sets a pairwise ordering between two service provider objects - * within a given category. If one or both objects are not - * currently registered within the given category, or if the - * desired ordering is already set, nothing happens and - * false is returned. If the providers previously - * were ordered in the reverse direction, that ordering is - * removed. - * - *

The ordering will be used by the - * getServiceProviders methods when their - * useOrdering argument is true. - * - * @param category a Class object indicating the - * category under which the preference is to be established. - * @param firstProvider the preferred provider. - * @param secondProvider the provider to which - * firstProvider is preferred. - * - * @return true if a previously unset ordering - * was established. - * - * @exception IllegalArgumentException if either provider is - * null or they are the same object. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public boolean setOrdering(Class category, - T firstProvider, - T secondProvider) { - if (firstProvider == null || secondProvider == null) { - throw new IllegalArgumentException("provider is null!"); - } - if (firstProvider == secondProvider) { - throw new IllegalArgumentException("providers are the same!"); - } - SubRegistry reg = (SubRegistry)categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (reg.contains(firstProvider) && - reg.contains(secondProvider)) { - return reg.setOrdering(firstProvider, secondProvider); - } - return false; - } - - /** - * Sets a pairwise ordering between two service provider objects - * within a given category. If one or both objects are not - * currently registered within the given category, or if no - * ordering is currently set between them, nothing happens - * and false is returned. - * - *

The ordering will be used by the - * getServiceProviders methods when their - * useOrdering argument is true. - * - * @param category a Class object indicating the - * category under which the preference is to be disestablished. - * @param firstProvider the formerly preferred provider. - * @param secondProvider the provider to which - * firstProvider was formerly preferred. - * - * @return true if a previously set ordering was - * disestablished. - * - * @exception IllegalArgumentException if either provider is - * null or they are the same object. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public boolean unsetOrdering(Class category, - T firstProvider, - T secondProvider) { - if (firstProvider == null || secondProvider == null) { - throw new IllegalArgumentException("provider is null!"); - } - if (firstProvider == secondProvider) { - throw new IllegalArgumentException("providers are the same!"); - } - SubRegistry reg = (SubRegistry)categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (reg.contains(firstProvider) && - reg.contains(secondProvider)) { - return reg.unsetOrdering(firstProvider, secondProvider); - } - return false; - } - - /** - * Deregisters all service provider object currently registered - * under the given category. - * - * @param category the category to be emptied. - * - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public void deregisterAll(Class category) { - SubRegistry reg = (SubRegistry)categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - reg.clear(); - } - - /** - * Deregisters all currently registered service providers from all - * categories. - */ - public void deregisterAll() { - Iterator iter = categoryMap.values().iterator(); - while (iter.hasNext()) { - SubRegistry reg = (SubRegistry)iter.next(); - reg.clear(); - } - } - - /** - * Finalizes this object prior to garbage collection. The - * deregisterAll method is called to deregister all - * currently registered service providers. This method should not - * be called from application code. - * - * @exception Throwable if an error occurs during superclass - * finalization. - */ - public void finalize() throws Throwable { - deregisterAll(); - super.finalize(); - } -} - - -/** - * A portion of a registry dealing with a single superclass or - * interface. - */ -class SubRegistry { - - ServiceRegistry registry; - - Class category; - - // Provider Objects organized by partial oridering - PartiallyOrderedSet poset = new PartiallyOrderedSet(); - - // Class -> Provider Object of that class - Map,Object> map = new HashMap<>(); - - public SubRegistry(ServiceRegistry registry, Class category) { - this.registry = registry; - this.category = category; - } - - public boolean registerServiceProvider(Object provider) { - Object oprovider = map.get(provider.getClass()); - boolean present = oprovider != null; - - if (present) { - deregisterServiceProvider(oprovider); - } - map.put(provider.getClass(), provider); - poset.add(provider); - if (provider instanceof RegisterableService) { - RegisterableService rs = (RegisterableService)provider; - rs.onRegistration(registry, category); - } - - return !present; - } - - /** - * If the provider was not previously registered, do nothing. - * - * @return true if the provider was previously registered. - */ - public boolean deregisterServiceProvider(Object provider) { - Object oprovider = map.get(provider.getClass()); - - if (provider == oprovider) { - map.remove(provider.getClass()); - poset.remove(provider); - if (provider instanceof RegisterableService) { - RegisterableService rs = (RegisterableService)provider; - rs.onDeregistration(registry, category); - } - - return true; - } - return false; - } - - public boolean contains(Object provider) { - Object oprovider = map.get(provider.getClass()); - return oprovider == provider; - } - - public boolean setOrdering(Object firstProvider, - Object secondProvider) { - return poset.setOrdering(firstProvider, secondProvider); - } - - public boolean unsetOrdering(Object firstProvider, - Object secondProvider) { - return poset.unsetOrdering(firstProvider, secondProvider); - } - - public Iterator getServiceProviders(boolean useOrdering) { - if (useOrdering) { - return poset.iterator(); - } else { - return map.values().iterator(); - } - } - - public T getServiceProviderByClass(Class providerClass) { - return (T)map.get(providerClass); - } - - public void clear() { - Iterator iter = map.values().iterator(); - while (iter.hasNext()) { - Object provider = iter.next(); - iter.remove(); - - if (provider instanceof RegisterableService) { - RegisterableService rs = (RegisterableService)provider; - rs.onDeregistration(registry, category); - } - } - poset.clear(); - } - - public void finalize() { - clear(); - } -} - - -/** - * A class for wrapping Iterators with a filter function. - * This provides an iterator for a subset without duplication. - */ -class FilterIterator implements Iterator { - - private Iterator iter; - private ServiceRegistry.Filter filter; - - private T next = null; - - public FilterIterator(Iterator iter, - ServiceRegistry.Filter filter) { - this.iter = iter; - this.filter = filter; - advance(); - } - - private void advance() { - while (iter.hasNext()) { - T elt = iter.next(); - if (filter.filter(elt)) { - next = elt; - return; - } - } - - next = null; - } - - public boolean hasNext() { - return next != null; - } - - public T next() { - if (next == null) { - throw new NoSuchElementException(); - } - T o = next; - advance(); - return o; - } - - public void remove() { - throw new UnsupportedOperationException(); - } -} From 6cfe538cac9ed16873e277d117eacfd1e4fd7fd9 Mon Sep 17 00:00:00 2001 From: bencomp Date: Tue, 18 Oct 2022 00:03:48 +0200 Subject: [PATCH 0004/1092] Update HTTP URL to HTTPS URL --- .../source/_static/navbarscroll.js | 2 +- .../source/_templates/navbar.html | 32 +++++++++---------- doc/sphinx-guides/source/admin/monitoring.rst | 2 +- .../source/api/client-libraries.rst | 6 ++-- .../source/api/external-tools.rst | 4 +-- .../source/api/getting-started.rst | 2 +- doc/sphinx-guides/source/api/intro.rst | 2 +- doc/sphinx-guides/source/api/native-api.rst | 12 +++---- doc/sphinx-guides/source/api/sword.rst | 12 +++---- doc/sphinx-guides/source/conf.py | 2 +- .../source/developers/dev-environment.rst | 8 ++--- .../source/developers/documentation.rst | 6 ++-- doc/sphinx-guides/source/developers/intro.rst | 6 ++-- .../source/developers/testing.rst | 8 ++--- doc/sphinx-guides/source/developers/tools.rst | 8 ++--- .../source/developers/unf/index.rst | 2 +- .../source/developers/unf/unf-v3.rst | 6 ++-- .../source/developers/unf/unf-v6.rst | 2 +- .../source/developers/version-control.rst | 2 +- doc/sphinx-guides/source/index.rst | 4 +-- .../source/installation/config.rst | 26 +++++++-------- .../source/installation/installation-main.rst | 6 ++-- .../source/installation/intro.rst | 4 +-- .../source/installation/oauth2.rst | 2 +- .../source/installation/oidc.rst | 2 +- .../source/installation/prerequisites.rst | 4 +-- .../source/installation/shibboleth.rst | 6 ++-- .../source/style/foundations.rst | 16 +++++----- doc/sphinx-guides/source/style/patterns.rst | 28 ++++++++-------- doc/sphinx-guides/source/user/account.rst | 2 +- doc/sphinx-guides/source/user/appendix.rst | 10 +++--- .../source/user/dataset-management.rst | 6 ++-- .../user/tabulardataingest/ingestprocess.rst | 4 +-- 33 files changed, 122 insertions(+), 122 deletions(-) diff --git a/doc/sphinx-guides/source/_static/navbarscroll.js b/doc/sphinx-guides/source/_static/navbarscroll.js index 66c9d4d7995..735f80870cd 100644 --- a/doc/sphinx-guides/source/_static/navbarscroll.js +++ b/doc/sphinx-guides/source/_static/navbarscroll.js @@ -1,6 +1,6 @@ /* Use to fix hidden section headers behind the navbar when using links with targets - See: http://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header + See: https://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header */ $jqTheme(document).ready(function() { $jqTheme('a[href*="#"]:not([href="#"])').on('click', function() { diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html index 538cccf74d7..c7b81dcb937 100644 --- a/doc/sphinx-guides/source/_templates/navbar.html +++ b/doc/sphinx-guides/source/_templates/navbar.html @@ -15,7 +15,7 @@ - Dataverse Project + Dataverse Project @@ -24,15 +24,15 @@

  • - + Community
  • @@ -49,18 +49,18 @@
  • - + Contact
  • diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst index a4affda1302..e902d5fdcc9 100644 --- a/doc/sphinx-guides/source/admin/monitoring.rst +++ b/doc/sphinx-guides/source/admin/monitoring.rst @@ -14,7 +14,7 @@ In production you'll want to monitor the usual suspects such as CPU, memory, fre Munin +++++ -http://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps. +https://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps. Enable the EPEL yum repo (if you haven't already): diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 634f03a8125..388a9d641ed 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -13,7 +13,7 @@ Python There are two Python modules for interacting with Dataverse Software APIs. -`pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. +`pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. `dataverse-client-python `_ had its initial release in 2015. `Robert Liebowitz `_ created this library while at the `Center for Open Science (COS) `_ and the COS uses it to integrate the `Open Science Framework (OSF) `_ with a Dataverse installation via an add-on which itself is open source and listed on the :doc:`/api/apps` page. @@ -30,14 +30,14 @@ R https://github.com/IQSS/dataverse-client-r is the official R package for Dataverse Software APIs. The latest release can be installed from `CRAN `_. The R client can search and download datasets. It is useful when automatically (instead of manually) downloading data files as part of a script. For bulk edit and upload operations, we currently recommend pyDataverse. -The package is currently maintained by `Shiro Kuriwaki `_. It was originally created by `Thomas Leeper `_ and then formerly maintained by `Will Beasley `_. +The package is currently maintained by `Shiro Kuriwaki `_. It was originally created by `Thomas Leeper `_ and then formerly maintained by `Will Beasley `_. Java ---- https://github.com/IQSS/dataverse-client-java is the official Java library for Dataverse Software APIs. -`Richard Adams `_ from `ResearchSpace `_ created and maintains this library. +`Richard Adams `_ from `ResearchSpace `_ created and maintains this library. Ruby ---- diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index d72a6f62004..8c6c9fa8d46 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -11,7 +11,7 @@ Introduction External tools are additional applications the user can access or open from your Dataverse installation to preview, explore, and manipulate data files and datasets. The term "external" is used to indicate that the tool is not part of the main Dataverse Software. -Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org +Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org In short, you will be creating a manifest in JSON format that describes not only how to construct URLs for your tool, but also what types of files your tool operates on, where it should appear in the Dataverse installation web interfaces, etc. @@ -94,7 +94,7 @@ Terminology toolParameters **Query parameters** are supported and described below. - queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org``. + queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org``. query parameter keys An **arbitrary string** to associate with a value that is populated with a reserved word (described below). As the author of the tool, you have control over what "key" you would like to be passed to your tool. For example, if you want to have your tool receive and operate on the query parameter "dataverseFileId=42" instead of just "fileId=42", that's fine. diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index c465b726421..fd7c561cdf0 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -9,7 +9,7 @@ If you are a researcher or curator who wants to automate parts of your workflow, Servers You Can Test With ------------------------- -Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. +Rather than using a production Dataverse installation, API users are welcome to use https://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst index 933932cd7b9..6c61bb8c20d 100755 --- a/doc/sphinx-guides/source/api/intro.rst +++ b/doc/sphinx-guides/source/api/intro.rst @@ -237,7 +237,7 @@ Dataverse Software API questions are on topic in all the usual places: - The dataverse-community Google Group: https://groups.google.com/forum/#!forum/dataverse-community - The Dataverse Project community calls: https://dataverse.org/community-calls -- The Dataverse Project chat room: http://chat.dataverse.org +- The Dataverse Project chat room: https://chat.dataverse.org - The Dataverse Project ticketing system: support@dataverse.org After your question has been answered, you are welcome to help improve the :doc:`faq` section of this guide. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 93e1c36f179..578b35011ff 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -9,7 +9,7 @@ The Dataverse Software exposes most of its GUI functionality via a REST-based AP .. _CORS: https://www.w3.org/TR/cors/ -.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``http://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded. +.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``https://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded. .. contents:: |toctitle| :local: @@ -508,7 +508,7 @@ The fully expanded example above (without environment variables) looks like this curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot -.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. +.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST https://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. .. _create-dataset-command: @@ -720,7 +720,7 @@ Getting its draft version: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" https://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER The fully expanded example above (without environment variables) looks like this: @@ -2226,7 +2226,7 @@ The fully expanded example above (without environment variables) looks like this Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). -- JHOVE: http://jhove.openpreservation.org +- JHOVE: https://jhove.openpreservation.org - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. @@ -2413,7 +2413,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ - http://demo.dataverse.org/api/files/24/metadata + https://demo.dataverse.org/api/files/24/metadata A curl example using a ``PERSISTENT_ID`` @@ -2614,7 +2614,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/prov-freeform?persistentId=doi:10.5072/FK2/AAA000" -H "Content-type:application/json" --upload-file provenance.json -See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from http://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). +See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from https://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). Delete Provenance JSON for an uploaded file ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst index 11b43e98774..c9ac83bc204 100755 --- a/doc/sphinx-guides/source/api/sword.rst +++ b/doc/sphinx-guides/source/api/sword.rst @@ -9,19 +9,19 @@ SWORD_ stands for "Simple Web-service Offering Repository Deposit" and is a "pro About ----- -Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`. +Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`. -The Dataverse Software implements most of SWORDv2_, which is specified at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. +The Dataverse Software implements most of SWORDv2_, which is specified at https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse Software 4.0 datasets can also be created via JSON using the "native" API. SWORD is limited to the dozen or so fields listed below in the crosswalk, but the native API allows you to populate all metadata fields available in a Dataverse installation. -.. _SWORD: http://en.wikipedia.org/wiki/SWORD_%28protocol%29 +.. _SWORD: https://en.wikipedia.org/wiki/SWORD_%28protocol%29 .. _SWORDv2: http://swordapp.org/sword-v2/sword-v2-specifications/ .. _RFC 5023: https://tools.ietf.org/html/rfc5023 -.. _SWORDv2 specification: http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html +.. _SWORDv2 specification: https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html .. _sword-auth: @@ -86,7 +86,7 @@ New features as of v1.1 - "Contact E-mail" is automatically populated from dataset owner's email. -- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References `_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A". +- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References `_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A". - Zero-length files are now allowed (but not necessarily encouraged). @@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:creator | authorName (LastName, FirstName) | Y | Author(s) for the Dataset. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ -|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:description | dsDescriptionValue | Y | Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 880ed561720..5ff538c3c46 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -432,7 +432,7 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +intersphinx_mapping = {'https://docs.python.org/': None} # Suppress "WARNING: unknown mimetype for ..." https://github.com/IQSS/dataverse/issues/3391 suppress_warnings = ['epub.unknown_project_files'] rst_prolog = """ diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index e44a70a405f..2139b85c64a 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -34,7 +34,7 @@ On Linux, you are welcome to use the OpenJDK available from package managers. Install Netbeans or Maven ~~~~~~~~~~~~~~~~~~~~~~~~~ -NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. +NetBeans IDE is recommended, and can be downloaded from https://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section. @@ -74,7 +74,7 @@ On Mac, run this command: ``brew install jq`` -On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/ +On Linux, install ``jq`` from your package manager or download a binary from https://stedolan.github.io/jq/ Install Payara ~~~~~~~~~~~~~~ @@ -117,7 +117,7 @@ On Linux, you should just install PostgreSQL using your favorite package manager Install Solr ~~~~~~~~~~~~ -`Solr `_ 8.11.1 is required. +`Solr `_ 8.11.1 is required. To install Solr, execute the following commands: @@ -127,7 +127,7 @@ To install Solr, execute the following commands: ``cd /usr/local/solr`` -``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` +``curl -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` ``tar xvfz solr-8.11.1.tgz`` diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index b20fd112533..46fc268461b 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -34,7 +34,7 @@ If you would like to read more about the Dataverse Project's use of GitHub, plea Building the Guides with Sphinx ------------------------------- -The Dataverse guides are written using Sphinx (http://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes. +The Dataverse guides are written using Sphinx (https://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes. Installing Sphinx ~~~~~~~~~~~~~~~~~ @@ -58,7 +58,7 @@ In some parts of the documentation, graphs are rendered as images using the Sphi Building the guides requires the ``dot`` executable from GraphViz. -This requires having `GraphViz `_ installed and either having ``dot`` on the path or +This requires having `GraphViz `_ installed and either having ``dot`` on the path or `adding options to the make call `_. Editing and Building the Guides @@ -67,7 +67,7 @@ Editing and Building the Guides To edit the existing documentation: - Create a branch (see :ref:`how-to-make-a-pull-request`). -- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to http://guides.dataverse.org. +- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to https://guides.dataverse.org. - Using your preferred text editor, open and edit the necessary files, or create new ones. Once you are done, open a terminal, change directories to ``doc/sphinx-guides``, activate (or reactivate) your Python virtual environment, and build the guides. diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index 7f4e8c1ba34..6469a43b5ab 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Welcome! `The Dataverse Project `_ is an `open source `_ project that loves `contributors `_! +Welcome! `The Dataverse Project `_ is an `open source `_ project that loves `contributors `_! .. contents:: |toctitle| :local: @@ -19,7 +19,7 @@ To get started, you'll want to set up your :doc:`dev-environment` and make sure Getting Help ------------ -If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as http://chat.dataverse.org, the `dataverse-dev `_ mailing list, `community calls `_, or support@dataverse.org. +If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as https://chat.dataverse.org, the `dataverse-dev `_ mailing list, `community calls `_, or support@dataverse.org. .. _core-technologies: @@ -52,7 +52,7 @@ Related Guides If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`. -If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). +If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). Related Projects ---------------- diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4b3d5fd0a55..132120291c2 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -46,7 +46,7 @@ The main takeaway should be that we care about unit testing enough to measure th Writing Unit Tests with JUnit ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. +We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. We support both (legacy) JUnit 4.x tests (forming the majority of our tests) and newer JUnit 5 based testing. @@ -238,11 +238,11 @@ Remember, it’s only a test (and it's not graded)! Some guidelines to bear in m - Map out which logical functions you want to test - Understand what’s being tested and ensure it’s repeatable - Assert the conditions of success / return values for each operation - * A useful resource would be `HTTP status codes `_ + * A useful resource would be `HTTP status codes `_ - Let the code do the labor; automate everything that happens when you run your test file. - Just as with any development, if you’re stuck: ask for help! -To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven `_. You should have Maven installed as per the `Development Environment `_ guide, but if not it’s easily done via Homebrew: ``brew install maven``. +To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven `_. You should have Maven installed as per the `Development Environment `_ guide, but if not it’s easily done via Homebrew: ``brew install maven``. Once installed, you may run commands with ``mvn [options] [] []``. @@ -487,7 +487,7 @@ Future Work on Integration Tests - Automate testing of dataverse-client-python: https://github.com/IQSS/dataverse-client-python/issues/10 - Work with @leeper on testing the R client: https://github.com/IQSS/dataverse-client-r - Review and attempt to implement "API Test Checklist" from @kcondon at https://docs.google.com/document/d/199Oq1YwQ4pYCguaeW48bIN28QAitSk63NbPYxJHCCAE/edit?usp=sharing -- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and http://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/ +- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and https://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/ - Consistent logging of API Tests. Show test name at the beginning and end and status codes returned. - expected passing and known/expected failing integration tests: https://github.com/IQSS/dataverse/issues/4438 diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index cbd27d6e8d2..17673ae499e 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -43,20 +43,20 @@ On Windows if you see an error like ``/usr/bin/perl^M: bad interpreter`` you mig PlantUML ++++++++ -PlantUML is used to create diagrams in the guides and other places. Download it from http://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``. +PlantUML is used to create diagrams in the guides and other places. Download it from https://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``. Eclipse Memory Analyzer Tool (MAT) ++++++++++++++++++++++++++++++++++ The Memory Analyzer Tool (MAT) from Eclipse can help you analyze heap dumps, showing you "leak suspects" such as seen at https://github.com/payara/Payara/issues/350#issuecomment-115262625 -It can be downloaded from http://www.eclipse.org/mat +It can be downloaded from https://www.eclipse.org/mat If the heap dump provided to you was created with ``gcore`` (such as with ``gcore -o /tmp/app.core $app_pid``) rather than ``jmap``, you will need to convert the file before you can open it in MAT. Using ``app.core.13849`` as example of the original 33 GB file, here is how you could convert it into a 26 GB ``app.core.13849.hprof`` file. Please note that this operation took almost 90 minutes: ``/usr/java7/bin/jmap -dump:format=b,file=app.core.13849.hprof /usr/java7/bin/java app.core.13849`` -A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also http://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer +A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also https://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer PageKite ++++++++ @@ -73,7 +73,7 @@ The first time you run ``./pagekite.py`` a file at ``~/.pagekite.rc`` will be created. You can edit this file to configure PageKite to serve up port 8080 (the default app server HTTP port) or the port of your choosing. -According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets http://opensource.org/docs/definition.php such as the Dataverse Project. +According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets https://opensource.org/docs/definition.php such as the Dataverse Project. MSV +++ diff --git a/doc/sphinx-guides/source/developers/unf/index.rst b/doc/sphinx-guides/source/developers/unf/index.rst index 2423877348f..856de209e82 100644 --- a/doc/sphinx-guides/source/developers/unf/index.rst +++ b/doc/sphinx-guides/source/developers/unf/index.rst @@ -27,7 +27,7 @@ with Dataverse Software 2.0 and throughout the 3.* lifecycle, UNF v.5 UNF v.6. Two parallel implementation, in R and Java, will be available, for cross-validation. -Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.†D-Lib Magazine, 13. Publisher’s Version Copy at http://j.mp/2ovSzoT +Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.†D-Lib Magazine, 13. Publisher’s Version Copy at https://j.mp/2ovSzoT **Contents:** diff --git a/doc/sphinx-guides/source/developers/unf/unf-v3.rst b/doc/sphinx-guides/source/developers/unf/unf-v3.rst index 3f0018d7fa5..98c07b398e0 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v3.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v3.rst @@ -34,11 +34,11 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n 1. Terminate character strings representing nonmissing values with a POSIX end-of-line character. -2. Encode each character string with `Unicode bit encoding `_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8. +2. Encode each character string with `Unicode bit encoding `_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8. 3. Combine the vector of character strings into a single sequence, with each character string separated by a POSIX end-of-line character and a null byte. -4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 `_ for Version 4. The resulting hash is `base64 `_ encoded to support readability. +4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 `_ for Version 4. The resulting hash is `base64 `_ encoded to support readability. 5. Calculate the UNF for each lower-level data object, using a consistent UNF version and level of precision across the individual UNFs being combined. @@ -49,4 +49,4 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n 8. Combine UNFs from multiple variables to form a single UNF for an entire data frame, and then combine UNFs for a set of data frames to form a single UNF that represents an entire research study. Learn more: -Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML `_ | Article: `PDF `_) +Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML `_ | Article: `PDF `_) diff --git a/doc/sphinx-guides/source/developers/unf/unf-v6.rst b/doc/sphinx-guides/source/developers/unf/unf-v6.rst index 9648bae47c8..b2495ff3dd9 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v6.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v6.rst @@ -156,7 +156,7 @@ For example, to specify a non-default precision the parameter it is specified us | Allowed values are {``128`` , ``192`` , ``196`` , ``256``} with ``128`` being the default. | ``R1`` - **truncate** numeric values to ``N`` digits, **instead of rounding**, as previously described. -`Dr. Micah Altman's classic UNF v5 paper `_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project). +`Dr. Micah Altman's classic UNF v5 paper `_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project). Note: we do not recommend truncating character strings at fewer bytes than the default ``128`` (the ``X`` parameter). At the very least this number **must** be high enough so that the printable UNFs of individual variables or files are not truncated, when calculating combined UNFs of files or datasets, respectively. diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index aacc245af5a..31fc0a4e602 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -24,7 +24,7 @@ The goals of the Dataverse Software branching strategy are: - allow for concurrent development - only ship stable code -We follow a simplified "git flow" model described at http://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix". +We follow a simplified "git flow" model described at https://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix". Branches ~~~~~~~~ diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst index f7e81756e5b..37bb2353ff7 100755 --- a/doc/sphinx-guides/source/index.rst +++ b/doc/sphinx-guides/source/index.rst @@ -42,7 +42,7 @@ Other Resources Additional information about the Dataverse Project itself including presentations, information about upcoming releases, data management and citation, and announcements can be found at -`http://dataverse.org/ `__ +`https://dataverse.org/ `__ **User Group** @@ -65,7 +65,7 @@ The support email address is `support@dataverse.org `__ -or use `GitHub pull requests `__, +or use `GitHub pull requests `__, if you have some code, scripts or documentation that you'd like to share. If you have a **security issue** to report, please email `security@dataverse.org `__. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f2de9d5702f..0edb09784e1 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -112,7 +112,7 @@ The need to redirect port HTTP (port 80) to HTTPS (port 443) for security has al Your decision to proxy or not should primarily be driven by which features of the Dataverse Software you'd like to use. If you'd like to use Shibboleth, the decision is easy because proxying or "fronting" Payara with Apache is required. The details are covered in the :doc:`shibboleth` section. -Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team `_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips `_ on the web. +Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team `_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips `_ on the web. Still not convinced you should put Payara behind another web server? Even if you manage to get your SSL certificate into Payara, how are you going to run Payara on low ports such as 80 and 443? Are you going to run Payara as root? Bad idea. This is a security risk. Under "Additional Recommendations" under "Securing Your Installation" above you are advised to configure Payara to run as a user other than root. @@ -124,7 +124,7 @@ If you really don't want to front Payara with any proxy (not recommended), you c ``./asadmin set server-config.network-config.network-listeners.network-listener.http-listener-2.port=443`` -What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: http://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to +What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: https://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to If you are running an installation with Apache and Payara on the same server, and would like to restrict Payara from responding to any requests to port 8080 from external hosts (in other words, not through Apache), you can restrict the AJP listener to localhost only with: @@ -157,7 +157,7 @@ and restart Payara. The prefix can be configured via the API (where it is referr Once this is done, you will be able to publish datasets and files, but the persistent identifiers will not be citable, and they will only resolve from the DataCite test environment (and then only if the Dataverse installation from which you published them is accessible - DOIs minted from your laptop will not resolve). Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. -To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (http://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. +To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (https://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. Once you have your DOI or Handle account credentials and a namespace, configure your Dataverse installation to use them using the JVM options and database settings below. @@ -205,7 +205,7 @@ Here are the configuration options for handles: - :ref:`:IndependentHandleService <:IndependentHandleService>` (optional) - :ref:`:HandleAuthHandle <:HandleAuthHandle>` (optional) -Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. +Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. .. _auth-modes: @@ -288,7 +288,7 @@ Multiple file stores should specify different directories (which would nominally Swift Storage +++++++++++++ -Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage `_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container. +Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage `_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container. **In order to configure a Swift installation,** you need to complete these steps to properly modify the JVM options: @@ -304,7 +304,7 @@ First, run all the following create commands with your Swift endpoint informatio ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..username.endpoint1=your-username" ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..endpoint.endpoint1=your-swift-endpoint" -``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``http://rdgw.swift.example.org/swift/v1``. +``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``https://rdgw.swift.example.org/swift/v1``. Then create a password alias by running (without changes): @@ -400,7 +400,7 @@ You'll need an AWS account with an associated S3 bucket for your installation to **Make note** of the **bucket's name** and the **region** its data is hosted in. To **create a user** with full S3 access and nothing more for security reasons, we recommend using IAM -(Identity and Access Management). See `IAM User Guide `_ +(Identity and Access Management). See `IAM User Guide `_ for more info on this process. **Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user. @@ -410,7 +410,7 @@ for more info on this process. If you are hosting your Dataverse installation on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the ``~/.aws/config`` file to specify the region. For more information on this option, see - http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html Preparation When Using Custom S3-Compatible Service ################################################### @@ -471,7 +471,7 @@ Additional profiles can be added to these files by appending the relevant inform aws_access_key_id = aws_secret_access_key = -Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation `_: +Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation `_: "In order to separate credentials from less sensitive options, region and output format are stored in a separate file named config in the same folder") @@ -598,7 +598,7 @@ You may provide the values for these via any of the Reported Working S3-Compatible Storage ###################################### -`Minio v2018-09-12 `_ +`Minio v2018-09-12 `_ Set ``dataverse.files..path-style-access=true``, as Minio works path-based. Works pretty smooth, easy to setup. **Can be used for quick testing, too:** just use the example values above. Uses the public (read: unsecure and possibly slow) https://play.minio.io:9000 service. @@ -2063,7 +2063,7 @@ Note: by default, the URL is composed from the settings ``:GuidesBaseUrl`` and ` :GuidesBaseUrl ++++++++++++++ -Set ``:GuidesBaseUrl`` to override the default value "http://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful. +Set ``:GuidesBaseUrl`` to override the default value "https://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful. ``curl -X PUT -d http://dataverse.example.edu http://localhost:8080/api/admin/settings/:GuidesBaseUrl`` @@ -2084,14 +2084,14 @@ Set ``:NavbarSupportUrl`` to a fully-qualified URL which will be used for the "S Note that this will override the default behaviour for the "Support" menu option, which is to display the Dataverse collection 'feedback' dialog. -``curl -X PUT -d http://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl`` +``curl -X PUT -d https://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl`` :MetricsUrl +++++++++++ Make the metrics component on the root Dataverse collection a clickable link to a website where you present metrics on your Dataverse installation, perhaps one of the community-supported tools mentioned in the :doc:`/admin/reporting-tools-and-queries` section of the Admin Guide. -``curl -X PUT -d http://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl`` +``curl -X PUT -d https://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl`` .. _:MaxFileUploadSizeInBytes: diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 4b000f1ef9e..5cb6e7153d4 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -98,7 +98,7 @@ The supplied site URL will be saved under the JVM option :ref:`dataverse.siteUrl The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara5/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara. -.. _JHOVE: http://jhove.openpreservation.org +.. _JHOVE: https://jhove.openpreservation.org Logging In ---------- @@ -118,7 +118,7 @@ Use the following credentials to log in: - username: dataverseAdmin - password: admin -Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg `_ asking to be added to the map at http://dataverse.org :) +Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg `_ asking to be added to the map at https://dataverse.org :) Trouble? See if you find an answer in the troubleshooting section below. @@ -197,7 +197,7 @@ Be sure you save the changes made here and then restart your Payara server to te UnknownHostException While Deploying ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also http://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873 +If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also https://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873 .. _fresh-reinstall: diff --git a/doc/sphinx-guides/source/installation/intro.rst b/doc/sphinx-guides/source/installation/intro.rst index 2251af7b81b..e5b10883d4b 100644 --- a/doc/sphinx-guides/source/installation/intro.rst +++ b/doc/sphinx-guides/source/installation/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Welcome! Thanks for installing `The Dataverse Project `_! +Welcome! Thanks for installing `The Dataverse Project `_! .. contents:: |toctitle| :local: @@ -36,7 +36,7 @@ Getting Help To get help installing or configuring a Dataverse installation, please try one or more of: - posting to the `dataverse-community `_ Google Group. -- asking at http://chat.dataverse.org +- asking at https://chat.dataverse.org - emailing support@dataverse.org to open a private ticket at https://help.hmdc.harvard.edu Information to Send to Support When Installation Fails diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst index 0dfdb0393e0..cd765c91b7f 100644 --- a/doc/sphinx-guides/source/installation/oauth2.rst +++ b/doc/sphinx-guides/source/installation/oauth2.rst @@ -11,7 +11,7 @@ As explained under "Auth Modes" in the :doc:`config` section, OAuth2 is one of t `OAuth2 `_ is an authentication protocol that allows systems to share user data, while letting the users control what data is being shared. When you see buttons stating "login with Google" or "login through Facebook", OAuth2 is probably involved. For the purposes of this section, we will shorten "OAuth2" to just "OAuth." OAuth can be compared and contrasted with :doc:`shibboleth`. -The Dataverse Software supports four OAuth providers: `ORCID `_, `Microsoft Azure Active Directory (AD) `_, `GitHub `_, and `Google `_. +The Dataverse Software supports four OAuth providers: `ORCID `_, `Microsoft Azure Active Directory (AD) `_, `GitHub `_, and `Google `_. In addition :doc:`oidc` are supported, using a standard based on OAuth2. diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index a40ef758dc7..ee154ca9b9c 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -51,7 +51,7 @@ Just like with :doc:`oauth2` you need to obtain a *Client ID* and a *Client Secr You need to apply for credentials out-of-band. The Dataverse installation will discover all necessary metadata for a given provider on its own (this is `part of the standard -`_). +`_). To enable this, you need to specify an *Issuer URL* when creating the configuration for your provider (see below). diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 3cf876a2251..7d458bbc37b 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -26,7 +26,7 @@ Installing Java The Dataverse Software should run fine with only the Java Runtime Environment (JRE) installed, but installing the Java Development Kit (JDK) is recommended so that useful tools for troubleshooting production environments are available. We recommend using Oracle JDK or OpenJDK. -The Oracle JDK can be downloaded from http://www.oracle.com/technetwork/java/javase/downloads/index.html +The Oracle JDK can be downloaded from https://www.oracle.com/technetwork/java/javase/downloads/index.html On a RHEL/derivative, install OpenJDK (devel version) using yum:: @@ -261,7 +261,7 @@ Installing jq or you may install it manually:: # cd /usr/bin - # wget http://stedolan.github.io/jq/download/linux64/jq + # wget https://stedolan.github.io/jq/download/linux64/jq # chmod +x jq # jq --version diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst index cd0fbda77a6..3a2e1b99c70 100644 --- a/doc/sphinx-guides/source/installation/shibboleth.rst +++ b/doc/sphinx-guides/source/installation/shibboleth.rst @@ -76,7 +76,7 @@ A ``jk-connector`` network listener should have already been set up when you ran You can verify this with ``./asadmin list-network-listeners``. -This enables the `AJP protocol `_ used in Apache configuration files below. +This enables the `AJP protocol `_ used in Apache configuration files below. SSLEngine Warning Workaround ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -93,7 +93,7 @@ Configure Apache Enforce HTTPS ~~~~~~~~~~~~~ -To prevent attacks such as `FireSheep `_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place. +To prevent attacks such as `FireSheep `_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place. Below is an example of how "rewrite rule" lines look within a ``VirtualHost`` block. Download a :download:`sample file <../_static/installation/files/etc/httpd/conf.d/dataverse.example.edu.conf>` , edit it to substitute your own hostname under ``ServerName``, and place it at ``/etc/httpd/conf.d/dataverse.example.edu.conf`` or a filename that matches your hostname. The file must be in ``/etc/httpd/conf.d`` and must end in ".conf" to be included in Apache's configuration. @@ -235,7 +235,7 @@ Run semodule Silent is golden. No output is expected. This will place a file in ``/etc/selinux/targeted/modules/active/modules/shibboleth.pp`` and include "shibboleth" in the output of ``semodule -l``. See the ``semodule`` man page if you ever want to remove or disable the module you just added. -Congrats! You've made the creator of http://stopdisablingselinux.com proud. :) +Congrats! You've made the creator of https://stopdisablingselinux.com proud. :) Restart Apache and Shibboleth ----------------------------- diff --git a/doc/sphinx-guides/source/style/foundations.rst b/doc/sphinx-guides/source/style/foundations.rst index 31e0c314a05..cc193666868 100755 --- a/doc/sphinx-guides/source/style/foundations.rst +++ b/doc/sphinx-guides/source/style/foundations.rst @@ -9,7 +9,7 @@ Foundation elements are the very basic building blocks to create a page in Datav Grid Layout =========== -`Bootstrap `__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts. +`Bootstrap `__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts. We use the fixed-width ``.container`` class which provides responsive widths (i.e. auto, 750px, 970px or 1170px) based on media queries for the page layout, with a series of rows and columns for the content. @@ -42,7 +42,7 @@ The grid layout uses ``.col-sm-*`` classes for horizontal groups of columns, ins Typography ========== -The typeface, text size, and line-height are set in the `Bootstrap CSS `__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ```` and all paragraphs. +The typeface, text size, and line-height are set in the `Bootstrap CSS `__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ```` and all paragraphs. .. code-block:: css @@ -57,7 +57,7 @@ The typeface, text size, and line-height are set in the `Bootstrap CSS `__. It provides the background, border, text and link colors used across the application. +The default color palette is set in the `Bootstrap CSS `__. It provides the background, border, text and link colors used across the application. Brand Colors @@ -138,7 +138,7 @@ We use our brand color, a custom burnt orange ``{color:#C55B28;}``, which is set Text Colors ----------- -Text color is the default setting from `Bootstrap CSS `__. +Text color is the default setting from `Bootstrap CSS `__. .. code-block:: css @@ -163,7 +163,7 @@ Text color is the default setting from `Bootstrap CSS `__. The hover state color is set to 15% darker. +Link color is the default setting from `Bootstrap CSS `__. The hover state color is set to 15% darker. **Please note**, there is a CSS override issue with the link color due to the use of both a Bootstrap stylesheet and a PrimeFaces stylesheet in the UI. We've added CSS such as ``.ui-widget-content a {color: #428BCA;}`` to our stylesheet to keep the link color consistent. @@ -204,7 +204,7 @@ Link color is the default setting from `Bootstrap CSS `__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes. +Contextual classes from `Bootstrap CSS `__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes. .. raw:: html @@ -259,7 +259,7 @@ We use various icons across the application, which we get from Bootstrap, FontCu Bootstrap Glyphicons -------------------- -There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap `__. We utilize these mainly as icons inside of buttons and in message blocks. +There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap `__. We utilize these mainly as icons inside of buttons and in message blocks. .. raw:: html @@ -305,7 +305,7 @@ The :doc:`/developers/fontcustom` section of the Developer Guide explains how to Socicon Icon Font ----------------- -We use `Socicon `__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels. +We use `Socicon `__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels. .. raw:: html diff --git a/doc/sphinx-guides/source/style/patterns.rst b/doc/sphinx-guides/source/style/patterns.rst index e96f17dc2ec..c6602ffa26e 100644 --- a/doc/sphinx-guides/source/style/patterns.rst +++ b/doc/sphinx-guides/source/style/patterns.rst @@ -1,7 +1,7 @@ Patterns ++++++++ -Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap `__ like tooltips and dropdowns, and AJAX components from `PrimeFaces `__ like datatables and commandlinks. +Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap `__ like tooltips and dropdowns, and AJAX components from `PrimeFaces `__ like datatables and commandlinks. .. contents:: |toctitle| :local: @@ -9,7 +9,7 @@ Patterns are what emerge when using the foundation elements together with basic Navbar ====== -The `Navbar component `__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right. +The `Navbar component `__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right. When logged in, the account name is a dropdown menu, linking the user to account-specific content and the log out link. @@ -74,7 +74,7 @@ When logged in, the account name is a dropdown menu, linking the user to account Breadcrumbs =========== -The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component `_ to iterate through the breadcrumbs. +The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component `_ to iterate through the breadcrumbs. .. raw:: html @@ -108,7 +108,7 @@ The breadcrumbs are displayed under the header, and provide a trail of links for Tables ====== -Most tables use the `DataTable components `__ from PrimeFaces and are styled using the `Tables component `__ from Bootstrap. +Most tables use the `DataTable components `__ from PrimeFaces and are styled using the `Tables component `__ from Bootstrap. .. raw:: html @@ -187,7 +187,7 @@ Most tables use the `DataTable components `__ from Bootstrap. Form elements like the `InputText component `__ from PrimeFaces are kept looking clean and consistent across each page. +Forms fulfill various functions across the site, but we try to style them consistently. We use the ``.form-horizontal`` layout, which uses ``.form-group`` to create a grid of rows for the labels and inputs. The consistent style of forms is maintained using the `Forms component `__ from Bootstrap. Form elements like the `InputText component `__ from PrimeFaces are kept looking clean and consistent across each page. .. raw:: html @@ -289,7 +289,7 @@ Here are additional form elements that are common across many pages, including r Buttons ======= -There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component `__ and `CommandLink component `__ from PrimeFaces, as well as the basic JSF `Link component `__ and `OutputLink component `__. Those are styled using the `Buttons component `__, `Button Groups component `__ and `Buttons Dropdowns component `__ from Bootstrap. +There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component `__ and `CommandLink component `__ from PrimeFaces, as well as the basic JSF `Link component `__ and `OutputLink component `__. Those are styled using the `Buttons component `__, `Button Groups component `__ and `Buttons Dropdowns component `__ from Bootstrap. Action Buttons -------------- @@ -668,7 +668,7 @@ Another variation of icon-only buttons uses the ``.btn-link`` style class from B Pagination ========== -We use the `Pagination component `__ from Bootstrap for paging through search results. +We use the `Pagination component `__ from Bootstrap for paging through search results. .. raw:: html @@ -738,7 +738,7 @@ We use the `Pagination component `__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial). +The `Labels component `__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial). .. raw:: html @@ -768,7 +768,7 @@ The `Labels component `__ from Boots Alerts ====== -For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component `__ from Bootstrap. +For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component `__ from Bootstrap. .. raw:: html @@ -859,9 +859,9 @@ Style classes can be added to ``p``, ``div``, ``span`` and other elements to add Images ====== -For images, we use the `GraphicImage component `__ from PrimeFaces, or the basic JSF `GraphicImage component `__. +For images, we use the `GraphicImage component `__ from PrimeFaces, or the basic JSF `GraphicImage component `__. -To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class `__ from Bootstrap. +To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class `__ from Bootstrap. .. raw:: html @@ -879,7 +879,7 @@ To display images in a responsive way, they are styled with ``.img-responsive``, Panels ====== -The most common of our containers, the `Panels component `__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the `Collapse plugin `__ from Bootstrap. +The most common of our containers, the `Panels component `__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the `Collapse plugin `__ from Bootstrap. .. raw:: html @@ -943,7 +943,7 @@ Tabs Tabs are used to provide content panes on a page that allow the user to view different sections of content without navigating to a different page. -We use the `TabView component `__ from PrimeFaces, which is styled using the `Tab component `__ from Bootstrap. +We use the `TabView component `__ from PrimeFaces, which is styled using the `Tab component `__ from Bootstrap. .. raw:: html @@ -989,7 +989,7 @@ Modals are dialog prompts that act as popup overlays, but don't create a new bro Buttons usually provide the UI prompt. A user clicks the button, which then opens a `Dialog component `__ or `Confirm Dialog component `__ from PrimeFaces that displays the modal with the necessary information and actions to take. -The modal is styled using the `Modal component `__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options. +The modal is styled using the `Modal component `__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options. .. raw:: html diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index 12cc54c7fde..792fad730cf 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -109,7 +109,7 @@ If you are leaving your institution and need to convert your Dataverse installat ORCID Log In ~~~~~~~~~~~~~ -You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org `_. +You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org `_. Create a Dataverse installation account using ORCID ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index b05459b6aaf..ae0ec37aff3 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -22,13 +22,13 @@ Supported Metadata Detailed below are what metadata schemas we support for Citation and Domain Specific Metadata in the Dataverse Project: -- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 3.1 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. -- `Geospatial Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. +- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 3.1 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. +- `Geospatial Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. - `Social Science & Humanities Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, and Dublin Core. - `Astronomy and Astrophysics Metadata `__ (`see .tsv version `__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) - `VOResource Schema format `__ and is based on - `Virtual Observatory (VO) Discovery and Provenance Metadata `__. -- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. + `VOResource Schema format `__ and is based on + `Virtual Observatory (VO) Discovery and Provenance Metadata `__. +- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. - `Journal Metadata `__ (`see .tsv version `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__. Experimental Metadata diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 77a760ef838..a3637154050 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -192,7 +192,7 @@ Additional download options available for tabular data (found in the same drop-d - As tab-delimited data (with the variable names in the first row); - The original file uploaded by the user; - Saved as R data (if the original file was not in R format); -- Variable Metadata (as a `DDI Codebook `_ XML file); +- Variable Metadata (as a `DDI Codebook `_ XML file); - Data File Citation (currently in either RIS, EndNote XML, or BibTeX format); - All of the above, as a zipped bundle. @@ -297,7 +297,7 @@ You can also search for files within datasets that have been tagged as "Workflow Astronomy (FITS) ---------------- -Metadata found in the header section of `Flexible Image Transport System (FITS) files `_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level. +Metadata found in the header section of `Flexible Image Transport System (FITS) files `_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level. Compressed Files ---------------- @@ -388,7 +388,7 @@ Choosing a License ------------------ Each Dataverse installation provides a set of license(s) data can be released under, and whether users can specify custom terms instead (see below). -One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. +One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. If you want to apply one of the other available licenses to your dataset, you can change it on the Terms tab of your Dataset page. License Selection and Professional Norms diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index f1d5611ede9..33ae9b555e6 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -27,7 +27,7 @@ separately, in a relational database, so that it can be accessed efficiently by the application. For the purposes of archival preservation it can be exported, in plain text XML files, using a standardized, open `DDI Codebook -`_ +`_ format. (more info below) @@ -53,6 +53,6 @@ Tabular Metadata in the Dataverse Software The structure of the metadata defining tabular data variables used in the Dataverse Software was originally based on the `DDI Codebook -`_ format. +`_ format. You can see an example of DDI output under the :ref:`data-variable-metadata-access` section of the :doc:`/api/dataaccess` section of the API Guide. From 044ed40c17e1fa5fdbb7c8745a4671add25414c0 Mon Sep 17 00:00:00 2001 From: bencomp Date: Tue, 18 Oct 2022 00:32:55 +0200 Subject: [PATCH 0005/1092] Align table boundary in SWORD doc --- doc/sphinx-guides/source/api/sword.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst index c9ac83bc204..51391784bde 100755 --- a/doc/sphinx-guides/source/api/sword.rst +++ b/doc/sphinx-guides/source/api/sword.rst @@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:creator | authorName (LastName, FirstName) | Y | Author(s) for the Dataset. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ -|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:description | dsDescriptionValue | Y | Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ From cbc42d5052f8a9afc30121082a44c128387e2023 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Tue, 8 Nov 2022 14:07:32 +0100 Subject: [PATCH 0006/1092] renamed and moved the direct upload JVM option in the documentation --- doc/release-notes/4.20-release-notes.md | 7 +------ doc/sphinx-guides/source/installation/config.rst | 7 ++++++- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index 79037d8cd8c..ec52b638274 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -90,14 +90,10 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid ### Direct S3 Upload Changes -Direct upload to S3 in UI and API is enabled per store by one new jvm option: +Direct upload to S3 is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" -This option makes direct upload the default in the UI. In the API, you can use either: direct upload or upload via Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: - - ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" - That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. @@ -133,7 +129,6 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. - The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) -- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset in any storage. (Via API only and when the uploading tool has direct access to the relevant storage used; i.e., upload the file first and register it via API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2e68bfaa1ab..4f15ad81190 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,6 +249,12 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. +When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.allow-out-of-band-upload`` JVM option to ``true``. +Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``allow-out-of-band-upload`` and will enable direct upload even with ``allow-out-of-band-upload`` not set (or set to false). +In other words, ``dataverse.files.\.allow-out-of-band-upload`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. +Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``allow-out-of-band-upload`` (setting it to ``false`` does not have any effect in that case). + The following sections describe how to set up various types of stores and how to configure for multiple stores. Multi-store Basics @@ -546,7 +552,6 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` - dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 8e2dd9fa961..a2ff546ef0a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { public static boolean isDirectUploadEnabled(String driverId) { return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0a41da4f7dd..db82df72b8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1749,7 +1749,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 4585d99a01f..b040f557895 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1112,7 +1112,7 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".api-direct-upload" is not used here + // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } From 4abac1ac15d77f2f059977254971cf4be0f3f1f1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 09:40:34 +0100 Subject: [PATCH 0007/1092] revert by accident editted old release notes --- doc/release-notes/4.20-release-notes.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index ec52b638274..e29953db101 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -93,9 +93,7 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid Direct upload to S3 is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" - -That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. - + The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. @@ -128,7 +126,7 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo ## New JVM Options for file storage drivers - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. -- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. From 578c7af84e7cd1eac52901643d9bb49bc878cfa3 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:05:20 +0100 Subject: [PATCH 0008/1092] indentation fixes --- .../iq/dataverse/util/SystemConfig.java | 407 +++++++++--------- 1 file changed, 205 insertions(+), 202 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b040f557895..f3d8e46b004 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -59,8 +59,8 @@ public class SystemConfig { @EJB AuthenticationServiceBean authenticationService; - - public static final String DATAVERSE_PATH = "/dataverse/"; + + public static final String DATAVERSE_PATH = "/dataverse/"; /** * A JVM option for the advertised fully qualified domain name (hostname) of @@ -70,11 +70,11 @@ public class SystemConfig { * The equivalent in DVN 3.x was "dvn.inetAddress". */ public static final String FQDN = "dataverse.fqdn"; - + /** * A JVM option for specifying the "official" URL of the site. - * Unlike the FQDN option above, this would be a complete URL, - * with the protocol, port number etc. + * Unlike the FQDN option above, this would be a complete URL, + * with the protocol, port number etc. */ public static final String SITE_URL = "dataverse.siteUrl"; @@ -102,41 +102,41 @@ public class SystemConfig { private String saneDefaultForSolrHostColonPort = "localhost:8983"; /** - * The default number of datafiles that we allow to be created through + * The default number of datafiles that we allow to be created through * zip file upload. */ - private static final int defaultZipUploadFilesLimit = 1000; + private static final int defaultZipUploadFilesLimit = 1000; public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours - private static String appVersionString = null; - private static String buildNumberString = null; - + private static String appVersionString = null; + private static String buildNumberString = null; + private static final String JVM_TIMER_SERVER_OPTION = "dataverse.timerServer"; - - private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; + + private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - + public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; public final static String CURATIONLABELSDISABLED = "DISABLED"; - + public String getVersion() { return getVersion(false); } - + // The return value is a "prviate static String", that should be initialized - // once, on the first call (see the code below)... But this is a @Stateless - // bean... so that would mean "once per thread"? - this would be a prime + // once, on the first call (see the code below)... But this is a @Stateless + // bean... so that would mean "once per thread"? - this would be a prime // candidate for being moved into some kind of an application-scoped caching // service... some CachingService @Singleton - ? (L.A. 5.8) public String getVersion(boolean withBuildNumber) { - + if (appVersionString == null) { // The Version Number is no longer supplied in a .properties file - so - // we can't just do + // we can't just do // return BundleUtil.getStringFromBundle("version.number", null, ResourceBundle.getBundle("VersionNumber", Locale.US)); // // Instead, we'll rely on Maven placing the version number into the @@ -144,31 +144,31 @@ public String getVersion(boolean withBuildNumber) { // (this is considered a better practice, and will also allow us // to maintain this number in only one place - the pom.xml file) // -- L.A. 4.0.2 - - // One would assume, that once the version is in the MANIFEST.MF, - // as Implementation-Version:, it would be possible to obtain - // said version simply as + + // One would assume, that once the version is in the MANIFEST.MF, + // as Implementation-Version:, it would be possible to obtain + // said version simply as // appVersionString = getClass().getPackage().getImplementationVersion(); - // alas - that's not working, for whatever reason. (perhaps that's + // alas - that's not working, for whatever reason. (perhaps that's // only how it works with jar-ed packages; not with .war files). - // People on the interwebs suggest that one should instead - // open the Manifest as a resource, then extract its attributes. - // There were some complications with that too. Plus, relying solely - // on the MANIFEST.MF would NOT work for those of the developers who - // are using "in place deployment" (i.e., where - // Netbeans runs their builds directly from the local target - // directory, bypassing the war file deployment; and the Manifest - // is only available in the .war file). For that reason, I am - // going to rely on the pom.properties file, and use java.util.Properties + // People on the interwebs suggest that one should instead + // open the Manifest as a resource, then extract its attributes. + // There were some complications with that too. Plus, relying solely + // on the MANIFEST.MF would NOT work for those of the developers who + // are using "in place deployment" (i.e., where + // Netbeans runs their builds directly from the local target + // directory, bypassing the war file deployment; and the Manifest + // is only available in the .war file). For that reason, I am + // going to rely on the pom.properties file, and use java.util.Properties // to read it. We have to look for this file in 2 different places - // depending on whether this is a .war file deployment, or a + // depending on whether this is a .war file deployment, or a // developers build. (the app-level META-INF is only populated when - // a .war file is built; the "maven-archiver" directory, on the other + // a .war file is built; the "maven-archiver" directory, on the other // hand, is only available when it's a local build deployment). - // So, long story short, I'm resorting to the convoluted steps below. - // It may look hacky, but it should actually be pretty solid and - // reliable. - + // So, long story short, I'm resorting to the convoluted steps below. + // It may look hacky, but it should actually be pretty solid and + // reliable. + // First, find the absolute path url of the application persistence file // always supplied with the Dataverse app: @@ -180,46 +180,46 @@ public String getVersion(boolean withBuildNumber) { filePath = fileUrl.getFile(); if (filePath != null) { InputStream mavenPropertiesInputStream = null; - String mavenPropertiesFilePath; + String mavenPropertiesFilePath; Properties mavenProperties = new Properties(); filePath = filePath.replaceFirst("/[^/]*$", "/"); - // Using a relative path, find the location of the maven pom.properties file. - // First, try to look for it in the app-level META-INF. This will only be - // available if it's a war file deployment: + // Using a relative path, find the location of the maven pom.properties file. + // First, try to look for it in the app-level META-INF. This will only be + // available if it's a war file deployment: mavenPropertiesFilePath = filePath.concat("../../../META-INF/maven/edu.harvard.iq/dataverse/pom.properties"); - + try { mavenPropertiesInputStream = new FileInputStream(mavenPropertiesFilePath); } catch (IOException ioex) { - // OK, let's hope this is a local dev. build. - // In that case the properties file should be available in - // the maven-archiver directory: - + // OK, let's hope this is a local dev. build. + // In that case the properties file should be available in + // the maven-archiver directory: + mavenPropertiesFilePath = filePath.concat("../../../../maven-archiver/pom.properties"); - - // try again: - + + // try again: + try { mavenPropertiesInputStream = new FileInputStream(mavenPropertiesFilePath); } catch (IOException ioex2) { logger.warning("Failed to find and/or open for reading the pom.properties file."); - mavenPropertiesInputStream = null; + mavenPropertiesInputStream = null; } } - + if (mavenPropertiesInputStream != null) { try { mavenProperties.load(mavenPropertiesInputStream); - appVersionString = mavenProperties.getProperty("version"); + appVersionString = mavenProperties.getProperty("version"); } catch (IOException ioex) { logger.warning("caught IOException trying to read and parse the pom properties file."); } finally { IOUtils.closeQuietly(mavenPropertiesInputStream); } } - + } else { logger.warning("Null file path representation of the location of persistence.xml in the webapp root directory!"); } @@ -229,53 +229,54 @@ public String getVersion(boolean withBuildNumber) { if (appVersionString == null) { - // still null? - defaulting to 4.0: + // still null? - defaulting to 4.0: appVersionString = "4.0"; } } - + if (withBuildNumber) { if (buildNumberString == null) { - // (build number is still in a .properties file in the source tree; it only - // contains a real build number if this war file was built by - // Jenkins) - + // (build number is still in a .properties file in the source tree; it only + // contains a real build number if this war file was built by + // Jenkins) + try { buildNumberString = ResourceBundle.getBundle("BuildNumber").getString("build.number"); } catch (MissingResourceException ex) { - buildNumberString = null; + buildNumberString = null; } } - + if (buildNumberString != null && !buildNumberString.equals("")) { - return appVersionString + " build " + buildNumberString; - } - } - - return appVersionString; + return appVersionString + " build " + buildNumberString; + } + } + + return appVersionString; } public String getSolrHostColonPort() { String SolrHost; if ( System.getenv("SOLR_SERVICE_HOST") != null && System.getenv("SOLR_SERVICE_HOST") != ""){ SolrHost = System.getenv("SOLR_SERVICE_HOST"); + } else { + SolrHost = saneDefaultForSolrHostColonPort; } - else SolrHost = saneDefaultForSolrHostColonPort; String solrHostColonPort = settingsService.getValueForKey(SettingsServiceBean.Key.SolrHostColonPort, SolrHost); return solrHostColonPort; } public boolean isProvCollectionEnabled() { String provCollectionEnabled = settingsService.getValueForKey(SettingsServiceBean.Key.ProvCollectionEnabled, null); - if("true".equalsIgnoreCase(provCollectionEnabled)){ + if ("true".equalsIgnoreCase(provCollectionEnabled)) { return true; } return false; } - + public int getMetricsCacheTimeoutMinutes() { - int defaultValue = 10080; //one week in minutes + int defaultValue = 10080; // one week in minutes SettingsServiceBean.Key key = SettingsServiceBean.Key.MetricsCacheTimeoutMinutes; String metricsCacheTimeString = settingsService.getValueForKey(key); if (metricsCacheTimeString != null) { @@ -293,7 +294,7 @@ public int getMetricsCacheTimeoutMinutes() { } return defaultValue; } - + public int getMinutesUntilConfirmEmailTokenExpires() { final int minutesInOneDay = 1440; final int reasonableDefault = minutesInOneDay; @@ -338,10 +339,10 @@ public static int getMinutesUntilPasswordResetTokenExpires() { } return reasonableDefault; } - + /** * The "official", designated URL of the site; - * can be defined as a complete URL; or derived from the + * can be defined as a complete URL; or derived from the * "official" hostname. If none of these options is set, * defaults to the InetAddress.getLocalHOst() and https; * These are legacy JVM options. Will be eventualy replaced @@ -350,7 +351,7 @@ public static int getMinutesUntilPasswordResetTokenExpires() { public String getDataverseSiteUrl() { return getDataverseSiteUrlStatic(); } - + public static String getDataverseSiteUrlStatic() { String hostUrl = System.getProperty(SITE_URL); if (hostUrl != null && !"".equals(hostUrl)) { @@ -367,19 +368,20 @@ public static String getDataverseSiteUrlStatic() { hostUrl = "https://" + hostName; return hostUrl; } - + /** - * URL Tracking: + * URL Tracking: */ public String getPageURLWithQueryString() { - return PrettyContext.getCurrentInstance().getRequestURL().toURL() + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); + return PrettyContext.getCurrentInstance().getRequestURL().toURL() + + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); } /** - * The "official" server's fully-qualified domain name: + * The "official" server's fully-qualified domain name: */ public String getDataverseServer() { - // still reliese on a JVM option: + // still reliese on a JVM option: String fqdn = System.getProperty(FQDN); if (fqdn == null) { try { @@ -447,44 +449,44 @@ public static int getIntLimitFromStringOrDefault(String limitSetting, Integer de /** * Download-as-zip size limit. - * returns defaultZipDownloadLimit if not specified; - * set to -1 to disable zip downloads. + * returns defaultZipDownloadLimit if not specified; + * set to -1 to disable zip downloads. */ public long getZipDownloadLimit() { String zipLimitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipDownloadLimit); return getLongLimitFromStringOrDefault(zipLimitOption, defaultZipDownloadLimit); } - + public int getZipUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultZipUploadFilesLimit); } - + /** - * Session timeout, in minutes. + * Session timeout, in minutes. * (default value provided) */ public int getLoginSessionTimeout() { return getIntLimitFromStringOrDefault( - settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), - defaultLoginSessionTimeout); + settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), + defaultLoginSessionTimeout); } - + /* ` the number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog - */ + */ public int getMultipleUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.MultipleUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultMultipleUploadFilesLimit); } - + public long getGuestbookResponsesPageDisplayLimit() { String limitSetting = settingsService.getValueForKey(SettingsServiceBean.Key.GuestbookResponsesPageDisplayLimit); return getLongLimitFromStringOrDefault(limitSetting, DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT); } - - public long getUploadLogoSizeLimit(){ + + public long getUploadLogoSizeLimit() { return 500000; } @@ -497,10 +499,10 @@ public long getThumbnailSizeLimitPDF() { } public static long getThumbnailSizeLimit(String type) { - String option = null; - - //get options via jvm options - + String option = null; + + // get options via jvm options + if ("Image".equals(type)) { option = System.getProperty("dataverse.dataAccess.thumbnail.image.limit"); return getLongLimitFromStringOrDefault(option, DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE); @@ -512,19 +514,19 @@ public static long getThumbnailSizeLimit(String type) { // Zero (0) means no limit. return getLongLimitFromStringOrDefault(option, 0L); } - + public boolean isThumbnailGenerationDisabledForType(String type) { return getThumbnailSizeLimit(type) == -1l; } - + public boolean isThumbnailGenerationDisabledForImages() { return isThumbnailGenerationDisabledForType("Image"); } - + public boolean isThumbnailGenerationDisabledForPDF() { return isThumbnailGenerationDisabledForType("PDF"); } - + public String getApplicationTermsOfUse() { String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); @@ -532,9 +534,9 @@ public String getApplicationTermsOfUse() { // value, or as a better default than the saneDefaultForAppTermsOfUse if there // is no language-specific value String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); - //Now get the language-specific value if it exists + // Now get the language-specific value if it exists if (language != null && !language.equalsIgnoreCase(BundleUtil.getDefaultLocale().getLanguage())) { - appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); } return appTermsOfUse; } @@ -545,7 +547,7 @@ public String getApiTermsOfUse() { return apiTermsOfUse; } - // TODO: + // TODO: // remove this method! // pages should be using settingsWrapper.get(":ApplicationPrivacyPolicyUrl") instead. -- 4.2.1 public String getApplicationPrivacyPolicyUrl() { @@ -564,10 +566,10 @@ public boolean isFilesOnDatasetPageFromSolr() { return settingsService.isTrueForKey(SettingsServiceBean.Key.FilesOnDatasetPageFromSolr, safeDefaultIfKeyNotFound); } - public Long getMaxFileUploadSizeForStore(String driverId){ - return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); - } - + public Long getMaxFileUploadSizeForStore(String driverId) { + return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); + } + public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { @@ -581,12 +583,12 @@ public Integer getSearchHighlightFragmentSize() { } public long getTabularIngestSizeLimit() { - // This method will return the blanket ingestable size limit, if - // set on the system. I.e., the universal limit that applies to all - // tabular ingests, regardless of fromat: - - String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); - + // This method will return the blanket ingestable size limit, if + // set on the system. I.e., the universal limit that applies to all + // tabular ingests, regardless of fromat: + + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); @@ -595,48 +597,48 @@ public long getTabularIngestSizeLimit() { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); } } - // -1 means no limit is set; - // 0 on the other hand would mean that ingest is fully disabled for - // tabular data. - return -1; + // -1 means no limit is set; + // 0 on the other hand would mean that ingest is fully disabled for + // tabular data. + return -1; } - + public long getTabularIngestSizeLimit(String formatName) { // This method returns the size limit set specifically for this format name, - // if available, otherwise - the blanket limit that applies to all tabular - // ingests regardless of a format. - + // if available, otherwise - the blanket limit that applies to all tabular + // ingests regardless of a format. + if (formatName == null || formatName.equals("")) { - return getTabularIngestSizeLimit(); + return getTabularIngestSizeLimit(); } - + String limitEntry = settingsService.get(SettingsServiceBean.Key.TabularIngestSizeLimit.toString() + ":" + formatName); - + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { - logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); + logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry); } } - - return getTabularIngestSizeLimit(); + + return getTabularIngestSizeLimit(); } public boolean isOAIServerEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.OAIServerEnabled, defaultResponse); } - + public void enableOAIServer() { settingsService.setValueForKey(SettingsServiceBean.Key.OAIServerEnabled, "true"); } - + public void disableOAIServer() { settingsService.deleteValueForKey(SettingsServiceBean.Key.OAIServerEnabled); - } - + } + public boolean isTimerServer() { String optionValue = System.getProperty(JVM_TIMER_SERVER_OPTION); if ("true".equalsIgnoreCase(optionValue)) { @@ -704,11 +706,12 @@ public String getOAuth2CallbackUrl() { } return saneDefault; } - + public boolean isShibPassiveLoginEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibPassiveLoginEnabled, defaultResponse); } + public boolean isShibAttributeCharacterSetConversionEnabled() { boolean defaultResponse = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibAttributeCharacterSetConversionEnabled, defaultResponse); @@ -734,7 +737,7 @@ public String getPVDictionaries() { public int getPVGoodStrength() { // FIXME: Change this to 21 to match Harvard's requirements or implement a way to disable the rule (0 or -1) and have the default be disabled. int goodStrengthLength = 20; - //String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); + // String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); String _goodStrengthLength = settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString()); try { goodStrengthLength = Integer.parseInt(_goodStrengthLength); @@ -862,9 +865,7 @@ public enum FileUploadMethods { * Upload through Globus of large files */ - GLOBUS("globus") - ; - + GLOBUS("globus"); private final String text; @@ -887,8 +888,7 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - - + } /** @@ -904,8 +904,8 @@ public enum FileDownloadMethods { */ RSYNC("rsal/rsync"), NATIVE("native/http"), - GLOBUS("globus") - ; + GLOBUS("globus"); + private final String text; private FileDownloadMethods(final String text) { @@ -927,27 +927,28 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + } - + public enum DataFilePIDFormat { DEPENDENT("DEPENDENT"), INDEPENDENT("INDEPENDENT"); + private final String text; public String getText() { return text; } - - private DataFilePIDFormat(final String text){ + + private DataFilePIDFormat(final String text) { this.text = text; } - + @Override public String toString() { return text; } - + } /** @@ -987,44 +988,44 @@ public String toString() { } - public boolean isPublicInstall(){ + public boolean isPublicInstall() { boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); } - - public boolean isRsyncUpload(){ + + public boolean isRsyncUpload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), true); } - public boolean isGlobusUpload(){ + public boolean isGlobusUpload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), true); } // Controls if HTTP upload is enabled for both GUI and API. - public boolean isHTTPUpload(){ + public boolean isHTTPUpload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), true); } - - public boolean isRsyncOnly(){ + + public boolean isRsyncOnly() { String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - if(downloadMethods == null){ + if (downloadMethods == null) { return false; } - if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())){ + if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())) { return false; } String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods==null){ + if (uploadMethods == null) { return false; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); } } - + public boolean isRsyncDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), false); } - + public boolean isHTTPDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), false); } @@ -1032,14 +1033,14 @@ public boolean isHTTPDownload() { public boolean isGlobusDownload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false); } - + public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } public List getGlobusStoresList() { - String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); - return Arrays.asList(globusStores.split("\\s*,\\s*")); + String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); + return Arrays.asList(globusStores.split("\\s*,\\s*")); } private Boolean getMethodAvailable(String method, boolean upload) { @@ -1051,31 +1052,32 @@ private Boolean getMethodAvailable(String method, boolean upload) { return Arrays.asList(methods.toLowerCase().split("\\s*,\\s*")).contains(method); } } - - public Integer getUploadMethodCount(){ - String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods==null){ + + public Integer getUploadMethodCount() { + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods == null) { return 0; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); - } + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); + } } - public boolean isDataFilePIDSequentialDependent(){ + + public boolean isDataFilePIDSequentialDependent() { String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")){ + if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")) { return true; } return false; } - + public int getPIDAsynchRegFileCount() { String fileCount = settingsService.getValueForKey(SettingsServiceBean.Key.PIDAsynchRegFileCount, "10"); int retVal = 10; try { retVal = Integer.parseInt(fileCount); - } catch (NumberFormatException e) { - //if no number in the setting we'll return 10 + } catch (NumberFormatException e) { + // if no number in the setting we'll return 10 } return retVal; } @@ -1089,13 +1091,13 @@ public boolean isFilePIDsEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound); } - + public boolean isIndependentHandleService() { boolean safeDefaultIfKeyNotFound = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.IndependentHandleService, safeDefaultIfKeyNotFound); - + } - + public String getHandleAuthHandle() { String handleAuthHandle = settingsService.getValueForKey(SettingsServiceBean.Key.HandleAuthHandle, null); return handleAuthHandle; @@ -1105,61 +1107,61 @@ public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); return mDCLogPath; } - + public boolean isDatafileValidationOnPublishEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FileValidationOnPublishEnabled, safeDefaultIfKeyNotFound); } - public boolean directUploadEnabled(DvObjectContainer container) { + public boolean directUploadEnabled(DvObjectContainer container) { // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); - } - - public String getDataCiteRestApiUrlString() { - //As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + } + + public String getDataCiteRestApiUrlString() { + // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); - } - + } + public boolean isExternalDataverseValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public boolean isExternalDatasetValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public String getDataverseValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript); } - + public String getDatasetValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript); } - + public String getDataverseValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataPublishValidationFailureMsg, defaultMessage); } - + public String getDataverseUpdateValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be updated because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataUpdateValidationFailureMsg, defaultMessage); } - + public String getDatasetValidationFailureMsg() { String defaultMessage = "This dataset cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidationFailureMsg, defaultMessage); } - + public boolean isExternalValidationAdminOverrideEnabled() { return "true".equalsIgnoreCase(settingsService.getValueForKey(SettingsServiceBean.Key.ExternalValidationAdminOverride)); } - + public long getDatasetValidationSizeLimit() { String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetChecksumValidationSizeLimit); @@ -1189,6 +1191,7 @@ public long getFileValidationSizeLimit() { // -1 means no limit is set; return -1; } + public Map getCurationLabels() { Map labelMap = new HashMap(); String setting = settingsService.getValueForKey(SettingsServiceBean.Key.AllowedCurationLabels, ""); @@ -1229,15 +1232,15 @@ public Map getCurationLabels() { } return labelMap; } - + public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { - Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); - - // we default to false - i.e., "not disabled" if the setting is not present: + Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); + + // we default to false - i.e., "not disabled" if the setting is not present: if (ret == null) { - return false; + return false; } - - return !ret; + + return !ret; } } From 8578de173b63dbde3bb5440147422783621fbee9 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:10:57 +0100 Subject: [PATCH 0009/1092] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index f3d8e46b004..1edf5a0fb6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1105,7 +1105,7 @@ public String getHandleAuthHandle() { public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); - return mDCLogPath; + return mDCLogPath;this method is used } public boolean isDatafileValidationOnPublishEnabled() { @@ -1115,7 +1115,7 @@ public boolean isDatafileValidationOnPublishEnabled() { public boolean directUploadEnabled(DvObjectContainer container) { // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here - return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); + return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } public String getDataCiteRestApiUrlString() { From f2e75db13bcff1f5a5bc7d5cfc958db04be745c0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:15:03 +0100 Subject: [PATCH 0010/1092] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 1edf5a0fb6e..0ab99c0de6a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1105,7 +1105,7 @@ public String getHandleAuthHandle() { public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); - return mDCLogPath;this method is used + return mDCLogPath; } public boolean isDatafileValidationOnPublishEnabled() { From bff889d3864ca10f7dc4f7ae84595e40a2b70d34 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:25:39 +0100 Subject: [PATCH 0011/1092] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 0ab99c0de6a..7d7006e708e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1119,7 +1119,7 @@ public boolean directUploadEnabled(DvObjectContainer container) { } public String getDataCiteRestApiUrlString() { - // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); } From ad4bb5107fcb14b8c4ebb7f7fd57186511577548 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 11:51:31 +0100 Subject: [PATCH 0012/1092] renamed jvm option: allow-out-of-band-upload -> upload-out-of-band --- doc/sphinx-guides/source/installation/config.rst | 9 +++++---- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4f15ad81190..62cc984bc56 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,11 +249,11 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. -When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.allow-out-of-band-upload`` JVM option to ``true``. +When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``allow-out-of-band-upload`` and will enable direct upload even with ``allow-out-of-band-upload`` not set (or set to false). -In other words, ``dataverse.files.\.allow-out-of-band-upload`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. -Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``allow-out-of-band-upload`` (setting it to ``false`` does not have any effect in that case). +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). +In other words, ``dataverse.files.\.upload-out-of-band`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. +Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``upload-out-of-band`` (setting it to ``false`` does not have any effect in that case). The following sections describe how to set up various types of stores and how to configure for multiple stores. @@ -552,6 +552,7 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` + dataverse.files..upload-out-of-band ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index a2ff546ef0a..85ca97d5f15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { public static boolean isDirectUploadEnabled(String driverId) { return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index db82df72b8a..9b549901d55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1749,7 +1749,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".aupload-out-of-band")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 7d7006e708e..b45ad50ab1d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1114,7 +1114,7 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here + // this method is used in UI only, therfore "dataverse.files." + driverId + ".upload-out-of-band" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } From 49102ada3380863d115f5167343eb97446b35872 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 12:49:36 +0100 Subject: [PATCH 0013/1092] linking to api documentation --- doc/sphinx-guides/source/api/native-api.rst | 1 + doc/sphinx-guides/source/installation/config.rst | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6d68d648cb3..0341b6e07d1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2348,6 +2348,7 @@ The fully expanded example above (without environment variables) looks like this Note: The ``id`` returned in the json response is the id of the file metadata version. +.. _add-file-metadata-api: Adding File Metadata ~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 62cc984bc56..b074a180c8f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,10 +250,9 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. +Files can be then uploaded by an integration tool with :ref:`add-file-api` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the :ref:`add-file-metadata-api` api call. Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). -In other words, ``dataverse.files.\.upload-out-of-band`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. -Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``upload-out-of-band`` (setting it to ``false`` does not have any effect in that case). +In other words, ``dataverse.files.\.upload-out-of-band`` option opens the :ref:`add-file-api` and :ref:`add-file-metadata-api` api endpoints without redirecting uploads in the UI. The following sections describe how to set up various types of stores and how to configure for multiple stores. From e9d6df0bb6f23f4f4a8e7fe53213c91596980332 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 14:11:19 +0100 Subject: [PATCH 0014/1092] some improvements in the documentation --- doc/sphinx-guides/source/api/native-api.rst | 4 ++++ doc/sphinx-guides/source/installation/config.rst | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0341b6e07d1..f075acf40f6 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1481,6 +1481,8 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.json() print r.status_code +This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. + .. _add-remote-file-api: Add a Remote File to a Dataset @@ -2391,6 +2393,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/:persistentId/addFiles?persistentId=doi:10.5072/FK2/7U7YBV -F jsonData='[{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}, {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' +This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index b074a180c8f..d3a22453453 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,10 +249,10 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. -When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -Files can be then uploaded by an integration tool with :ref:`add-file-api` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the :ref:`add-file-metadata-api` api call. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). -In other words, ``dataverse.files.\.upload-out-of-band`` option opens the :ref:`add-file-api` and :ref:`add-file-metadata-api` api endpoints without redirecting uploads in the UI. +A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. +This option allows adding files with the :ref:`add-file-api` call. It also allows registering the metadata of a file with the :ref:`add-file-metadata-api` call for a file uploaded directly to the storage. +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` option and will enable direct upload even with ``upload-out-of-band`` option not set (or set to false). +When neither of the two option is enabled, adding files with API will not be possible and will result with the "Dataset store configuration does not allow provided storageIdentifier" error. The following sections describe how to set up various types of stores and how to configure for multiple stores. From dc64aa23c3d4c364f46ad6e695e38ed3311455eb Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 17:47:13 +0100 Subject: [PATCH 0015/1092] documentation improvements by Dieuwertje --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- doc/sphinx-guides/source/installation/config.rst | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f075acf40f6..54e47a29b9d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1481,7 +1481,7 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.json() print r.status_code -This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. +This API call might result in the following error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. This error can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. .. _add-remote-file-api: @@ -2393,7 +2393,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/:persistentId/addFiles?persistentId=doi:10.5072/FK2/7U7YBV -F jsonData='[{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}, {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' -This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. +This API call might result in the following error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. This error can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d3a22453453..4eadcc8ed9d 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,9 +250,9 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -This option allows adding files with the :ref:`add-file-api` call. It also allows registering the metadata of a file with the :ref:`add-file-metadata-api` call for a file uploaded directly to the storage. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` option and will enable direct upload even with ``upload-out-of-band`` option not set (or set to false). -When neither of the two option is enabled, adding files with API will not be possible and will result with the "Dataset store configuration does not allow provided storageIdentifier" error. +This option allows API users to add files with the :ref:`add-file-api` call. It also allows API users to register the metadata of a file with the :ref:`add-file-metadata-api` call for a file that was uploaded directly to the storage. +Note that if a Dataverse installation uses S3-storage while the ``dataverse.files.\.upload-redirect`` JVM option is enabled, the ``upload-out-of-band`` setting is overruled. This results in direct upload being enabled even with the ``upload-out-of-band`` option not set (or set to false). +When the ``upload-out-of-band`` option is not set to ``true`` and it isn't being overruled by the previously mentioned combination, adding files using the API will not be possible and will return the "Dataset store configuration does not allow provided storageIdentifier" error. The following sections describe how to set up various types of stores and how to configure for multiple stores. From 085fb8f44503d69354b5cb8f5793d8144dbde0e1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 21 Nov 2022 09:53:01 +0100 Subject: [PATCH 0016/1092] improvements in the documentation --- doc/sphinx-guides/source/installation/config.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4eadcc8ed9d..467872bfdd4 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -251,6 +251,9 @@ A Dataverse installation may also be configured to reference some files (e.g. la A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. This option allows API users to add files with the :ref:`add-file-api` call. It also allows API users to register the metadata of a file with the :ref:`add-file-metadata-api` call for a file that was uploaded directly to the storage. + +The option is useful in cases in which an S3 storage is not used or made public, as required by the ``dataverse.files.\.upload-redirect`` option. An example would be building a tool for synchronizing datasets with files from a third-party repository. In such a case, the tool would upload files directly to the storage, and then use :ref:`add-file-metadata-api` to link them to a dataset. + Note that if a Dataverse installation uses S3-storage while the ``dataverse.files.\.upload-redirect`` JVM option is enabled, the ``upload-out-of-band`` setting is overruled. This results in direct upload being enabled even with the ``upload-out-of-band`` option not set (or set to false). When the ``upload-out-of-band`` option is not set to ``true`` and it isn't being overruled by the previously mentioned combination, adding files using the API will not be possible and will return the "Dataset store configuration does not allow provided storageIdentifier" error. From d870e202dccac268cc3f099277559d3e473b7944 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 9 Jan 2023 13:18:06 +0100 Subject: [PATCH 0017/1092] chore(deps): upgrade Nimbus OIDC SDK to latest 10.4 release #9268 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8b6f98c5896..a5d52fd7545 100644 --- a/pom.xml +++ b/pom.xml @@ -381,7 +381,7 @@ com.nimbusds oauth2-oidc-sdk - 9.41.1 + 10.4 From 2ee66618ed77d55878300a7baaa4fa4a94ac7162 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 9 Jan 2023 15:52:14 +0100 Subject: [PATCH 0018/1092] style(oidc): make class fields final in OIDCAuthProvider These values should not be changed once the provider has been initialized. --- .../oauth2/oidc/OIDCAuthProvider.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index a9c44010950..4b6c575cfaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -54,15 +54,15 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { protected String title = "Open ID Connect"; protected List scope = Arrays.asList("openid", "email", "profile"); - Issuer issuer; - ClientAuthentication clientAuth; - OIDCProviderMetadata idpMetadata; + final Issuer issuer; + final ClientAuthentication clientAuth; + final OIDCProviderMetadata idpMetadata; public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException { this.clientSecret = aClientSecret; // nedded for state creation this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret)); this.issuer = new Issuer(issuerEndpointURL); - getMetadata(); + this.idpMetadata = getMetadata(); } /** @@ -74,7 +74,9 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd * @return false */ @Override - public boolean isDisplayIdentifier() { return false; } + public boolean isDisplayIdentifier() { + return false; + } /** * Setup metadata from OIDC provider during creation of the provider representation @@ -82,9 +84,14 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd * @throws IOException when sth. goes wrong with the retrieval * @throws ParseException when the metadata is not parsable */ - void getMetadata() throws AuthorizationSetupException { + OIDCProviderMetadata getMetadata() throws AuthorizationSetupException { try { - this.idpMetadata = getMetadata(this.issuer); + var metadata = getMetadata(this.issuer); + // Assert that the provider supports the code flow + if (metadata.getResponseTypes().stream().noneMatch(ResponseType::impliesCodeFlow)) { + throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling."); + } + return metadata; } catch (IOException ex) { logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not retrievable: "+ex.getMessage()); throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not retrievable."); @@ -92,11 +99,6 @@ void getMetadata() throws AuthorizationSetupException { logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not parsable: "+ex.getMessage()); throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not parsable."); } - - // Assert that the provider supports the code flow - if (! this.idpMetadata.getResponseTypes().stream().filter(idp -> idp.impliesCodeFlow()).findAny().isPresent()) { - throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling."); - } } /** From 0c7db6614669ecc40e96ffb029be4f21ed04f4db Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:27:55 +0100 Subject: [PATCH 0019/1092] chore(deps): update Testcontainers to latest version Also updating Postgres Server version in "tc" Maven profile. --- modules/dataverse-parent/pom.xml | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 3911e9d5bbb..e316a5508ce 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -167,7 +167,7 @@ 5.0.0-RC2 - 1.15.0 + 1.17.6 2.10.1 4.13.1 diff --git a/pom.xml b/pom.xml index a5d52fd7545..56871c7fd56 100644 --- a/pom.xml +++ b/pom.xml @@ -757,7 +757,7 @@ tc true - 9.6 + 13.0 From 5681d24520ac017eb925bc058ecaef877eedd14b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:28:52 +0100 Subject: [PATCH 0020/1092] chore(deps): add Keycloak Testcontainer module for tests --- pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml index 56871c7fd56..63c362ba904 100644 --- a/pom.xml +++ b/pom.xml @@ -570,6 +570,12 @@ postgresql test + + com.github.dasniko + testcontainers-keycloak + 2.4.0 + test + org.mockito mockito-core From 9f534c4e4a59d7b33b9a0e4a5a876819e9278c47 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:32:12 +0100 Subject: [PATCH 0021/1092] feat(tests): extend JvmSetting helper for test class method references Instead of only allowing to supply static String values for a setting, also allow referencing a static method in the test class to retrieve dynamic data. This is inspired by the JUnit5 MethodSource example. --- .../iq/dataverse/util/testing/JvmSetting.java | 6 ++++- .../util/testing/JvmSettingExtension.java | 25 ++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java index f54cadaf253..85b10489f15 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java @@ -39,6 +39,8 @@ @ResourceLock(value = Resources.SYSTEM_PROPERTIES, mode = ResourceAccessMode.READ_WRITE) public @interface JvmSetting { + static final String PLACEHOLDER = "NULL"; + /** * The key of the system property to be set. */ @@ -47,10 +49,12 @@ /** * The value of the system property to be set. */ - String value(); + String value() default PLACEHOLDER; String[] varArgs() default {}; + String method() default PLACEHOLDER; + /** * Containing annotation of repeatable {@code @SetSystemProperty}. */ diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java index 56e87589139..17728e75ffc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java @@ -5,6 +5,11 @@ import org.junit.jupiter.api.extension.BeforeTestExecutionCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER; + public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback { private ExtensionContext.Store getStore(ExtensionContext context) { @@ -28,7 +33,25 @@ public void beforeTestExecution(ExtensionContext extensionContext) throws Except } // set to new value - System.setProperty(settingName, setting.value()); + if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { + throw new IllegalArgumentException("You must either provide a value or a method reference " + + "for key JvmSettings." + setting.key()); + } + + // retrieve value from static test class method if no setting given + if (setting.value().equals(PLACEHOLDER)) { + extensionContext.getTestClass().ifPresent(klass -> { + try { + Method valueMethod = klass.getDeclaredMethod(setting.method()); + valueMethod.setAccessible(true); + System.setProperty(settingName, (String)valueMethod.invoke(null)); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new RuntimeException(e); + } + }); + } else { + System.setProperty(settingName, setting.value()); + } } }); } From 5cd9f2eb8bd01b88cde28e41c8b27c52656c62b9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:40:13 +0100 Subject: [PATCH 0022/1092] doc(dev): add description for method references in @JvmSetting helper --- doc/sphinx-guides/source/developers/testing.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4b3d5fd0a55..2d1948449a9 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -89,8 +89,12 @@ For unit tests, the most interesting part is to set a JVM setting just for the c Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or a test class to set and clear the property automatically. -To set arbitrary system properties for the current test, a similar extension -``@SystemProperty(key = "", value = "")`` has been added. +Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")`` +to reference a method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow +retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!) + +To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")`` +has been added. (Note: it does not support method references.) Both extensions will ensure the global state of system properties is non-interfering for test executions. Tests using these extensions will be executed in serial. From ebd8eede980fa1b3cce3e2f30538c9a79c180eb2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:45:04 +0100 Subject: [PATCH 0023/1092] feat(settings): add authentication settings for OIDC to JvmSettings #9268 --- .../harvard/iq/dataverse/settings/JvmSettings.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index bc5a73cd958..46b79b06466 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -64,6 +64,17 @@ public enum JvmSettings { SCOPE_API(PREFIX, "api"), API_SIGNING_SECRET(SCOPE_API, "signing-secret"), + // AUTH SETTINGS + SCOPE_AUTH(PREFIX, "auth"), + // AUTH: OIDC SETTINGS + SCOPE_OIDC(SCOPE_AUTH, "oidc"), + OIDC_ENABLED(SCOPE_OIDC, "enabled"), + OIDC_TITLE(SCOPE_OIDC, "title"), + OIDC_SUBTITLE(SCOPE_OIDC, "subtitle"), + OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"), + OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"), + OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"), + ; private static final String SCOPE_SEPARATOR = "."; From 1bff1be736a1362dd779be66415919961fb44599 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:48:18 +0100 Subject: [PATCH 0024/1092] feat(auth): add OIDC provider provisioning via MPCONFIG #9268 Only one provider can be configured via MPCONFIG for now. The provider is configured with an appropriate ID to distinguish it from other providers configured via the API. It can be configured in addition to other OIDC providers when desired. --- ...ationProvidersRegistrationServiceBean.java | 10 ++++++++++ .../OIDCAuthenticationProviderFactory.java | 20 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java index 6289865baf0..79dabe1d390 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import java.util.HashMap; import java.util.Map; @@ -121,6 +122,15 @@ public void startup() { logger.log(Level.SEVERE, "Exception setting up the authentication provider '" + row.getId() + "': " + ex.getMessage(), ex); } }); + + // Add providers registered via MPCONFIG + if (JvmSettings.OIDC_ENABLED.lookupOptional(Boolean.class).orElse(false)) { + try { + registerProvider(OIDCAuthenticationProviderFactory.buildFromSettings()); + } catch (AuthorizationSetupException e) { + logger.log(Level.SEVERE, "Exception setting up an OIDC auth provider via MicroProfile Config", e); + } + } } private void registerProviderFactory(AuthenticationProviderFactory aFactory) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index c6d1a28e19d..f4d631adea3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; +import edu.harvard.iq.dataverse.settings.JvmSettings; import java.util.Map; @@ -44,4 +45,23 @@ public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) th return oidc; } + + /** + * Build an OIDC provider from MicroProfile Config provisioned details + * @return The configured auth provider + * @throws AuthorizationSetupException + */ + public static AuthenticationProvider buildFromSettings() throws AuthorizationSetupException { + OIDCAuthProvider oidc = new OIDCAuthProvider( + JvmSettings.OIDC_CLIENT_ID.lookup(), + JvmSettings.OIDC_CLIENT_SECRET.lookup(), + JvmSettings.OIDC_AUTH_SERVER_URL.lookup() + ); + + oidc.setId("oidc-mpconfig"); + oidc.setTitle(JvmSettings.OIDC_TITLE.lookupOptional().orElse("OpenID Connect")); + oidc.setSubTitle(JvmSettings.OIDC_SUBTITLE.lookupOptional().orElse("OpenID Connect")); + + return oidc; + } } From fb11096562269d3704dd74504b6e665a6a6a843e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:48:48 +0100 Subject: [PATCH 0025/1092] style(auth): slight reformat of OIDC provider factory #9268 --- .../oauth2/oidc/OIDCAuthenticationProviderFactory.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index f4d631adea3..89cf1cb986d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -38,7 +38,12 @@ public String getInfo() { public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) throws AuthorizationSetupException { Map factoryData = OAuth2AuthenticationProviderFactory.parseFactoryData(aRow.getFactoryData()); - OIDCAuthProvider oidc = new OIDCAuthProvider(factoryData.get("clientId"), factoryData.get("clientSecret"), factoryData.get("issuer")); + OIDCAuthProvider oidc = new OIDCAuthProvider( + factoryData.get("clientId"), + factoryData.get("clientSecret"), + factoryData.get("issuer") + ); + oidc.setId(aRow.getId()); oidc.setTitle(aRow.getTitle()); oidc.setSubTitle(aRow.getSubtitle()); From 1fb0f588262a92010c5f0afa52d336a707358a6b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:51:24 +0100 Subject: [PATCH 0026/1092] test(auth): add integration test for OIDC provisioning via MPCONFIG #9268 Using Testcontainers to start a Keycloak instance with our default development realm, the provider is created using MPCONFIG settings. --- .../OIDCAuthenticationProviderFactoryIT.java | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java new file mode 100644 index 00000000000..53cfcca2742 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -0,0 +1,37 @@ +package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; + +import dasniko.testcontainers.keycloak.KeycloakContainer; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +@Tag("testcontainers") +@Testcontainers +class OIDCAuthenticationProviderFactoryIT { + + static final String clientId = "oidc-client"; + static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; + static final String realm = "oidc-realm"; + + @Container + static KeycloakContainer keycloakContainer = new KeycloakContainer().withRealmImportFile("keycloak/oidc-realm.json"); + + // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations + private static String getAuthUrl() { + return keycloakContainer.getAuthServerUrl() + "realms/" + realm; + } + + @Test + @JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) + @JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) + @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") + void testCreateProvider() throws Exception { + OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); + assertTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString().startsWith(keycloakContainer.getAuthServerUrl())); + } +} \ No newline at end of file From e31dba3da3dc267e963c537da4d0076ed11eee44 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:54:39 +0100 Subject: [PATCH 0027/1092] build(auth): make resources in /conf avail to tests #9268 To use data in /conf for tests, adding the folder in Maven to copy them to the test classpath as resources helps to use them in tests very easily. All dirs under /conf will be copied to the /target/test-classes directory recursively. This also works when running tests in IDEs like IntelliJ. --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index 63c362ba904..a26071d253b 100644 --- a/pom.xml +++ b/pom.xml @@ -596,6 +596,11 @@ + + + ${project.basedir}/conf + + - + src/main/java From 4d7df9c4abfdebe4b8d19382fd836ff9827f5053 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 11:13:17 -0500 Subject: [PATCH 0030/1092] (draft/work in progress) framework for a new file creation command and storage quota enforcement #9361. --- .../iq/dataverse/DataFileServiceBean.java | 57 ++ .../iq/dataverse/DatasetServiceBean.java | 2 +- .../iq/dataverse/EditDatafilesPage.java | 30 +- .../impl/CreateNewDataFilesCommand.java | 555 ++++++++++++++++++ .../settings/SettingsServiceBean.java | 10 +- .../harvard/iq/dataverse/util/FileUtil.java | 36 +- .../iq/dataverse/util/SystemConfig.java | 16 +- .../util/bagit/data/FileUtilWrapper.java | 7 +- .../FileExceedsStorageQuotaException.java | 22 + src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/editFilesFragment.xhtml | 1 + 11 files changed, 718 insertions(+), 19 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 7da06f36be4..328f2aa59c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -67,6 +67,8 @@ public class DataFileServiceBean implements java.io.Serializable { @EJB EmbargoServiceBean embargoService; + @EJB SystemConfig systemConfig; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -140,6 +142,36 @@ public class DataFileServiceBean implements java.io.Serializable { */ public static final String MIME_TYPE_PACKAGE_FILE = "application/vnd.dataverse.file-package"; + public class UserStorageQuota { + private Long totalAllocatedInBytes = 0L; + private Long totalUsageInBytes = 0L; + + public UserStorageQuota(Long allocated, Long used) { + this.totalAllocatedInBytes = allocated; + this.totalUsageInBytes = used; + } + + public Long getTotalAllocatedInBytes() { + return totalAllocatedInBytes; + } + + public void setTotalAllocatedInBytes(Long totalAllocatedInBytes) { + this.totalAllocatedInBytes = totalAllocatedInBytes; + } + + public Long getTotalUsageInBytes() { + return totalUsageInBytes; + } + + public void setTotalUsageInBytes(Long totalUsageInBytes) { + this.totalUsageInBytes = totalUsageInBytes; + } + + public Long getRemainingQuotaInBytes() { + return totalAllocatedInBytes - totalUsageInBytes; + } + } + public DataFile find(Object pk) { return em.find(DataFile.class, pk); } @@ -1657,4 +1689,29 @@ public Embargo findEmbargo(Long id) { DataFile d = find(id); return d.getEmbargo(); } + + public Long getStorageUsageByCreator(AuthenticatedUser user) { + Query query = em.createQuery("SELECT SUM(o.filesize) FROM DataFile o WHERE o.creator.id=:creatorId"); + + try { + Long totalSize = (Long)query.setParameter("creatorId", user.getId()).getSingleResult(); + logger.info("total size for user: "+totalSize); + return totalSize == null ? 0L : totalSize; + } catch (NoResultException nre) { // ? + logger.info("NoResultException, returning 0L"); + return 0L; + } + } + + public UserStorageQuota getUserStorageQuota(AuthenticatedUser user, Dataset dataset) { + // this is for testing only - one pre-set, installation-wide quota limit + // for everybody: + Long totalAllocated = systemConfig.getTestStorageQuotaLimit(); + // again, this is for testing only - we are only counting the total size + // of all the files created by this user; it will likely be a much more + // complex calculation in real life applications: + Long totalUsed = getStorageUsageByCreator(user); + + return new UserStorageQuota(totalAllocated, totalUsed); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 91ec050fe5c..4e522bbd441 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1166,5 +1166,5 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo hdLogger.warning("Failed to destroy the dataset"); } } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 1c033b37872..74c4e782d56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; @@ -187,7 +188,13 @@ public enum Referrer { // Used to store results of permissions checks private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean } + // Size limit of an individual file: (set for the storage volume used) private Long maxFileUploadSizeInBytes = null; + // Total amount of data that the user should be allowed to upload. + // Will be calculated in real time based on various level quotas - + // for this user and/or this collection/dataset, etc. We should + // assume that it may change during the user session. + private Long maxTotalUploadSizeInBytes = null; private Long maxIngestSizeInBytes = null; // CSV: 4.8 MB, DTA: 976.6 KB, XLSX: 5.7 MB, etc. private String humanPerFormatTabularLimits = null; @@ -336,6 +343,14 @@ public Long getMaxFileUploadSizeInBytes() { public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } + + public Long getMaxTotalUploadSizeInBytes() { + return maxTotalUploadSizeInBytes; + } + + public String getHumanMaxTotalUploadSizeInBytes() { + return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes); + } public boolean isUnlimitedUploadFileSize() { @@ -563,7 +578,6 @@ public String init() { this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); hasValidTermsOfAccess = isHasValidTermsOfAccess(); if (!hasValidTermsOfAccess) { @@ -2024,7 +2038,13 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // Note: A single uploaded file may produce multiple datafiles - // for example, multiple files can be extracted from an uncompressed // zip file. - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); + ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); + + Command cmd; + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + + dFileList = createDataFilesResult.getDataFiles(); String createDataFilesError = editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult); if(createDataFilesError != null) { @@ -2033,8 +2053,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { } } catch (IOException ioex) { + // shouldn't we try and communicate to the user what happened? logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage()); return; + } catch (CommandException cex) { + // shouldn't we try and communicate to the user what happened? + errorMessages.add(cex.getMessage()); + uploadComponentId = event.getComponent().getClientId(); + return; } /*catch (FileExceedsMaxSizeException ex) { logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java new file mode 100644 index 00000000000..9f281f9446d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -0,0 +1,555 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; +import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; +import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; +import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; +import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; +import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; +import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; +import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; +import edu.harvard.iq.dataverse.util.ShapefileHandler; +import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.file.BagItFileHandler; +import edu.harvard.iq.dataverse.util.file.BagItFileHandlerFactory; +import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.logging.Logger; +import java.util.zip.GZIPInputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import javax.enterprise.inject.spi.CDI; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; + +/** + * + * @author landreev + */ +@RequiredPermissions( Permission.EditDataset ) +public class CreateNewDataFilesCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); + + private final DatasetVersion version; + private final InputStream inputStream; + private final String fileName; + private final String suppliedContentType; + private final String newStorageIdentifier; + private final String newCheckSum; + private DataFile.ChecksumType newCheckSumType; + + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, null); + } + + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, DataFile.ChecksumType newCheckSumType) { + super(aRequest, version.getDataset()); + + this.version = version; + this.inputStream = inputStream; + this.fileName = fileName; + this.suppliedContentType = suppliedContentType; + this.newStorageIdentifier = newStorageIdentifier; + this.newCheckSum = newCheckSum; + this.newCheckSumType = newCheckSumType; + } + + @Override + public CreateDataFileResult execute(CommandContext ctxt) throws CommandException { + List datafiles = new ArrayList<>(); + + //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default + if(newCheckSumType == null) { + newCheckSumType = ctxt.systemConfig().getFileFixityChecksumAlgorithm(); + } + + String warningMessage = null; + + // save the file, in the temporary location for now: + Path tempFile = null; + + Long fileSizeLimit = ctxt.systemConfig().getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId()); + Long storageQuotaLimit = null; + + if (ctxt.systemConfig().isStorageQuotasEnforced()) { + //storageQuotaLimit = ctxt.files().getClass()...; + UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); + if (quota != null) { + storageQuotaLimit = quota.getRemainingQuotaInBytes(); + } + } + String finalType = null; + + if (newStorageIdentifier == null) { + if (getFilesTempDirectory() != null) { + try { + tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); + // "temporary" location is the key here; this is why we are not using + // the DataStore framework for this - the assumption is that + // temp files will always be stored on the local filesystem. + // -- L.A. Jul. 2014 + logger.fine("Will attempt to save the file as: " + tempFile.toString()); + Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to save the upload as a temp file (temp disk space?)", ioex, this); + } + + // A file size check, before we do anything else: + // (note that "no size limit set" = "unlimited") + // (also note, that if this is a zip file, we'll be checking + // the size limit for each of the individual unpacked files) + Long fileSize = tempFile.toFile().length(); + if (fileSizeLimit != null && fileSize > fileSizeLimit) { + try { + tempFile.toFile().delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)), this); + } + + } else { + throw new CommandExecutionException("Temp directory is not configured.", this); + } + + logger.fine("mime type supplied: " + suppliedContentType); + + // Let's try our own utilities (Jhove, etc.) to determine the file type + // of the uploaded file. (We may already have a mime type supplied for this + // file - maybe the type that the browser recognized on upload; or, if + // it's a harvest, maybe the remote server has already given us the type + // for this file... with our own type utility we may or may not do better + // than the type supplied: + // -- L.A. + String recognizedType = null; + + try { + recognizedType = determineFileType(tempFile.toFile(), fileName); + logger.fine("File utility recognized the file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + if (useRecognizedType(suppliedContentType, recognizedType)) { + finalType = recognizedType; + } + } + + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); + } + + if (finalType == null) { + finalType = (suppliedContentType == null || suppliedContentType.equals("")) + ? MIME_TYPE_UNDETERMINED_DEFAULT + : suppliedContentType; + } + + // A few special cases: + // if this is a gzipped FITS file, we'll uncompress it, and ingest it as + // a regular FITS file: + if (finalType.equals("application/fits-gzipped")) { + + InputStream uncompressedIn = null; + String finalFileName = fileName; + // if the file name had the ".gz" extension, remove it, + // since we are going to uncompress it: + if (fileName != null && fileName.matches(".*\\.gz$")) { + finalFileName = fileName.replaceAll("\\.gz$", ""); + } + + DataFile datafile = null; + try { + uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); + File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit); + datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); + } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { + // it looks like we simply skip the file silently, if its uncompressed size + // exceeds the limit. we should probably report this in detail instead. + datafile = null; + } finally { + if (uncompressedIn != null) { + try { + uncompressedIn.close(); + } catch (IOException e) { + } + } + } + + // If we were able to produce an uncompressed file, we'll use it + // to create and return a final DataFile; if not, we're not going + // to do anything - and then a new DataFile will be created further + // down, from the original, uncompressed file. + if (datafile != null) { + // remove the compressed temp file: + try { + tempFile.toFile().delete(); + } catch (SecurityException ex) { + // (this is very non-fatal) + logger.warning("Failed to delete temporary file " + tempFile.toString()); + } + + datafiles.add(datafile); + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + // If it's a ZIP file, we are going to unpack it and create multiple + // DataFile objects from its contents: + } else if (finalType.equals("application/zip")) { + + ZipInputStream unZippedIn = null; + ZipEntry zipEntry = null; + + int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); + + try { + Charset charset = null; + /* + TODO: (?) + We may want to investigate somehow letting the user specify + the charset for the filenames in the zip file... + - otherwise, ZipInputStream bails out if it encounteres a file + name that's not valid in the current charest (i.e., UTF-8, in + our case). It would be a bit trickier than what we're doing for + SPSS tabular ingests - with the lang. encoding pulldown menu - + because this encoding needs to be specified *before* we upload and + attempt to unzip the file. + -- L.A. 4.0 beta12 + logger.info("default charset is "+Charset.defaultCharset().name()); + if (Charset.isSupported("US-ASCII")) { + logger.info("charset US-ASCII is supported."); + charset = Charset.forName("US-ASCII"); + if (charset != null) { + logger.info("was able to obtain charset for US-ASCII"); + } + + } + */ + + if (charset != null) { + unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); + } else { + unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); + } + + Long storageQuotaLimitForUnzippedFiles = storageQuotaLimit; + while (true) { + try { + zipEntry = unZippedIn.getNextEntry(); + } catch (IllegalArgumentException iaex) { + // Note: + // ZipInputStream documentation doesn't even mention that + // getNextEntry() throws an IllegalArgumentException! + // but that's what happens if the file name of the next + // entry is not valid in the current CharSet. + // -- L.A. + warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; + logger.warning(warningMessage); + throw new IOException(); + } + + if (zipEntry == null) { + break; + } + // Note that some zip entries may be directories - we + // simply skip them: + + if (!zipEntry.isDirectory()) { + if (datafiles.size() > fileNumberLimit) { + logger.warning("Zip upload - too many files."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + + String fileEntryName = zipEntry.getName(); + logger.fine("ZipEntry, file: " + fileEntryName); + + if (fileEntryName != null && !fileEntryName.equals("")) { + + String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); + + // Check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { + // OK, this seems like an OK file entry - we'll try + // to read it and create a DataFile with it: + + File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit, storageQuotaLimitForUnzippedFiles); + DataFile datafile = FileUtil.createSingleDataFile(version, + unZippedTempFile, + null, + shortName, + MIME_TYPE_UNDETERMINED_DEFAULT, + ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); + + storageQuotaLimitForUnzippedFiles = storageQuotaLimitForUnzippedFiles - datafile.getFilesize(); + + if (!fileEntryName.equals(shortName)) { + // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), + // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all + // the leading, trailing and duplicate slashes; then replace all the characters that + // don't pass our validation rules. + String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); + directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); + // if (!"".equals(directoryName)) { + if (!StringUtil.isEmpty(directoryName)) { + logger.fine("setting the directory label to " + directoryName); + datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } + + if (datafile != null) { + // We have created this datafile with the mime type "unknown"; + // Now that we have it saved in a temporary location, + // let's try and determine its real type: + + String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); + + try { + recognizedType = determineFileType(new File(tempFileName), shortName); + logger.fine("File utility recognized unzipped file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + datafile.setContentType(recognizedType); + } + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); + } + + datafiles.add(datafile); + } + } + } + } + unZippedIn.closeEntry(); + + } + + } catch (IOException ioex) { + // just clear the datafiles list and let + // ingest default to creating a single DataFile out + // of the unzipped file. + logger.warning("Unzipping failed; rolling back to saving the file as is."); + if (warningMessage == null) { + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed"); + } + + datafiles.clear(); + } catch (FileExceedsMaxSizeException femsx) { + logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); + datafiles.clear(); + } catch (FileExceedsStorageQuotaException fesqx) { + logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); + datafiles.clear(); + } finally { + if (unZippedIn != null) { + try { + unZippedIn.close(); + } catch (Exception zEx) { + } + } + } + if (datafiles.size() > 0) { + // remove the uploaded zip file: + try { + Files.delete(tempFile); + } catch (IOException ioex) { + // do nothing - it's just a temp file. + logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); + } + // and return: + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { + // Shape files may have to be split into multiple files, + // one zip archive per each complete set of shape files: + + // File rezipFolder = new File(this.getFilesTempDirectory()); + File rezipFolder = FileUtil.getShapefileUnzipTempDirectory(); + + IngestServiceShapefileHelper shpIngestHelper; + shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); + + boolean didProcessWork = shpIngestHelper.processFile(); + if (!(didProcessWork)) { + logger.severe("Processing of zipped shapefile failed."); + return CreateDataFileResult.error(fileName, finalType); + } + + try { + Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; + + for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { + FileInputStream finalFileInputStream = new FileInputStream(finalFile); + finalType = FileUtil.determineContentType(finalFile); + if (finalType == null) { + logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); + continue; + } + + File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimitForRezippedFiles); + DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); + + String directoryName = null; + String absolutePathName = finalFile.getParent(); + if (absolutePathName != null) { + if (absolutePathName.length() > rezipFolder.toString().length()) { + // This file lives in a subfolder - we want to + // preserve it in the FileMetadata: + directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1); + + if (!StringUtil.isEmpty(directoryName)) { + new_datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } + } + if (new_datafile != null) { + datafiles.add(new_datafile); + // todo: can this new_datafile be null? + storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + } else { + logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); + } + try { + finalFileInputStream.close(); + } catch (IOException ioex) { + // this one can be ignored + } + + } + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { + logger.severe("One of the unzipped shape files exceeded the size limit, or the storage quota; giving up. " + femsx.getMessage()); + datafiles.clear(); + // (or should we throw an exception, instead of skipping it quietly? + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to process one of the components of the unpacked shape file", ioex, this); + // todo? - maybe try to provide a more detailed explanation, of which repackaged component, etc.? + } + + // Delete the temp directory used for unzipping + // The try-catch is due to error encountered in using NFS for stocking file, + // cf. https://github.com/IQSS/dataverse/issues/5909 + try { + FileUtils.deleteDirectory(rezipFolder); + } catch (IOException ioex) { + // do nothing - it's a temp folder. + logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); + } + + if (datafiles.size() > 0) { + // remove the uploaded zip file: + try { + Files.delete(tempFile); + } catch (IOException ioex) { + // ignore - it's just a temp file - but let's log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); + } catch (SecurityException se) { + // same + logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " + + se.getMessage()); + } + return CreateDataFileResult.success(fileName, finalType, datafiles); + } else { + logger.severe("No files added from directory of rezipped shapefiles"); + } + return CreateDataFileResult.error(fileName, finalType); + + } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) { + + try { + Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); + if (bagItFileHandler.isPresent()) { + CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(ctxt.systemConfig(), version, fileName, tempFile.toFile()); + return result; + } + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to process uploaded BagIt file", ioex, this); + } + } + } else { + // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String type = determineFileTypeByNameAndExtension(fileName); + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + if (useRecognizedType(finalType, type)) { + finalType = type; + } + logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + } + // Finally, if none of the special cases above were applicable (or + // if we were unable to unpack an uploaded file, etc.), we'll just + // create and return a single DataFile: + File newFile = null; + if (tempFile != null) { + newFile = tempFile.toFile(); + } + + // We have already checked that this file does not exceed the individual size limit; + // but if we are processing it as is, as a single file, we need to check if + // its size does not go beyond the allocated storage quota (if specified): + + long fileSize = newFile.length(); + + if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { + try { + tempFile.toFile().delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)), this); + } + + DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); + File f = null; + if (tempFile != null) { + f = tempFile.toFile(); + } + if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { + + if (warningMessage != null) { + createIngestFailureReport(datafile, warningMessage); + datafile.SetIngestProblem(); + } + datafiles.add(datafile); + + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + return CreateDataFileResult.error(fileName, finalType); + } // end createDataFiles +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index d84e18d5931..7f44b4c6a0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -576,7 +576,15 @@ Whether Harvesting (OAI) service is enabled /** * The URL for the DvWebLoader tool (see github.com/gdcc/dvwebloader for details) */ - WebloaderUrl + WebloaderUrl, + /** + * Enforce storage quotas: + */ + UseStorageQuotas, + /** + * Placeholder storage quota (defines the same quota setting for every user; used to test the concept of a quota. + */ + StorageQuotaSizeInBytes ; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index c600abfd409..ba24472b314 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -105,6 +105,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import java.util.Arrays; import org.apache.commons.io.IOUtils; @@ -411,7 +412,7 @@ public static String getUserFriendlyOriginalType(DataFile dataFile) { * Returns a content type string for a FileObject * */ - private static String determineContentType(File fileObject) { + public static String determineContentType(File fileObject) { if (fileObject==null){ return null; } @@ -902,7 +903,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit); datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm()); - } catch (IOException | FileExceedsMaxSizeException ioex) { + } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { datafile = null; } finally { if (uncompressedIn != null) { @@ -1068,7 +1069,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input } datafiles.clear(); - } catch (FileExceedsMaxSizeException femsx) { + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); datafiles.clear(); @@ -1154,7 +1155,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input finalFileInputStream.close(); } - } catch (FileExceedsMaxSizeException femsx) { + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage()); datafiles.clear(); } @@ -1271,7 +1272,12 @@ public static boolean useRecognizedType(String suppliedContentType, String recog } public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) - throws IOException, FileExceedsMaxSizeException { + throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException { + return saveInputStreamInTempFile(inputStream, fileSizeLimit, null); + } + + public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit, Long storageQuotaLimit) + throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException { Path tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); if (inputStream != null && tempFile != null) { @@ -1285,6 +1291,11 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS throw new FileExceedsMaxSizeException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); } + if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { + try {tempFile.toFile().delete();} catch (Exception ex) {} + throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_quota"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + } + return tempFile.toFile(); } throw new IOException("Failed to save uploaded file."); @@ -1325,7 +1336,6 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); FileMetadata fmd = new FileMetadata(); - // TODO: add directoryLabel? fmd.setLabel(fileName); if (addToDataset) { @@ -1341,13 +1351,13 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil fmd.setDatasetVersion(version); version.getDataset().getFiles().add(datafile); } - if(storageIdentifier==null) { - generateStorageIdentifier(datafile); - if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { - return null; - } + if (storageIdentifier == null) { + generateStorageIdentifier(datafile); + if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { + return null; + } } else { - datafile.setStorageIdentifier(storageIdentifier); + datafile.setStorageIdentifier(storageIdentifier); } if ((checksum !=null)&&(!checksum.isEmpty())) { @@ -1372,7 +1382,7 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil Naming convention: getFilesTempDirectory() + "shp_" + "yyyy-MM-dd-hh-mm-ss-SSS" */ - private static File getShapefileUnzipTempDirectory(){ + public static File getShapefileUnzipTempDirectory(){ String tempDirectory = getFilesTempDirectory(); if (tempDirectory == null){ diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index c989add6e3d..ac4a3970379 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -892,7 +892,7 @@ public String toString() { } } - + public boolean isPublicInstall(){ boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); @@ -1149,4 +1149,18 @@ public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { return !ret; } + + public boolean isStorageQuotasEnforced() { + return settingsService.isTrueForKey(SettingsServiceBean.Key.UseStorageQuotas, false); + } + + /** + * This method should only be used for testing of the new storage quota + * mechanism, temporarily. (it uses the same value as the quota for + * *everybody* regardless of the circumstances, defined as a database + * setting) + */ + public Long getTestStorageQuotaLimit() { + return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.StorageQuotaSizeInBytes); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java index 2bcac04076a..ecb34bdcfb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.FileUtil; import java.io.File; @@ -43,7 +44,11 @@ public void deleteFile(Path filePath) { } public File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) throws IOException, FileExceedsMaxSizeException { - return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit); + try { + return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit); + } catch (FileExceedsStorageQuotaException fesqx) { + return null; + } } public String determineFileType(File file, String fileName) throws IOException { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java new file mode 100644 index 00000000000..29eeca254f7 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java @@ -0,0 +1,22 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.util.file; + +/** + * + * @author landreev + */ +public class FileExceedsStorageQuotaException extends Exception { + + public FileExceedsStorageQuotaException(String message) { + super(message); + } + + public FileExceedsStorageQuotaException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 45807dc7cde..c1fd4ebaf10 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2145,6 +2145,7 @@ file.message.replaceSuccess=The file has been replaced. file.addreplace.file_size_ok=File size is in range. file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. +file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. file.addreplace.error.parsing=Error in parsing provided json diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..99db5abd2dc 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -155,6 +155,7 @@ fileLimit="#{EditDatafilesPage.getMaxNumberOfFiles()}" invalidSizeMessage="#{bundle['file.edit.error.file_exceeds_limit']}" sequential="true" + previewWidth="-1" widgetVar="fileUploadWidget"> From d5fd5e3e690f0b39d630b4774ec2807b2ec08750 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 18:35:56 -0500 Subject: [PATCH 0031/1092] switched to the new Create Files command in the remaining places where the utility was used. #9361 --- .../iq/dataverse/EditDatafilesPage.java | 18 +++--- .../datadeposit/MediaResourceManagerImpl.java | 58 ++++++++++--------- .../datasetutility/AddReplaceFileHelper.java | 12 ++-- .../harvard/iq/dataverse/util/FileUtil.java | 6 +- 4 files changed, 54 insertions(+), 40 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 74c4e782d56..928bf635ffa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1508,14 +1508,16 @@ public void handleDropBoxUpload(ActionEvent event) { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); - } catch (IOException ex) { + } catch (CommandException ex) { this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}", new Object[]{fileName, fileLink}); continue; - }/*catch (FileExceedsMaxSizeException ex){ + } /*catch (FileExceedsMaxSizeException ex){ this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}: {2}", new Object[]{fileName, fileLink, ex.getMessage()}); continue; }*/ finally { @@ -2040,8 +2042,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // zip file. ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - Command cmd; - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); @@ -2165,10 +2166,13 @@ public void handleExternalUpload() { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); - } catch (IOException ex) { + } catch (CommandException ex) { logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName}); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 5491024c73c..f21a65bdf1e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -6,14 +6,17 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -69,6 +72,8 @@ public class MediaResourceManagerImpl implements MediaResourceManager { SwordAuth swordAuth; @Inject UrlManager urlManager; + @Inject + DataverseRequestServiceBean dvRequestService; private HttpServletRequest httpRequest; @@ -301,37 +306,38 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au */ String guessContentTypeForMe = null; List dataFiles = new ArrayList<>(); + try { - try { - CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - dataFiles = createDataFilesResponse.getDataFiles(); - } catch (EJBException ex) { - Throwable cause = ex.getCause(); - if (cause != null) { - if (cause instanceof IllegalArgumentException) { - /** - * @todo should be safe to remove this catch of - * EJBException and IllegalArgumentException once - * this ticket is resolved: - * - * IllegalArgumentException: MALFORMED when - * uploading certain zip files - * https://github.com/IQSS/dataverse/issues/1021 - */ - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. Problem with zip file, perhaps: " + cause); - } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + cause); - } + //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + dataFiles = createDataFilesResult.getDataFiles(); + } catch (CommandException ex) { + Throwable cause = ex.getCause(); + if (cause != null) { + if (cause instanceof IllegalArgumentException) { + /** + * @todo should be safe to remove this catch of + * EJBException and IllegalArgumentException once this + * ticket is resolved: + * + * IllegalArgumentException: MALFORMED when uploading + * certain zip files + * https://github.com/IQSS/dataverse/issues/1021 + */ + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset. Problem with zip file, perhaps: " + cause); } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. No cause: " + ex.getMessage()); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + cause); } - } /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) { + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); + } + } + /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + ex.getMessage()); //Logger.getLogger(MediaResourceManagerImpl.class.getName()).log(Level.SEVERE, null, ex); - }*/ - } catch (IOException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); - } + }*/ + if (!dataFiles.isEmpty()) { Set constraintViolations = editVersion.validate(); if (constraintViolations.size() > 0) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 1d0ec0f19d9..e31f86093ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -63,6 +63,7 @@ import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_ERROR; import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_OK; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; /** @@ -1205,17 +1206,20 @@ private boolean step_030_createNewFilesViaIngest(){ clone = workingVersion.cloneDatasetVersion(); } try { - CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, + /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, this.newFileInputStream, this.newFileName, this.newFileContentType, this.newStorageIdentifier, this.newCheckSum, this.newCheckSumType, - this.systemConfig); - initialFileList = result.getDataFiles(); + this.systemConfig);*/ + + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, newCheckSum, newCheckSumType); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + initialFileList = createDataFilesResult.getDataFiles(); - } catch (IOException ex) { + } catch (CommandException ex) { if (!Strings.isNullOrEmpty(ex.getMessage())) { this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage()); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index ba24472b314..0c099242849 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -805,7 +805,7 @@ public static String generateOriginalExtension(String fileType) { return ""; } - public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, + /*public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, SystemConfig systemConfig) throws IOException { ChecksumType checkSumType = DataFile.ChecksumType.MD5; @@ -813,7 +813,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input checkSumType = systemConfig.getFileFixityChecksumAlgorithm(); } return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig); - } + }*/ public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException { List datafiles = new ArrayList<>(); @@ -1293,7 +1293,7 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_quota"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); } return tempFile.toFile(); From 6210c3435ff7df308a6491c5b9a0b0b23d758774 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 18:43:23 -0500 Subject: [PATCH 0032/1092] removed the static utility methods that have been turned into a command. #9361 --- .../harvard/iq/dataverse/util/FileUtil.java | 430 ------------------ 1 file changed, 430 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0c099242849..014f44c5c33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -804,436 +804,6 @@ public static String generateOriginalExtension(String fileType) { } return ""; } - - /*public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, - String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, - SystemConfig systemConfig) throws IOException { - ChecksumType checkSumType = DataFile.ChecksumType.MD5; - if (newStorageIdentifier == null) { - checkSumType = systemConfig.getFileFixityChecksumAlgorithm(); - } - return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig); - }*/ - - public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException { - List datafiles = new ArrayList<>(); - - //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default - if(newCheckSumType == null) { - newCheckSumType = systemConfig.getFileFixityChecksumAlgorithm(); - } - - String warningMessage = null; - - // save the file, in the temporary location for now: - Path tempFile = null; - - Long fileSizeLimit = systemConfig.getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId()); - String finalType = null; - if (newStorageIdentifier == null) { - if (getFilesTempDirectory() != null) { - tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); - // "temporary" location is the key here; this is why we are not using - // the DataStore framework for this - the assumption is that - // temp files will always be stored on the local filesystem. - // -- L.A. Jul. 2014 - logger.fine("Will attempt to save the file as: " + tempFile.toString()); - Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); - - // A file size check, before we do anything else: - // (note that "no size limit set" = "unlimited") - // (also note, that if this is a zip file, we'll be checking - // the size limit for each of the individual unpacked files) - Long fileSize = tempFile.toFile().length(); - if (fileSizeLimit != null && fileSize > fileSizeLimit) { - try { - tempFile.toFile().delete(); - } catch (Exception ex) { - } - throw new IOException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); - } - - } else { - throw new IOException("Temp directory is not configured."); - } - logger.fine("mime type supplied: " + suppliedContentType); - // Let's try our own utilities (Jhove, etc.) to determine the file type - // of the uploaded file. (We may already have a mime type supplied for this - // file - maybe the type that the browser recognized on upload; or, if - // it's a harvest, maybe the remote server has already given us the type - // for this file... with our own type utility we may or may not do better - // than the type supplied: - // -- L.A. - String recognizedType = null; - - try { - recognizedType = determineFileType(tempFile.toFile(), fileName); - logger.fine("File utility recognized the file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - if (useRecognizedType(suppliedContentType, recognizedType)) { - finalType = recognizedType; - } - } - - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - if (finalType == null) { - finalType = (suppliedContentType == null || suppliedContentType.equals("")) - ? MIME_TYPE_UNDETERMINED_DEFAULT - : suppliedContentType; - } - - // A few special cases: - // if this is a gzipped FITS file, we'll uncompress it, and ingest it as - // a regular FITS file: - if (finalType.equals("application/fits-gzipped")) { - - InputStream uncompressedIn = null; - String finalFileName = fileName; - // if the file name had the ".gz" extension, remove it, - // since we are going to uncompress it: - if (fileName != null && fileName.matches(".*\\.gz$")) { - finalFileName = fileName.replaceAll("\\.gz$", ""); - } - - DataFile datafile = null; - try { - uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); - File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit); - datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm()); - } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { - datafile = null; - } finally { - if (uncompressedIn != null) { - try { - uncompressedIn.close(); - } catch (IOException e) { - } - } - } - - // If we were able to produce an uncompressed file, we'll use it - // to create and return a final DataFile; if not, we're not going - // to do anything - and then a new DataFile will be created further - // down, from the original, uncompressed file. - if (datafile != null) { - // remove the compressed temp file: - try { - tempFile.toFile().delete(); - } catch (SecurityException ex) { - // (this is very non-fatal) - logger.warning("Failed to delete temporary file " + tempFile.toString()); - } - - datafiles.add(datafile); - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - // If it's a ZIP file, we are going to unpack it and create multiple - // DataFile objects from its contents: - } else if (finalType.equals("application/zip")) { - - ZipInputStream unZippedIn = null; - ZipEntry zipEntry = null; - - int fileNumberLimit = systemConfig.getZipUploadFilesLimit(); - - try { - Charset charset = null; - /* - TODO: (?) - We may want to investigate somehow letting the user specify - the charset for the filenames in the zip file... - - otherwise, ZipInputStream bails out if it encounteres a file - name that's not valid in the current charest (i.e., UTF-8, in - our case). It would be a bit trickier than what we're doing for - SPSS tabular ingests - with the lang. encoding pulldown menu - - because this encoding needs to be specified *before* we upload and - attempt to unzip the file. - -- L.A. 4.0 beta12 - logger.info("default charset is "+Charset.defaultCharset().name()); - if (Charset.isSupported("US-ASCII")) { - logger.info("charset US-ASCII is supported."); - charset = Charset.forName("US-ASCII"); - if (charset != null) { - logger.info("was able to obtain charset for US-ASCII"); - } - - } - */ - - if (charset != null) { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); - } else { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); - } - - while (true) { - try { - zipEntry = unZippedIn.getNextEntry(); - } catch (IllegalArgumentException iaex) { - // Note: - // ZipInputStream documentation doesn't even mention that - // getNextEntry() throws an IllegalArgumentException! - // but that's what happens if the file name of the next - // entry is not valid in the current CharSet. - // -- L.A. - warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; - logger.warning(warningMessage); - throw new IOException(); - } - - if (zipEntry == null) { - break; - } - // Note that some zip entries may be directories - we - // simply skip them: - - if (!zipEntry.isDirectory()) { - if (datafiles.size() > fileNumberLimit) { - logger.warning("Zip upload - too many files."); - warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit - + "); please upload a zip archive with fewer files, if you want them to be ingested " - + "as individual DataFiles."; - throw new IOException(); - } - - String fileEntryName = zipEntry.getName(); - logger.fine("ZipEntry, file: " + fileEntryName); - - if (fileEntryName != null && !fileEntryName.equals("")) { - - String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); - - // Check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - // OK, this seems like an OK file entry - we'll try - // to read it and create a DataFile with it: - - File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit); - DataFile datafile = createSingleDataFile(version, unZippedTempFile, null, shortName, - MIME_TYPE_UNDETERMINED_DEFAULT, - systemConfig.getFileFixityChecksumAlgorithm(), null, false); - - if (!fileEntryName.equals(shortName)) { - // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), - // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all - // the leading, trailing and duplicate slashes; then replace all the characters that - // don't pass our validation rules. - String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); - directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); - // if (!"".equals(directoryName)) { - if (!StringUtil.isEmpty(directoryName)) { - logger.fine("setting the directory label to " + directoryName); - datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } - - if (datafile != null) { - // We have created this datafile with the mime type "unknown"; - // Now that we have it saved in a temporary location, - // let's try and determine its real type: - - String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); - - try { - recognizedType = determineFileType(new File(tempFileName), shortName); - logger.fine("File utility recognized unzipped file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - datafile.setContentType(recognizedType); - } - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - datafiles.add(datafile); - } - } - } - } - unZippedIn.closeEntry(); - - } - - } catch (IOException ioex) { - // just clear the datafiles list and let - // ingest default to creating a single DataFile out - // of the unzipped file. - logger.warning("Unzipping failed; rolling back to saving the file as is."); - if (warningMessage == null) { - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed"); - } - - datafiles.clear(); - } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { - logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); - datafiles.clear(); - } finally { - if (unZippedIn != null) { - try { - unZippedIn.close(); - } catch (Exception zEx) { - } - } - } - if (datafiles.size() > 0) { - // link the data files to the dataset/version: - // (except we no longer want to do this! -- 4.6) - /*Iterator itf = datafiles.iterator(); - while (itf.hasNext()) { - DataFile datafile = itf.next(); - datafile.setOwner(version.getDataset()); - if (version.getFileMetadatas() == null) { - version.setFileMetadatas(new ArrayList()); - } - version.getFileMetadatas().add(datafile.getFileMetadata()); - datafile.getFileMetadata().setDatasetVersion(version); - - version.getDataset().getFiles().add(datafile); - } */ - // remove the uploaded zip file: - try { - Files.delete(tempFile); - } catch (IOException ioex) { - // do nothing - it's just a temp file. - logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); - } - // and return: - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { - // Shape files may have to be split into multiple files, - // one zip archive per each complete set of shape files: - - // File rezipFolder = new File(this.getFilesTempDirectory()); - File rezipFolder = getShapefileUnzipTempDirectory(); - - IngestServiceShapefileHelper shpIngestHelper; - shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); - - boolean didProcessWork = shpIngestHelper.processFile(); - if (!(didProcessWork)) { - logger.severe("Processing of zipped shapefile failed."); - return CreateDataFileResult.error(fileName, finalType); - } - - try { - for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { - FileInputStream finalFileInputStream = new FileInputStream(finalFile); - finalType = determineContentType(finalFile); - if (finalType == null) { - logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); - continue; - } - - File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit); - DataFile new_datafile = createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, systemConfig.getFileFixityChecksumAlgorithm()); - String directoryName = null; - String absolutePathName = finalFile.getParent(); - if (absolutePathName != null) { - if (absolutePathName.length() > rezipFolder.toString().length()) { - // This file lives in a subfolder - we want to - // preserve it in the FileMetadata: - directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1); - - if (!StringUtil.isEmpty(directoryName)) { - new_datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } - } - if (new_datafile != null) { - datafiles.add(new_datafile); - } else { - logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); - } - finalFileInputStream.close(); - - } - } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { - logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage()); - datafiles.clear(); - } - - // Delete the temp directory used for unzipping - // The try-catch is due to error encountered in using NFS for stocking file, - // cf. https://github.com/IQSS/dataverse/issues/5909 - try { - FileUtils.deleteDirectory(rezipFolder); - } catch (IOException ioex) { - // do nothing - it's a tempo folder. - logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); - } - - if (datafiles.size() > 0) { - // remove the uploaded zip file: - try { - Files.delete(tempFile); - } catch (IOException ioex) { - // do nothing - it's just a temp file. - logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); - } catch (SecurityException se) { - logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " - + se.getMessage()); - } - return CreateDataFileResult.success(fileName, finalType, datafiles); - } else { - logger.severe("No files added from directory of rezipped shapefiles"); - } - return CreateDataFileResult.error(fileName, finalType); - - } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) { - Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); - if (bagItFileHandler.isPresent()) { - CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(systemConfig, version, fileName, tempFile.toFile()); - return result; - } - } - } else { - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied - finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = determineFileTypeByNameAndExtension(fileName); - if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - if (useRecognizedType(finalType, type)) { - finalType = type; - } - logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } - } - // Finally, if none of the special cases above were applicable (or - // if we were unable to unpack an uploaded file, etc.), we'll just - // create and return a single DataFile: - File newFile = null; - if (tempFile != null) { - newFile = tempFile.toFile(); - } - - - DataFile datafile = createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); - File f = null; - if (tempFile != null) { - f = tempFile.toFile(); - } - if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { - - if (warningMessage != null) { - createIngestFailureReport(datafile, warningMessage); - datafile.SetIngestProblem(); - } - datafiles.add(datafile); - - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - return CreateDataFileResult.error(fileName, finalType); - } // end createDataFiles - public static boolean useRecognizedType(String suppliedContentType, String recognizedType) { // is it any better than the type that was supplied to us, From 1a22b11c65353f7c2bd0677b2f4bb2e134aebcb5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 24 Feb 2023 17:02:39 -0500 Subject: [PATCH 0033/1092] Added info messages about the remaining storage quota, if enforced, for the user on the upload page. #9361 --- .../iq/dataverse/EditDatafilesPage.java | 20 +++++++++++++++---- src/main/java/propertyFiles/Bundle.properties | 3 ++- src/main/webapp/editFilesFragment.xhtml | 5 +++++ 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 928bf635ffa..420642f2fa5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -343,6 +343,11 @@ public Long getMaxFileUploadSizeInBytes() { public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } + + public boolean isUnlimitedUploadFileSize() { + + return this.maxFileUploadSizeInBytes == null; + } public Long getMaxTotalUploadSizeInBytes() { return maxTotalUploadSizeInBytes; @@ -351,10 +356,9 @@ public Long getMaxTotalUploadSizeInBytes() { public String getHumanMaxTotalUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes); } - - public boolean isUnlimitedUploadFileSize() { - - return this.maxFileUploadSizeInBytes == null; + + public boolean isStorageQuotaEnforced() { + return maxTotalUploadSizeInBytes != null; } public Long getMaxIngestSizeInBytes() { @@ -524,6 +528,11 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo selectedFiles = selectedFileMetadatasList; this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + if (systemConfig.isStorageQuotasEnforced()) { + this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + } else { + this.maxTotalUploadSizeInBytes = null; + } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); @@ -575,6 +584,9 @@ public String init() { clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + if (systemConfig.isStorageQuotasEnforced()) { + this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index c1fd4ebaf10..dd9b398b709 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1659,7 +1659,8 @@ file.select.tooltip=Select Files file.selectAllFiles=Select all {0} files in this dataset. file.dynamicCounter.filesPerPage=Files Per Page file.selectToAddBtn=Select Files to Add -file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipQuotaRemaining=Storage quota: {0} remaining. file.selectToAdd.tipMaxNumFiles=Maximum of {0} {0, choice, 0#files|1#file|2#files} per upload. file.selectToAdd.tipTabularLimit=Tabular file ingest is limited to {2}. file.selectToAdd.tipPerFileTabularLimit=Ingest is limited to the following file sizes based on their format: {0}. diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 99db5abd2dc..77f7aab2f76 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -80,6 +80,11 @@ rendered="#{!EditDatafilesPage.isUnlimitedUploadFileSize()}"> + + + + From 9bdcba600915d44117db9ae8ca046c1e32d07c3d Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 13 Mar 2023 11:54:39 -0400 Subject: [PATCH 0034/1092] Alternative Title --- conf/solr/8.11.1/schema.xml | 2 +- scripts/api/data/metadatablocks/citation.tsv | 2 +- .../dataverse/export/ddi/DdiExportUtil.java | 8 +++-- .../export/openaire/OpenAireExportUtil.java | 32 +++++++++++++++++-- .../dataverse/export/dataset-all-defaults.txt | 4 +-- .../dataset-create-new-all-ddi-fields.json | 4 +-- 6 files changed, 42 insertions(+), 10 deletions(-) diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/8.11.1/schema.xml index f11938621fc..5fa9c7f0616 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/8.11.1/schema.xml @@ -250,7 +250,7 @@ - + diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index be32bb7134e..6b0f231c7b7 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -3,7 +3,7 @@ #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation - alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE FALSE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative + alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index eb7632dd03c..eb53473d4d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -202,8 +202,12 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle)); - writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle)); - + + FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation" ); + if (altField != null) { + writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage()); + } + xmlw.writeStartElement("IDNo"); writeAttribute(xmlw, "agency", persistentAgency); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index bea3858a60e..34cb7a4e138 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -7,6 +7,7 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; +import java.util.List; import javax.json.JsonObject; import javax.xml.stream.XMLOutputFactory; @@ -368,8 +369,8 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle); title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language); - String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle); - title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language); + title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language); + writeEndTag(xmlw, title_check); } @@ -404,6 +405,33 @@ private static boolean writeTitleElement(XMLStreamWriter xmlw, String titleType, } return title_check; } + + private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { + MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); + if (block != null) { + logger.info("Block is not empty"); + List fieldsBlock = block.getFields(); + if (fieldsBlock != null) { + for (FieldDTO fieldDTO : fieldsBlock) { + logger.info(titleType + " " + fieldDTO.getTypeName()); + if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { + logger.info("Found Alt title"); + List fields = fieldDTO.getMultiplePrimitive(); + for (String value : fields) { + if (!writeTitleElement(xmlw, titleType, value, title_check, language)) + title_check = false; + } + break; + } + } + } + } + + return title_check; + } + + + /** * 5, PublicationYear (M) diff --git a/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt b/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt index a3f0dffc767..62f2cd37447 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt +++ b/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt @@ -42,9 +42,9 @@ }, { "typeName": "alternativeTitle", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "Alternative Title" + "value": ["Alternative Title"] }, { "typeName": "alternativeURL", diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json index 1b327c15496..96f058b1b02 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json @@ -51,9 +51,9 @@ }, { "typeName": "alternativeTitle", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "Alternative Title" + "value": ["Alternative Title"] }, { "typeName": "otherId", From 7b8281c7a0f98350533047dac1790603557e463b Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 13 Mar 2023 13:08:24 -0400 Subject: [PATCH 0035/1092] citation --- scripts/api/data/metadatablocks/citation.tsv | 509 +++++++++---------- 1 file changed, 254 insertions(+), 255 deletions(-) diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index 6b0f231c7b7..20d858ddb4b 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -3,8 +3,8 @@ #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation - alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative - alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution + alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative + alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation otherIdValue Identifier Another identifier uniquely identifies the Dataset text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation @@ -24,26 +24,26 @@ keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation - keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 "#VALUE" FALSE FALSE FALSE FALSE TRUE FALSE keyword citation topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation - topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier - publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerLogoURL Logo URL The URL of the producer's logo https:// url 40
    FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL The URL of the producer's website https:// url 39 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL The URL of the producer's logo https:// url 40 "
    " FALSE FALSE FALSE FALSE FALSE FALSE producer citation productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation - productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation + productionPlace Production Location The location where the data and any related materials were produced or collected text 42 FALSE FALSE FALSE FALSE FALSE FALSE citation contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation @@ -54,8 +54,8 @@ distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54
    FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL The URL of the distributor's webpage https:// url 53 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54 "
    " FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted @@ -79,248 +79,247 @@ originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation -#controlledVocabulary DatasetField Value identifier displayOrder - subject Agricultural Sciences D01 0 - subject Arts and Humanities D0 1 - subject Astronomy and Astrophysics D1 2 - subject Business and Management D2 3 - subject Chemistry D3 4 - subject Computer and Information Science D7 5 - subject Earth and Environmental Sciences D4 6 - subject Engineering D5 7 - subject Law D8 8 - subject Mathematical Sciences D9 9 - subject Medicine, Health and Life Sciences D6 10 - subject Physics D10 11 - subject Social Sciences D11 12 - subject Other D12 13 - publicationIDType ark 0 - publicationIDType arXiv 1 - publicationIDType bibcode 2 - publicationIDType cstr 3 - publicationIDType doi 4 - publicationIDType ean13 5 - publicationIDType eissn 6 - publicationIDType handle 7 - publicationIDType isbn 8 - publicationIDType issn 9 - publicationIDType istc 10 - publicationIDType lissn 11 - publicationIDType lsid 12 - publicationIDType pmid 13 - publicationIDType purl 14 - publicationIDType upc 15 - publicationIDType url 16 - publicationIDType urn 17 - publicationIDType DASH-NRS 18 - contributorType Data Collector 0 - contributorType Data Curator 1 - contributorType Data Manager 2 - contributorType Editor 3 - contributorType Funder 4 - contributorType Hosting Institution 5 - contributorType Project Leader 6 - contributorType Project Manager 7 - contributorType Project Member 8 - contributorType Related Person 9 - contributorType Researcher 10 - contributorType Research Group 11 - contributorType Rights Holder 12 - contributorType Sponsor 13 - contributorType Supervisor 14 - contributorType Work Package Leader 15 - contributorType Other 16 - authorIdentifierScheme ORCID 0 - authorIdentifierScheme ISNI 1 - authorIdentifierScheme LCNA 2 - authorIdentifierScheme VIAF 3 - authorIdentifierScheme GND 4 - authorIdentifierScheme DAI 5 - authorIdentifierScheme ResearcherID 6 - authorIdentifierScheme ScopusID 7 - language Abkhaz 0 - language Afar 1 aar aa - language Afrikaans 2 afr af - language Akan 3 aka ak - language Albanian 4 sqi alb sq - language Amharic 5 amh am - language Arabic 6 ara ar - language Aragonese 7 arg an - language Armenian 8 hye arm hy - language Assamese 9 asm as - language Avaric 10 ava av - language Avestan 11 ave ae - language Aymara 12 aym ay - language Azerbaijani 13 aze az - language Bambara 14 bam bm - language Bashkir 15 bak ba - language Basque 16 eus baq eu - language Belarusian 17 bel be - language Bengali, Bangla 18 ben bn - language Bihari 19 bih bh - language Bislama 20 bis bi - language Bosnian 21 bos bs - language Breton 22 bre br - language Bulgarian 23 bul bg - language Burmese 24 mya bur my - language Catalan,Valencian 25 cat ca - language Chamorro 26 cha ch - language Chechen 27 che ce - language Chichewa, Chewa, Nyanja 28 nya ny - language Chinese 29 zho chi zh - language Chuvash 30 chv cv - language Cornish 31 cor kw - language Corsican 32 cos co - language Cree 33 cre cr - language Croatian 34 hrv src hr - language Czech 35 ces cze cs - language Danish 36 dan da - language Divehi, Dhivehi, Maldivian 37 div dv - language Dutch 38 nld dut nl - language Dzongkha 39 dzo dz - language English 40 eng en - language Esperanto 41 epo eo - language Estonian 42 est et - language Ewe 43 ewe ee - language Faroese 44 fao fo - language Fijian 45 fij fj - language Finnish 46 fin fi - language French 47 fra fre fr - language Fula, Fulah, Pulaar, Pular 48 ful ff - language Galician 49 glg gl - language Georgian 50 kat geo ka - language German 51 deu ger de - language Greek (modern) 52 gre ell el - language Guaraní 53 grn gn - language Gujarati 54 guj gu - language Haitian, Haitian Creole 55 hat ht - language Hausa 56 hau ha - language Hebrew (modern) 57 heb he - language Herero 58 her hz - language Hindi 59 hin hi - language Hiri Motu 60 hmo ho - language Hungarian 61 hun hu - language Interlingua 62 ina ia - language Indonesian 63 ind id - language Interlingue 64 ile ie - language Irish 65 gle ga - language Igbo 66 ibo ig - language Inupiaq 67 ipk ik - language Ido 68 ido io - language Icelandic 69 isl ice is - language Italian 70 ita it - language Inuktitut 71 iku iu - language Japanese 72 jpn ja - language Javanese 73 jav jv - language Kalaallisut, Greenlandic 74 kal kl - language Kannada 75 kan kn - language Kanuri 76 kau kr - language Kashmiri 77 kas ks - language Kazakh 78 kaz kk - language Khmer 79 khm km - language Kikuyu, Gikuyu 80 kik ki - language Kinyarwanda 81 kin rw - language Kyrgyz 82 - language Komi 83 kom kv - language Kongo 84 kon kg - language Korean 85 kor ko - language Kurdish 86 kur ku - language Kwanyama, Kuanyama 87 kua kj - language Latin 88 lat la - language Luxembourgish, Letzeburgesch 89 ltz lb - language Ganda 90 lug lg - language Limburgish, Limburgan, Limburger 91 lim li - language Lingala 92 lin ln - language Lao 93 lao lo - language Lithuanian 94 lit lt - language Luba-Katanga 95 lub lu - language Latvian 96 lav lv - language Manx 97 glv gv - language Macedonian 98 mkd mac mk - language Malagasy 99 mlg mg - language Malay 100 may msa ms - language Malayalam 101 mal ml - language Maltese 102 mlt mt - language MÄori 103 mao mri mi - language Marathi (MarÄá¹­hÄ«) 104 mar mr - language Marshallese 105 mah mh - language Mixtepec Mixtec 106 mix - language Mongolian 107 mon mn - language Nauru 108 nau na - language Navajo, Navaho 109 nav nv - language Northern Ndebele 110 nde nd - language Nepali 111 nep ne - language Ndonga 112 ndo ng - language Norwegian BokmÃ¥l 113 nob nb - language Norwegian Nynorsk 114 nno nn - language Norwegian 115 nor no - language Nuosu 116 - language Southern Ndebele 117 nbl nr - language Occitan 118 oci oc - language Ojibwe, Ojibwa 119 oji oj - language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu - language Oromo 121 orm om - language Oriya 122 ori or - language Ossetian, Ossetic 123 oss os - language Panjabi, Punjabi 124 pan pa - language PÄli 125 pli pi - language Persian (Farsi) 126 per fas fa - language Polish 127 pol pl - language Pashto, Pushto 128 pus ps - language Portuguese 129 por pt - language Quechua 130 que qu - language Romansh 131 roh rm - language Kirundi 132 run rn - language Romanian 133 ron rum ro - language Russian 134 rus ru - language Sanskrit (Saá¹ská¹›ta) 135 san sa - language Sardinian 136 srd sc - language Sindhi 137 snd sd - language Northern Sami 138 sme se - language Samoan 139 smo sm - language Sango 140 sag sg - language Serbian 141 srp scc sr - language Scottish Gaelic, Gaelic 142 gla gd - language Shona 143 sna sn - language Sinhala, Sinhalese 144 sin si - language Slovak 145 slk slo sk - language Slovene 146 slv sl - language Somali 147 som so - language Southern Sotho 148 sot st - language Spanish, Castilian 149 spa es - language Sundanese 150 sun su - language Swahili 151 swa sw - language Swati 152 ssw ss - language Swedish 153 swe sv - language Tamil 154 tam ta - language Telugu 155 tel te - language Tajik 156 tgk tg - language Thai 157 tha th - language Tigrinya 158 tir ti - language Tibetan Standard, Tibetan, Central 159 tib bod bo - language Turkmen 160 tuk tk - language Tagalog 161 tgl tl - language Tswana 162 tsn tn - language Tonga (Tonga Islands) 163 ton to - language Turkish 164 tur tr - language Tsonga 165 tso ts - language Tatar 166 tat tt - language Twi 167 twi tw - language Tahitian 168 tah ty - language Uyghur, Uighur 169 uig ug - language Ukrainian 170 ukr uk - language Urdu 171 urd ur - language Uzbek 172 uzb uz - language Venda 173 ven ve - language Vietnamese 174 vie vi - language Volapük 175 vol vo - language Walloon 176 wln wa - language Welsh 177 cym wel cy - language Wolof 178 wol wo - language Western Frisian 179 fry fy - language Xhosa 180 xho xh - language Yiddish 181 yid yi - language Yoruba 182 yor yo - language Zhuang, Chuang 183 zha za - language Zulu 184 zul zu - language Not applicable 185 +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 + publicationIDType bibcode 2 + publicationIDType doi 3 + publicationIDType ean13 4 + publicationIDType eissn 5 + publicationIDType handle 6 + publicationIDType isbn 7 + publicationIDType issn 8 + publicationIDType istc 9 + publicationIDType lissn 10 + publicationIDType lsid 11 + publicationIDType pmid 12 + publicationIDType purl 13 + publicationIDType upc 14 + publicationIDType url 15 + publicationIDType urn 16 + publicationIDType DASH-NRS 17 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + authorIdentifierScheme VIAF 3 + authorIdentifierScheme GND 4 + authorIdentifierScheme DAI 5 + authorIdentifierScheme ResearcherID 6 + authorIdentifierScheme ScopusID 7 + language Abkhaz 0 + language Afar 1 aar aa + language Afrikaans 2 afr af + language Akan 3 aka ak + language Albanian 4 sqi alb sq + language Amharic 5 amh am + language Arabic 6 ara ar + language Aragonese 7 arg an + language Armenian 8 hye arm hy + language Assamese 9 asm as + language Avaric 10 ava av + language Avestan 11 ave ae + language Aymara 12 aym ay + language Azerbaijani 13 aze az + language Bambara 14 bam bm + language Bashkir 15 bak ba + language Basque 16 eus baq eu + language Belarusian 17 bel be + language Bengali, Bangla 18 ben bn + language Bihari 19 bih bh + language Bislama 20 bis bi + language Bosnian 21 bos bs + language Breton 22 bre br + language Bulgarian 23 bul bg + language Burmese 24 mya bur my + language Catalan,Valencian 25 cat ca + language Chamorro 26 cha ch + language Chechen 27 che ce + language Chichewa, Chewa, Nyanja 28 nya ny + language Chinese 29 zho chi zh + language Chuvash 30 chv cv + language Cornish 31 cor kw + language Corsican 32 cos co + language Cree 33 cre cr + language Croatian 34 hrv src hr + language Czech 35 ces cze cs + language Danish 36 dan da + language Divehi, Dhivehi, Maldivian 37 div dv + language Dutch 38 nld dut nl + language Dzongkha 39 dzo dz + language English 40 eng en + language Esperanto 41 epo eo + language Estonian 42 est et + language Ewe 43 ewe ee + language Faroese 44 fao fo + language Fijian 45 fij fj + language Finnish 46 fin fi + language French 47 fra fre fr + language Fula, Fulah, Pulaar, Pular 48 ful ff + language Galician 49 glg gl + language Georgian 50 kat geo ka + language German 51 deu ger de + language Greek (modern) 52 gre ell el + language Guaraní 53 grn gn + language Gujarati 54 guj gu + language Haitian, Haitian Creole 55 hat ht + language Hausa 56 hau ha + language Hebrew (modern) 57 heb he + language Herero 58 her hz + language Hindi 59 hin hi + language Hiri Motu 60 hmo ho + language Hungarian 61 hun hu + language Interlingua 62 ina ia + language Indonesian 63 ind id + language Interlingue 64 ile ie + language Irish 65 gle ga + language Igbo 66 ibo ig + language Inupiaq 67 ipk ik + language Ido 68 ido io + language Icelandic 69 isl ice is + language Italian 70 ita it + language Inuktitut 71 iku iu + language Japanese 72 jpn ja + language Javanese 73 jav jv + language Kalaallisut, Greenlandic 74 kal kl + language Kannada 75 kan kn + language Kanuri 76 kau kr + language Kashmiri 77 kas ks + language Kazakh 78 kaz kk + language Khmer 79 khm km + language Kikuyu, Gikuyu 80 kik ki + language Kinyarwanda 81 kin rw + language Kyrgyz 82 + language Komi 83 kom kv + language Kongo 84 kon kg + language Korean 85 kor ko + language Kurdish 86 kur ku + language Kwanyama, Kuanyama 87 kua kj + language Latin 88 lat la + language Luxembourgish, Letzeburgesch 89 ltz lb + language Ganda 90 lug lg + language Limburgish, Limburgan, Limburger 91 lim li + language Lingala 92 lin ln + language Lao 93 lao lo + language Lithuanian 94 lit lt + language Luba-Katanga 95 lub lu + language Latvian 96 lav lv + language Manx 97 glv gv + language Macedonian 98 mkd mac mk + language Malagasy 99 mlg mg + language Malay 100 may msa ms + language Malayalam 101 mal ml + language Maltese 102 mlt mt + language MÄori 103 mao mri mi + language Marathi (MarÄá¹­hÄ«) 104 mar mr + language Marshallese 105 mah mh + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon mn + language Nauru 108 nau na + language Navajo, Navaho 109 nav nv + language Northern Ndebele 110 nde nd + language Nepali 111 nep ne + language Ndonga 112 ndo ng + language Norwegian BokmÃ¥l 113 nob nb + language Norwegian Nynorsk 114 nno nn + language Norwegian 115 nor no + language Nuosu 116 + language Southern Ndebele 117 nbl nr + language Occitan 118 oci oc + language Ojibwe, Ojibwa 119 oji oj + language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu + language Oromo 121 orm om + language Oriya 122 ori or + language Ossetian, Ossetic 123 oss os + language Panjabi, Punjabi 124 pan pa + language PÄli 125 pli pi + language Persian (Farsi) 126 per fas fa + language Polish 127 pol pl + language Pashto, Pushto 128 pus ps + language Portuguese 129 por pt + language Quechua 130 que qu + language Romansh 131 roh rm + language Kirundi 132 run rn + language Romanian 133 ron rum ro + language Russian 134 rus ru + language Sanskrit (Saá¹ská¹›ta) 135 san sa + language Sardinian 136 srd sc + language Sindhi 137 snd sd + language Northern Sami 138 sme se + language Samoan 139 smo sm + language Sango 140 sag sg + language Serbian 141 srp scc sr + language Scottish Gaelic, Gaelic 142 gla gd + language Shona 143 sna sn + language Sinhala, Sinhalese 144 sin si + language Slovak 145 slk slo sk + language Slovene 146 slv sl + language Somali 147 som so + language Southern Sotho 148 sot st + language Spanish, Castilian 149 spa es + language Sundanese 150 sun su + language Swahili 151 swa sw + language Swati 152 ssw ss + language Swedish 153 swe sv + language Tamil 154 tam ta + language Telugu 155 tel te + language Tajik 156 tgk tg + language Thai 157 tha th + language Tigrinya 158 tir ti + language Tibetan Standard, Tibetan, Central 159 tib bod bo + language Turkmen 160 tuk tk + language Tagalog 161 tgl tl + language Tswana 162 tsn tn + language Tonga (Tonga Islands) 163 ton to + language Turkish 164 tur tr + language Tsonga 165 tso ts + language Tatar 166 tat tt + language Twi 167 twi tw + language Tahitian 168 tah ty + language Uyghur, Uighur 169 uig ug + language Ukrainian 170 ukr uk + language Urdu 171 urd ur + language Uzbek 172 uzb uz + language Venda 173 ven ve + language Vietnamese 174 vie vi + language Volapük 175 vol vo + language Walloon 176 wln wa + language Welsh 177 cym wel cy + language Wolof 178 wol wo + language Western Frisian 179 fry fy + language Xhosa 180 xho xh + language Yiddish 181 yid yi + language Yoruba 182 yor yo + language Zhuang, Chuang 183 zha za + language Zulu 184 zul zu + language Not applicable 185 From e5a356a5132cb7296ca1878f0120f0a52faebbdb Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 13 Mar 2023 13:35:42 -0400 Subject: [PATCH 0036/1092] release notes --- doc/release-notes/9428-alternative-title.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/release-notes/9428-alternative-title.md diff --git a/doc/release-notes/9428-alternative-title.md b/doc/release-notes/9428-alternative-title.md new file mode 100644 index 00000000000..d6eaa680612 --- /dev/null +++ b/doc/release-notes/9428-alternative-title.md @@ -0,0 +1,6 @@ +Alternative Title is made repeatable. +- One will need to update database with updated citation block. +`curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` +- One will also need to update solr schema: +Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` +Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` From 5d81fc7042e81b152cc89773a3e67afd47c17e5a Mon Sep 17 00:00:00 2001 From: lubitchv Date: Tue, 14 Mar 2023 16:15:48 -0400 Subject: [PATCH 0037/1092] import --- .../iq/dataverse/api/imports/ImportDDIServiceBean.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index d9433832309..f7f0e30ea6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1396,6 +1396,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException { MetadataBlockDTO citation = datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation"); List> otherIds = new ArrayList<>(); + List altTitles = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1406,8 +1407,7 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws FieldDTO field = FieldDTO.createPrimitiveFieldDTO("subtitle", parseText(xmlr)); citation.getFields().add(field); } else if (xmlr.getLocalName().equals("altTitl")) { - FieldDTO field = FieldDTO.createPrimitiveFieldDTO("alternativeTitle", parseText(xmlr)); - citation.getFields().add(field); + altTitles.add(parseText(xmlr)); } else if (xmlr.getLocalName().equals("IDNo")) { if ( AGENCY_HANDLE.equals( xmlr.getAttributeValue(null, "agency") ) || AGENCY_DOI.equals( xmlr.getAttributeValue(null, "agency") ) ) { importGenericService.reassignIdentifierAsGlobalId(parseText(xmlr), datasetDTO); @@ -1435,6 +1435,9 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws if (otherIds.size()>0) { citation.addField(FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIds)); } + if (altTitles.size()>0) { + citation.addField(FieldDTO.createMultiplePrimitiveFieldDTO("alternativeTitle", altTitles)); + } return; } } From 991c5f9faf5a378c0c9e21848788d24cb918fbbc Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 15 Mar 2023 17:20:01 -0400 Subject: [PATCH 0038/1092] prodPlac --- scripts/api/data/metadatablocks/citation.tsv | 507 +++++++++--------- .../api/imports/ImportDDIServiceBean.java | 6 +- .../dataverse/export/ddi/DdiExportUtil.java | 9 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 2 +- .../dataset-create-new-all-ddi-fields.json | 4 +- 5 files changed, 268 insertions(+), 260 deletions(-) diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index 20d858ddb4b..3fbc38438b8 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -4,7 +4,7 @@ title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative - alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution + alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation otherIdValue Identifier Another identifier uniquely identifies the Dataset text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation @@ -24,26 +24,26 @@ keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation - keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 "#VALUE" FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation - topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier - publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerURL URL The URL of the producer's website https:// url 39 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerLogoURL Logo URL The URL of the producer's logo https:// url 40 "
    " FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL The URL of the producer's logo https:// url 40
    FALSE FALSE FALSE FALSE FALSE FALSE producer citation productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation - productionPlace Production Location The location where the data and any related materials were produced or collected text 42 FALSE FALSE FALSE FALSE FALSE FALSE citation + productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation @@ -54,8 +54,8 @@ distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorURL URL The URL of the distributor's webpage https:// url 53 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54 "
    " FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54
    FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted @@ -79,247 +79,248 @@ originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation -#controlledVocabulary DatasetField Value identifier displayOrder - subject Agricultural Sciences D01 0 - subject Arts and Humanities D0 1 - subject Astronomy and Astrophysics D1 2 - subject Business and Management D2 3 - subject Chemistry D3 4 - subject Computer and Information Science D7 5 - subject Earth and Environmental Sciences D4 6 - subject Engineering D5 7 - subject Law D8 8 - subject Mathematical Sciences D9 9 - subject Medicine, Health and Life Sciences D6 10 - subject Physics D10 11 - subject Social Sciences D11 12 - subject Other D12 13 - publicationIDType ark 0 - publicationIDType arXiv 1 - publicationIDType bibcode 2 - publicationIDType doi 3 - publicationIDType ean13 4 - publicationIDType eissn 5 - publicationIDType handle 6 - publicationIDType isbn 7 - publicationIDType issn 8 - publicationIDType istc 9 - publicationIDType lissn 10 - publicationIDType lsid 11 - publicationIDType pmid 12 - publicationIDType purl 13 - publicationIDType upc 14 - publicationIDType url 15 - publicationIDType urn 16 - publicationIDType DASH-NRS 17 - contributorType Data Collector 0 - contributorType Data Curator 1 - contributorType Data Manager 2 - contributorType Editor 3 - contributorType Funder 4 - contributorType Hosting Institution 5 - contributorType Project Leader 6 - contributorType Project Manager 7 - contributorType Project Member 8 - contributorType Related Person 9 - contributorType Researcher 10 - contributorType Research Group 11 - contributorType Rights Holder 12 - contributorType Sponsor 13 - contributorType Supervisor 14 - contributorType Work Package Leader 15 - contributorType Other 16 - authorIdentifierScheme ORCID 0 - authorIdentifierScheme ISNI 1 - authorIdentifierScheme LCNA 2 - authorIdentifierScheme VIAF 3 - authorIdentifierScheme GND 4 - authorIdentifierScheme DAI 5 - authorIdentifierScheme ResearcherID 6 - authorIdentifierScheme ScopusID 7 - language Abkhaz 0 - language Afar 1 aar aa - language Afrikaans 2 afr af - language Akan 3 aka ak - language Albanian 4 sqi alb sq - language Amharic 5 amh am - language Arabic 6 ara ar - language Aragonese 7 arg an - language Armenian 8 hye arm hy - language Assamese 9 asm as - language Avaric 10 ava av - language Avestan 11 ave ae - language Aymara 12 aym ay - language Azerbaijani 13 aze az - language Bambara 14 bam bm - language Bashkir 15 bak ba - language Basque 16 eus baq eu - language Belarusian 17 bel be - language Bengali, Bangla 18 ben bn - language Bihari 19 bih bh - language Bislama 20 bis bi - language Bosnian 21 bos bs - language Breton 22 bre br - language Bulgarian 23 bul bg - language Burmese 24 mya bur my - language Catalan,Valencian 25 cat ca - language Chamorro 26 cha ch - language Chechen 27 che ce - language Chichewa, Chewa, Nyanja 28 nya ny - language Chinese 29 zho chi zh - language Chuvash 30 chv cv - language Cornish 31 cor kw - language Corsican 32 cos co - language Cree 33 cre cr - language Croatian 34 hrv src hr - language Czech 35 ces cze cs - language Danish 36 dan da - language Divehi, Dhivehi, Maldivian 37 div dv - language Dutch 38 nld dut nl - language Dzongkha 39 dzo dz - language English 40 eng en - language Esperanto 41 epo eo - language Estonian 42 est et - language Ewe 43 ewe ee - language Faroese 44 fao fo - language Fijian 45 fij fj - language Finnish 46 fin fi - language French 47 fra fre fr - language Fula, Fulah, Pulaar, Pular 48 ful ff - language Galician 49 glg gl - language Georgian 50 kat geo ka - language German 51 deu ger de - language Greek (modern) 52 gre ell el - language Guaraní 53 grn gn - language Gujarati 54 guj gu - language Haitian, Haitian Creole 55 hat ht - language Hausa 56 hau ha - language Hebrew (modern) 57 heb he - language Herero 58 her hz - language Hindi 59 hin hi - language Hiri Motu 60 hmo ho - language Hungarian 61 hun hu - language Interlingua 62 ina ia - language Indonesian 63 ind id - language Interlingue 64 ile ie - language Irish 65 gle ga - language Igbo 66 ibo ig - language Inupiaq 67 ipk ik - language Ido 68 ido io - language Icelandic 69 isl ice is - language Italian 70 ita it - language Inuktitut 71 iku iu - language Japanese 72 jpn ja - language Javanese 73 jav jv - language Kalaallisut, Greenlandic 74 kal kl - language Kannada 75 kan kn - language Kanuri 76 kau kr - language Kashmiri 77 kas ks - language Kazakh 78 kaz kk - language Khmer 79 khm km - language Kikuyu, Gikuyu 80 kik ki - language Kinyarwanda 81 kin rw - language Kyrgyz 82 - language Komi 83 kom kv - language Kongo 84 kon kg - language Korean 85 kor ko - language Kurdish 86 kur ku - language Kwanyama, Kuanyama 87 kua kj - language Latin 88 lat la - language Luxembourgish, Letzeburgesch 89 ltz lb - language Ganda 90 lug lg - language Limburgish, Limburgan, Limburger 91 lim li - language Lingala 92 lin ln - language Lao 93 lao lo - language Lithuanian 94 lit lt - language Luba-Katanga 95 lub lu - language Latvian 96 lav lv - language Manx 97 glv gv - language Macedonian 98 mkd mac mk - language Malagasy 99 mlg mg - language Malay 100 may msa ms - language Malayalam 101 mal ml - language Maltese 102 mlt mt - language MÄori 103 mao mri mi - language Marathi (MarÄá¹­hÄ«) 104 mar mr - language Marshallese 105 mah mh - language Mixtepec Mixtec 106 mix - language Mongolian 107 mon mn - language Nauru 108 nau na - language Navajo, Navaho 109 nav nv - language Northern Ndebele 110 nde nd - language Nepali 111 nep ne - language Ndonga 112 ndo ng - language Norwegian BokmÃ¥l 113 nob nb - language Norwegian Nynorsk 114 nno nn - language Norwegian 115 nor no - language Nuosu 116 - language Southern Ndebele 117 nbl nr - language Occitan 118 oci oc - language Ojibwe, Ojibwa 119 oji oj - language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu - language Oromo 121 orm om - language Oriya 122 ori or - language Ossetian, Ossetic 123 oss os - language Panjabi, Punjabi 124 pan pa - language PÄli 125 pli pi - language Persian (Farsi) 126 per fas fa - language Polish 127 pol pl - language Pashto, Pushto 128 pus ps - language Portuguese 129 por pt - language Quechua 130 que qu - language Romansh 131 roh rm - language Kirundi 132 run rn - language Romanian 133 ron rum ro - language Russian 134 rus ru - language Sanskrit (Saá¹ská¹›ta) 135 san sa - language Sardinian 136 srd sc - language Sindhi 137 snd sd - language Northern Sami 138 sme se - language Samoan 139 smo sm - language Sango 140 sag sg - language Serbian 141 srp scc sr - language Scottish Gaelic, Gaelic 142 gla gd - language Shona 143 sna sn - language Sinhala, Sinhalese 144 sin si - language Slovak 145 slk slo sk - language Slovene 146 slv sl - language Somali 147 som so - language Southern Sotho 148 sot st - language Spanish, Castilian 149 spa es - language Sundanese 150 sun su - language Swahili 151 swa sw - language Swati 152 ssw ss - language Swedish 153 swe sv - language Tamil 154 tam ta - language Telugu 155 tel te - language Tajik 156 tgk tg - language Thai 157 tha th - language Tigrinya 158 tir ti - language Tibetan Standard, Tibetan, Central 159 tib bod bo - language Turkmen 160 tuk tk - language Tagalog 161 tgl tl - language Tswana 162 tsn tn - language Tonga (Tonga Islands) 163 ton to - language Turkish 164 tur tr - language Tsonga 165 tso ts - language Tatar 166 tat tt - language Twi 167 twi tw - language Tahitian 168 tah ty - language Uyghur, Uighur 169 uig ug - language Ukrainian 170 ukr uk - language Urdu 171 urd ur - language Uzbek 172 uzb uz - language Venda 173 ven ve - language Vietnamese 174 vie vi - language Volapük 175 vol vo - language Walloon 176 wln wa - language Welsh 177 cym wel cy - language Wolof 178 wol wo - language Western Frisian 179 fry fy - language Xhosa 180 xho xh - language Yiddish 181 yid yi - language Yoruba 182 yor yo - language Zhuang, Chuang 183 zha za - language Zulu 184 zul zu - language Not applicable 185 +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 + publicationIDType bibcode 2 + publicationIDType cstr 3 + publicationIDType doi 4 + publicationIDType ean13 5 + publicationIDType eissn 6 + publicationIDType handle 7 + publicationIDType isbn 8 + publicationIDType issn 9 + publicationIDType istc 10 + publicationIDType lissn 11 + publicationIDType lsid 12 + publicationIDType pmid 13 + publicationIDType purl 14 + publicationIDType upc 15 + publicationIDType url 16 + publicationIDType urn 17 + publicationIDType DASH-NRS 18 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + authorIdentifierScheme VIAF 3 + authorIdentifierScheme GND 4 + authorIdentifierScheme DAI 5 + authorIdentifierScheme ResearcherID 6 + authorIdentifierScheme ScopusID 7 + language Abkhaz 0 + language Afar 1 aar aa + language Afrikaans 2 afr af + language Akan 3 aka ak + language Albanian 4 sqi alb sq + language Amharic 5 amh am + language Arabic 6 ara ar + language Aragonese 7 arg an + language Armenian 8 hye arm hy + language Assamese 9 asm as + language Avaric 10 ava av + language Avestan 11 ave ae + language Aymara 12 aym ay + language Azerbaijani 13 aze az + language Bambara 14 bam bm + language Bashkir 15 bak ba + language Basque 16 eus baq eu + language Belarusian 17 bel be + language Bengali, Bangla 18 ben bn + language Bihari 19 bih bh + language Bislama 20 bis bi + language Bosnian 21 bos bs + language Breton 22 bre br + language Bulgarian 23 bul bg + language Burmese 24 mya bur my + language Catalan,Valencian 25 cat ca + language Chamorro 26 cha ch + language Chechen 27 che ce + language Chichewa, Chewa, Nyanja 28 nya ny + language Chinese 29 zho chi zh + language Chuvash 30 chv cv + language Cornish 31 cor kw + language Corsican 32 cos co + language Cree 33 cre cr + language Croatian 34 hrv src hr + language Czech 35 ces cze cs + language Danish 36 dan da + language Divehi, Dhivehi, Maldivian 37 div dv + language Dutch 38 nld dut nl + language Dzongkha 39 dzo dz + language English 40 eng en + language Esperanto 41 epo eo + language Estonian 42 est et + language Ewe 43 ewe ee + language Faroese 44 fao fo + language Fijian 45 fij fj + language Finnish 46 fin fi + language French 47 fra fre fr + language Fula, Fulah, Pulaar, Pular 48 ful ff + language Galician 49 glg gl + language Georgian 50 kat geo ka + language German 51 deu ger de + language Greek (modern) 52 gre ell el + language Guaraní 53 grn gn + language Gujarati 54 guj gu + language Haitian, Haitian Creole 55 hat ht + language Hausa 56 hau ha + language Hebrew (modern) 57 heb he + language Herero 58 her hz + language Hindi 59 hin hi + language Hiri Motu 60 hmo ho + language Hungarian 61 hun hu + language Interlingua 62 ina ia + language Indonesian 63 ind id + language Interlingue 64 ile ie + language Irish 65 gle ga + language Igbo 66 ibo ig + language Inupiaq 67 ipk ik + language Ido 68 ido io + language Icelandic 69 isl ice is + language Italian 70 ita it + language Inuktitut 71 iku iu + language Japanese 72 jpn ja + language Javanese 73 jav jv + language Kalaallisut, Greenlandic 74 kal kl + language Kannada 75 kan kn + language Kanuri 76 kau kr + language Kashmiri 77 kas ks + language Kazakh 78 kaz kk + language Khmer 79 khm km + language Kikuyu, Gikuyu 80 kik ki + language Kinyarwanda 81 kin rw + language Kyrgyz 82 + language Komi 83 kom kv + language Kongo 84 kon kg + language Korean 85 kor ko + language Kurdish 86 kur ku + language Kwanyama, Kuanyama 87 kua kj + language Latin 88 lat la + language Luxembourgish, Letzeburgesch 89 ltz lb + language Ganda 90 lug lg + language Limburgish, Limburgan, Limburger 91 lim li + language Lingala 92 lin ln + language Lao 93 lao lo + language Lithuanian 94 lit lt + language Luba-Katanga 95 lub lu + language Latvian 96 lav lv + language Manx 97 glv gv + language Macedonian 98 mkd mac mk + language Malagasy 99 mlg mg + language Malay 100 may msa ms + language Malayalam 101 mal ml + language Maltese 102 mlt mt + language MÄori 103 mao mri mi + language Marathi (MarÄá¹­hÄ«) 104 mar mr + language Marshallese 105 mah mh + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon mn + language Nauru 108 nau na + language Navajo, Navaho 109 nav nv + language Northern Ndebele 110 nde nd + language Nepali 111 nep ne + language Ndonga 112 ndo ng + language Norwegian BokmÃ¥l 113 nob nb + language Norwegian Nynorsk 114 nno nn + language Norwegian 115 nor no + language Nuosu 116 + language Southern Ndebele 117 nbl nr + language Occitan 118 oci oc + language Ojibwe, Ojibwa 119 oji oj + language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu + language Oromo 121 orm om + language Oriya 122 ori or + language Ossetian, Ossetic 123 oss os + language Panjabi, Punjabi 124 pan pa + language PÄli 125 pli pi + language Persian (Farsi) 126 per fas fa + language Polish 127 pol pl + language Pashto, Pushto 128 pus ps + language Portuguese 129 por pt + language Quechua 130 que qu + language Romansh 131 roh rm + language Kirundi 132 run rn + language Romanian 133 ron rum ro + language Russian 134 rus ru + language Sanskrit (Saá¹ská¹›ta) 135 san sa + language Sardinian 136 srd sc + language Sindhi 137 snd sd + language Northern Sami 138 sme se + language Samoan 139 smo sm + language Sango 140 sag sg + language Serbian 141 srp scc sr + language Scottish Gaelic, Gaelic 142 gla gd + language Shona 143 sna sn + language Sinhala, Sinhalese 144 sin si + language Slovak 145 slk slo sk + language Slovene 146 slv sl + language Somali 147 som so + language Southern Sotho 148 sot st + language Spanish, Castilian 149 spa es + language Sundanese 150 sun su + language Swahili 151 swa sw + language Swati 152 ssw ss + language Swedish 153 swe sv + language Tamil 154 tam ta + language Telugu 155 tel te + language Tajik 156 tgk tg + language Thai 157 tha th + language Tigrinya 158 tir ti + language Tibetan Standard, Tibetan, Central 159 tib bod bo + language Turkmen 160 tuk tk + language Tagalog 161 tgl tl + language Tswana 162 tsn tn + language Tonga (Tonga Islands) 163 ton to + language Turkish 164 tur tr + language Tsonga 165 tso ts + language Tatar 166 tat tt + language Twi 167 twi tw + language Tahitian 168 tah ty + language Uyghur, Uighur 169 uig ug + language Ukrainian 170 ukr uk + language Urdu 171 urd ur + language Uzbek 172 uzb uz + language Venda 173 ven ve + language Vietnamese 174 vie vi + language Volapük 175 vol vo + language Walloon 176 wln wa + language Welsh 177 cym wel cy + language Wolof 178 wol wo + language Western Frisian 179 fry fy + language Xhosa 180 xho xh + language Yiddish 181 yid yi + language Yoruba 182 yor yo + language Zhuang, Chuang 183 zha za + language Zulu 184 zul zu + language Not applicable 185 diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index f7f0e30ea6e..458803e0c92 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1337,6 +1337,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th List> producers = new ArrayList<>(); List> grants = new ArrayList<>(); List> software = new ArrayList<>(); + List prodPlac = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1352,9 +1353,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("prodDate")) { citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate"))); } else if (xmlr.getLocalName().equals("prodPlac")) { - List prodPlac = new ArrayList<>(); prodPlac.add(parseText(xmlr, "prodPlac")); - citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); } else if (xmlr.getLocalName().equals("software")) { HashSet set = new HashSet<>(); addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version")); @@ -1387,6 +1386,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th if (producers.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers)); } + if (prodPlac.size() > 0) { + citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); + } return; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index eb53473d4d9..4ea90ea6199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -895,8 +895,13 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT } } } - writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate)); - writeFullElement(xmlw, "prodPlac", dto2Primitive(version, DatasetFieldConstant.productionPlace)); + writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate)); + + FieldDTO prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation" ); + if (prodPlac != null) { + writeMultipleElement(xmlw, "prodPlac", prodPlac, null); + } + writeSoftwareElement(xmlw, version); writeGrantElement(xmlw, version); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 2ba06314ddb..14185b97e9e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -758,7 +758,7 @@ public void testLoadMetadataBlock_NoErrorPath() { assertEquals(1, data.size()); List> addedElements = data.get("added"); //Note -test depends on the number of elements in the production citation block, so any changes to the # of elements there can break this test - assertEquals(323, addedElements.size()); + assertEquals(322, addedElements.size()); Map statistics = new HashMap<>(); for (Map unit : addedElements) { diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json index 96f058b1b02..822623f721a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json @@ -409,9 +409,9 @@ }, { "typeName": "productionPlace", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "ProductionPlace" + "value": ["ProductionPlace"] }, { "typeName": "contributor", From 89dbc7a87407a095670fa66dda87616b8500959d Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 16 Mar 2023 13:19:33 -0400 Subject: [PATCH 0039/1092] restore back --- src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 14185b97e9e..2ba06314ddb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -758,7 +758,7 @@ public void testLoadMetadataBlock_NoErrorPath() { assertEquals(1, data.size()); List> addedElements = data.get("added"); //Note -test depends on the number of elements in the production citation block, so any changes to the # of elements there can break this test - assertEquals(322, addedElements.size()); + assertEquals(323, addedElements.size()); Map statistics = new HashMap<>(); for (Map unit : addedElements) { From b5fe4c75944deea540b807021e0f88401878118f Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 17 Mar 2023 16:22:48 +0100 Subject: [PATCH 0040/1092] Fix placement of allowedApiCalls in example manifests allowedApiCalls should be at the top level, not inside toolParameters. --- .../external-tools/dynamicDatasetTool.json | 20 +++++++++---------- .../root/external-tools/fabulousFileTool.json | 18 ++++++++--------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json index 47413c8a625..22dd6477cb4 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json @@ -14,14 +14,14 @@ { "locale":"{localeCode}" } - ], - "allowedApiCalls": [ - { - "name":"retrieveDatasetJson", - "httpMethod":"GET", - "urlTemplate":"/api/v1/datasets/{datasetId}", - "timeOut":10 - } - ] - } + ] + }, + "allowedApiCalls": [ + { + "name":"retrieveDatasetJson", + "httpMethod":"GET", + "urlTemplate":"/api/v1/datasets/{datasetId}", + "timeOut":10 + } + ] } diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json index 1c132576099..2b6a0b8e092 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json @@ -21,14 +21,14 @@ { "locale":"{localeCode}" } - ], - "allowedApiCalls": [ - { - "name":"retrieveDataFile", - "httpMethod":"GET", - "urlTemplate":"/api/v1/access/datafile/{fileId}", - "timeOut":270 - } ] - } + }, + "allowedApiCalls": [ + { + "name":"retrieveDataFile", + "httpMethod":"GET", + "urlTemplate":"/api/v1/access/datafile/{fileId}", + "timeOut":270 + } + ] } From d76092c1ec57a835920b8fd10e6883299f8b6d3a Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 17 Mar 2023 16:24:41 +0100 Subject: [PATCH 0041/1092] Add missing break to DATASET case Without this it also evaluates the FILE case causing NPE when dataFile is accessed. --- .../harvard/iq/dataverse/externaltools/ExternalToolHandler.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index 88a51017b75..dac046373ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -111,6 +111,7 @@ public String handleRequest(boolean preview) { case DATASET: callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + break; case FILE: callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" + dataFile.getId() + "/metadata/" + fileMetadata.getId() + "/toolparams/" From 10a475317771227b23263170ad3c97232764d14d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 20 Mar 2023 10:48:20 -0400 Subject: [PATCH 0042/1092] An extra check, to disable the upload component right away, if the quota is already full/exceeded. #9361 --- src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 4 ++++ src/main/webapp/editFilesFragment.xhtml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 420642f2fa5..c39e6f62ce2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -542,6 +542,10 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo saveEnabled = true; return null; } + + public boolean isQuotaExceeded() { + return systemConfig.isStorageQuotasEnforced() && datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes() == 0; + } public String init() { // default mode should be EDIT diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 77f7aab2f76..834ca597892 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -148,7 +148,7 @@ dragDropSupport="true" auto="#{!(systemConfig.directUploadEnabled(EditDatafilesPage.dataset))}" multiple="#{datasetPage || EditDatafilesPage.allowMultipleFileUpload()}" - disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) }" + disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) || EditDatafilesPage.isQuotaExceeded()}" listener="#{EditDatafilesPage.handleFileUpload}" process="filesTable" update=":datasetForm:filesTable, @([id$=filesButtons])" From 098de49c8ac14478ad01d9aaa2f820e3d9ab744d Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 21 Apr 2023 16:10:41 +0200 Subject: [PATCH 0043/1092] reverted SystemConfig.java changes --- .../iq/dataverse/util/SystemConfig.java | 299 ++++++++++-------- 1 file changed, 160 insertions(+), 139 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 1764abf4478..c989add6e3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -73,7 +73,7 @@ public class SystemConfig { * The default number of datafiles that we allow to be created through * zip file upload. */ - private static final int defaultZipUploadFilesLimit = 1000; + private static final int defaultZipUploadFilesLimit = 1000; public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours @@ -81,21 +81,21 @@ public class SystemConfig { private String buildNumber = null; private static final String JVM_TIMER_SERVER_OPTION = "dataverse.timerServer"; - - private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; + + private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - + public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; public final static String CURATIONLABELSDISABLED = "DISABLED"; - + public String getVersion() { return getVersion(false); } - + // The return value is a "prviate static String", that should be initialized - // once, on the first call (see the code below)... But this is a @Stateless - // bean... so that would mean "once per thread"? - this would be a prime + // once, on the first call (see the code below)... But this is a @Stateless + // bean... so that would mean "once per thread"? - this would be a prime // candidate for being moved into some kind of an application-scoped caching // service... some CachingService @Singleton - ? (L.A. 5.8) public String getVersion(boolean withBuildNumber) { @@ -157,15 +157,15 @@ public String getSolrHostColonPort() { public boolean isProvCollectionEnabled() { String provCollectionEnabled = settingsService.getValueForKey(SettingsServiceBean.Key.ProvCollectionEnabled, null); - if ("true".equalsIgnoreCase(provCollectionEnabled)) { + if("true".equalsIgnoreCase(provCollectionEnabled)){ return true; } return false; } - + public int getMetricsCacheTimeoutMinutes() { - int defaultValue = 10080; // one week in minutes + int defaultValue = 10080; //one week in minutes SettingsServiceBean.Key key = SettingsServiceBean.Key.MetricsCacheTimeoutMinutes; String metricsCacheTimeString = settingsService.getValueForKey(key); if (metricsCacheTimeString != null) { @@ -183,7 +183,7 @@ public int getMetricsCacheTimeoutMinutes() { } return defaultValue; } - + public int getMinutesUntilConfirmEmailTokenExpires() { final int minutesInOneDay = 1440; final int reasonableDefault = minutesInOneDay; @@ -201,9 +201,34 @@ public int getMinutesUntilConfirmEmailTokenExpires() { logger.info("Returning " + reasonableDefault + " for " + key + " because value must be an integer greater than zero, not \"" + valueFromDatabase + "\"."); } } + logger.fine("Returning " + reasonableDefault + " for " + key); return reasonableDefault; } + /** + * The number of minutes for which a password reset token is valid. Can be + * overridden by {@link #PASSWORD_RESET_TIMEOUT_IN_MINUTES}. + */ + public static int getMinutesUntilPasswordResetTokenExpires() { + final int reasonableDefault = 60; + String configuredValueAsString = System.getProperty(PASSWORD_RESET_TIMEOUT_IN_MINUTES); + if (configuredValueAsString != null) { + int configuredValueAsInteger = 0; + try { + configuredValueAsInteger = Integer.parseInt(configuredValueAsString); + if (configuredValueAsInteger > 0) { + return configuredValueAsInteger; + } else { + logger.info(PASSWORD_RESET_TIMEOUT_IN_MINUTES + " is configured as a negative number \"" + configuredValueAsInteger + "\". Using default value instead: " + reasonableDefault); + return reasonableDefault; + } + } catch (NumberFormatException ex) { + logger.info("Unable to convert " + PASSWORD_RESET_TIMEOUT_IN_MINUTES + " from \"" + configuredValueAsString + "\" into an integer value: " + ex + ". Using default value " + reasonableDefault); + } + } + return reasonableDefault; + } + /** * Lookup (or construct) the designated URL of this instance from configuration. * @@ -226,6 +251,7 @@ public String getDataverseSiteUrl() { * * Can be defined as a complete URL via dataverse.siteUrl; or derived from the hostname * dataverse.fqdn and HTTPS. If none of these options is set, defaults to the + * {@link InetAddress#getLocalHost} and HTTPS. * * NOTE: This method does not provide any validation. * TODO: The behaviour of this method is subject to a later change, see @@ -257,13 +283,12 @@ public static String getDataverseSiteUrlStatic() { return null; } } - + /** - * URL Tracking: + * URL Tracking: */ public String getPageURLWithQueryString() { - return PrettyContext.getCurrentInstance().getRequestURL().toURL() - + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); + return PrettyContext.getCurrentInstance().getRequestURL().toURL() + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); } public String getGuidesBaseUrl() { @@ -322,44 +347,44 @@ public static int getIntLimitFromStringOrDefault(String limitSetting, Integer de /** * Download-as-zip size limit. - * returns defaultZipDownloadLimit if not specified; - * set to -1 to disable zip downloads. + * returns defaultZipDownloadLimit if not specified; + * set to -1 to disable zip downloads. */ public long getZipDownloadLimit() { String zipLimitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipDownloadLimit); return getLongLimitFromStringOrDefault(zipLimitOption, defaultZipDownloadLimit); } - + public int getZipUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultZipUploadFilesLimit); } - + /** - * Session timeout, in minutes. + * Session timeout, in minutes. * (default value provided) */ public int getLoginSessionTimeout() { return getIntLimitFromStringOrDefault( - settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), - defaultLoginSessionTimeout); + settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), + defaultLoginSessionTimeout); } - + /* ` the number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog - */ + */ public int getMultipleUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.MultipleUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultMultipleUploadFilesLimit); } - + public long getGuestbookResponsesPageDisplayLimit() { String limitSetting = settingsService.getValueForKey(SettingsServiceBean.Key.GuestbookResponsesPageDisplayLimit); return getLongLimitFromStringOrDefault(limitSetting, DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT); } - - public long getUploadLogoSizeLimit() { + + public long getUploadLogoSizeLimit(){ return 500000; } @@ -372,10 +397,10 @@ public long getThumbnailSizeLimitPDF() { } public static long getThumbnailSizeLimit(String type) { - String option = null; - - // get options via jvm options - + String option = null; + + //get options via jvm options + if ("Image".equals(type)) { option = System.getProperty("dataverse.dataAccess.thumbnail.image.limit"); return getLongLimitFromStringOrDefault(option, DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE); @@ -387,19 +412,19 @@ public static long getThumbnailSizeLimit(String type) { // Zero (0) means no limit. return getLongLimitFromStringOrDefault(option, 0L); } - + public boolean isThumbnailGenerationDisabledForType(String type) { return getThumbnailSizeLimit(type) == -1l; } - + public boolean isThumbnailGenerationDisabledForImages() { return isThumbnailGenerationDisabledForType("Image"); } - + public boolean isThumbnailGenerationDisabledForPDF() { return isThumbnailGenerationDisabledForType("PDF"); } - + public String getApplicationTermsOfUse() { String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); @@ -407,9 +432,9 @@ public String getApplicationTermsOfUse() { // value, or as a better default than the saneDefaultForAppTermsOfUse if there // is no language-specific value String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); - // Now get the language-specific value if it exists + //Now get the language-specific value if it exists if (language != null && !language.equalsIgnoreCase(BundleUtil.getDefaultLocale().getLanguage())) { - appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); } return appTermsOfUse; } @@ -420,7 +445,7 @@ public String getApiTermsOfUse() { return apiTermsOfUse; } - // TODO: + // TODO: // remove this method! // pages should be using settingsWrapper.get(":ApplicationPrivacyPolicyUrl") instead. -- 4.2.1 public String getApplicationPrivacyPolicyUrl() { @@ -439,10 +464,10 @@ public boolean isFilesOnDatasetPageFromSolr() { return settingsService.isTrueForKey(SettingsServiceBean.Key.FilesOnDatasetPageFromSolr, safeDefaultIfKeyNotFound); } - public Long getMaxFileUploadSizeForStore(String driverId) { - return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); - } - + public Long getMaxFileUploadSizeForStore(String driverId){ + return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); + } + public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { @@ -456,12 +481,12 @@ public Integer getSearchHighlightFragmentSize() { } public long getTabularIngestSizeLimit() { - // This method will return the blanket ingestable size limit, if - // set on the system. I.e., the universal limit that applies to all - // tabular ingests, regardless of fromat: - - String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); - + // This method will return the blanket ingestable size limit, if + // set on the system. I.e., the universal limit that applies to all + // tabular ingests, regardless of fromat: + + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); @@ -470,48 +495,48 @@ public long getTabularIngestSizeLimit() { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); } } - // -1 means no limit is set; - // 0 on the other hand would mean that ingest is fully disabled for - // tabular data. - return -1; + // -1 means no limit is set; + // 0 on the other hand would mean that ingest is fully disabled for + // tabular data. + return -1; } - + public long getTabularIngestSizeLimit(String formatName) { // This method returns the size limit set specifically for this format name, - // if available, otherwise - the blanket limit that applies to all tabular - // ingests regardless of a format. - + // if available, otherwise - the blanket limit that applies to all tabular + // ingests regardless of a format. + if (formatName == null || formatName.equals("")) { - return getTabularIngestSizeLimit(); + return getTabularIngestSizeLimit(); } - + String limitEntry = settingsService.get(SettingsServiceBean.Key.TabularIngestSizeLimit.toString() + ":" + formatName); - + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { - logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry); + logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); } } - - return getTabularIngestSizeLimit(); + + return getTabularIngestSizeLimit(); } public boolean isOAIServerEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.OAIServerEnabled, defaultResponse); } - + public void enableOAIServer() { settingsService.setValueForKey(SettingsServiceBean.Key.OAIServerEnabled, "true"); } - + public void disableOAIServer() { settingsService.deleteValueForKey(SettingsServiceBean.Key.OAIServerEnabled); - } - + } + public boolean isTimerServer() { String optionValue = System.getProperty(JVM_TIMER_SERVER_OPTION); if ("true".equalsIgnoreCase(optionValue)) { @@ -579,12 +604,11 @@ public String getOAuth2CallbackUrl() { } return saneDefault; } - + public boolean isShibPassiveLoginEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibPassiveLoginEnabled, defaultResponse); } - public boolean isShibAttributeCharacterSetConversionEnabled() { boolean defaultResponse = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibAttributeCharacterSetConversionEnabled, defaultResponse); @@ -610,7 +634,7 @@ public String getPVDictionaries() { public int getPVGoodStrength() { // FIXME: Change this to 21 to match Harvard's requirements or implement a way to disable the rule (0 or -1) and have the default be disabled. int goodStrengthLength = 20; - // String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); + //String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); String _goodStrengthLength = settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString()); try { goodStrengthLength = Integer.parseInt(_goodStrengthLength); @@ -769,7 +793,8 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + + } /** @@ -785,8 +810,8 @@ public enum FileDownloadMethods { */ RSYNC("rsal/rsync"), NATIVE("native/http"), - GLOBUS("globus"); - + GLOBUS("globus") + ; private final String text; private FileDownloadMethods(final String text) { @@ -808,28 +833,27 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + } - + public enum DataFilePIDFormat { DEPENDENT("DEPENDENT"), INDEPENDENT("INDEPENDENT"); - private final String text; public String getText() { return text; } - - private DataFilePIDFormat(final String text) { + + private DataFilePIDFormat(final String text){ this.text = text; } - + @Override public String toString() { return text; } - + } /** @@ -869,16 +893,16 @@ public String toString() { } - public boolean isPublicInstall() { + public boolean isPublicInstall(){ boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); } - - public boolean isRsyncUpload() { + + public boolean isRsyncUpload(){ return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), true); } - public boolean isGlobusUpload() { + public boolean isGlobusUpload(){ return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), true); } @@ -887,30 +911,30 @@ public boolean isWebloaderUpload(){ } // Controls if HTTP upload is enabled for both GUI and API. - public boolean isHTTPUpload() { + public boolean isHTTPUpload(){ return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), true); } - - public boolean isRsyncOnly() { + + public boolean isRsyncOnly(){ String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - if (downloadMethods == null) { + if(downloadMethods == null){ return false; } - if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())) { + if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())){ return false; } String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods == null) { + if (uploadMethods==null){ return false; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); } } - + public boolean isRsyncDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), false); } - + public boolean isHTTPDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), false); } @@ -918,14 +942,14 @@ public boolean isHTTPDownload() { public boolean isGlobusDownload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false); } - + public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } public List getGlobusStoresList() { - String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); - return Arrays.asList(globusStores.split("\\s*,\\s*")); + String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); + return Arrays.asList(globusStores.split("\\s*,\\s*")); } private Boolean getMethodAvailable(String method, boolean upload) { @@ -937,32 +961,31 @@ private Boolean getMethodAvailable(String method, boolean upload) { return Arrays.asList(methods.toLowerCase().split("\\s*,\\s*")).contains(method); } } - - public Integer getUploadMethodCount() { - String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods == null) { + + public Integer getUploadMethodCount(){ + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods==null){ return 0; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); - } + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); + } } - - public boolean isDataFilePIDSequentialDependent() { + public boolean isDataFilePIDSequentialDependent(){ String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")) { + if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")){ return true; } return false; } - + public int getPIDAsynchRegFileCount() { String fileCount = settingsService.getValueForKey(SettingsServiceBean.Key.PIDAsynchRegFileCount, "10"); int retVal = 10; try { retVal = Integer.parseInt(fileCount); - } catch (NumberFormatException e) { - // if no number in the setting we'll return 10 + } catch (NumberFormatException e) { + //if no number in the setting we'll return 10 } return retVal; } @@ -976,13 +999,13 @@ public boolean isFilePIDsEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound); } - + public boolean isIndependentHandleService() { boolean safeDefaultIfKeyNotFound = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.IndependentHandleService, safeDefaultIfKeyNotFound); - + } - + public String getHandleAuthHandle() { String handleAuthHandle = settingsService.getValueForKey(SettingsServiceBean.Key.HandleAuthHandle, null); return handleAuthHandle; @@ -992,61 +1015,60 @@ public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); return mDCLogPath; } - + public boolean isDatafileValidationOnPublishEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FileValidationOnPublishEnabled, safeDefaultIfKeyNotFound); } - public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".upload-out-of-band" is not used here - return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); - } - - public String getDataCiteRestApiUrlString() { - // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + public boolean directUploadEnabled(DvObjectContainer container) { + return Boolean.getBoolean("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect"); + } + + public String getDataCiteRestApiUrlString() { + //As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); - } - + } + public boolean isExternalDataverseValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public boolean isExternalDatasetValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public String getDataverseValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript); } - + public String getDatasetValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript); } - + public String getDataverseValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataPublishValidationFailureMsg, defaultMessage); } - + public String getDataverseUpdateValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be updated because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataUpdateValidationFailureMsg, defaultMessage); } - + public String getDatasetValidationFailureMsg() { String defaultMessage = "This dataset cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidationFailureMsg, defaultMessage); } - + public boolean isExternalValidationAdminOverrideEnabled() { return "true".equalsIgnoreCase(settingsService.getValueForKey(SettingsServiceBean.Key.ExternalValidationAdminOverride)); } - + public long getDatasetValidationSizeLimit() { String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetChecksumValidationSizeLimit); @@ -1076,7 +1098,6 @@ public long getFileValidationSizeLimit() { // -1 means no limit is set; return -1; } - public Map getCurationLabels() { Map labelMap = new HashMap(); String setting = settingsService.getValueForKey(SettingsServiceBean.Key.AllowedCurationLabels, ""); @@ -1117,15 +1138,15 @@ public Map getCurationLabels() { } return labelMap; } - + public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { - Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); - - // we default to false - i.e., "not disabled" if the setting is not present: + Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); + + // we default to false - i.e., "not disabled" if the setting is not present: if (ret == null) { - return false; + return false; } - - return !ret; + + return !ret; } } From 364858af43fcce6a1cfb4decefa8215621d4f648 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 25 Apr 2023 17:27:51 -0400 Subject: [PATCH 0044/1092] Further reconciliation of the quotas branch with the zip optimization pr. (#9361) --- .../impl/CreateNewDataFilesCommand.java | 111 +++++++++++++++--- .../harvard/iq/dataverse/util/FileUtil.java | 10 +- 2 files changed, 99 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 9f281f9446d..15304502232 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -41,10 +41,12 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.Enumeration; import java.util.List; import java.util.Optional; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; +import java.util.zip.ZipFile; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javax.enterprise.inject.spi.CDI; @@ -224,6 +226,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // DataFile objects from its contents: } else if (finalType.equals("application/zip")) { + ZipFile zipFile = null; ZipInputStream unZippedIn = null; ZipEntry zipEntry = null; @@ -253,13 +256,88 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } */ + /** + * Perform a quick check for how many individual files are + * inside this zip archive. If it's above the limit, we can + * give up right away, without doing any unpacking. + * This should be a fairly inexpensive operation, we just need + * to read the directory at the end of the file. + */ + + if (charset != null) { + zipFile = new ZipFile(tempFile.toFile(), charset); + } else { + zipFile = new ZipFile(tempFile.toFile()); + } + /** + * The ZipFile constructors above will throw ZipException - + * a type of IOException - if there's something wrong + * with this file as a zip. There's no need to intercept it + * here, it will be caught further below, with other IOExceptions, + * at which point we'll give up on trying to unpack it and + * then attempt to save it as is. + */ + + int numberOfUnpackableFiles = 0; + Long combinedUnzippedFileSize = 0L; + + /** + * Note that we can't just use zipFile.size(), + * unfortunately, since that's the total number of entries, + * some of which can be directories. So we need to go + * through all the individual zipEntries and count the ones + * that are files. + */ + + for (Enumeration entries = zipFile.entries(); entries.hasMoreElements();) { + ZipEntry entry = entries.nextElement(); + logger.fine("inside first zip pass; this entry: "+entry.getName()); + if (!entry.isDirectory()) { + String shortName = entry.getName().replaceFirst("^.*[\\/]", ""); + // ... and, finally, check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { + numberOfUnpackableFiles++; + if (numberOfUnpackableFiles > fileNumberLimit) { + logger.warning("Zip upload - too many files in the zip to process individually."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + // In addition to counting the files, we can + // also check the file size while we're here, + // provided the size limit is defined; if a single + // file is above the individual size limit, unzipped, + // we give up on unpacking this zip archive as well: + if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) { + throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit))); + } + // Similarly, we want to check if saving all these unpacked + // files is going to push the disk usage over the + // quota: + if (storageQuotaLimit != null) { + combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); + if (combinedUnzippedFileSize > storageQuotaLimit) { + throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + } + } + } + } + } + + // OK we're still here - that means we can proceed unzipping. + + // Close the ZipFile, re-open as ZipInputStream: + zipFile.close(); + if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); } else { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); } - Long storageQuotaLimitForUnzippedFiles = storageQuotaLimit; while (true) { try { zipEntry = unZippedIn.getNextEntry(); @@ -304,16 +382,16 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // OK, this seems like an OK file entry - we'll try // to read it and create a DataFile with it: - File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit, storageQuotaLimitForUnzippedFiles); - DataFile datafile = FileUtil.createSingleDataFile(version, - unZippedTempFile, - null, - shortName, + String storageIdentifier = FileUtil.generateStorageIdentifier(); + File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier); + Files.copy(unZippedIn, unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + // No need to check the size of this unpacked file against the size limit, + // since we've already checked for that in the first pass. + + DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); - storageQuotaLimitForUnzippedFiles = storageQuotaLimitForUnzippedFiles - datafile.getFilesize(); - if (!fileEntryName.equals(shortName)) { // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all @@ -336,7 +414,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); try { - recognizedType = determineFileType(new File(tempFileName), shortName); + recognizedType = determineFileType(unzippedFile, shortName); + // null the File explicitly, to release any open FDs: + unzippedFile = null; logger.fine("File utility recognized unzipped file as " + recognizedType); if (recognizedType != null && !recognizedType.equals("")) { datafile.setContentType(recognizedType); @@ -373,14 +453,18 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); datafiles.clear(); } finally { + if (zipFile != null) { + try { + zipFile.close(); + } catch (Exception zEx) {} + } if (unZippedIn != null) { try { unZippedIn.close(); - } catch (Exception zEx) { - } + } catch (Exception zEx) {} } } - if (datafiles.size() > 0) { + if (!datafiles.isEmpty()) { // remove the uploaded zip file: try { Files.delete(tempFile); @@ -447,7 +531,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } catch (IOException ioex) { // this one can be ignored } - } } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.severe("One of the unzipped shape files exceeded the size limit, or the storage quota; giving up. " + femsx.getMessage()); @@ -468,7 +551,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); } - if (datafiles.size() > 0) { + if (!datafiles.isEmpty()) { // remove the uploaded zip file: try { Files.delete(tempFile); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6b1dfe9c163..73d06ad8bee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -96,20 +96,14 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import org.apache.commons.io.FileUtils; import java.util.zip.GZIPInputStream; -import java.util.zip.ZipFile; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; import org.apache.commons.io.FilenameUtils; import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; -import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import java.util.Arrays; -import java.util.Enumeration; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import ucar.nc2.NetcdfFile; @@ -848,12 +842,12 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS Long fileSize = tempFile.toFile().length(); if (fileSizeLimit != null && fileSize > fileSizeLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsMaxSizeException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); + throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); } if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); } return tempFile.toFile(); From f6ea0fd06262a878a4e41048d046bce00d1e9784 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 25 Apr 2023 17:55:58 -0400 Subject: [PATCH 0045/1092] remaining quota in bytes should never be shown as negative, is exceeded. #9361 --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 5732b9df7ed..2e30e6b76f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -168,6 +168,9 @@ public void setTotalUsageInBytes(Long totalUsageInBytes) { } public Long getRemainingQuotaInBytes() { + if (totalUsageInBytes > totalAllocatedInBytes) { + return 0L; + } return totalAllocatedInBytes - totalUsageInBytes; } } From 30ea450e6c2f32ad3c545249375b510733e8ab13 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 26 Apr 2023 13:14:26 -0400 Subject: [PATCH 0046/1092] this should resolve the Sword upload errors (wasn't calling the new command with the right auth) #9361 --- .../iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index bd7067efb7b..c71eeb3d375 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -309,7 +309,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au try { //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); dataFiles = createDataFilesResult.getDataFiles(); } catch (CommandException ex) { From 4955f810437788fef1a345e356308f8ea32c66cb Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 26 Apr 2023 14:38:48 -0400 Subject: [PATCH 0047/1092] And this should fix the remaining shape file test. #9361 --- .../engine/command/impl/CreateNewDataFilesCommand.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 15304502232..365a260cfd2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -522,7 +522,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (new_datafile != null) { datafiles.add(new_datafile); // todo: can this new_datafile be null? - storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + if (storageQuotaLimitForRezippedFiles != null) { + storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + } } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } From ecac37fbd64c83bfc8d045ae3204ab86dc7bc29d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 2 May 2023 10:52:13 -0400 Subject: [PATCH 0048/1092] initial Globus Store class with some quick test code --- pom.xml | 7 +- .../dataaccess/GlobusOverlayAccessIO.java | 655 ++++++++++++++++++ .../dataaccess/RemoteOverlayAccessIO.java | 34 +- .../iq/dataverse/settings/JvmSettings.java | 2 + 4 files changed, 680 insertions(+), 18 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java diff --git a/pom.xml b/pom.xml index 5f514819947..e5b191f0ed7 100644 --- a/pom.xml +++ b/pom.xml @@ -167,8 +167,13 @@ org.eclipse.microprofile.config microprofile-config-api - provided + + + org.apache.geronimo.config + geronimo-config-impl + 1.0 + jakarta.platform jakarta.jakartaee-api diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java new file mode 100644 index 00000000000..fe62e25ad6f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -0,0 +1,655 @@ +package edu.harvard.iq.dataverse.dataaccess; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.channels.Channel; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.Path; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.List; +import java.util.function.Predicate; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.apache.commons.lang3.NotImplementedException; +import org.apache.http.client.config.CookieSpecs; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.util.EntityUtils; + +import javax.net.ssl.SSLContext; + +/** + * @author qqmyers + * @param what it stores + */ +/* + * Globus Overlay Driver + * + * StorageIdentifier format: + * :///// + */ +public class GlobusOverlayAccessIO extends StorageIO { + + private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO"); + + private StorageIO baseStore = null; + private String path = null; + private String endpointWithBasePath = null; + + private static HttpClientContext localContext = HttpClientContext.create(); + private PoolingHttpClientConnectionManager cm = null; + CloseableHttpClient httpclient = null; + private int timeout = 1200; + private RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) + .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000) + .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build(); + private static boolean trustCerts = false; + private int httpConcurrency = 4; + + public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { + super(dvObject, req, driverId); + this.setIsLocalFile(false); + configureStores(req, driverId, null); + logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); + path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); + validatePath(path); + + logger.fine("Relative path: " + path); + } + + public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { + super(null, null, driverId); + this.setIsLocalFile(false); + configureStores(null, driverId, storageLocation); + + path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); + validatePath(path); + logger.fine("Relative path: " + path); + } + + private void validatePath(String relPath) throws IOException { + try { + URI absoluteURI = new URI(endpointWithBasePath + "/" + relPath); + if(!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { + throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s endpoint/basePath"); + } + } catch(URISyntaxException use) { + throw new IOException("Could not interpret storageidentifier in remote store " + this.driverId); + } + } + + + @Override + public void open(DataAccessOption... options) throws IOException { + + baseStore.open(options); + + DataAccessRequest req = this.getRequest(); + + if (isWriteAccessRequested(options)) { + isWriteAccess = true; + isReadAccess = false; + } else { + isWriteAccess = false; + isReadAccess = true; + } + + if (dvObject instanceof DataFile) { + String storageIdentifier = dvObject.getStorageIdentifier(); + + DataFile dataFile = this.getDataFile(); + + if (req != null && req.getParameter("noVarHeader") != null) { + baseStore.setNoVarHeader(true); + } + + if (storageIdentifier == null || "".equals(storageIdentifier)) { + throw new FileNotFoundException("Data Access: No local storage identifier defined for this datafile."); + } + + // Fix new DataFiles: DataFiles that have not yet been saved may use this method + // when they don't have their storageidentifier in the final form + // So we fix it up here. ToDo: refactor so that storageidentifier is generated + // by the appropriate StorageIO class and is final from the start. + logger.fine("StorageIdentifier is: " + storageIdentifier); + + if (isReadAccess) { + if (dataFile.getFilesize() >= 0) { + this.setSize(dataFile.getFilesize()); + } else { + logger.fine("Setting size"); + this.setSize(getSizeFromGlobus()); + } + if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") + && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { + + List datavariables = dataFile.getDataTable().getDataVariables(); + String varHeaderLine = generateVariableHeader(datavariables); + this.setVarHeader(varHeaderLine); + } + + } + + this.setMimeType(dataFile.getContentType()); + + try { + this.setFileName(dataFile.getFileMetadata().getLabel()); + } catch (Exception ex) { + this.setFileName("unknown"); + } + } else if (dvObject instanceof Dataset) { + throw new IOException( + "Data Access: RemoteOverlay Storage driver does not support dvObject type Dataverse yet"); + } else if (dvObject instanceof Dataverse) { + throw new IOException( + "Data Access: RemoteOverlay Storage driver does not support dvObject type Dataverse yet"); + } else { + this.setSize(getSizeFromGlobus()); + } + } + + private long getSizeFromGlobus() { + throw new NotImplementedException(); + /* + long size = -1; + HttpHead head = new HttpHead(endpointWithBasePath + "/" + path); + try { + CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); + + try { + int code = response.getStatusLine().getStatusCode(); + logger.fine("Response for HEAD: " + code); + switch (code) { + case 200: + Header[] headers = response.getHeaders(HTTP.CONTENT_LEN); + logger.fine("Num headers: " + headers.length); + String sizeString = response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); + logger.fine("Content-Length: " + sizeString); + size = Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); + logger.fine("Found file size: " + size); + break; + default: + logger.warning("Response from " + head.getURI().toString() + " was " + code); + } + } finally { + EntityUtils.consume(response.getEntity()); + } + } catch (IOException e) { + logger.warning(e.getMessage()); + } + return size; + */ + } + + @Override + public InputStream getInputStream() throws IOException { + if (super.getInputStream() == null) { + try { + HttpGet get = new HttpGet(generateTemporaryDownloadUrl(null, null, null)); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); + + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + setInputStream(response.getEntity().getContent()); + break; + default: + logger.warning("Response from " + get.getURI().toString() + " was " + code); + throw new IOException("Cannot retrieve: " + endpointWithBasePath + "/" + path + " code: " + code); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + e.printStackTrace(); + throw new IOException("Error retrieving: " + endpointWithBasePath + "/" + path + " " + e.getMessage()); + + } + setChannel(Channels.newChannel(super.getInputStream())); + } + return super.getInputStream(); + } + + @Override + public Channel getChannel() throws IOException { + if (super.getChannel() == null) { + getInputStream(); + } + return channel; + } + + @Override + public ReadableByteChannel getReadChannel() throws IOException { + // Make sure StorageIO.channel variable exists + getChannel(); + return super.getReadChannel(); + } + + @Override + public void delete() throws IOException { + // Delete is best-effort - we tell the remote server and it may or may not + // implement this call + if (!isDirectAccess()) { + throw new IOException("Direct Access IO must be used to permanently delete stored file objects"); + } + try { + HttpDelete del = new HttpDelete(endpointWithBasePath + "/" + path); + CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext); + try { + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + logger.fine("Sent DELETE for " + endpointWithBasePath + "/" + path); + default: + logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code); + } + } finally { + EntityUtils.consume(response.getEntity()); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + throw new IOException("Error deleting: " + endpointWithBasePath + "/" + path); + + } + + // Delete all the cached aux files as well: + deleteAllAuxObjects(); + + } + + @Override + public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException { + return baseStore.openAuxChannel(auxItemTag, options); + } + + @Override + public boolean isAuxObjectCached(String auxItemTag) throws IOException { + return baseStore.isAuxObjectCached(auxItemTag); + } + + @Override + public long getAuxObjectSize(String auxItemTag) throws IOException { + return baseStore.getAuxObjectSize(auxItemTag); + } + + @Override + public Path getAuxObjectAsPath(String auxItemTag) throws IOException { + return baseStore.getAuxObjectAsPath(auxItemTag); + } + + @Override + public void backupAsAux(String auxItemTag) throws IOException { + baseStore.backupAsAux(auxItemTag); + } + + @Override + public void revertBackupAsAux(String auxItemTag) throws IOException { + baseStore.revertBackupAsAux(auxItemTag); + } + + @Override + // this method copies a local filesystem Path into this DataAccess Auxiliary + // location: + public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException { + baseStore.savePathAsAux(fileSystemPath, auxItemTag); + } + + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize); + } + + /** + * @param inputStream InputStream we want to save + * @param auxItemTag String representing this Auxiliary type ("extension") + * @throws IOException if anything goes wrong. + */ + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag); + } + + @Override + public List listAuxObjects() throws IOException { + return baseStore.listAuxObjects(); + } + + @Override + public void deleteAuxObject(String auxItemTag) throws IOException { + baseStore.deleteAuxObject(auxItemTag); + } + + @Override + public void deleteAllAuxObjects() throws IOException { + baseStore.deleteAllAuxObjects(); + } + + @Override + public String getStorageLocation() throws IOException { + String fullStorageLocation = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStorageLocation); + int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); + if(driverIndex >=0) { + fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + } + if (this.getDvObject() instanceof Dataset) { + throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); + } else if (this.getDvObject() instanceof DataFile) { + fullStorageLocation = StorageIO.getDriverPrefix(this.driverId) + fullStorageLocation; + } else if (dvObject instanceof Dataverse) { + throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); + } + logger.fine("fullStorageLocation: " + fullStorageLocation); + return fullStorageLocation; + } + + @Override + public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: this is a remote DataAccess IO object, it has no local filesystem path associated with it."); + } + + @Override + public boolean exists() { + logger.fine("Exists called"); + return (getSizeFromGlobus() != -1); + } + + @Override + public WritableByteChannel getWriteChannel() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: there are no write Channels associated with S3 objects."); + } + + @Override + public OutputStream getOutputStream() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: there are no output Streams associated with S3 objects."); + } + + @Override + public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { + return baseStore.getAuxFileAsInputStream(auxItemTag); + } + + @Override + public boolean downloadRedirectEnabled() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect"); + if ("true".equalsIgnoreCase(optionValue)) { + return true; + } + return false; + } + + public boolean downloadRedirectEnabled(String auxObjectTag) { + return baseStore.downloadRedirectEnabled(auxObjectTag); + } + + @Override + public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName) + throws IOException { + + // ToDo - support remote auxiliary Files + if (auxiliaryTag == null) { + String secretKey = System.getProperty("dataverse.files." + this.driverId + ".secret-key"); + if (secretKey == null) { + return endpointWithBasePath + "/" + path; + } else { + return UrlSignerUtil.signUrl(endpointWithBasePath + "/" + path, getUrlExpirationMinutes(), null, "GET", + secretKey); + } + } else { + return baseStore.generateTemporaryDownloadUrl(auxiliaryTag, auxiliaryType, auxiliaryFileName); + } + } + + int getUrlExpirationMinutes() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".url-expiration-minutes"); + if (optionValue != null) { + Integer num; + try { + num = Integer.parseInt(optionValue); + } catch (NumberFormatException ex) { + num = null; + } + if (num != null) { + return num; + } + } + return 60; + } + + private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + endpointWithBasePath = JvmSettings.BASE_URI.lookup(this.driverId); + logger.info("base-uri is " + endpointWithBasePath); + if (endpointWithBasePath == null) { + throw new IOException("dataverse.files." + this.driverId + ".base-uri is required"); + } else { + try { + new URI(endpointWithBasePath); + } catch (Exception e) { + logger.warning( + "Trouble interpreting base-url for store: " + this.driverId + " : " + e.getLocalizedMessage()); + throw new IOException("Can't interpret base-url as a URI"); + } + + } + + if (baseStore == null) { + String baseDriverId = getBaseStoreIdFor(driverId); + String fullStorageLocation = null; + String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + + if(dvObject instanceof Dataset) { + baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); + } else { + if (this.getDvObject() != null) { + fullStorageLocation = getStoragePath(); + + // S3 expects :/// + switch (baseDriverType) { + case DataAccess.S3: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" + + fullStorageLocation; + break; + case DataAccess.FILE: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" + + fullStorageLocation; + break; + default: + logger.warning("Not Implemented: RemoteOverlay store with base store type: " + + System.getProperty("dataverse.files." + baseDriverId + ".type")); + throw new IOException("Not implemented"); + } + + } else if (storageLocation != null) { + // ://// + //remoteDriverId:// is removed if coming through directStorageIO + int index = storageLocation.indexOf(DataAccess.SEPARATOR); + if(index > 0) { + storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); + } + //THe base store needs the baseStoreIdentifier and not the relative URL + fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); + + switch (baseDriverType) { + case DataAccess.S3: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" + + fullStorageLocation; + break; + case DataAccess.FILE: + fullStorageLocation = baseDriverId + DataAccess.SEPARATOR + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" + + fullStorageLocation; + break; + default: + logger.warning("Not Implemented: RemoteOverlay store with base store type: " + + System.getProperty("dataverse.files." + baseDriverId + ".type")); + throw new IOException("Not implemented"); + } + } + baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); + } + if (baseDriverType.contentEquals(DataAccess.S3)) { + ((S3AccessIO) baseStore).setMainDriver(false); + } + } + remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name"); + try { + remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); + } catch(MalformedURLException mfue) { + logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); + } + } + + //Convenience method to assemble the path, starting with the DOI authority/identifier/, that is needed to create a base store via DataAccess.getDirectStorageIO - the caller has to add the store type specific prefix required. + private String getStoragePath() throws IOException { + String fullStoragePath = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStoragePath); + int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); + if(driverIndex >=0) { + fullStoragePath = fullStoragePath.substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + } + int suffixIndex = fullStoragePath.indexOf("//"); + if(suffixIndex >=0) { + fullStoragePath = fullStoragePath.substring(0, suffixIndex); + } + if (this.getDvObject() instanceof Dataset) { + fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/" + + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (this.getDvObject() instanceof DataFile) { + fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" + + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; + }else if (dvObject instanceof Dataverse) { + throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); + } + logger.fine("fullStoragePath: " + fullStoragePath); + return fullStoragePath; + } + + public CloseableHttpClient getSharedHttpClient() { + if (httpclient == null) { + try { + initHttpPool(); + httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build(); + + } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { + logger.warning(ex.getMessage()); + } + } + return httpclient; + } + + private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException { + if (trustCerts) { + // use the TrustSelfSignedStrategy to allow Self Signed Certificates + SSLContext sslContext; + SSLConnectionSocketFactory connectionFactory; + + sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build(); + // create an SSL Socket Factory to use the SSLContext with the trust self signed + // certificate strategy + // and allow all hosts verifier. + connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); + + Registry registry = RegistryBuilder.create() + .register("https", connectionFactory).build(); + cm = new PoolingHttpClientConnectionManager(registry); + } else { + cm = new PoolingHttpClientConnectionManager(); + } + cm.setDefaultMaxPerRoute(httpConcurrency); + cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20); + } + + @Override + public void savePath(Path fileSystemPath) throws IOException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: savePath() not implemented in this storage driver."); + + } + + @Override + public void saveInputStream(InputStream inputStream) throws IOException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: saveInputStream() not implemented in this storage driver."); + + } + + @Override + public void saveInputStream(InputStream inputStream, Long filesize) throws IOException { + throw new UnsupportedDataAccessOperationException( + "RemoteOverlayAccessIO: saveInputStream(InputStream, Long) not implemented in this storage driver."); + + } + + protected static boolean isValidIdentifier(String driverId, String storageId) { + String urlPath = storageId.substring(storageId.lastIndexOf("//") + 2); + String baseUri = System.getProperty("dataverse.files." + driverId + ".base-uri"); + try { + URI absoluteURI = new URI(baseUri + "/" + urlPath); + if(!absoluteURI.normalize().toString().startsWith(baseUri)) { + logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId); + return false; + } + } catch(URISyntaxException use) { + logger.warning("Could not interpret storageidentifier in remote store " + driverId + " : " + storageId); + logger.warning(use.getLocalizedMessage()); + return false; + } + return true; + } + + public static String getBaseStoreIdFor(String driverId) { + return System.getProperty("dataverse.files." + driverId + ".base-store"); + } + + @Override + public List cleanUp(Predicate filter, boolean dryRun) throws IOException { + return baseStore.cleanUp(filter, dryRun); + } + + public static void main(String[] args) { + System.out.println("Running the main method"); + if (args.length > 0) { + System.out.printf("List of arguments: {}", Arrays.toString(args)); + } + System.setProperty("dataverse.files.globus.base-uri", "12345/top"); + System.out.println("Valid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + logger.info(JvmSettings.BASE_URI.lookup("globus")); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index 66c6a4cc2ee..ee2b6779cba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -65,7 +65,7 @@ public class RemoteOverlayAccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); private StorageIO baseStore = null; - private String urlPath = null; + private String path = null; private String baseUrl = null; private static HttpClientContext localContext = HttpClientContext.create(); @@ -83,10 +83,10 @@ public RemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) this.setIsLocalFile(false); configureStores(req, driverId, null); logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); - urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); - validatePath(urlPath); + path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); + validatePath(path); - logger.fine("Base URL: " + urlPath); + logger.fine("Base URL: " + path); } public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOException { @@ -94,14 +94,14 @@ public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOE this.setIsLocalFile(false); configureStores(null, driverId, storageLocation); - urlPath = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); - validatePath(urlPath); - logger.fine("Base URL: " + urlPath); + path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); + validatePath(path); + logger.fine("Base URL: " + path); } - private void validatePath(String path) throws IOException { + private void validatePath(String relPath) throws IOException { try { - URI absoluteURI = new URI(baseUrl + "/" + urlPath); + URI absoluteURI = new URI(baseUrl + "/" + relPath); if(!absoluteURI.normalize().toString().startsWith(baseUrl)) { throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s base-url"); } @@ -182,7 +182,7 @@ public void open(DataAccessOption... options) throws IOException { private long getSizeFromHttpHeader() { long size = -1; - HttpHead head = new HttpHead(baseUrl + "/" + urlPath); + HttpHead head = new HttpHead(baseUrl + "/" + path); try { CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); @@ -224,12 +224,12 @@ public InputStream getInputStream() throws IOException { break; default: logger.warning("Response from " + get.getURI().toString() + " was " + code); - throw new IOException("Cannot retrieve: " + baseUrl + "/" + urlPath + " code: " + code); + throw new IOException("Cannot retrieve: " + baseUrl + "/" + path + " code: " + code); } } catch (Exception e) { logger.warning(e.getMessage()); e.printStackTrace(); - throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath + " " + e.getMessage()); + throw new IOException("Error retrieving: " + baseUrl + "/" + path + " " + e.getMessage()); } setChannel(Channels.newChannel(super.getInputStream())); @@ -260,13 +260,13 @@ public void delete() throws IOException { throw new IOException("Direct Access IO must be used to permanently delete stored file objects"); } try { - HttpDelete del = new HttpDelete(baseUrl + "/" + urlPath); + HttpDelete del = new HttpDelete(baseUrl + "/" + path); CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext); try { int code = response.getStatusLine().getStatusCode(); switch (code) { case 200: - logger.fine("Sent DELETE for " + baseUrl + "/" + urlPath); + logger.fine("Sent DELETE for " + baseUrl + "/" + path); default: logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code); } @@ -275,7 +275,7 @@ public void delete() throws IOException { } } catch (Exception e) { logger.warning(e.getMessage()); - throw new IOException("Error deleting: " + baseUrl + "/" + urlPath); + throw new IOException("Error deleting: " + baseUrl + "/" + path); } @@ -420,9 +420,9 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary if (auxiliaryTag == null) { String secretKey = System.getProperty("dataverse.files." + this.driverId + ".secret-key"); if (secretKey == null) { - return baseUrl + "/" + urlPath; + return baseUrl + "/" + path; } else { - return UrlSignerUtil.signUrl(baseUrl + "/" + urlPath, getUrlExpirationMinutes(), null, "GET", + return UrlSignerUtil.signUrl(baseUrl + "/" + path, getUrlExpirationMinutes(), null, "GET", secretKey); } } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 86130f5146e..4fb895f5adc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -47,6 +47,8 @@ public enum JvmSettings { // FILES SETTINGS SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), + FILES(SCOPE_FILES), + BASE_URI(FILES, "base-uri"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), From 2c4c927cc8f20d53ee1aaaf1979b793ee53f9b3f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 5 May 2023 14:13:02 -0400 Subject: [PATCH 0049/1092] add token --- .../dataaccess/GlobusOverlayAccessIO.java | 171 +++++++++++------- .../iq/dataverse/settings/JvmSettings.java | 1 + 2 files changed, 111 insertions(+), 61 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index fe62e25ad6f..050b9ddc176 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.io.FileNotFoundException; import java.io.IOException; @@ -31,6 +32,7 @@ import java.util.logging.Logger; import org.apache.commons.lang3.NotImplementedException; +import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; @@ -49,6 +51,7 @@ import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; +import javax.json.JsonObject; import javax.net.ssl.SSLContext; /** @@ -58,8 +61,8 @@ /* * Globus Overlay Driver * - * StorageIdentifier format: - * :///// + * StorageIdentifier format: :///// */ public class GlobusOverlayAccessIO extends StorageIO { @@ -68,6 +71,7 @@ public class GlobusOverlayAccessIO extends StorageIO { private StorageIO baseStore = null; private String path = null; private String endpointWithBasePath = null; + private String globusToken = null; private static HttpClientContext localContext = HttpClientContext.create(); private PoolingHttpClientConnectionManager cm = null; @@ -86,7 +90,7 @@ public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); validatePath(path); - + logger.fine("Relative path: " + path); } @@ -99,18 +103,17 @@ public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOE validatePath(path); logger.fine("Relative path: " + path); } - + private void validatePath(String relPath) throws IOException { try { URI absoluteURI = new URI(endpointWithBasePath + "/" + relPath); - if(!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { + if (!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s endpoint/basePath"); } - } catch(URISyntaxException use) { + } catch (URISyntaxException use) { throw new IOException("Could not interpret storageidentifier in remote store " + this.driverId); } - } - + } @Override public void open(DataAccessOption... options) throws IOException { @@ -181,37 +184,64 @@ public void open(DataAccessOption... options) throws IOException { } } + // Call the Globus API to get the file size private long getSizeFromGlobus() { - throw new NotImplementedException(); - /* - long size = -1; - HttpHead head = new HttpHead(endpointWithBasePath + "/" + path); + // Construct Globus URL + URI absoluteURI = null; try { - CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); - - try { - int code = response.getStatusLine().getStatusCode(); - logger.fine("Response for HEAD: " + code); - switch (code) { - case 200: - Header[] headers = response.getHeaders(HTTP.CONTENT_LEN); - logger.fine("Num headers: " + headers.length); - String sizeString = response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); - logger.fine("Content-Length: " + sizeString); - size = Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); - logger.fine("Found file size: " + size); - break; - default: - logger.warning("Response from " + head.getURI().toString() + " was " + code); - } - } finally { - EntityUtils.consume(response.getEntity()); + int filenameStart = path.lastIndexOf("/") + 1; + int pathStart = endpointWithBasePath.indexOf("/") + 1; + + String directoryPath = (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + + path.substring(0, filenameStart); + String filename = path.substring(filenameStart); + String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart - 1) : endpointWithBasePath; + + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + path + "&filter=name:" + filename); + HttpGet get = new HttpGet(absoluteURI); + String token = JvmSettings.GLOBUS_TOKEN.lookup(driverId); + logger.info("Token is " + token); + get.addHeader("Authorization", "Bearer " + token); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); + if (response.getStatusLine().getStatusCode() == 200) { + //Get reponse as string + String responseString = EntityUtils.toString(response.getEntity()); + logger.fine("Response from " + get.getURI().toString() + " is: " + responseString); + JsonObject responseJson = JsonUtil.getJsonObject(responseString); + return (long) responseJson.getInt("size"); + } else { + logger.warning("Response from " + get.getURI().toString() + " was " + response.getStatusLine().getStatusCode()); + logger.info(EntityUtils.toString(response.getEntity())); } + } catch (URISyntaxException e) { + // Should have been caught in validatePath + e.printStackTrace(); + } catch (ClientProtocolException e) { + // TODO Auto-generated catch block + e.printStackTrace(); } catch (IOException e) { - logger.warning(e.getMessage()); + // TODO Auto-generated catch block + e.printStackTrace(); } - return size; - */ + return -1; + + /* + * long size = -1; HttpHead head = new HttpHead(endpointWithBasePath + "/" + + * path); try { CloseableHttpResponse response = + * getSharedHttpClient().execute(head, localContext); + * + * try { int code = response.getStatusLine().getStatusCode(); + * logger.fine("Response for HEAD: " + code); switch (code) { case 200: Header[] + * headers = response.getHeaders(HTTP.CONTENT_LEN); logger.fine("Num headers: " + * + headers.length); String sizeString = + * response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); + * logger.fine("Content-Length: " + sizeString); size = + * Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); + * logger.fine("Found file size: " + size); break; default: + * logger.warning("Response from " + head.getURI().toString() + " was " + code); + * } } finally { EntityUtils.consume(response.getEntity()); } } catch + * (IOException e) { logger.warning(e.getMessage()); } return size; + */ } @Override @@ -360,8 +390,9 @@ public String getStorageLocation() throws IOException { String fullStorageLocation = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStorageLocation); int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); - if(driverIndex >=0) { - fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + if (driverIndex >= 0) { + fullStorageLocation = fullStorageLocation + .substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); } if (this.getDvObject() instanceof Dataset) { throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); @@ -411,7 +442,7 @@ public boolean downloadRedirectEnabled() { } return false; } - + public boolean downloadRedirectEnabled(String auxObjectTag) { return baseStore.downloadRedirectEnabled(auxObjectTag); } @@ -469,9 +500,10 @@ private void configureStores(DataAccessRequest req, String driverId, String stor if (baseStore == null) { String baseDriverId = getBaseStoreIdFor(driverId); String fullStorageLocation = null; - String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - - if(dvObject instanceof Dataset) { + String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + + if (dvObject instanceof Dataset) { baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); } else { if (this.getDvObject() != null) { @@ -486,8 +518,8 @@ private void configureStores(DataAccessRequest req, String driverId, String stor break; case DataAccess.FILE: fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" - + fullStorageLocation; + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + + "/" + fullStorageLocation; break; default: logger.warning("Not Implemented: RemoteOverlay store with base store type: " @@ -497,12 +529,12 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } else if (storageLocation != null) { // ://// - //remoteDriverId:// is removed if coming through directStorageIO + // remoteDriverId:// is removed if coming through directStorageIO int index = storageLocation.indexOf(DataAccess.SEPARATOR); - if(index > 0) { + if (index > 0) { storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); } - //THe base store needs the baseStoreIdentifier and not the relative URL + // THe base store needs the baseStoreIdentifier and not the relative URL fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); switch (baseDriverType) { @@ -513,8 +545,8 @@ private void configureStores(DataAccessRequest req, String driverId, String stor break; case DataAccess.FILE: fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" - + fullStorageLocation; + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + + "/" + fullStorageLocation; break; default: logger.warning("Not Implemented: RemoteOverlay store with base store type: " @@ -530,37 +562,41 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name"); try { - remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); - } catch(MalformedURLException mfue) { + remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); + } catch (MalformedURLException mfue) { logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); } } - //Convenience method to assemble the path, starting with the DOI authority/identifier/, that is needed to create a base store via DataAccess.getDirectStorageIO - the caller has to add the store type specific prefix required. + // Convenience method to assemble the path, starting with the DOI + // authority/identifier/, that is needed to create a base store via + // DataAccess.getDirectStorageIO - the caller has to add the store type specific + // prefix required. private String getStoragePath() throws IOException { String fullStoragePath = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStoragePath); int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); - if(driverIndex >=0) { - fullStoragePath = fullStoragePath.substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + if (driverIndex >= 0) { + fullStoragePath = fullStoragePath + .substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); } int suffixIndex = fullStoragePath.indexOf("//"); - if(suffixIndex >=0) { - fullStoragePath = fullStoragePath.substring(0, suffixIndex); + if (suffixIndex >= 0) { + fullStoragePath = fullStoragePath.substring(0, suffixIndex); } if (this.getDvObject() instanceof Dataset) { fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/" + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; } else if (this.getDvObject() instanceof DataFile) { fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" - + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; - }else if (dvObject instanceof Dataverse) { + + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (dvObject instanceof Dataverse) { throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); } logger.fine("fullStoragePath: " + fullStoragePath); return fullStoragePath; } - + public CloseableHttpClient getSharedHttpClient() { if (httpclient == null) { try { @@ -622,11 +658,11 @@ protected static boolean isValidIdentifier(String driverId, String storageId) { String baseUri = System.getProperty("dataverse.files." + driverId + ".base-uri"); try { URI absoluteURI = new URI(baseUri + "/" + urlPath); - if(!absoluteURI.normalize().toString().startsWith(baseUri)) { + if (!absoluteURI.normalize().toString().startsWith(baseUri)) { logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId); return false; } - } catch(URISyntaxException use) { + } catch (URISyntaxException use) { logger.warning("Could not interpret storageidentifier in remote store " + driverId + " : " + storageId); logger.warning(use.getLocalizedMessage()); return false; @@ -642,14 +678,27 @@ public static String getBaseStoreIdFor(String driverId) { public List cleanUp(Predicate filter, boolean dryRun) throws IOException { return baseStore.cleanUp(filter, dryRun); } - + public static void main(String[] args) { System.out.println("Running the main method"); if (args.length > 0) { System.out.printf("List of arguments: {}", Arrays.toString(args)); } - System.setProperty("dataverse.files.globus.base-uri", "12345/top"); + System.setProperty("dataverse.files.globus.base-uri", "2791b83e-b989-47c5-a7fa-ce65fd949522"); System.out.println("Valid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + System.setProperty("dataverse.files.globus.globus-token","Mjc5MWI4M2UtYjk4OS00N2M1LWE3ZmEtY2U2NWZkOTQ5NTIyOlprRmxGejNTWDlkTVpUNk92ZmVJaFQyTWY0SDd4cXBoTDNSS29vUmRGVlE9"); + System.setProperty("dataverse.files.globus.base-store","file"); + System.setProperty("dataverse.files.file.type", + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + System.setProperty("dataverse.files.file.directory", "/tmp/files"); logger.info(JvmSettings.BASE_URI.lookup("globus")); + try { + GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234//2791b83e-b989-47c5-a7fa-ce65fd949522/hdc1/image001.mrc", "globus"); + logger.info("Size is " + gsio.getSizeFromGlobus()); + + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 4fb895f5adc..eac8411c939 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -49,6 +49,7 @@ public enum JvmSettings { FILES_DIRECTORY(SCOPE_FILES, "directory"), FILES(SCOPE_FILES), BASE_URI(FILES, "base-uri"), + GLOBUS_TOKEN(FILES, "globus-token"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), From cef601ee6af94fcfa56d9d02efb807d2c6bb20d2 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 13:33:17 -0400 Subject: [PATCH 0050/1092] A new, experimental version of the Add Files command with the dynamic permissions lookup that allows it to be checked on either the Dataset or the parent Collection (when files are bing added in the context of creating a new dataset via the Add Dataset page). #9361 --- .../impl/CreateNewDataFilesCommand.java | 49 ++++++++++++++++++- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 365a260cfd2..fb4f7a7db86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -9,11 +9,12 @@ import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -42,8 +43,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import java.util.zip.ZipFile; @@ -57,7 +62,10 @@ * * @author landreev */ -@RequiredPermissions( Permission.EditDataset ) +// Note the commented out @RequiredPermissions. We need to use dynamic +// permissions instead, to accommodate both adding files to an existing +// dataset and files being uploaded on create of a new dataset. +//@RequiredPermissions( Permission.EditDataset ) public class CreateNewDataFilesCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); @@ -68,6 +76,7 @@ public class CreateNewDataFilesCommand extends AbstractCommand> getRequiredPermissions() { + Map> ret = new HashMap<>(); + + ret.put("", new HashSet<>()); + + if (dataverse != null) { + // The command is called in the context of uploading files on + // create of a new dataset + ret.get("").add(Permission.AddDataset); + } else { + // An existing dataset + ret.get("").add(Permission.EditDataset); + } + + return ret; + } } From 3c3378f5a3bf39eff13a582d0dc52a2a5549af8f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 9 May 2023 14:53:25 -0400 Subject: [PATCH 0051/1092] start refactoring Globus bean --- .../dataaccess/GlobusOverlayAccessIO.java | 28 +++++++++----- .../iq/dataverse/globus/AccessToken.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 37 +++++++++++-------- 3 files changed, 41 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 050b9ddc176..0d7c5458e14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -5,6 +5,8 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -28,10 +30,8 @@ import java.util.Arrays; import java.util.List; import java.util.function.Predicate; -import java.util.logging.Level; import java.util.logging.Logger; -import org.apache.commons.lang3.NotImplementedException; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; @@ -83,6 +83,8 @@ public class GlobusOverlayAccessIO extends StorageIO { private static boolean trustCerts = false; private int httpConcurrency = 4; + private String globusAccessToken = null; + public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); this.setIsLocalFile(false); @@ -190,18 +192,19 @@ private long getSizeFromGlobus() { URI absoluteURI = null; try { int filenameStart = path.lastIndexOf("/") + 1; - int pathStart = endpointWithBasePath.indexOf("/") + 1; - - String directoryPath = (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + int pathStart = endpointWithBasePath.indexOf("/"); +logger.info("endpointWithBasePath: " + endpointWithBasePath); + String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + path.substring(0, filenameStart); + logger.info("directoryPath: " + directoryPath); String filename = path.substring(filenameStart); String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart - 1) : endpointWithBasePath; - absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + path + "&filter=name:" + filename); + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + directoryPath + "&filter=name:" + filename); HttpGet get = new HttpGet(absoluteURI); - String token = JvmSettings.GLOBUS_TOKEN.lookup(driverId); - logger.info("Token is " + token); - get.addHeader("Authorization", "Bearer " + token); + + logger.info("Token is " + globusAccessToken); + get.addHeader("Authorization", "Bearer " + globusAccessToken); CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); if (response.getStatusLine().getStatusCode() == 200) { //Get reponse as string @@ -482,6 +485,8 @@ int getUrlExpirationMinutes() { } private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + AccessToken accessToken = GlobusServiceBean.getClientToken(JvmSettings.GLOBUS_TOKEN.lookup(driverId)); + globusAccessToken = accessToken.getOtherTokens().get(0).getAccessToken(); endpointWithBasePath = JvmSettings.BASE_URI.lookup(this.driverId); logger.info("base-uri is " + endpointWithBasePath); if (endpointWithBasePath == null) { @@ -692,8 +697,11 @@ public static void main(String[] args) { DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); System.setProperty("dataverse.files.file.directory", "/tmp/files"); logger.info(JvmSettings.BASE_URI.lookup("globus")); + + + try { - GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234//2791b83e-b989-47c5-a7fa-ce65fd949522/hdc1/image001.mrc", "globus"); + GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234///hdc1/image001.mrc", "globus"); logger.info("Size is " + gsio.getSizeFromGlobus()); } catch (IOException e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java index 877fc68e4a1..c93e2c6aa94 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java @@ -46,7 +46,7 @@ String getRefreshToken() { return refreshToken; } - ArrayList getOtherTokens() { + public ArrayList getOtherTokens() { return otherTokens; } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 9d80c5cc280..c2137dd1f47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -167,7 +167,8 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException { if (ruleId.length() > 0) { - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); + globusLogger.info("Start deleting permissions."); String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); @@ -264,15 +265,21 @@ public GlobusTask getTask(AccessToken clientTokenUser, String taskId, Logger glo return task; } - public AccessToken getClientToken() throws MalformedURLException { - String globusBasicToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, ""); - URL url = new URL( - "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); - - MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); + public static AccessToken getClientToken(String globusBasicToken) { + URL url; AccessToken clientTokenUser = null; - if (result.status == 200) { - clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + + try { + url = new URL( + "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); + + MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); + if (result.status == 200) { + clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + } + } catch (MalformedURLException e) { + // On a statically defined URL... + e.printStackTrace(); } return clientTokenUser; } @@ -306,7 +313,7 @@ public AccessToken getAccessToken(HttpServletRequest origRequest, String globusB } - public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, + public static MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { String str = null; HttpURLConnection connection = null; @@ -359,7 +366,7 @@ public MakeRequestResponse makeRequest(URL url, String authType, String authCode } - private StringBuilder readResultJson(InputStream in) { + private static StringBuilder readResultJson(InputStream in) { StringBuilder sb = null; try { @@ -378,7 +385,7 @@ private StringBuilder readResultJson(InputStream in) { return sb; } - private T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { + private static T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { if (sb != null) { Gson gson = null; if (namingPolicy) { @@ -420,7 +427,7 @@ public String getDirectory(String datasetId) { } - class MakeRequestResponse { + static class MakeRequestResponse { public String jsonResponse; public int status; @@ -451,7 +458,7 @@ public boolean giveGlobusPublicPermissions(String datasetId) if (globusEndpoint.equals("") || globusBasicToken.equals("")) { return false; } - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); if (clientTokenUser == null) { logger.severe("Cannot get client token "); return false; @@ -908,7 +915,7 @@ private GlobusTask globusStatusCheck(String taskId, Logger globusLogger) throws try { globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(pollingInterval * 1000); - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); // success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); task = getTask(clientTokenUser, taskId, globusLogger); if (task != null) { From f14b75454a524fd8816d6f5367b0e15fbd0ded92 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 9 May 2023 14:53:56 -0400 Subject: [PATCH 0052/1092] enable globus store main() to run - will revert --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e5b191f0ed7..4926f59f8a0 100644 --- a/pom.xml +++ b/pom.xml @@ -184,7 +184,7 @@ org.glassfish jakarta.json - provided + From 51cfa409ddaa7d9d111bff71d6e69beff0a3454c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 15:53:31 -0400 Subject: [PATCH 0053/1092] Changes to the edit files page to accommodate "upload on create" #9361 --- .../edu/harvard/iq/dataverse/EditDatafilesPage.java | 11 ++++++++++- .../command/impl/CreateNewDataFilesCommand.java | 3 ++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index c39e6f62ce2..b892acdb527 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2058,7 +2058,16 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // zip file. ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + Command cmd; + if (mode == FileEditMode.CREATE) { + // This is a file upload in the context of creating a brand new + // dataset that does not yet exist in the database. We must + // use the version of the Create New Files constructor that takes + // the parent Dataverse as the extra argument: + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, null, workingVersion.getDataset().getOwner()); + } else { + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + } CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index fb4f7a7db86..e2d7f834c4a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -64,7 +64,8 @@ */ // Note the commented out @RequiredPermissions. We need to use dynamic // permissions instead, to accommodate both adding files to an existing -// dataset and files being uploaded on create of a new dataset. +// dataset and files being uploaded in the context of creating a new dataset +// via the Add Dataset page. //@RequiredPermissions( Permission.EditDataset ) public class CreateNewDataFilesCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); From 8560ba4e16f763d48e6ec20f1ddaecd9b5728603 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 17:16:56 -0400 Subject: [PATCH 0054/1092] Makes the create file command refuse a zip file if uncompressed content exceeds the remaining quota; as opposed to accepting it zipped, even if the compressed size under the quota. #9361 --- .../command/impl/CreateNewDataFilesCommand.java | 17 +++++++++++------ src/main/java/propertyFiles/Bundle.properties | 1 + 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e2d7f834c4a..2608069dcb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -348,7 +348,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (storageQuotaLimit != null) { combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); if (combinedUnzippedFileSize > storageQuotaLimit) { - throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + // change of plans: if the unzipped content inside exceeds the remaining quota, + // we reject the upload outright, rather than accepting the zip + // file as is. + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); } } } @@ -476,11 +480,12 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); datafiles.clear(); - } catch (FileExceedsStorageQuotaException fesqx) { - logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); - datafiles.clear(); - } finally { + } /*catch (FileExceedsStorageQuotaException fesqx) { + //logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); + //warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); + //datafiles.clear(); + throw new CommandExecutionException(fesqx.getMessage(), fesqx, this); + }*/ finally { if (zipFile != null) { try { zipFile.close(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 55679a01a07..b12e0df7c08 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2155,6 +2155,7 @@ file.addreplace.file_size_ok=File size is in range. file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}. +file.addreplace.error.unzipped.quota_exceeded=Unzipped files exceed the remaining storage quota of {0}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. file.addreplace.error.parsing=Error in parsing provided json From 2ac403d35220f6ec61242f7f7f1c12b00f36a93c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 May 2023 11:04:00 -0400 Subject: [PATCH 0055/1092] changes needed to accommodate the quota changing dynamically, before the uploaded files are saved (when uploading via the page) #9361 --- .../iq/dataverse/EditDatafilesPage.java | 25 +++++++----- .../datadeposit/MediaResourceManagerImpl.java | 10 +++-- .../datasetutility/AddReplaceFileHelper.java | 6 ++- .../impl/CreateNewDataFilesCommand.java | 39 +++++++++++++++---- 4 files changed, 57 insertions(+), 23 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index b892acdb527..57729f321b5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean; import edu.harvard.iq.dataverse.DataFile.ChecksumType; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; @@ -206,6 +207,7 @@ public enum Referrer { private final int NUMBER_OF_SCROLL_ROWS = 25; private DataFile singleFile = null; + private UserStorageQuota userStorageQuota = null; public DataFile getSingleFile() { return singleFile; @@ -358,7 +360,7 @@ public String getHumanMaxTotalUploadSizeInBytes() { } public boolean isStorageQuotaEnforced() { - return maxTotalUploadSizeInBytes != null; + return userStorageQuota != null; } public Long getMaxIngestSizeInBytes() { @@ -529,22 +531,24 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); if (systemConfig.isStorageQuotasEnforced()) { - this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset); + this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes(); } else { this.maxTotalUploadSizeInBytes = null; } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - + logger.fine("done"); saveEnabled = true; + return null; } public boolean isQuotaExceeded() { - return systemConfig.isStorageQuotasEnforced() && datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes() == 0; + return systemConfig.isStorageQuotasEnforced() && userStorageQuota != null && userStorageQuota.getRemainingQuotaInBytes() == 0; } public String init() { @@ -589,7 +593,8 @@ public String init() { clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); if (systemConfig.isStorageQuotasEnforced()) { - this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset); + this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes(); } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); @@ -687,7 +692,7 @@ public String init() { if (isHasPublicStore()){ JH.addMessage(FacesMessage.SEVERITY_WARN, getBundleString("dataset.message.label.fileAccess"), getBundleString("dataset.message.publicInstall")); } - + return null; } @@ -1525,7 +1530,7 @@ public void handleDropBoxUpload(ActionEvent event) { // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, userStorageQuota, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); @@ -2064,9 +2069,9 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // dataset that does not yet exist in the database. We must // use the version of the Create New Files constructor that takes // the parent Dataverse as the extra argument: - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, null, workingVersion.getDataset().getOwner()); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, workingVersion.getDataset().getOwner()); } else { - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null); } CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); @@ -2193,7 +2198,7 @@ public void handleExternalUpload() { //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index c71eeb3d375..0f11b858238 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -10,9 +10,8 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -22,7 +21,6 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; -import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -309,7 +307,11 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au try { //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + UserStorageQuota quota = null; + if (systemConfig.isStorageQuotasEnforced()) { + quota = dataFileService.getUserStorageQuota(user, dataset); + } + Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, quota, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); dataFiles = createDataFilesResult.getDataFiles(); } catch (CommandException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 3914c4dc4cc..38a31dba82f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1212,7 +1212,11 @@ private boolean step_030_createNewFilesViaIngest(){ this.newCheckSumType, this.systemConfig);*/ - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, newCheckSum, newCheckSumType); + DataFileServiceBean.UserStorageQuota quota = null; + if (systemConfig.isStorageQuotasEnforced()) { + quota = fileService.getUserStorageQuota(dvRequest.getAuthenticatedUser(), dataset); + } + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 2608069dcb2..b7ceddcd8bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -78,12 +78,13 @@ public class CreateNewDataFilesCommand extends AbstractCommand datafiles = new ArrayList<>(); //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default @@ -132,7 +136,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (ctxt.systemConfig().isStorageQuotasEnforced()) { //storageQuotaLimit = ctxt.files().getClass()...; - UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); + //UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); if (quota != null) { storageQuotaLimit = quota.getRemainingQuotaInBytes(); } @@ -247,6 +251,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + // Update quota if present + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -259,6 +267,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException ZipEntry zipEntry = null; int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); + Long combinedUnzippedFileSize = 0L; try { Charset charset = null; @@ -307,8 +316,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException */ int numberOfUnpackableFiles = 0; - Long combinedUnzippedFileSize = 0L; - + /** * Note that we can't just use zipFile.size(), * unfortunately, since that's the total number of entries, @@ -363,6 +371,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // Close the ZipFile, re-open as ZipInputStream: zipFile.close(); + // reset: + combinedUnzippedFileSize = 0L; if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); @@ -458,6 +468,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + combinedUnzippedFileSize += datafile.getFilesize(); } } } @@ -505,6 +516,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // do nothing - it's just a temp file. logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); } + // update the quota object: + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedUnzippedFileSize); + } // and return: return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -524,9 +539,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.severe("Processing of zipped shapefile failed."); return CreateDataFileResult.error(fileName, finalType); } + Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; try { - Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { FileInputStream finalFileInputStream = new FileInputStream(finalFile); @@ -598,6 +613,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " + se.getMessage()); } + // update the quota object: + if (quota != null) { + quota.setTotalUsageInBytes(storageQuotaLimitForRezippedFiles); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } else { logger.severe("No files added from directory of rezipped shapefiles"); @@ -665,6 +684,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + // Update quota (may not be necessary in the context of direct upload - ?) + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } From 87c7383c646de0756503cc6116e13f3d47f0ad31 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 May 2023 13:03:07 -0400 Subject: [PATCH 0056/1092] More fixes for the command. #9361 --- .../impl/CreateNewDataFilesCommand.java | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index b7ceddcd8bf..6f582a4c015 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -118,7 +118,6 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi @Override public CreateDataFileResult execute(CommandContext ctxt) throws CommandException { - logger.info("entering command.execute();"); List datafiles = new ArrayList<>(); //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default @@ -135,8 +134,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException Long storageQuotaLimit = null; if (ctxt.systemConfig().isStorageQuotasEnforced()) { - //storageQuotaLimit = ctxt.files().getClass()...; - //UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); if (quota != null) { storageQuotaLimit = quota.getRemainingQuotaInBytes(); } @@ -220,9 +217,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } DataFile datafile = null; + long fileSize = 0L; try { uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit); + fileSize = unZippedTempFile.length(); datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { // it looks like we simply skip the file silently, if its uncompressed size @@ -253,7 +252,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException datafiles.add(datafile); // Update quota if present if (quota != null) { - quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -539,7 +538,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.severe("Processing of zipped shapefile failed."); return CreateDataFileResult.error(fileName, finalType); } - Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; + long combinedRezippedFileSize = 0L; try { @@ -551,7 +550,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException continue; } - File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimitForRezippedFiles); + File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimit != null ? storageQuotaLimit - combinedRezippedFileSize : null); DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); String directoryName = null; @@ -569,10 +568,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } if (new_datafile != null) { datafiles.add(new_datafile); + combinedRezippedFileSize += unZippedShapeTempFile.length(); // todo: can this new_datafile be null? - if (storageQuotaLimitForRezippedFiles != null) { - storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); - } } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } @@ -615,7 +612,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } // update the quota object: if (quota != null) { - quota.setTotalUsageInBytes(storageQuotaLimitForRezippedFiles); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedRezippedFileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } else { @@ -686,7 +683,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // Update quota (may not be necessary in the context of direct upload - ?) if (quota != null) { - quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } From dbd97ff1c9c7c19ec56270fdfcef7e11056a7778 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 11 May 2023 16:38:49 -0400 Subject: [PATCH 0057/1092] develop + title --- .../api/imports/ImportDDIServiceBean.java | 33 +- .../dataverse/export/ddi/DdiExportUtil.java | 481 +++++++++++------- .../export/openaire/OpenAireExportUtil.java | 243 +++------ 3 files changed, 393 insertions(+), 364 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 458803e0c92..bafd7267acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1266,24 +1266,26 @@ private void parseVersionNumber(DatasetVersionDTO dvDTO, String versionNumber) { } - private void processSerStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { - FieldDTO seriesName=null; - FieldDTO seriesInformation=null; - for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { + private void processSerStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { + FieldDTO seriesInformation = null; + FieldDTO seriesName = null; + for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { + if (xmlr.getLocalName().equals("serInfo")) { + seriesInformation = FieldDTO.createPrimitiveFieldDTO("seriesInformation", parseText(xmlr)); + } if (xmlr.getLocalName().equals("serName")) { - seriesName = FieldDTO.createPrimitiveFieldDTO("seriesName", parseText(xmlr)); - - } else if (xmlr.getLocalName().equals("serInfo")) { - seriesInformation=FieldDTO.createPrimitiveFieldDTO("seriesInformation", parseText(xmlr) ); + seriesName = FieldDTO.createPrimitiveFieldDTO("seriesName", parseText(xmlr)); } } else if (event == XMLStreamConstants.END_ELEMENT) { if (xmlr.getLocalName().equals("serStmt")) { - citation.getFields().add(FieldDTO.createCompoundFieldDTO("series",seriesName,seriesInformation )); + if (seriesInformation != null || seriesName != null) { + citation.addField(FieldDTO.createMultipleCompoundFieldDTO("series", seriesName, seriesInformation )); + } return; } } - } + } } private void processDistStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { @@ -1337,7 +1339,6 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th List> producers = new ArrayList<>(); List> grants = new ArrayList<>(); List> software = new ArrayList<>(); - List prodPlac = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1353,7 +1354,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("prodDate")) { citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate"))); } else if (xmlr.getLocalName().equals("prodPlac")) { + List prodPlac = new ArrayList<>(); prodPlac.add(parseText(xmlr, "prodPlac")); + citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); } else if (xmlr.getLocalName().equals("software")) { HashSet set = new HashSet<>(); addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version")); @@ -1386,9 +1389,6 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th if (producers.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers)); } - if (prodPlac.size() > 0) { - citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); - } return; } } @@ -1437,8 +1437,9 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws if (otherIds.size()>0) { citation.addField(FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIds)); } - if (altTitles.size()>0) { - citation.addField(FieldDTO.createMultiplePrimitiveFieldDTO("alternativeTitle", altTitles)); + if (!altTitles.isEmpty()) { + FieldDTO field = FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.alternativeTitle, altTitles); + citation.getFields().add(field); } return; } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 4ea90ea6199..a647c2a6f2a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -30,6 +30,7 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; import edu.harvard.iq.dataverse.export.DDIExporter; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -93,7 +94,6 @@ public class DdiExportUtil { public static final String CITATION_BLOCK_NAME = "citation"; public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) { - logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson)); Gson gson = new Gson(); DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson, DatasetDTO.class); try { @@ -181,7 +181,7 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) String pidUri = pid; //Some tests don't send real PIDs - don't try to get their URL form if(!pidUri.equals("null:null/null")) { - pidUri= new GlobalId(persistentProtocol + ":" + persistentAuthority + "/" + persistentId).toURL().toString(); + pidUri= PidUtil.parseAsGlobalID(persistentProtocol, persistentAuthority, persistentId).asURL(); } // The "persistentAgency" tag is used for the "agency" attribute of the // ddi section; back in the DVN3 days we used "handle" and "DOI" @@ -202,12 +202,12 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle)); - + //writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle)); FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation" ); if (altField != null) { writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage()); } - + xmlw.writeStartElement("IDNo"); writeAttribute(xmlw, "agency", persistentAgency); @@ -239,9 +239,11 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) } writeDistributorsElement(xmlw, version, datasetDto.getMetadataLanguage()); writeContactsElement(xmlw, version); - writeFullElement(xmlw, "distDate", dto2Primitive(version, DatasetFieldConstant.distributionDate)); + /* per SCHEMA, depositr comes before depDate! - L.A. */ writeFullElement(xmlw, "depositr", dto2Primitive(version, DatasetFieldConstant.depositor)); + /* ... and depDate comes before distDate - L.A. */ writeFullElement(xmlw, "depDate", dto2Primitive(version, DatasetFieldConstant.dateOfDeposit)); + writeFullElement(xmlw, "distDate", dto2Primitive(version, DatasetFieldConstant.distributionDate)); xmlw.writeEndElement(); // diststmt @@ -294,23 +296,16 @@ private static void writeOtherStudyMaterial(XMLStreamWriter xmlw , DatasetVersio xmlw.writeEndElement(); //othrStdyMat } + /* + + + + + + */ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO version) throws XMLStreamException { xmlw.writeStartElement("dataAccs"); - if (version.getTermsOfUse() != null && !version.getTermsOfUse().trim().equals("")) { - xmlw.writeStartElement("notes"); - writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_USE); - writeAttribute(xmlw, "level", LEVEL_DV); - xmlw.writeCharacters(version.getTermsOfUse()); - xmlw.writeEndElement(); //notes - } - if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) { - xmlw.writeStartElement("notes"); - writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_ACCESS); - writeAttribute(xmlw, "level", LEVEL_DV); - xmlw.writeCharacters(version.getTermsOfAccess()); - xmlw.writeEndElement(); //notes - } - + xmlw.writeStartElement("setAvail"); writeFullElement(xmlw, "accsPlac", version.getDataAccessPlace()); writeFullElement(xmlw, "origArch", version.getOriginalArchive()); @@ -318,6 +313,7 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver writeFullElement(xmlw, "collSize", version.getSizeOfCollection()); writeFullElement(xmlw, "complete", version.getStudyCompletion()); xmlw.writeEndElement(); //setAvail + xmlw.writeStartElement("useStmt"); writeFullElement(xmlw, "confDec", version.getConfidentialityDeclaration()); writeFullElement(xmlw, "specPerm", version.getSpecialPermissions()); @@ -328,6 +324,15 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver writeFullElement(xmlw, "conditions", version.getConditions()); writeFullElement(xmlw, "disclaimer", version.getDisclaimer()); xmlw.writeEndElement(); //useStmt + + /* any s: */ + if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) { + xmlw.writeStartElement("notes"); + writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_ACCESS); + writeAttribute(xmlw, "level", LEVEL_DV); + xmlw.writeCharacters(version.getTermsOfAccess()); + xmlw.writeEndElement(); //notes + } xmlw.writeEndElement(); //dataAccs } @@ -388,141 +393,222 @@ private static void writeVersionStatement(XMLStreamWriter xmlw, DatasetVersionDT xmlw.writeEndElement(); // verStmt } + /* From the DDI 2.5 schema: + + + + + + + + + + + + + */ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String lang) throws XMLStreamException { xmlw.writeStartElement("sumDscr"); + FieldDTO timePeriodCoveredDTO = null; + FieldDTO dateOfCollectionDTO = null; + FieldDTO geographicCoverageDTO = null; + FieldDTO geographicBoundingBoxDTO = null; + FieldDTO unitOfAnalysisDTO = null; + FieldDTO universeDTO = null; + FieldDTO kindOfDataDTO = null; + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); + if ("citation".equals(key)) { - Integer per = 0; - Integer coll = 0; for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.timePeriodCovered.equals(fieldDTO.getTypeName())) { - String dateValStart = ""; - String dateValEnd = ""; - for (HashSet foo : fieldDTO.getMultipleCompound()) { - per++; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) { - dateValStart = next.getSinglePrimitive(); - } - if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) { - dateValEnd = next.getSinglePrimitive(); - } - } - if (!dateValStart.isEmpty()) { - writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "start", dateValStart ); - } - if (!dateValEnd.isEmpty()) { - writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "end", dateValEnd ); - } - } + timePeriodCoveredDTO = fieldDTO; } + if (DatasetFieldConstant.dateOfCollection.equals(fieldDTO.getTypeName())) { - String dateValStart = ""; - String dateValEnd = ""; - for (HashSet foo : fieldDTO.getMultipleCompound()) { - coll++; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) { - dateValStart = next.getSinglePrimitive(); - } - if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) { - dateValEnd = next.getSinglePrimitive(); - } - } - if (!dateValStart.isEmpty()) { - writeDateElement(xmlw, "collDate", "P"+ coll.toString(), "start", dateValStart ); - } - if (!dateValEnd.isEmpty()) { - writeDateElement(xmlw, "collDate", "P"+ coll.toString(), "end", dateValEnd ); - } - } + dateOfCollectionDTO = fieldDTO; } + if (DatasetFieldConstant.kindOfData.equals(fieldDTO.getTypeName())) { - writeMultipleElement(xmlw, "dataKind", fieldDTO, lang); + kindOfDataDTO = fieldDTO; } } } - - if("geospatial".equals(key)){ + + if ("geospatial".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.geographicCoverage.equals(fieldDTO.getTypeName())) { - - for (HashSet foo : fieldDTO.getMultipleCompound()) { - HashMap geoMap = new HashMap<>(); - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.country.equals(next.getTypeName())) { - geoMap.put("country", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.city.equals(next.getTypeName())) { - geoMap.put("city", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.state.equals(next.getTypeName())) { - geoMap.put("state", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) { - geoMap.put("otherGeographicCoverage", next.getSinglePrimitive()); - } - } - - if (geoMap.get("country") != null) { - writeFullElement(xmlw, "nation", geoMap.get("country")); - } - if (geoMap.get("city") != null) { - writeFullElement(xmlw, "geogCover", geoMap.get("city")); - } - if (geoMap.get("state") != null) { - writeFullElement(xmlw, "geogCover", geoMap.get("state")); - } - if (geoMap.get("otherGeographicCoverage") != null) { - writeFullElement(xmlw, "geogCover", geoMap.get("otherGeographicCoverage")); - } - - } + geographicCoverageDTO = fieldDTO; } if (DatasetFieldConstant.geographicBoundingBox.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { - xmlw.writeStartElement("geoBndBox"); - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "westBL", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "eastBL", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "northBL", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "southBL", next.getSinglePrimitive()); - } - - } - xmlw.writeEndElement(); - } + geographicBoundingBoxDTO = fieldDTO; } } - writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit)); } - if("socialscience".equals(key)){ + if ("socialscience".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.universe.equals(fieldDTO.getTypeName())) { - writeMultipleElement(xmlw, "universe", fieldDTO, lang); + universeDTO = fieldDTO; } if (DatasetFieldConstant.unitOfAnalysis.equals(fieldDTO.getTypeName())) { - writeI18NElementList(xmlw, "anlyUnit", fieldDTO.getMultipleVocab(), "unitOfAnalysis", fieldDTO.getTypeClass(), "socialscience", lang); + unitOfAnalysisDTO = fieldDTO; + } + } + } + } + /* Finally, we can write the fields we have collected, in the correct order: -L.A.*/ + + if (timePeriodCoveredDTO != null) { + String dateValStart = ""; + String dateValEnd = ""; + Integer per = 0; + for (HashSet foo : timePeriodCoveredDTO.getMultipleCompound()) { + per++; + for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) { + dateValStart = next.getSinglePrimitive(); + } + if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) { + dateValEnd = next.getSinglePrimitive(); + } + } + if (!dateValStart.isEmpty()) { + writeDateElement(xmlw, "timePrd", "P" + per.toString(), "start", dateValStart); + } + if (!dateValEnd.isEmpty()) { + writeDateElement(xmlw, "timePrd", "P" + per.toString(), "end", dateValEnd); + } + } + } + + if (dateOfCollectionDTO != null) { + String dateValStart = ""; + String dateValEnd = ""; + Integer coll = 0; + for (HashSet foo : dateOfCollectionDTO.getMultipleCompound()) { + coll++; + for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) { + dateValStart = next.getSinglePrimitive(); + } + if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) { + dateValEnd = next.getSinglePrimitive(); } } + if (!dateValStart.isEmpty()) { + writeDateElement(xmlw, "collDate", "P" + coll.toString(), "start", dateValStart); + } + if (!dateValEnd.isEmpty()) { + writeDateElement(xmlw, "collDate", "P" + coll.toString(), "end", dateValEnd); + } } } + + /* and come next, in that order. -L.A. */ + if (geographicCoverageDTO != null) { + + List nationList = new ArrayList<>(); + List geogCoverList = new ArrayList<>(); + + for (HashSet foo : geographicCoverageDTO.getMultipleCompound()) { + for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + /* our "country" field maps 1:1 to the DDI "": */ + if (DatasetFieldConstant.country.equals(next.getTypeName())) { + nationList.add(next.getSinglePrimitive()); + } + /* city, state and otherGeographicCoverage all exported as "": */ + if (DatasetFieldConstant.city.equals(next.getTypeName()) + || DatasetFieldConstant.state.equals(next.getTypeName()) + || DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) { + geogCoverList.add(next.getSinglePrimitive()); + } + } + } + + /** + * And now we can write all the fields encountered, first the + * "" entries, then all the "" ones: + */ + for (String nationEntry : nationList) { + writeFullElement(xmlw, "nation", nationEntry); + } + for (String geogCoverEntry : geogCoverList) { + writeFullElement(xmlw, "geogCover", geogCoverEntry); + } + } + + writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit)); + + /* Only 1 geoBndBox is allowed in the DDI. + So, I'm just going to arbitrarily use the first one, and ignore the rest! -L.A. */ + if (geographicBoundingBoxDTO != null) { + HashSet bndBoxSet = geographicBoundingBoxDTO.getMultipleCompound().get(0); + xmlw.writeStartElement("geoBndBox"); + HashMap geoBndBoxMap = new HashMap<>(); + for (FieldDTO next : bndBoxSet) { + if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) { + geoBndBoxMap.put("westBL", next.getSinglePrimitive()); + } + if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) { + geoBndBoxMap.put("eastBL", next.getSinglePrimitive()); + } + if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) { + geoBndBoxMap.put("northBL", next.getSinglePrimitive()); + } + if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) { + geoBndBoxMap.put("southBL", next.getSinglePrimitive()); + } + } + + /* Once again, order is important! */ + /* + + + + + + + */ + if (geoBndBoxMap.get("westBL") != null) { + writeFullElement(xmlw, "westBL", geoBndBoxMap.get("westBL")); + } + if (geoBndBoxMap.get("eastBL") != null) { + writeFullElement(xmlw, "eastBL", geoBndBoxMap.get("eastBL")); + } + if (geoBndBoxMap.get("southBL") != null) { + writeFullElement(xmlw, "southBL", geoBndBoxMap.get("southBL")); + } + if (geoBndBoxMap.get("northBL") != null) { + writeFullElement(xmlw, "northBL", geoBndBoxMap.get("northBL")); + } + + xmlw.writeEndElement(); + } + + /* analyUnit: */ + if (unitOfAnalysisDTO != null) { + writeI18NElementList(xmlw, "anlyUnit", unitOfAnalysisDTO.getMultipleVocab(), "unitOfAnalysis", unitOfAnalysisDTO.getTypeClass(), "socialscience", lang); + + } + + /* universe: */ + if (universeDTO != null) { + writeMultipleElement(xmlw, "universe", universeDTO, lang); + } + + /* finally, any "kind of data" entries: */ + if (kindOfDataDTO != null) { + writeMultipleElement(xmlw, "dataKind", kindOfDataDTO, lang); + } + xmlw.writeEndElement(); //sumDscr } @@ -544,6 +630,29 @@ private static void writeDateElement(XMLStreamWriter xmlw, String element, Strin } + /** + * Again, is an xs:sequence - order is important and must follow + * the schema. -L.A. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + */ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO version, String lang) throws XMLStreamException{ xmlw.writeStartElement("method"); xmlw.writeStartElement("dataColl"); @@ -557,13 +666,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO writeI18NElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang); - xmlw.writeStartElement("sources"); - writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); - writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); - writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang); - writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang); - xmlw.writeEndElement(); //sources - + /* comes before : */ FieldDTO collModeFieldDTO = dto2FieldDTO(version, DatasetFieldConstant.collectionMode, "socialscience"); if (collModeFieldDTO != null) { // This field was made multiple as of 5.10 @@ -575,21 +678,33 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO writeI18NElement(xmlw, "collMode", version, DatasetFieldConstant.collectionMode, lang); } } + /* and so does : */ writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang); + xmlw.writeStartElement("sources"); + writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); + writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); + writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang); + writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang); + xmlw.writeEndElement(); //sources + + writeI18NElement(xmlw, "collSitu", version, DatasetFieldConstant.dataCollectionSituation, lang); writeI18NElement(xmlw, "actMin", version, DatasetFieldConstant.actionsToMinimizeLoss, lang); - writeI18NElement(xmlw, "conOps", version, DatasetFieldConstant.controlOperations, lang); + /* "" has the uppercase C: */ + writeI18NElement(xmlw, "ConOps", version, DatasetFieldConstant.controlOperations, lang); writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang); writeI18NElement(xmlw, "cleanOps", version, DatasetFieldConstant.cleaningOperations, lang); xmlw.writeEndElement(); //dataColl + /* before : */ + writeNotesElement(xmlw, version); + xmlw.writeStartElement("anlyInfo"); //writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes)); writeI18NElement(xmlw, "respRate", version, DatasetFieldConstant.responseRate, lang); writeI18NElement(xmlw, "EstSmpErr", version, DatasetFieldConstant.samplingErrorEstimates, lang); writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang); xmlw.writeEndElement(); //anlyInfo - writeNotesElement(xmlw, version); xmlw.writeEndElement();//method } @@ -852,7 +967,6 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT String producerAffiliation = ""; String producerAbbreviation = ""; String producerLogo = ""; - String producerURL = ""; for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.producerName.equals(next.getTypeName())) { @@ -867,10 +981,6 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT if (DatasetFieldConstant.producerLogo.equals(next.getTypeName())) { producerLogo = next.getSinglePrimitive(); } - if (DatasetFieldConstant.producerURL.equals(next.getTypeName())) { - producerURL = next.getSinglePrimitive(); - - } } if (!producerName.isEmpty()) { xmlw.writeStartElement("producer"); @@ -880,12 +990,9 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT if (!producerAbbreviation.isEmpty()) { writeAttribute(xmlw, "abbr", producerAbbreviation); } - if (!producerLogo.isEmpty()) { + /*if (!producerLogo.isEmpty()) { writeAttribute(xmlw, "role", producerLogo); - } - if (!producerURL.isEmpty()) { - writeAttribute(xmlw, "URI", producerURL); - } + }*/ xmlw.writeCharacters(producerName); xmlw.writeEndElement(); //AuthEnty } @@ -896,12 +1003,10 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT } } writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate)); - - FieldDTO prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation" ); - if (prodPlac != null) { - writeMultipleElement(xmlw, "prodPlac", prodPlac, null); - } - + // productionPlace was made multiple as of 5.14: + // (a quick backward compatibility check was added to dto2PrimitiveList(), + // see the method for details) + writeFullElementList(xmlw, "prodPlac", dto2PrimitiveList(version, DatasetFieldConstant.productionPlace)); writeSoftwareElement(xmlw, version); writeGrantElement(xmlw, version); @@ -921,7 +1026,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio String distributorAffiliation = ""; String distributorAbbreviation = ""; String distributorURL = ""; - String distributorLogoURL = ""; for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.distributorName.equals(next.getTypeName())) { @@ -936,9 +1040,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio if (DatasetFieldConstant.distributorURL.equals(next.getTypeName())) { distributorURL = next.getSinglePrimitive(); } - if (DatasetFieldConstant.distributorLogo.equals(next.getTypeName())) { - distributorLogoURL = next.getSinglePrimitive(); - } } if (!distributorName.isEmpty()) { xmlw.writeStartElement("distrbtr"); @@ -954,9 +1055,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio if (!distributorURL.isEmpty()) { writeAttribute(xmlw, "URI", distributorURL); } - if (!distributorLogoURL.isEmpty()) { - writeAttribute(xmlw, "role", distributorLogoURL); - } xmlw.writeCharacters(distributorName); xmlw.writeEndElement(); //AuthEnty } @@ -1000,16 +1098,33 @@ private static void writeRelPublElement(XMLStreamWriter xmlw, DatasetVersionDTO if (citation != null && !citation.trim().equals("")) { xmlw.writeStartElement("relPubl"); xmlw.writeStartElement("citation"); + /* + + + + + + + + + + + + (In other words - titlStmt is mandatory! -L.A.) + */ + xmlw.writeStartElement("titlStmt"); + writeFullElement(xmlw, "titl", citation); if (IDNo != null && !IDNo.trim().equals("")) { - xmlw.writeStartElement("titlStmt"); + xmlw.writeStartElement("IDNo"); if (IDType != null && !IDType.trim().equals("")) { - xmlw.writeAttribute("agency", IDType ); + xmlw.writeAttribute("agency", IDType); } xmlw.writeCharacters(IDNo); xmlw.writeEndElement(); //IDNo - xmlw.writeEndElement(); // titlStmt } + xmlw.writeEndElement(); // titlStmt + writeFullElement(xmlw,"biblCit",citation); xmlw.writeEndElement(); //citation @@ -1181,33 +1296,34 @@ private static void writeSeriesElement(XMLStreamWriter xmlw, DatasetVersionDTO d for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); - if ("citation".equals(key)) { + if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) { - xmlw.writeStartElement("serStmt"); String seriesName = ""; String seriesInformation = ""; - Set foo = fieldDTO.getSingleCompound(); + for (HashSet foo : fieldDTO.getMultipleCompound()) { + xmlw.writeStartElement("serStmt"); for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) { - seriesName = next.getSinglePrimitive(); + seriesName = next.getSinglePrimitive(); } if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) { - seriesInformation = next.getSinglePrimitive(); + seriesInformation = next.getSinglePrimitive(); } } - if (!seriesName.isEmpty()){ - xmlw.writeStartElement("serName"); + if (!seriesName.isEmpty()) { + xmlw.writeStartElement("serName"); xmlw.writeCharacters(seriesName); - xmlw.writeEndElement(); //grantno + xmlw.writeEndElement(); //serName } - if (!seriesInformation.isEmpty()){ - xmlw.writeStartElement("serInfo"); + if (!seriesInformation.isEmpty()) { + xmlw.writeStartElement("serInfo"); xmlw.writeCharacters(seriesInformation); - xmlw.writeEndElement(); //grantno + xmlw.writeEndElement(); //serInfo } - xmlw.writeEndElement(); //serStmt + xmlw.writeEndElement(); //serStmt + } } } } @@ -1234,17 +1350,18 @@ private static void writeTargetSampleElement(XMLStreamWriter xmlw, DatasetVersio actualSize = next.getSinglePrimitive(); } } - - if (!sizeFormula.isEmpty()) { - xmlw.writeStartElement("sampleSizeFormula"); - xmlw.writeCharacters(sizeFormula); - xmlw.writeEndElement(); //sampleSizeFormula - } + /* must come before ! -L.A. */ if (!actualSize.isEmpty()) { xmlw.writeStartElement("sampleSize"); xmlw.writeCharacters(actualSize); xmlw.writeEndElement(); //sampleSize } + if (!sizeFormula.isEmpty()) { + xmlw.writeStartElement("sampleSizeFormula"); + xmlw.writeCharacters(sizeFormula); + xmlw.writeEndElement(); //sampleSizeFormula + } + xmlw.writeEndElement(); // targetSampleSize } } @@ -1356,8 +1473,8 @@ private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, List< writeAttribute(xmlw, "ID", "f" + fileMetadata.getDataFile().getId()); String dfIdentifier = fileMetadata.getDataFile().getIdentifier(); if (dfIdentifier != null && !dfIdentifier.isEmpty()){ - GlobalId globalId = new GlobalId(fileMetadata.getDataFile()); - writeAttribute(xmlw, "URI", globalId.toURL().toString()); + GlobalId globalId = fileMetadata.getDataFile().getGlobalId(); + writeAttribute(xmlw, "URI", globalId.asURL()); } else { writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileMetadata.getDataFile().getId()); } @@ -1432,7 +1549,15 @@ private static List dto2PrimitiveList(DatasetVersionDTO datasetVersionDT MetadataBlockDTO value = entry.getValue(); for (FieldDTO fieldDTO : value.getFields()) { if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) { - return fieldDTO.getMultiplePrimitive(); + // This hack is here to make sure the export does not blow + // up on an instance that upgraded to a Dataverse version + // where a certain primitive has been made multiple, but has + // not yet update the block. + if (fieldDTO.getMultiple() != null && fieldDTO.getMultiple()) { + return fieldDTO.getMultiplePrimitive(); + } else { + return Arrays.asList(fieldDTO.getSinglePrimitive()); + } } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 34cb7a4e138..334b18f4601 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -1,13 +1,8 @@ package edu.harvard.iq.dataverse.export.openaire; import java.io.OutputStream; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.logging.Logger; -import java.util.List; import javax.json.JsonObject; import javax.xml.stream.XMLOutputFactory; @@ -18,13 +13,17 @@ import com.google.gson.Gson; +import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.HandlenetServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; +import edu.harvard.iq.dataverse.util.PersonOrOrgUtil; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -71,7 +70,7 @@ private static void createOpenAire(XMLStreamWriter xmlw, DatasetDTO datasetDto) String persistentAgency = datasetDto.getProtocol(); String persistentAuthority = datasetDto.getAuthority(); String persistentId = datasetDto.getIdentifier(); - GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId); + GlobalId globalId = PidUtil.parseAsGlobalID(persistentAgency, persistentAuthority, persistentId); // The sequence is revied using sample: // https://schema.datacite.org/meta/kernel-4.0/example/datacite-example-full-v4.0.xml @@ -83,7 +82,7 @@ private static void createOpenAire(XMLStreamWriter xmlw, DatasetDTO datasetDto) String language = null; // 1, Identifier (with mandatory type sub-property) (M) - writeIdentifierElement(xmlw, globalId.toURL().toString(), language); + writeIdentifierElement(xmlw, globalId.asURL(), language); // 2, Creator (with optional given name, family name, // name identifier and affiliation sub-properties) (M) @@ -191,10 +190,10 @@ public static void writeIdentifierElement(XMLStreamWriter xmlw, String identifie if (StringUtils.isNotBlank(identifier)) { Map identifier_map = new HashMap(); - if (StringUtils.containsIgnoreCase(identifier, GlobalId.DOI_RESOLVER_URL)) { + if (StringUtils.containsIgnoreCase(identifier, DOIServiceBean.DOI_RESOLVER_URL)) { identifier_map.put("identifierType", "DOI"); identifier = StringUtils.substring(identifier, identifier.indexOf("10.")); - } else if (StringUtils.containsIgnoreCase(identifier, GlobalId.HDL_RESOLVER_URL)) { + } else if (StringUtils.containsIgnoreCase(identifier, HandlenetServiceBean.HDL_RESOLVER_URL)) { identifier_map.put("identifierType", "Handle"); if (StringUtils.contains(identifier, "http")) { identifier = identifier.replace(identifier.substring(0, identifier.indexOf("/") + 2), ""); @@ -250,72 +249,26 @@ public static void writeCreatorsElement(XMLStreamWriter xmlw, DatasetVersionDTO if (StringUtils.isNotBlank(creatorName)) { creator_check = writeOpenTag(xmlw, "creators", creator_check); xmlw.writeStartElement("creator"); // - - boolean nameType_check = false; + Map creator_map = new HashMap(); - if ((StringUtils.containsIgnoreCase(nameIdentifierScheme, "orcid"))) { + JsonObject creatorObj = PersonOrOrgUtil.getPersonOrOrganization(creatorName, false, + StringUtils.containsIgnoreCase(nameIdentifierScheme, "orcid")); + + // creatorName=, + if (creatorObj.getBoolean("isPerson")) { creator_map.put("nameType", "Personal"); - nameType_check = true; - } - // ToDo - the algorithm to determine if this is a Person or Organization here - // has been abstracted into a separate - // edu.harvard.iq.dataverse.util.PersonOrOrgUtil class that could be used here - // to avoid duplication/variants of the algorithm - creatorName = Cleanup.normalize(creatorName); - // Datacite algorithm, https://github.com/IQSS/dataverse/issues/2243#issuecomment-358615313 - if (creatorName.contains(",")) { - String givenName = FirstNames.getInstance().getFirstName(creatorName); - boolean isOrganization = Organizations.getInstance().isOrganization(creatorName); - - // creatorName=, - if (givenName != null && !isOrganization) { - // givenName ok - creator_map.put("nameType", "Personal"); - nameType_check = true; - } else if (isOrganization) { - creator_map.put("nameType", "Organizational"); - nameType_check = false; - } - writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language); - - if ((nameType_check) && (!creatorName.replaceFirst(",", "").contains(","))) { - // creatorName=, - String[] fullName = creatorName.split(", "); - if (fullName.length == 2) { - givenName = fullName[1]; - String familyName = fullName[0]; - - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); - } else { - // It's possible to get here if "Smith," is entered as an author name. - logger.info("Unable to write givenName and familyName based on creatorName '" + creatorName + "'."); - } - } } else { - String givenName = FirstNames.getInstance().getFirstName(creatorName); - boolean isOrganization = Organizations.getInstance().isOrganization(creatorName); - - if (givenName != null && !isOrganization) { - // givenName ok, creatorName= - creator_map.put("nameType", "Personal"); - nameType_check = true; - writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language); - - String familyName = ""; - if (givenName.length() + 1 < creatorName.length()) { - familyName = creatorName.substring(givenName.length() + 1); - } - - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); - } else { - // default - if (isOrganization) { - creator_map.put("nameType", "Organizational"); - } - writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language); - } + creator_map.put("nameType", "Organizational"); + } + writeFullElement(xmlw, null, "creatorName", creator_map, + creatorObj.getString("fullName"), language); + if (creatorObj.containsKey("givenName")) { + writeFullElement(xmlw, null, "givenName", null, creatorObj.getString("givenName"), + language); + } + if (creatorObj.containsKey("familyName")) { + writeFullElement(xmlw, null, "familyName", null, creatorObj.getString("familyName"), + language); } if (StringUtils.isNotBlank(nameIdentifier)) { @@ -369,10 +322,34 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle); title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language); + //String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle); + //title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language); title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language); + writeEndTag(xmlw, title_check); + } + private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { + MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); + if (block != null) { + logger.info("Block is not empty"); + List fieldsBlock = block.getFields(); + if (fieldsBlock != null) { + for (FieldDTO fieldDTO : fieldsBlock) { + logger.info(titleType + " " + fieldDTO.getTypeName()); + if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { + logger.info("Found Alt title"); + List fields = fieldDTO.getMultiplePrimitive(); + for (String value : fields) { + if (!writeTitleElement(xmlw, titleType, value, title_check, language)) + title_check = false; + } + break; + } + } + } + } - writeEndTag(xmlw, title_check); + return title_check; } /** @@ -405,33 +382,6 @@ private static boolean writeTitleElement(XMLStreamWriter xmlw, String titleType, } return title_check; } - - private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { - MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); - if (block != null) { - logger.info("Block is not empty"); - List fieldsBlock = block.getFields(); - if (fieldsBlock != null) { - for (FieldDTO fieldDTO : fieldsBlock) { - logger.info(titleType + " " + fieldDTO.getTypeName()); - if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { - logger.info("Found Alt title"); - List fields = fieldDTO.getMultiplePrimitive(); - for (String value : fields) { - if (!writeTitleElement(xmlw, titleType, value, title_check, language)) - title_check = false; - } - break; - } - } - } - } - - return title_check; - } - - - /** * 5, PublicationYear (M) @@ -737,61 +687,23 @@ public static void writeContributorElement(XMLStreamWriter xmlw, String contribu boolean nameType_check = false; Map contributor_map = new HashMap(); - // ToDo - the algorithm to determine if this is a Person or Organization here - // has been abstracted into a separate - // edu.harvard.iq.dataverse.util.PersonOrOrgUtil class that could be used here - // to avoid duplication/variants of the algorithm + JsonObject contributorObj = PersonOrOrgUtil.getPersonOrOrganization(contributorName, + ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName)), false); - contributorName = Cleanup.normalize(contributorName); - // Datacite algorithm, https://github.com/IQSS/dataverse/issues/2243#issuecomment-358615313 - if (contributorName.contains(",")) { - String givenName = FirstNames.getInstance().getFirstName(contributorName); - boolean isOrganization = Organizations.getInstance().isOrganization(contributorName); - - // contributorName=, - if (givenName != null && !isOrganization) { - // givenName ok + if (contributorObj.getBoolean("isPerson")) { + if(contributorObj.containsKey("givenName")) { contributor_map.put("nameType", "Personal"); - nameType_check = true; - // re: the above toDo - the ("ContactPerson".equals(contributorType) && - // !isValidEmailAddress(contributorName)) clause in the next line could/should - // be sent as the OrgIfTied boolean parameter - } else if (isOrganization || ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName))) { - contributor_map.put("nameType", "Organizational"); - } - writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - - if ((nameType_check) && (!contributorName.replaceFirst(",", "").contains(","))) { - // contributorName=, - String[] fullName = contributorName.split(", "); - givenName = fullName[1]; - String familyName = fullName[0]; - - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); } } else { - String givenName = FirstNames.getInstance().getFirstName(contributorName); - boolean isOrganization = Organizations.getInstance().isOrganization(contributorName); - - if (givenName != null && !isOrganization) { - contributor_map.put("nameType", "Personal"); - writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - - String familyName = ""; - if (givenName.length() + 1 < contributorName.length()) { - familyName = contributorName.substring(givenName.length() + 1); - } + contributor_map.put("nameType", "Organizational"); + } + writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); - } else { - // default - if (isOrganization || ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName))) { - contributor_map.put("nameType", "Organizational"); - } - writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - } + if (contributorObj.containsKey("givenName")) { + writeFullElement(xmlw, null, "givenName", null, contributorObj.getString("givenName"), language); + } + if (contributorObj.containsKey("familyName")) { + writeFullElement(xmlw, null, "familyName", null, contributorObj.getString("familyName"), language); } if (StringUtils.isNotBlank(contributorAffiliation)) { @@ -1291,26 +1203,17 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) { // String seriesName = null; String seriesInformation = null; - - Set fieldDTOs = fieldDTO.getSingleCompound(); - for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - /*if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) { - seriesName = next.getSinglePrimitive(); - }*/ - if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) { - seriesInformation = next.getSinglePrimitive(); + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) { + seriesInformation = next.getSinglePrimitive(); + } + } + if (StringUtils.isNotBlank(seriesInformation)) { + description_check = writeOpenTag(xmlw, "descriptions", description_check); + writeDescriptionElement(xmlw, "SeriesInformation", seriesInformation, language); } - } - - /*if (StringUtils.isNotBlank(seriesName)){ - contributor_check = writeOpenTag(xmlw, "descriptions", description_check); - - writeDescriptionElement(xmlw, "SeriesInformation", seriesName); - }*/ - if (StringUtils.isNotBlank(seriesInformation)) { - description_check = writeOpenTag(xmlw, "descriptions", description_check); - writeDescriptionElement(xmlw, "SeriesInformation", seriesInformation, language); } } } From 3bb7cbfae2072f7bb4f5b11567cd0b02c0b4bb02 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 11 May 2023 17:27:05 -0400 Subject: [PATCH 0058/1092] add prodPlac --- .../iq/dataverse/api/imports/ImportDDIServiceBean.java | 8 +++++--- .../harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 6 +++++- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index bafd7267acb..ae98e7e76ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1339,6 +1339,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th List> producers = new ArrayList<>(); List> grants = new ArrayList<>(); List> software = new ArrayList<>(); + List prodPlac = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1354,9 +1355,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("prodDate")) { citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate"))); } else if (xmlr.getLocalName().equals("prodPlac")) { - List prodPlac = new ArrayList<>(); - prodPlac.add(parseText(xmlr, "prodPlac")); - citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); + prodPlac.add(parseText(xmlr)); } else if (xmlr.getLocalName().equals("software")) { HashSet set = new HashSet<>(); addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version")); @@ -1389,6 +1388,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th if (producers.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers)); } + if (prodPlac.size() > 0) { + citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); + } return; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index a647c2a6f2a..cd9311ec518 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1006,7 +1006,11 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT // productionPlace was made multiple as of 5.14: // (a quick backward compatibility check was added to dto2PrimitiveList(), // see the method for details) - writeFullElementList(xmlw, "prodPlac", dto2PrimitiveList(version, DatasetFieldConstant.productionPlace)); + + FieldDTO prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation" ); + if (prodPlac != null) { + writeMultipleElement(xmlw, "prodPlac", prodPlac, null); + } writeSoftwareElement(xmlw, version); writeGrantElement(xmlw, version); From cd47f9389e881866a45792b5d9cf9b286c5d7fa7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 09:30:23 +0200 Subject: [PATCH 0059/1092] chore(deps): update Nimbus SDK to 10.9.1 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 49443f62453..8764e4f493d 100644 --- a/pom.xml +++ b/pom.xml @@ -385,7 +385,7 @@ com.nimbusds oauth2-oidc-sdk - 10.7.1 + 10.9.1 From 94598bd66de2ee5a535cca6aab63b020ad95f65b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:04:39 +0200 Subject: [PATCH 0060/1092] feat(auth): pass OAuth2/OIDC state parameter to provider When the client is returning from the provider to us, carrying along the authorization code we need to retrieve user details, we also receive again the state. The state was generated and sent by us, and will not be altered by the provider, which makes it perfect to identify the original request we built before sending the client to the provider. Passing this state to the provider enables the provider to reuse this information. This is crucial to enable PKCE support, as we need to remember which secret code we sent to the provider - otherwise we will not be able to verify the authz code. Tests have been adapted. --- .../oauth2/AbstractOAuth2AuthenticationProvider.java | 3 ++- .../providers/oauth2/OAuth2LoginBackingBean.java | 2 +- .../providers/oauth2/OAuth2LoginBackingBeanTest.java | 11 ++++++++--- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java index 01139cd2e27..373a295487d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java @@ -139,6 +139,7 @@ public OAuth20Service getService(String callbackUrl) { * Receive user data from OAuth2 provider after authn/z has been successfull. (Callback view uses this) * Request a token and access the resource, parse output and return user details. * @param code The authz code sent from the provider + * @param state The state which was communicated between us and the provider, identifying the exact request * @param redirectUrl The redirect URL (some providers require this when fetching the access token, e. g. Google) * @return A user record containing all user details accessible for us * @throws IOException Thrown when communication with the provider fails @@ -146,7 +147,7 @@ public OAuth20Service getService(String callbackUrl) { * @throws InterruptedException Thrown when the requests thread is failing * @throws ExecutionException Thrown when the requests thread is failing */ - public OAuth2UserRecord getUserRecord(String code, String redirectUrl) + public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception, InterruptedException, ExecutionException { OAuth20Service service = getService(redirectUrl); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index c5be41a014a..7b52f2e9b16 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -100,7 +100,7 @@ public void exchangeCodeForToken() throws IOException { if (oIdp.isPresent() && code.isPresent()) { AbstractOAuth2AuthenticationProvider idp = oIdp.get(); - oauthUser = idp.getUserRecord(code.get(), systemConfig.getOAuth2CallbackUrl()); + oauthUser = idp.getUserRecord(code.get(), req.getParameter("state"), systemConfig.getOAuth2CallbackUrl()); // Throw an error if this authentication method is disabled: // (it's not clear if it's possible at all, for somebody to get here with diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java index 80249cc89e8..0c54c050d79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java @@ -105,6 +105,7 @@ class ecft { @Mock DataverseSession session; @Mock OAuth2TokenDataServiceBean oauth2Tokens; Optional redirect = Optional.of("/hellotest"); + String state; @BeforeEach void setUp() throws IOException { @@ -118,7 +119,11 @@ void setUp() throws IOException { when(externalContextMock.getRequest()).thenReturn(requestMock); lenient().when(externalContextMock.getFlash()).thenReturn(flashMock); lenient().when(requestMock.getReader()).thenReturn(reader); - doReturn(loginBackingBean.createState(testIdp, this.redirect)).when(requestMock).getParameter("state"); + + // Save the state as we need it for injection (necessary because of PKCE support) + state = loginBackingBean.createState(testIdp, this.redirect); + doReturn(state).when(requestMock).getParameter("state"); + // travel in time at least 10 milliseconds (remote calls & redirects are much likely longer) // (if not doing this tests become flaky on fast machinas) loginBackingBean.clock = Clock.offset(constantClock, Duration.ofMillis(10)); @@ -140,7 +145,7 @@ void newUser() throws Exception { // fake the code received from the provider when(requestMock.getParameter("code")).thenReturn(code); // let's deep-fake the result of getUserRecord() - doReturn(userRecord).when(testIdp).getUserRecord(code, null); + doReturn(userRecord).when(testIdp).getUserRecord(code, state, null); // WHEN (& then) // capture the redirect target from the faces context @@ -168,7 +173,7 @@ void existingUser() throws Exception { // fake the code received from the provider when(requestMock.getParameter("code")).thenReturn(code); // let's deep-fake the result of getUserRecord() - doReturn(userRecord).when(testIdp).getUserRecord(code, null); + doReturn(userRecord).when(testIdp).getUserRecord(code, state, null); doReturn(tokenData).when(userRecord).getTokenData(); // also fake the result of the lookup in the auth service doReturn(userIdentifier).when(userRecord).getUserRecordIdentifier(); From 5fbee2e067722e7ff649dd30ae3e8afa90851958 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:05:03 +0200 Subject: [PATCH 0061/1092] feat(auth): add OIDC PKCE settings to JvmSettings --- .../java/edu/harvard/iq/dataverse/settings/JvmSettings.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index dc9267805e6..ba90d895ae9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -121,6 +121,9 @@ public enum JvmSettings { OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"), OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"), OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"), + SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"), + OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"), + OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"), ; From c0d21cc9f935b3dc43653baa3d132681ff94c1c0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:07:35 +0200 Subject: [PATCH 0062/1092] feat(auth): enable PKCE usage in OIDC provider - Adding PKCE parameters to constructor - Adding a hashmap to cache the code verifiers mapped by the unique state we generate - Enabling the actual workflow of PKCE --- .../oauth2/oidc/OIDCAuthProvider.java | 38 ++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 4b6c575cfaf..91d552618ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -18,6 +18,8 @@ import com.nimbusds.oauth2.sdk.id.ClientID; import com.nimbusds.oauth2.sdk.id.Issuer; import com.nimbusds.oauth2.sdk.id.State; +import com.nimbusds.oauth2.sdk.pkce.CodeChallengeMethod; +import com.nimbusds.oauth2.sdk.pkce.CodeVerifier; import com.nimbusds.oauth2.sdk.token.BearerAccessToken; import com.nimbusds.openid.connect.sdk.AuthenticationRequest; import com.nimbusds.openid.connect.sdk.Nonce; @@ -39,7 +41,9 @@ import java.net.URI; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.logging.Logger; @@ -57,12 +61,25 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { final Issuer issuer; final ClientAuthentication clientAuth; final OIDCProviderMetadata idpMetadata; + final boolean pkceEnabled; + final CodeChallengeMethod pkceMethod; - public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException { + /** + * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret + * we need again when verifying the response by the provider, thus the cache. + */ + private final Map verifierCache = new ConcurrentHashMap<>(); + + public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL, + boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException { this.clientSecret = aClientSecret; // nedded for state creation this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret)); this.issuer = new Issuer(issuerEndpointURL); + this.idpMetadata = getMetadata(); + + this.pkceEnabled = pkceEnabled; + this.pkceMethod = CodeChallengeMethod.parse(pkceMethod); } /** @@ -147,6 +164,7 @@ public String buildAuthzUrl(String state, String callbackUrl) { State stateObject = new State(state); URI callback = URI.create(callbackUrl); Nonce nonce = new Nonce(); + CodeVerifier pkceVerifier = pkceEnabled ? new CodeVerifier() : null; AuthenticationRequest req = new AuthenticationRequest.Builder(new ResponseType("code"), Scope.parse(this.scope), @@ -154,9 +172,15 @@ public String buildAuthzUrl(String state, String callbackUrl) { callback) .endpointURI(idpMetadata.getAuthorizationEndpointURI()) .state(stateObject) + // Called method is nullsafe - will disable sending a PKCE challenge in case the verifier is not present + .codeChallenge(pkceVerifier, pkceMethod) .nonce(nonce) .build(); + // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us + // the auth code! We use the state to cache the verifier, as the state is unique per authentication event. + this.verifierCache.put(state, pkceVerifier); + return req.toURI().toString(); } @@ -172,10 +196,14 @@ public String buildAuthzUrl(String state, String callbackUrl) { * @throws ExecutionException Thrown when the requests thread is failing */ @Override - public OAuth2UserRecord getUserRecord(String code, String redirectUrl) - throws IOException, OAuth2Exception, InterruptedException, ExecutionException { - // Create grant object - AuthorizationGrant codeGrant = new AuthorizationCodeGrant(new AuthorizationCode(code), URI.create(redirectUrl)); + public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception { + // Retrieve the verifier from the cache and clear from the cache. If not found, will be null. + // Will be sent to token endpoint for verification, so if required but missing, will lead to exception. + CodeVerifier verifier = verifierCache.remove(state); + + // Create grant object - again, this is null-safe for the verifier + AuthorizationGrant codeGrant = new AuthorizationCodeGrant( + new AuthorizationCode(code), URI.create(redirectUrl), verifier); // Get Access Token first Optional accessToken = getAccessToken(codeGrant); From ef4d192156bd7efd6e83226c57dd7deea545a6e2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:08:37 +0200 Subject: [PATCH 0063/1092] feat(auth): extend OIDC provider factory to understand PKCE parameters To enable backward compatibility, default to disabled and method S256. --- .../oauth2/oidc/OIDCAuthenticationProviderFactory.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index 89cf1cb986d..3f8c18d0567 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -41,7 +41,9 @@ public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) th OIDCAuthProvider oidc = new OIDCAuthProvider( factoryData.get("clientId"), factoryData.get("clientSecret"), - factoryData.get("issuer") + factoryData.get("issuer"), + Boolean.parseBoolean(factoryData.getOrDefault("pkceEnabled", "false")), + factoryData.getOrDefault("pkceMethod", "S256") ); oidc.setId(aRow.getId()); @@ -60,7 +62,9 @@ public static AuthenticationProvider buildFromSettings() throws AuthorizationSet OIDCAuthProvider oidc = new OIDCAuthProvider( JvmSettings.OIDC_CLIENT_ID.lookup(), JvmSettings.OIDC_CLIENT_SECRET.lookup(), - JvmSettings.OIDC_AUTH_SERVER_URL.lookup() + JvmSettings.OIDC_AUTH_SERVER_URL.lookup(), + JvmSettings.OIDC_PKCE_ENABLED.lookupOptional(Boolean.class).orElse(false), + JvmSettings.OIDC_PKCE_METHOD.lookupOptional().orElse("S256") ); oidc.setId("oidc-mpconfig"); From 37bcc3a69930879810c7a7eb87f465219a00a24d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:28:32 +0200 Subject: [PATCH 0064/1092] doc(auth): add OIDC PKCE configuration --- .../source/installation/oidc.rst | 34 ++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index fbcbd3eb4ad..9848d73b189 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -69,9 +69,32 @@ After adding a provider, the Log In page will by default show the "builtin" prov In contrast to our :doc:`oauth2`, you can use multiple providers by creating distinct configurations enabled by the same technology and without modifying the Dataverse Software code base (standards for the win!). + +.. _oidc-pkce: + +Enabling PKCE Security +^^^^^^^^^^^^^^^^^^^^^^ + +Many providers these days support or even require the usage of `PKCE `_ to safeguard against +some attacks and enable public clients that cannot have a secure secret to still use OpenID Connect (or OAuth2). + +The Dataverse built OIDC client can be enabled to use PKCE and which method to use when creating the code challenge. +See also `this explanation of the flow `_ +for details on how this works. + +As we are using the `Nimbus SDK `_ as our client +library, we support the standard ``PLAIN`` and ``S256`` code challenge methods. "SHA-256 method" is the default +as recommend in `RFC7636 `_. If your provider needs some +other method (unlikely), please open an issue. + +The provisioning sections below contain in the example the parameters you may use to configure PKCE. + Provision via REST API ^^^^^^^^^^^^^^^^^^^^^^ +Note: you may omit the PKCE related settings from ``factoryData`` below if you don't plan on using PKCE - default is +disabled. + Please create a ``my-oidc-provider.json`` file like this, replacing every ``<...>`` with your values: .. code-block:: json @@ -81,7 +104,7 @@ Please create a ``my-oidc-provider.json`` file like this, replacing every ``<... "factoryAlias":"oidc", "title":"", "subtitle":"", - "factoryData":"type: oidc | issuer: | clientId: | clientSecret: ", + "factoryData":"type: oidc | issuer: | clientId: | clientSecret: | pkceEnabled: | pkceMethod: ", "enabled":true } @@ -105,6 +128,7 @@ The following options are available: .. list-table:: :widths: 25 55 10 10 :header-rows: 1 + :align: left * - Option - Description @@ -126,6 +150,14 @@ The following options are available: - The base URL of the OpenID Connect (OIDC) server as explained above. - Y - \- + * - ``dataverse.auth.oidc.pkce.enabled`` + - Set to ``true`` to enable :ref:`PKCE ` in auth flow. + - N + - ``false`` + * - ``dataverse.auth.oidc.pkce.method`` + - Set code challenge method. Default equals best practice. + - N + - ``S256`` * - ``dataverse.auth.oidc.title`` - The UI visible name for this provider in login options. - N From 5ed66e6bb481bd94c81965b4032e642a058943e4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 12:20:20 +0200 Subject: [PATCH 0065/1092] chore(deps): update to Testcontainers 1.18.1 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 7f8f73e049a..060fc22b4d2 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -167,7 +167,7 @@ 5.0.0 - 1.17.6 + 1.18.1 2.10.1 4.13.1 From b9720c868b89e9db191b1425fb31574771bc1cee Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:35:42 -0400 Subject: [PATCH 0066/1092] Update src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java Co-authored-by: Philip Durbin --- .../java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index cd9311ec518..819a14c6c68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -202,7 +202,6 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle)); - //writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle)); FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation" ); if (altField != null) { writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage()); From 5684140dff737f092f195ffeefddbf5074a409e5 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:37:18 -0400 Subject: [PATCH 0067/1092] Update src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java Co-authored-by: Philip Durbin --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 334b18f4601..e858dee6d2b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -331,7 +331,7 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); if (block != null) { - logger.info("Block is not empty"); + logger.fine("Block is not empty"); List fieldsBlock = block.getFields(); if (fieldsBlock != null) { for (FieldDTO fieldDTO : fieldsBlock) { From 64f4f1f0af83384b8157f2b13d29d941cb2aac77 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:37:31 -0400 Subject: [PATCH 0068/1092] Update src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java Co-authored-by: Philip Durbin --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index e858dee6d2b..146c442526a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -335,7 +335,7 @@ private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String ti List fieldsBlock = block.getFields(); if (fieldsBlock != null) { for (FieldDTO fieldDTO : fieldsBlock) { - logger.info(titleType + " " + fieldDTO.getTypeName()); + logger.fine(titleType + " " + fieldDTO.getTypeName()); if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { logger.info("Found Alt title"); List fields = fieldDTO.getMultiplePrimitive(); From 37a372a23c92d4eb2c946a50bf00b91319e141de Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:37:44 -0400 Subject: [PATCH 0069/1092] Update src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java Co-authored-by: Philip Durbin --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 146c442526a..037428d0ea1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -337,7 +337,7 @@ private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String ti for (FieldDTO fieldDTO : fieldsBlock) { logger.fine(titleType + " " + fieldDTO.getTypeName()); if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { - logger.info("Found Alt title"); + logger.fine("Found Alt title"); List fields = fieldDTO.getMultiplePrimitive(); for (String value : fields) { if (!writeTitleElement(xmlw, titleType, value, title_check, language)) From 2ef0e5f0231367b90e962e73a638aab4c84a9ada Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 15 May 2023 12:39:32 -0400 Subject: [PATCH 0070/1092] test --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 -- .../edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json | 6 ++++++ .../edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml | 2 ++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 334b18f4601..6dca1ac348a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -322,8 +322,6 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle); title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language); - //String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle); - //title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language); title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language); writeEndTag(xmlw, title_check); } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json index 7845f77d33f..9bdc7e45349 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json @@ -29,6 +29,12 @@ "typeClass": "primitive", "value": "Darwin's Finches" }, + { + "typeName": "alternativeTitle", + "multiple": true, + "typeClass": "primitive", + "value": ["Darwin's Finches Alternative Title1", "Darwin's Finches Alternative Title2"] + }, { "typeName": "author", "multiple": true, diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml index 5bbfdae09ac..6730c44603a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml @@ -17,6 +17,8 @@ Darwin's Finches + Darwin's Finches Alternative Title1 + Darwin's Finches Alternative Title2 doi:10.5072/FK2/PCA2E3 From 4b4c9155048f0ee074f6ba9d01a12e02ea4abd00 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 15 May 2023 12:54:37 -0400 Subject: [PATCH 0071/1092] docs --- doc/release-notes/9428-alternative-title.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9428-alternative-title.md b/doc/release-notes/9428-alternative-title.md index d6eaa680612..3bc74f218b5 100644 --- a/doc/release-notes/9428-alternative-title.md +++ b/doc/release-notes/9428-alternative-title.md @@ -3,4 +3,7 @@ Alternative Title is made repeatable. `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` - One will also need to update solr schema: Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` -Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` +Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` + +Since Alternative Title is repeatable now, old json apis would not be compatable with a new version since value of alternative title has changed from simple string to an array. +For example, instead "value": "Alternative Title", the value canbe "value": ["Alternative Title1", "Alternative Title2"] From 07c70d8933d550eb01f3bd2cfb3626b9a2d94f8c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:01:01 -0500 Subject: [PATCH 0072/1092] flyway update script --- .../resources/db/migration/V5.13.0.3__guestbook-on-request.sql | 1 + 1 file changed, 1 insertion(+) create mode 100644 src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql diff --git a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql new file mode 100644 index 00000000000..1ffc87dfa32 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql @@ -0,0 +1 @@ +ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS request_state VARCHAR(64); From c8726958e45d6e25bc4ca773c55ce58afc1bf431 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:56:33 -0500 Subject: [PATCH 0073/1092] fix duplicate action --- src/main/webapp/filesFragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 6122b86b274..7395998042f 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -483,7 +483,8 @@ styleClass="btn btn-default btn-request" action="#{DatasetPage.requestAccessMultipleFiles()}" update="@form, @([id$=messagePanel])" - disabled="#{DatasetPage.locked}"> + disabled="#{DatasetPage.locked or !DatasetPage.fileAccessRequestMultiButtonEnabled}"> + #{bundle['file.requestAccess']} Date: Tue, 6 Dec 2022 13:57:13 -0500 Subject: [PATCH 0074/1092] add comment to check possible unused method --- .../edu/harvard/iq/dataverse/DatasetPage.java | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 9294620d790..33b598083da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3209,6 +3209,68 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa } + /*helper function to filter the selected files into , + and and for reuse*/ + + private boolean filterSelectedFiles(){ + setSelectedDownloadableFiles(new ArrayList<>()); + setSelectedNonDownloadableFiles(new ArrayList<>()); + setSelectedRestrictedFiles(new ArrayList<>()); + setSelectedUnrestrictedFiles(new ArrayList<>()); + + boolean someFiles = false; + for (FileMetadata fmd : this.selectedFiles){ + if(this.fileDownloadHelper.canDownloadFile(fmd)){ + getSelectedDownloadableFiles().add(fmd); + someFiles=true; + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(fmd.isRestricted()){ + getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not + someFiles=true; + } else { + getSelectedUnrestrictedFiles().add(fmd); + someFiles=true; + } + + } + return someFiles; + } +//QDRADA - still needed? + public void validateFilesForRequestAccess(){ + this.filterSelectedFiles(); + + if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest() + return; + } + + if(!this.selectedRestrictedFiles.isEmpty()){ + ArrayList nonDownloadableRestrictedFiles = new ArrayList<>(); + + List userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles(); + + for(FileMetadata fmd : this.selectedRestrictedFiles){ + if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){ + nonDownloadableRestrictedFiles.add(fmd); + } + } + + if(!nonDownloadableRestrictedFiles.isEmpty()){ + guestbookResponse.setDataFile(null); + guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles)); + + if(this.isGuestbookAndTermsPopupRequired()){ //need to pop up the guestbook and terms dialog + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } else { + this.requestAccessMultipleFiles(); + } + } else { + //popup select data files + } + } + } + private boolean selectAllFiles; public boolean isSelectAllFiles() { From 76b3b181d449336a9659bf5dc54637be90ff8679 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:57:30 -0500 Subject: [PATCH 0075/1092] debug logging --- .../iq/dataverse/engine/command/impl/RequestAccessCommand.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index b87b9a73aa5..df7c7367f2d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -52,6 +52,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { } //if user already has permission to download file or the file is public throw command exception + logger.info("User: " + this.getRequest().getAuthenticatedUser().getName()); + logger.info("File: " + file.getId()); if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this); } From bd603ec7a803542460eea3a66600572bdf85f57a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 14:17:45 -0500 Subject: [PATCH 0076/1092] more debug --- .../command/impl/RequestAccessCommand.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index df7c7367f2d..d710ed66551 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -44,6 +44,19 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean s this.sendNotification = sendNotification; } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { + this(dvRequest, file, gbr, false); + } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr, Boolean sendNotification) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = sendNotification; + } @Override public DataFile execute(CommandContext ctxt) throws CommandException { @@ -53,7 +66,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { //if user already has permission to download file or the file is public throw command exception logger.info("User: " + this.getRequest().getAuthenticatedUser().getName()); - logger.info("File: " + file.getId()); + logger.info("File: " + file.getId() + " : restricted?: " + file.isRestricted()); + logger.info("permission?: " + ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)); if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this); } From 5e29a0600b6d4fe59d89191897bc61cd45d78494 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 10:20:39 -0500 Subject: [PATCH 0077/1092] more debug --- .../edu/harvard/iq/dataverse/DataFile.java | 11 + .../iq/dataverse/DataFileServiceBean.java | 53 ++++ .../edu/harvard/iq/dataverse/Dataset.java | 6 + .../edu/harvard/iq/dataverse/DatasetPage.java | 134 +++++++-- .../iq/dataverse/FileAccessRequest.java | 262 ++++++++++++++---- .../FileAccessRequestServiceBean.java | 89 ++++++ .../iq/dataverse/FileDownloadHelper.java | 30 +- .../iq/dataverse/FileDownloadServiceBean.java | 102 ++++++- .../edu/harvard/iq/dataverse/FilePage.java | 6 + .../iq/dataverse/GuestbookResponse.java | 13 +- .../iq/dataverse/ManagePermissionsPage.java | 47 ++++ .../iq/dataverse/UserNotification.java | 2 +- .../UserNotificationServiceBean.java | 22 ++ .../users/AuthenticatedUser.java | 25 ++ .../command/impl/RequestAccessCommand.java | 33 ++- .../harvard/iq/dataverse/util/FileUtil.java | 66 +++++ .../harvard/iq/dataverse/util/MailUtil.java | 2 + src/main/webapp/dataset.xhtml | 24 +- .../file-download-button-fragment.xhtml | 74 ++--- src/main/webapp/file.xhtml | 40 ++- src/main/webapp/filesFragment.xhtml | 9 +- ...l => guestbook-terms-popup-fragment.xhtml} | 17 ++ .../iq/dataverse/util/FileUtilTest.java | 2 +- 23 files changed, 896 insertions(+), 173 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java rename src/main/webapp/{file-request-access-popup-fragment.xhtml => guestbook-terms-popup-fragment.xhtml} (65%) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 28d814d9844..c43800c57ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -200,6 +200,17 @@ public String toString() { @OneToMany(mappedBy="dataFile", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List guestbookResponses; + @OneToMany(mappedBy="dataFile",fetch = FetchType.LAZY,cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}) + private List fileAccessRequests; + + public List getFileAccessRequests(){ + return fileAccessRequests; + } + + public void setFileAccessRequests(List fARs){ + this.fileAccessRequests = fARs; + } + public List getGuestbookResponses() { return guestbookResponses; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..449e8d351c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -154,6 +154,27 @@ public DataFile find(Object pk) { }*/ + public List findAll(List fileIds){ + List dataFiles = new ArrayList<>(); + + for (Long fileId : fileIds){ + dataFiles.add(find(fileId)); + } + + return dataFiles; + } + + public List findAll(String fileIdsAsString){ + ArrayList dataFileIds = new ArrayList<>(); + + String[] fileIds = fileIdsAsString.split(","); + for (String fId : fileIds){ + dataFileIds.add(Long.parseLong(fId)); + } + + return findAll(dataFileIds); + } + public DataFile findByGlobalId(String globalId) { return (DataFile) dvObjectService.findByGlobalId(globalId, DvObject.DType.DataFile); } @@ -350,6 +371,18 @@ public FileMetadata findMostRecentVersionFileIsIn(DataFile file) { return fileMetadatas.get(0); } } + + public List findAllCheapAndEasy(String fileIdsAsString){ + //assumption is that the fileIds are separated by ',' + ArrayList dataFilesFound = new ArrayList<>(); + String[] fileIds = fileIdsAsString.split(","); + DataFile df = this.findCheapAndEasy(Long.parseLong(fileIds[0])); + if(df != null){ + dataFilesFound.add(df); + } + + return dataFilesFound; + } public DataFile findCheapAndEasy(Long id) { DataFile dataFile; @@ -802,6 +835,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion dataFile.addFileAccessRequester(au); } + dataFile.setFileAccessRequesters(retrieveFileAccessRequesters(dataFile)); dataFiles.add(dataFile); filesMap.put(dataFile.getId(), i++); } @@ -821,6 +855,25 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion owner.setFiles(dataFiles); } + private List retrieveFileAccessRequesters(DataFile fileIn) { + List retList = new ArrayList<>(); + + // List requesters = em.createNativeQuery("select authenticated_user_id + // from fileaccessrequests where datafile_id = + // "+fileIn.getId()).getResultList(); + List requesters = em.createNativeQuery("select authenticated_user_id from fileaccessrequests where datafile_id = " + fileIn.getId() + " and request_state='CREATED'").getResultList(); + + for (Object userIdObj : requesters) { + Long userId = (Long) userIdObj; + AuthenticatedUser user = userService.find(userId); + if (user != null) { + retList.add(user); + } + } + + return retList; + } + private List retrieveFileMetadataForVersion(Dataset dataset, DatasetVersion version, List dataFiles, Map filesMap, Map categoryMap) { List retList = new ArrayList<>(); Map> categoryMetaMap = new HashMap<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 683b6687c8b..305e9a404e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -858,6 +858,12 @@ public String getHarvestingDescription() { return null; } + public boolean hasEnabledGuestbook(){ + Guestbook gb = this.getGuestbook(); + + return ( gb != null && gb.isEnabled()); + } + @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 33b598083da..8754c26a3b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -366,6 +366,19 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { this.showIngestSuccess = showIngestSuccess; } + private String termsGuestbookPopupAction = ""; + + public void setTermsGuestbookPopupAction(String popupAction){ + if(popupAction != null && popupAction.length() > 0){ + this.termsGuestbookPopupAction = popupAction; + } + + } + + public String getTermsGuestbookPopupAction(){ + return termsGuestbookPopupAction; + } + // TODO: Consider renaming "configureTools" to "fileConfigureTools". List configureTools = new ArrayList<>(); // TODO: Consider renaming "exploreTools" to "fileExploreTools". @@ -3137,9 +3150,14 @@ public void setValidateFilesOutcome(String validateFilesOutcome) { this.validateFilesOutcome = validateFilesOutcome; } - public boolean validateFilesForDownload(boolean guestbookRequired, boolean downloadOriginal) { - setSelectedDownloadableFiles(new ArrayList<>()); - setSelectedNonDownloadableFiles(new ArrayList<>()); + public boolean validateFilesForDownload(boolean downloadOriginal){ + if (this.selectedFiles.isEmpty()) { + PrimeFaces.current().executeScript("PF('selectFilesForDownload').show()"); + return false; + } else { + this.filterSelectedFiles(); + } + //assume Pass unless something bad happens setValidateFilesOutcome("Pass"); Long bytes = (long) 0; @@ -3170,6 +3188,13 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return false; } + +//QDRADA handle new state from + /*if (isTermsPopupRequired() || isGuestbookPopupRequiredAtDownload()){ + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } + */ + // If some of the files were restricted and we had to drop them off the // list, and NONE of the files are left on the downloadable list // - we show them a "you're out of luck" popup: @@ -3183,10 +3208,11 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return true; } - if (guestbookRequired) { + //QDRADA - still needed? +/* if (guestbookRequired) { setValidateFilesOutcome("GuestbookRequired"); } - +*/ return true; } @@ -3208,6 +3234,67 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa guestbookResponse.setDownloadtype("Download"); } + /*helper function to filter the selected files into , + and and for reuse*/ + + private boolean filterSelectedFiles(){ + setSelectedDownloadableFiles(new ArrayList<>()); + setSelectedNonDownloadableFiles(new ArrayList<>()); + setSelectedRestrictedFiles(new ArrayList<>()); + setSelectedUnrestrictedFiles(new ArrayList<>()); + + boolean someFiles = false; + for (FileMetadata fmd : this.selectedFiles){ + if(this.fileDownloadHelper.canDownloadFile(fmd)){ + getSelectedDownloadableFiles().add(fmd); + someFiles=true; + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(fmd.isRestricted()){ + getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not + someFiles=true; + } else { + getSelectedUnrestrictedFiles().add(fmd); + someFiles=true; + } + + } + return someFiles; + } + + public void validateFilesForRequestAccess(){ + this.filterSelectedFiles(); + + if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest() + return; + } + + if(!this.selectedRestrictedFiles.isEmpty()){ + ArrayList nonDownloadableRestrictedFiles = new ArrayList<>(); + + List userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles(); + + for(FileMetadata fmd : this.selectedRestrictedFiles){ + if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){ + nonDownloadableRestrictedFiles.add(fmd); + } + } + + if(!nonDownloadableRestrictedFiles.isEmpty()){ + guestbookResponse.setDataFile(null); + guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles)); + + if(this.isGuestbookAndTermsPopupRequired()){ //need to pop up the guestbook and terms dialog + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } else { + this.requestAccessMultipleFiles(); + } + } else { + //popup select data files + } + } + } /*helper function to filter the selected files into , and and for reuse*/ @@ -3295,26 +3382,23 @@ public void toggleAllSelected(){ // helper Method public String getSelectedFilesIdsString() { - String downloadIdString = ""; - for (FileMetadata fmd : this.selectedFiles){ - if (!StringUtil.isEmpty(downloadIdString)) { - downloadIdString += ","; - } - downloadIdString += fmd.getDataFile().getId(); - } - return downloadIdString; + return this.getFilesIdsString(this.selectedFiles); } - + // helper Method public String getSelectedDownloadableFilesIdsString() { - String downloadIdString = ""; - for (FileMetadata fmd : this.selectedDownloadableFiles){ - if (!StringUtil.isEmpty(downloadIdString)) { - downloadIdString += ","; + return this.getFilesIdsString(this.selectedDownloadableFiles); + } + + public String getFilesIdsString(List fileMetadatas){ //for reuse + String idString = ""; + for (FileMetadata fmd : fileMetadatas){ + if (!StringUtil.isEmpty(idString)) { + idString += ","; } - downloadIdString += fmd.getDataFile().getId(); + idString += fmd.getDataFile().getId(); } - return downloadIdString; + return idString; } @@ -5221,6 +5305,10 @@ public boolean isDownloadPopupRequired() { public boolean isRequestAccessPopupRequired() { return FileUtil.isRequestAccessPopupRequired(workingVersion); } + + public boolean isGuestbookAndTermsPopupRequired() { + return FileUtil.isGuestbookAndTermsPopupRequired(workingVersion); + } public String requestAccessMultipleFiles() { @@ -5236,11 +5324,11 @@ public String requestAccessMultipleFiles() { for (FileMetadata fmd : selectedFiles){ fileDownloadHelper.addMultipleFilesForRequestAccess(fmd.getDataFile()); } - if (isRequestAccessPopupRequired()) { + if (isGuestbookAndTermsPopupRequired()) { //RequestContext requestContext = RequestContext.getCurrentInstance(); - PrimeFaces.current().executeScript("PF('requestAccessPopup').show()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show()"); //the popup will call writeGuestbookAndRequestAccess(); return ""; - } else { + }else { //No popup required fileDownloadHelper.requestAccessIndirect(); return ""; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java index 76c5df4409a..723a54c8587 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java @@ -1,91 +1,237 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; - -import javax.persistence.Column; -import javax.persistence.Embeddable; -import javax.persistence.EmbeddedId; +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.GeneratedValue; +import javax.persistence.UniqueConstraint; import javax.persistence.Entity; +import javax.persistence.Table; +import javax.persistence.Index; +import javax.persistence.Id; import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; import javax.persistence.ManyToOne; -import javax.persistence.MapsId; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import java.io.Serializable; -import java.util.Date; +import javax.persistence.OneToOne; +import javax.persistence.EnumType; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import javax.persistence.Column; +import javax.persistence.Enumerated; +import javax.persistence.GenerationType; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; + +/** + * + * @author Marina + */ @Entity -@Table(name = "fileaccessrequests") -public class FileAccessRequest { - @EmbeddedId - private FileAccessRequestKey id; +@Table(name = "fileaccessrequests", //having added the guestbookresponse_id column to fileaccessrequests + uniqueConstraints=@UniqueConstraint(columnNames={"datafile_id", "authenticated_user_id","request_state"}) //this may not make sense at some future point +) + +@NamedQueries({ + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserId", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId"), + @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseId", + query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId"), + @NamedQuery(name = "FileAccessRequest.findByDataFileId", + query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId"), + @NamedQuery(name = "FileAccessRequest.findByRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByDataFileIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.dataFile.id=:dataFileId and far.requestState=:requestState") +}) + +public class FileAccessRequest implements Serializable{ + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + @ManyToOne - @MapsId("dataFile") - @JoinColumn(name = "datafile_id") + @JoinColumn(nullable=false) private DataFile dataFile; + @ManyToOne - @MapsId("authenticatedUser") - @JoinColumn(name = "authenticated_user_id") - private AuthenticatedUser authenticatedUser; - - @Temporal(value = TemporalType.TIMESTAMP) - @Column(name = "creation_time") - private Date creationTime; - - public FileAccessRequestKey getId() { + @JoinColumn(name="authenticated_user_id",nullable=false) + private AuthenticatedUser user; + + @OneToOne + @JoinColumn(nullable=true) + private GuestbookResponse guestbookResponse; + + public enum RequestState {CREATED,EDITED,GRANTED,REJECTED,RESUBMIT,INVALIDATED,CLOSED}; + //private RequestState state; + @Enumerated(EnumType.STRING) + @Column(name="request_state", nullable=false ) + private RequestState requestState; + + public FileAccessRequest(){ + + } + + public FileAccessRequest(DataFile df, AuthenticatedUser au){ + setDataFile(df); + setRequester(au); + setState(RequestState.CREATED); + } + + public FileAccessRequest(DataFile df, AuthenticatedUser au, GuestbookResponse gbr){ + setDataFile(df); + setRequester(au); + setGuestbookResponse(gbr); + setState(RequestState.CREATED); + } + + public Long getId() { return id; } - public void setId(FileAccessRequestKey id) { + public void setId(Long id) { this.id = id; } - - public DataFile getDataFile() { + + public DataFile getDataFile(){ return dataFile; } - - public void setDataFile(DataFile dataFile) { - this.dataFile = dataFile; + + public final void setDataFile(DataFile df){ + this.dataFile = df; + } + + public AuthenticatedUser getRequester(){ + return user; + } + + public final void setRequester(AuthenticatedUser au){ + this.user = au; + } + + public GuestbookResponse getGuestbookResponse(){ + return guestbookResponse; + } + + public final void setGuestbookResponse(GuestbookResponse gbr){ + this.guestbookResponse = gbr; + } + + public RequestState getState() { + return this.requestState; + } + + public void setState(RequestState requestState) { + this.requestState = requestState; + } + + public String getStateLabel() { + if(isStateCreated()){ + return "created"; + } + if(isStateEdited()) { + return "edited"; + } + if(isStateGranted()) { + return "granted"; + } + if(isStateRejected()) { + return "rejected"; + } + if(isStateResubmit()) { + return "resubmit"; + } + if(isStateInvalidated()) { + return "invalidated"; + } + if(isStateClosed()) { + return "closed"; + } + return null; + } + + public void setStateCreated() { + this.requestState = RequestState.CREATED; + } + + public void setStateEdited() { + this.requestState = RequestState.EDITED; + } + + public void setStateGranted() { + this.requestState = RequestState.GRANTED; } - public AuthenticatedUser getAuthenticatedUser() { - return authenticatedUser; + public void setStateRejected() { + this.requestState = RequestState.REJECTED; } - public void setAuthenticatedUser(AuthenticatedUser authenticatedUser) { - this.authenticatedUser = authenticatedUser; + public void setStateResubmit() { + this.requestState = RequestState.RESUBMIT; + } + + public void setStateInvalidated() { + this.requestState = RequestState.INVALIDATED; } - public Date getCreationTime() { - return creationTime; + public void setStateClosed() { + this.requestState = RequestState.CLOSED; } - public void setCreationTime(Date creationTime) { - this.creationTime = creationTime; + + public boolean isStateCreated() { + return this.requestState == RequestState.CREATED; + } + + public boolean isStateEdited() { + return this.requestState == RequestState.EDITED; + } + + public boolean isStateGranted() { + return this.requestState == RequestState.GRANTED; } - @Embeddable - public static class FileAccessRequestKey implements Serializable { - @Column(name = "datafile_id") - private Long dataFile; - @Column(name = "authenticated_user_id") - private Long authenticatedUser; + public boolean isStateRejected() { + return this.requestState == RequestState.REJECTED; + } - public Long getDataFile() { - return dataFile; - } + public boolean isStateResubmit() { + return this.requestState == RequestState.RESUBMIT; + } + + public boolean isStateInvalidated() { + return this.requestState == RequestState.INVALIDATED; + } - public void setDataFile(Long dataFile) { - this.dataFile = dataFile; - } + public boolean isStateClosed() { + return this.requestState == RequestState.CLOSED; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } - public Long getAuthenticatedUser() { - return authenticatedUser; + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof FileAccessRequest)) { + return false; } - - public void setAuthenticatedUser(Long authenticatedUser) { - this.authenticatedUser = authenticatedUser; + FileAccessRequest other = (FileAccessRequest) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; } + return true; } -} + + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java new file mode 100644 index 00000000000..215e4695a75 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java @@ -0,0 +1,89 @@ +package edu.harvard.iq.dataverse; + +import java.util.List; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; + +/** + * + * @author Marina + */ +@Stateless +@Named +public class FileAccessRequestServiceBean { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public FileAccessRequest find(Object pk) { + return em.find(FileAccessRequest.class, pk); + } + + public List findAll() { + return em.createQuery("select object(o) from FileAccessRequest as o order by o.id", FileAccessRequest.class).getResultList(); + } + + public List findAll(Long authenticatedUserId, Long fileId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", FileAccessRequest.class) + .setParameter("authenticatedUserId",authenticatedUserId) + .setParameter("dataFileId",fileId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByAuthenticedUserId(Long authenticatedUserId){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserId", FileAccessRequest.class) + .setParameter("authenticatedUserId", authenticatedUserId) + .getResultList(); + } + + public List findAllByGuestbookResponseId(Long guestbookResponseId){ + return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseId", FileAccessRequest.class) + .setParameter("guestbookResponseId", guestbookResponseId) + .getResultList(); + + } + + public List findAllByDataFileId(Long dataFileId){ + return em.createNamedQuery("FileAccessRequest.findByDataFileId", FileAccessRequest.class) + .setParameter("dataFileId", dataFileId) + .getResultList(); + } + + public List findAllByAuthenticatedUserIdAndRequestState(Long authenticatedUserId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndRequestState", FileAccessRequest.class) + .setParameter("authenticatedUserId", authenticatedUserId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByGuestbookResponseIdAndRequestState(Long guestbookResponseId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseIdAndRequestState", FileAccessRequest.class) + .setParameter("dataFileId", guestbookResponseId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByDataFileIdAndRequestState(Long dataFileId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByDataFileIdAndRequestState", FileAccessRequest.class) + .setParameter("dataFileId", dataFileId) + .setParameter("requestState",requestState) + .getResultList(); + } + + + public FileAccessRequest save(FileAccessRequest far) { + if (far.getId() == null) { + em.persist(far); + return far; + } else { + return em.merge(far); + } + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 850efc2f1ae..e44aeafcc4d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -71,7 +71,7 @@ private boolean testResponseLength(String value) { // file downloads and multiple (batch) downloads - sice both use the same // terms/etc. popup. public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); guestbookResponse.setDownloadtype("Download"); // Note that this method is only ever called from the file-download-popup - // meaning we know for the fact that we DO want to save this @@ -91,7 +91,7 @@ public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) public void writeGuestbookAndOpenSubset(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadDataSubsetPopup').show()"); guestbookResponse.setDownloadtype("Subset"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); @@ -132,17 +132,23 @@ public void writeGuestbookAndLaunchExploreTool(GuestbookResponse guestbookRespon fileDownloadService.explore(guestbookResponse, fmd, externalTool); //requestContext.execute("PF('downloadPopup').hide()"); - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); } public void writeGuestbookAndLaunchPackagePopup(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadPackagePopup').show()"); PrimeFaces.current().executeScript("handleResizeDialog('downloadPackagePopup')"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse) { + //requestContext.execute("PF('guestbookAndTermsPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); + fileDownloadService.writeGuestbookResponseAndRequestAccess(guestbookResponse); + } + /** * Writes a guestbook entry for either popup scenario: guestbook or terms. */ @@ -307,13 +313,13 @@ public void requestAccessMultiple(List files) { } } if (notificationFile != null && succeeded) { - fileDownloadService.sendRequestFileAccessNotification(notificationFile, (AuthenticatedUser) session.getUser()); + fileDownloadService.sendRequestFileAccessNotification(notificationFile.getOwner(), notificationFile.getId(), (AuthenticatedUser) session.getUser()); } } public void requestAccessIndirect() { //Called when there are multiple files and no popup - // or there's a popup with sigular or multiple files + // or there's a popup with singular or multiple files // The list of files for Request Access is set in the Dataset Page when // user clicks the request access button in the files fragment // (and has selected one or more files) @@ -329,8 +335,16 @@ private boolean processRequestAccess(DataFile file, Boolean sendNotification) { // create notification if necessary if (sendNotification) { - fileDownloadService.sendRequestFileAccessNotification(file, user); - } + fileDownloadService.sendRequestFileAccessNotification(file.getOwner(), file.getId(), (AuthenticatedUser) session.getUser()); + } + //ToDO QDRADA - where to write the response? + /* + //write the guestbookResponse if there is an enabled guestbook + GuestbookResponse gbr = this.getGuestbookResponse(); //can we be sure this is the correct guestbookResponse?? - can it get out of sync?? + if( gbr != null && gbr.getGuestbook().isEnabled() ){ + fileDownloadService.writeGuestbookResponseRecordForRequestAccess(gbr); + } + */ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success")); return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a90489be29a..f7612300eaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -76,6 +76,8 @@ public class FileDownloadServiceBean implements java.io.Serializable { PrivateUrlServiceBean privateUrlService; @EJB SettingsServiceBean settingsService; + @EJB + MailServiceBean mailService; @Inject DataverseSession session; @@ -192,6 +194,38 @@ public void writeGuestbookAndStartFileDownload(GuestbookResponse guestbookRespon redirectToDownloadAPI(guestbookResponse.getFileFormat(), guestbookResponse.getDataFile().getId()); logger.fine("issued file download redirect for datafile "+guestbookResponse.getDataFile().getId()); } + + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse){ + if (guestbookResponse == null || ( guestbookResponse.getDataFile() == null && guestbookResponse.getSelectedFileIds() == null) ) { + return; + } + + List selectedDataFiles = new ArrayList<>(); //always make sure it's at least an empty List + + if(guestbookResponse.getDataFile() != null ){ //one file 'selected' by 'Request Access' button click + selectedDataFiles.add(datafileService.find(guestbookResponse.getDataFile().getId())); //don't want the findCheapAndEasy + } + + if(guestbookResponse.getSelectedFileIds() != null && !guestbookResponse.getSelectedFileIds().isEmpty()) { //multiple selected through multi-select REquest Access button + selectedDataFiles = datafileService.findAll(guestbookResponse.getSelectedFileIds()); + } + + int countRequestAccessSuccess = 0; + + for(DataFile dataFile : selectedDataFiles){ + guestbookResponse.setDataFile(dataFile); + writeGuestbookResponseRecordForRequestAccess(guestbookResponse); + if(requestAccess(dataFile,guestbookResponse)){ + countRequestAccessSuccess++; + } + } + + if(countRequestAccessSuccess > 0){ + DataFile firstDataFile = selectedDataFiles.get(0); + sendRequestFileAccessNotification(firstDataFile.getOwner(), firstDataFile.getId(), (AuthenticatedUser) session.getUser()); + } + + } public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse, FileMetadata fileMetadata, String format) { if(!fileMetadata.getDatasetVersion().isDraft()){ @@ -221,6 +255,18 @@ public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse) { } } + public void writeGuestbookResponseRecordForRequestAccess(GuestbookResponse guestbookResponse) { + try { + CreateGuestbookResponseCommand cmd = new CreateGuestbookResponseCommand(dvRequestService.getDataverseRequest(), guestbookResponse, guestbookResponse.getDataset()); + commandEngine.submit(cmd); + + } catch (CommandException e) { + //if an error occurs here then download won't happen no need for response recs... + logger.info("Failed to writeGuestbookResponseRecord for RequestAccess"); + } + + } + // The "guestBookRecord(s)AlreadyWritten" parameter in the 2 methods // below (redirectToBatchDownloadAPI() and redirectToDownloadAPI(), for the // multiple- and single-file downloads respectively) are passed to the @@ -499,15 +545,63 @@ public boolean requestAccess(Long fileId) { } } return false; - } + } + + public boolean requestAccess(DataFile dataFile, GuestbookResponse gbr){ + boolean accessRequested = false; + if (dvRequestService.getDataverseRequest().getAuthenticatedUser() == null){ + return accessRequested; + } + + List fARs = dataFile.getFileAccessRequesters(); + + if(fARs.isEmpty() || (!fARs.isEmpty() && !fARs.contains((AuthenticatedUser)session.getUser()))){ + try { + commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), dataFile, gbr)); + accessRequested = true; + } catch (CommandException ex) { + logger.info("Unable to request access for file id " + dataFile.getId() + ". Exception: " + ex); + } + } + + return accessRequested; + } - public void sendRequestFileAccessNotification(DataFile datafile, AuthenticatedUser requestor) { - permissionService.getUsersWithPermissionOn(Permission.ManageFilePermissions, datafile).stream().forEach((au) -> { - userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, datafile.getId(), null, requestor, false); + public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, AuthenticatedUser requestor) { + permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset).stream().forEach((au) -> { + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, fileId, null, requestor, false); }); } + public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, GuestbookResponse gb){ + Timestamp ts = new Timestamp(new Date().getTime()); + UserNotification un = null; + + //String appendMsgText = (gb == null)?("") : this.getGuestbookAppendEmailDetails(gb); + String appendMsgText = ""; + + //first send a notification for all the Users that have ManageDatasetPermissions a notification that a user has requested accedd + List mngDsPermUsers = permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset); + + for (AuthenticatedUser au : mngDsPermUsers){ + un = userNotificationService.sendUserNotification(au, ts, UserNotification.Type.REQUESTFILEACCESS, fileId); + + if(un != null){ + + boolean mailed = mailService.sendNotificationEmail(un, appendMsgText, (AuthenticatedUser)session.getUser(),false); + if(mailed){ + un.setEmailed(true); + userNotificationService.save(un); + } + } + } + + //send the user that requested access a notification that they requested the access + userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), ts, UserNotification.Type.REQUESTEDFILEACCESS, fileId); + } + + public String generateServiceKey() { UUID uid = UUID.randomUUID(); // last 8 bytes, of the random UUID, 16 hex digits: diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index e6d5cc75ca3..5845d65889e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -318,6 +318,12 @@ public boolean isRequestAccessPopupRequired() { return FileUtil.isRequestAccessPopupRequired(fileMetadata.getDatasetVersion()); } + public boolean isGuestbookAndTermsPopupRequired() { + if(fileMetadata.getId() == null || fileMetadata.getDatasetVersion().getId() == null ){ + return false; + } + return FileUtil.isGuestbookAndTermsPopupRequired(fileMetadata.getDatasetVersion()); + } public void setFileMetadata(FileMetadata fileMetadata) { this.fileMetadata = fileMetadata; diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 69404482fce..5c39d1039d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -65,8 +65,12 @@ public class GuestbookResponse implements Serializable { @JoinColumn(nullable=true) private AuthenticatedUser authenticatedUser; - @OneToOne(cascade=CascadeType.ALL,mappedBy="guestbookResponse",fetch = FetchType.LAZY, optional = false) + @OneToOne(cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},mappedBy="guestbookResponse",fetch = FetchType.LAZY) private FileDownload fileDownload; + + @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},fetch = FetchType.LAZY) + //private FileAccessRequest fileAccessRequest; + private List fileAccessRequests; @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},orphanRemoval=true) @OrderBy ("id") @@ -253,6 +257,13 @@ public void setFileDownload(FileDownload fDownload){ this.fileDownload = fDownload; } + public List getFileAccessRequests(){ + return fileAccessRequests; + } + + public void setFileAccessRequest(List fARs){ + this.fileAccessRequests = fARs; + } public Dataset getDataset() { return dataset; diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index e71e04bc42f..173af4c241f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -56,6 +56,8 @@ public class ManagePermissionsPage implements java.io.Serializable { @EJB DvObjectServiceBean dvObjectService; @EJB + FileAccessRequestServiceBean fileAccessRequestService; + @EJB DataverseRoleServiceBean roleService; @EJB RoleAssigneeServiceBean roleAssigneeService; @@ -400,6 +402,51 @@ public List completeRoleAssignee( String query ) { return roleAssigneeService.filterRoleAssignees(query, dvObject, roleAssignSelectedRoleAssignees); } + public void grantAccess(ActionEvent evt) { + //QDRADA + logger.info("grantAccess Called"); + try { + throw new Exception("grantAccessCalled"); + } catch (Exception e) { + e.printStackTrace(); + } + /* + // Find the built in file downloader role (currently by alias) + DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER); + for (RoleAssignee roleAssignee : selectedRoleAssignees) { + boolean sendNotification = false; + for (DataFile file : selectedFiles) { + if (assignRole(roleAssignee, file, fileDownloaderRole)) { + if (file.isReleased()) { + sendNotification = true; + } + // remove request, if it exist + for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { + if (file.getFileAccessRequesters().remove(au)) { + List fileAccessRequests = fileAccessRequestService.findAllByAuthenticatedUserIdAndRequestState(au.getId(), FileAccessRequest.RequestState.CREATED); + for(FileAccessRequest far : fileAccessRequests){ + far.setStateGranted(); + fileAccessRequestService.save(far); + } + file.setFileAccessRequests(fileAccessRequests); + datafileService.save(file); + } + } + } + + } + + if (sendNotification) { + for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId()); + } + } + } + + initMaps(); + */ + } + public List getAvailableRoles() { List roles = new LinkedList<>(); if (dvObject != null && dvObject.getId() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index b68a1b9d13e..c91f7630caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -39,7 +39,7 @@ public enum Type { CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, STATUSUPDATED, DATASETCREATED, DATASETMENTIONED, GLOBUSUPLOADCOMPLETED, GLOBUSUPLOADCOMPLETEDWITHERRORS, - GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS; + GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS, REQUESTEDFILEACCESS; public String getDescription() { return BundleUtil.getStringFromBundle("notification.typeDescription." + this.name()); diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java index 947ee3ce989..972f26f6830 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java @@ -131,6 +131,28 @@ public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate save(userNotification); } } + + /** + * Returns a UserNotification that was sent to a dataverseUser. + * Sends ONLY the UserNotification (no email is sent via this method). + * All parameters are assumed to be valid, non-null objects. + * + * @param dataverseUser - the AuthenticatedUser to whom the notification is to be sent + * @param sendDate - the time and date the notification was sent. + * @param type - the type of notification to be sent (see UserNotification for the different types) + * @param objectId - the ID of the Dataverse object (Dataverse, Dataset, etc.) that the notification pertains to + * @return The UserNotification that was sent to the dataverseUser + */ + + public UserNotification sendUserNotification(AuthenticatedUser dataverseUser, Timestamp sendDate, Type type, Long objectId) { + UserNotification userNotification = new UserNotification(); + userNotification.setUser(dataverseUser); + userNotification.setSendDate(sendDate); + userNotification.setType(type); + userNotification.setObjectId(objectId); + this.save(userNotification); + return userNotification; + } public boolean isEmailMuted(UserNotification userNotification) { final Type type = userNotification.getType(); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 9fdfce2f1a7..bb688fb8acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -1,7 +1,9 @@ package edu.harvard.iq.dataverse.authorization.users; import edu.harvard.iq.dataverse.Cart; +import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.FileAccessRequest; import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.validation.ValidateEmail; @@ -17,6 +19,7 @@ import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.Serializable; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; @@ -28,6 +31,7 @@ import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @@ -202,6 +206,27 @@ public void setDatasetLocks(List datasetLocks) { @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List oAuth2TokenDatas; + /*for many to many fileAccessRequests*/ + @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}, fetch = FetchType.LAZY) + private List fileAccessRequests; + + public List getFileAccessRequests() { + return fileAccessRequests; + } + + public void setFileAccessRequests(List fARs) { + this.fileAccessRequests = fARs; + } + + public List getRequestedDataFiles(){ + List requestedDataFiles = new ArrayList<>(); + + for(FileAccessRequest far : getFileAccessRequests()){ + requestedDataFiles.add(far.getDataFile()); + } + return requestedDataFiles; + } + @Override public AuthenticatedUserDisplayInfo getDisplayInfo() { return new AuthenticatedUserDisplayInfo(firstName, lastName, email, affiliation, position); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index d710ed66551..f6a3b287778 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -5,7 +5,11 @@ */ package edu.harvard.iq.dataverse.engine.command.impl; +import java.util.logging.Logger; + import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.FileAccessRequest; +import edu.harvard.iq.dataverse.GuestbookResponse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; @@ -23,8 +27,11 @@ @RequiredPermissions({}) public class RequestAccessCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(RequestAccessCommand.class.getName()); + private final DataFile file; private final AuthenticatedUser requester; + private final FileAccessRequest fileAccessRequest; private final Boolean sendNotification; @@ -33,6 +40,7 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file) { super(dvRequest, file); this.file = file; this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester); this.sendNotification = false; } @@ -41,8 +49,27 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean s super(dvRequest, file); this.file = file; this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester); this.sendNotification = sendNotification; } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = false; + } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr, Boolean sendNotification) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = sendNotification; + } public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { @@ -75,9 +102,13 @@ public DataFile execute(CommandContext ctxt) throws CommandException { if(FileUtil.isActivelyEmbargoed(file)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.embargoed"), this); } + file.getFileAccessRequests().add(fileAccessRequest); file.addFileAccessRequester(requester); + requester.getFileAccessRequests().add(fileAccessRequest); if (sendNotification) { - ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); + //QDRADA + logger.info("ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester);"); + //ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); } return ctxt.files().save(file); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..a5fb98f7c49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Embargo; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; @@ -1639,6 +1640,71 @@ private static Boolean popupDueToStateOrTerms(DatasetVersion datasetVersion) { return null; } + /** + * isGuestbookAndTermsPopupRequired + * meant to replace both isDownloadPopupRequired() and isRequestAccessDownloadPopupRequired() when the guestbook-terms-popup-fragment.xhtml + * replaced file-download-popup-fragment.xhtml and file-request-access-popup-fragment.xhtml + * @param datasetVersion + * @return boolean + */ + + public static boolean isGuestbookAndTermsPopupRequired(DatasetVersion datasetVersion) { + return isGuestbookPopupRequired(datasetVersion) || isTermsPopupRequired(datasetVersion); + } + + public static boolean isGuestbookPopupRequired(DatasetVersion datasetVersion) { + + if (datasetVersion == null) { + logger.fine("GuestbookPopup not required because datasetVersion is null."); + return false; + } + //0. if version is draft then Popup "not required" + if (!datasetVersion.isReleased()) { + logger.fine("GuestbookPopup not required because datasetVersion has not been released."); + return false; + } + + // 3. Guest Book: + if (datasetVersion.getDataset() != null && datasetVersion.getDataset().getGuestbook() != null && datasetVersion.getDataset().getGuestbook().isEnabled() && datasetVersion.getDataset().getGuestbook().getDataverse() != null) { + logger.fine("GuestbookPopup required because an enabled guestbook exists."); + return true; + } + + logger.fine("GuestbookPopup is not required."); + return false; + } + + public static boolean isTermsPopupRequired(DatasetVersion datasetVersion) { + + if (datasetVersion == null) { + logger.fine("TermsPopup not required because datasetVersion is null."); + return false; + } + //0. if version is draft then Popup "not required" + if (!datasetVersion.isReleased()) { + logger.fine("TermsPopup not required because datasetVersion has not been released."); + return false; + } + // 1. License and Terms of Use: + if (datasetVersion.getTermsOfUseAndAccess() != null) { + if (!License.CC0.equals(datasetVersion.getTermsOfUseAndAccess().getLicense()) + && !(datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null + || datasetVersion.getTermsOfUseAndAccess().getTermsOfUse().equals(""))) { + logger.fine("TermsPopup required because of license or terms of use."); + return true; + } + + // 2. Terms of Access: + if (!(datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess() == null) && !datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess().equals("")) { + logger.fine("TermsPopup required because of terms of access."); + return true; + } + } + + logger.fine("TermsPopup is not required."); + return false; + } + /** * Provide download URL if no Terms of Use, no guestbook, and not * restricted. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 72980c3451a..d166cc753cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -51,6 +51,8 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti return BundleUtil.getStringFromBundle("notification.email.create.dataverse.subject", rootDvNameAsList); case REQUESTFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", rootDvNameAsList); + case REQUESTEDFILEACCESS: + return BundleUtil.getStringFromBundle("notification.email.requested.file.access.subject", rootDvNameAsList); case GRANTFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.grant.file.access.subject", rootDvNameAsList); case REJECTFILEACCESS: diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 4ba6ad0e7e1..a79e421fa58 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1048,11 +1048,11 @@

    #{bundle['dataset.downloadUnrestricted']}

    + rendered="#{DatasetPage.guestbookAndTermsPopupRequired and !settingsWrapper.rsyncDownload}" + oncomplete="PF('guestbookAndTermsPopup').show();" /> @@ -1494,12 +1494,12 @@
    - + - + - + @@ -1530,19 +1530,11 @@ - + - - - - - - - -
    @@ -1883,7 +1875,7 @@ PF('downloadInvalid').show(); } if (outcome ==='GuestbookRequired'){ - PF('downloadPopup').show(); + PF('guestbookAndTermsPopup').show(); } } diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 597d9a12786..4b075eb3377 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -60,7 +60,7 @@
  • - #{bundle['file.globus.of']} #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + GT: #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -85,7 +86,7 @@
  • - #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -134,23 +136,24 @@
  • - #{bundle['file.downloadBtn.format.all']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.all']}
  • - @@ -158,12 +161,13 @@ - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + @@ -171,35 +175,37 @@
  • - #{bundle['file.downloadBtn.format.tab']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.tab']}
  • - #{bundle['file.downloadBtn.format.rdata']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.rdata']} @@ -215,18 +221,19 @@
  • - #{bundle['file.downloadBtn.format.var']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.var']}
  • @@ -303,20 +310,21 @@
  • - #{tool.getDisplayNameLang()} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{tool.getDisplayNameLang()}
  • diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index ae8729fdf89..d27536cb892 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -196,7 +196,7 @@ or FilePage.fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload}"> - + @@ -343,11 +343,11 @@ - + - + @@ -356,7 +356,7 @@ - + From bc42df0946371c1af41560b79b5324ed5b565b99 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 13:12:26 -0400 Subject: [PATCH 0093/1092] typo from merge --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index c0b4fc104ad..a3160a6e48b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3127,7 +3127,7 @@ public void startDownloadSelectedOriginal() { private void startDownload(boolean downloadOriginal){ boolean guestbookRequired = isDownloadPopupRequired(); - boolean validate = validateFilesForDownload(guestbookRequired); + boolean validate = validateFilesForDownload(downloadOriginal); if (validate) { updateGuestbookResponse(guestbookRequired, downloadOriginal); if(!guestbookRequired && !getValidateFilesOutcome().equals("Mixed")){ From 57e984b0e468a55f578b3b21e4787c1a63e4dca9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 13:27:52 -0400 Subject: [PATCH 0094/1092] fix for #9601 --- .../edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 7cdfda8d082..5a353453fe8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -332,9 +332,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException e) { logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage()); } - if (unattached) { - dataFile.setOwner(null); - } + if (savedSuccess && belowLimit) { // These are all brand new files, so they should all have // one filemetadata total. -- L.A. @@ -388,6 +386,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, dataFile.setContentType(FileUtil.MIME_TYPE_TSV); } } + if (unattached) { + dataFile.setOwner(null); + } // ... and let's delete the main temp file if it exists: if(tempLocationPath!=null) { try { From 0c76f7b02b4214efa37b5ac7a5d23f308afae5d5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 15:53:09 -0400 Subject: [PATCH 0095/1092] remove QDR updates --- src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index 34176f7fb26..1d481f18cf5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -138,7 +138,7 @@ public void testIsDownloadPopupRequiredLicenseCC0() { DatasetVersion dsv1 = new DatasetVersion(); dsv1.setVersionState(DatasetVersion.VersionState.RELEASED); TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess(); - License license = new License("CC0 1.0", "Creative Commons CC0 1.0 Universal Public Domain Dedication.", URI.create("https://creativecommons.org/publicdomain/zero/1.0"), URI.create("https://licensebuttons.net/p/zero/1.0/88x31.png"), true,1L); + License license = new License("CC0", "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"), true, 1l); license.setDefault(true); termsOfUseAndAccess.setLicense(license); dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess); From 45d9042a226cc53cb8f8a09902a849889786cc0b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 19 May 2023 23:42:32 +0200 Subject: [PATCH 0096/1092] style(api): fix typos and style issues in BearerTokenAuthMechanism --- .../api/auth/BearerTokenAuthMechanism.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index c4b03728179..856670523b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -23,6 +23,8 @@ public class BearerTokenAuthMechanism implements AuthMechanism { private static final String BEARER_AUTH_SCHEME = "Bearer"; + private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); + public static final String UNAUTHORIZED_BEARER_TOKEN = "Unauthorized bearer token"; public static final String INVALID_BEARER_TOKEN = "Could not parse bearer token"; public static final String BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED = "Bearer token detected, no OIDC provider configured"; @@ -31,18 +33,19 @@ public class BearerTokenAuthMechanism implements AuthMechanism { protected AuthenticationServiceBean authSvc; @Inject protected UserServiceBean userSvc; - private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); + @Override public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse { if (FeatureFlags.API_BEARER_AUTH.enabled()) { Optional bearerToken = getRequestApiKey(containerRequestContext); // No Bearer Token present, hence no user can be authenticated - if (!bearerToken.isPresent()) { + if (bearerToken.isEmpty()) { return null; } + // Validate and verify provided Bearer Token, and retrieve UserRecordIdentifier // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. Tokens in the cache should be removed after some (configurable) time. - UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIndentifier(bearerToken.get()); + UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIdentifier(bearerToken.get()); // retrieve Authenticated User from AuthService AuthenticatedUser authUser = authSvc.lookupUser(userInfo); @@ -67,7 +70,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext) * @param token The string containing the encoded JWT * @return */ - private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIndentifier(String token) throws WrappedAuthErrorResponse { + private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String token) throws WrappedAuthErrorResponse { try { BearerAccessToken accessToken = BearerAccessToken.parse(token); // Get list of all authentication providers using Open ID Connect @@ -108,7 +111,7 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIndentifier(String t * Retrieve the raw, encoded token value from the Authorization Bearer HTTP header as defined in RFC 6750 * @return An {@link Optional} either empty if not present or the raw token from the header */ - private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { + private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { String headerParamApiKey = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION); if (headerParamApiKey != null && headerParamApiKey.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) { return Optional.of(headerParamApiKey); From bb49ea52f482b1b6466e124ca926453670699b09 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 19 May 2023 23:47:31 +0200 Subject: [PATCH 0097/1092] refactor(api,auth): improve OIDCAuthProvider.getUserIdentifier - Reuse more existing code paths to avoid duplication - Make actual use of returning an empty optional - Remove no longer checked exception OAuth2Exception - Improve Javadocs of method - Don't just retrieve and bail out on fail but provide smaller analysis steps with logs - Rename method to be more concise in name selection - Change BearerTokenAuthMechanism accordingly --- .../api/auth/BearerTokenAuthMechanism.java | 5 +- .../oauth2/oidc/OIDCAuthProvider.java | 47 +++++++++++++++---- 2 files changed, 39 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index 856670523b1..eeabcba9f06 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -5,7 +5,6 @@ import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -88,12 +87,12 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String to for (OIDCAuthProvider provider : providers) { try { // The OIDCAuthProvider need to verify a Bearer Token and equip the client means to identify the corresponding AuthenticatedUser. - Optional userInfo = provider.getUserIdentifierForValidToken(accessToken); + Optional userInfo = provider.getUserIdentifier(accessToken); if(userInfo.isPresent()) { logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided identifier", provider.getId()); return userInfo.get(); } - } catch ( IOException| OAuth2Exception e) { + } catch (IOException e) { logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 4cf2eeb626a..52362f7abeb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -46,6 +46,7 @@ import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; import java.util.logging.Logger; /** @@ -305,16 +306,42 @@ Optional getUserInfo(BearerAccessToken accessToken) throws IOException } /** - * Returns the UserRecordIdentifier corresponding to the given accessToken if valid. - * UserRecordIdentifier (same used as in OAuth2UserRecord), i.e. can be used to find a local UserAccount. - * @param accessToken - * @return Returns the UserRecordIdentifier corresponding to the given accessToken if valid. - * @throws IOException - * @throws OAuth2Exception + * Trades an access token for an {@link UserRecordIdentifier} (if valid). + * + * @apiNote The resulting {@link UserRecordIdentifier} may be used with + * {@link edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean#lookupUser(UserRecordIdentifier)} + * to look up an {@link edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser} from the database. + * @see edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism + * + * @param accessToken The token to use when requesting user information from the provider + * @return Returns an {@link UserRecordIdentifier} for a valid access token or an empty {@link Optional}. + * @throws IOException In case communication with the endpoint fails to succeed for an I/O reason */ - public Optional getUserIdentifierForValidToken(BearerAccessToken accessToken) throws IOException, OAuth2Exception{ - // Request the UserInfoEndpoint to obtain UserInfo, since this endpoint also validate the Token we can reuse the existing code path. - // As an alternative we could use the Introspect Endpoint or assume the Token as some encoded information (i.e. JWT). - return Optional.of(new UserRecordIdentifier( this.getId(), getUserInfo(accessToken).get().getSubject().getValue())); + public Optional getUserIdentifier(BearerAccessToken accessToken) throws IOException { + OAuth2UserRecord userRecord; + try { + // Try to retrieve with given token (throws if invalid token) + Optional userInfo = getUserInfo(accessToken); + + if (userInfo.isPresent()) { + // Take this detour to avoid code duplication and potentially hard to track conversion errors. + userRecord = getUserRecord(userInfo.get()); + } else { + // This should not happen - an error at the provider side will lead to an exception. + logger.log(Level.WARNING, + "User info retrieval from {0} returned empty optional but expected exception for token {1}.", + List.of(getId(), accessToken).toArray() + ); + return Optional.empty(); + } + } catch (OAuth2Exception e) { + logger.log(Level.FINE, + "Could not retrieve user info with token {0} at provider {1}: {2}", + List.of(accessToken, getId(), e.getMessage()).toArray()); + logger.log(Level.FINER, "Retrieval failed, details as follows: ", e); + return Optional.empty(); + } + + return Optional.of(userRecord.getUserRecordIdentifier()); } } From cecb034ad7e2be7c47496e0197a7c591e2be503e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 20 May 2023 00:01:38 +0200 Subject: [PATCH 0098/1092] refactor(api): add TODO in BearerTokenAuthMechanism not to neglect IO exceptions --- .../iq/dataverse/api/auth/BearerTokenAuthMechanism.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index eeabcba9f06..e26717e97b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -93,6 +93,9 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String to return userInfo.get(); } } catch (IOException e) { + // TODO: Just logging this is not sufficient - if there is an IO error with the one provider + // which would have validated successfully, this is not the users fault. We need to + // take note and refer to that later when occurred. logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); } } From 647315cb52fbdbb45fb33492bdcbd9c829c73a16 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 22:49:47 +0200 Subject: [PATCH 0099/1092] feat(test): extend JUnit5 test helper JvmSettings - Adding a @JvmSetting to a testclass now applies it before all test methods (will be reset if necessary after all tests ran) - Enable deleting existing settings (which allows to override a class wide setting for example) by setting value to "null" - Introduce settings broker with a first simple local implementation as extension point to set these settings out somewhere (TM). - Broker requires annotation which one to use within a class at class level --- .../util/testing/JvmSettingBroker.java | 43 +++++ .../util/testing/JvmSettingExtension.java | 164 ++++++++++++------ .../util/testing/LocalJvmSettings.java | 39 +++++ 3 files changed, 191 insertions(+), 55 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java new file mode 100644 index 00000000000..1235df89b3e --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java @@ -0,0 +1,43 @@ +package edu.harvard.iq.dataverse.util.testing; + +import java.io.IOException; + +/** + * Provide an interface to access and manipulate {@link edu.harvard.iq.dataverse.settings.JvmSettings} + * at some place (local, remote, different ways to access, etc.). + * Part of the {@link JvmSettingExtension} extension to allow JUnit5 tests to manipulate these + * settings, enabling to test different code paths and so on. + * @implNote Keep in mind to use methods that do not require restarts or similar to set or delete a setting. + * This must be changeable on the fly, otherwise it will be useless for testing. + * Yes, non-hot-reloadable settings may be a problem. The code should be refactored in these cases. + */ +public interface JvmSettingBroker { + + /** + * Receive the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} given as its {@link String} + * representation. The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @return The value of the setting if present or null. + * @throws IOException When communication goes sideways. + */ + String getJvmSetting(String key) throws IOException; + + /** + * Set the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String} + * representation). The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @param value The JVM setting's value we want to have it set to. + * @throws IOException When communication goes sideways. + */ + void setJvmSetting(String key, String value) throws IOException; + + /** + * Remove the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String} + * representation). For some tests, one might want to clear a certain setting again and potentially have it set + * back afterward. The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @throws IOException When communication goes sideways. + */ + String deleteJvmSetting(String key) throws IOException; + +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java index 17728e75ffc..2065d7b3ae6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java @@ -1,81 +1,124 @@ package edu.harvard.iq.dataverse.util.testing; import edu.harvard.iq.dataverse.settings.JvmSettings; +import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.AfterTestExecutionCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeTestExecutionCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.platform.commons.support.AnnotationSupport; +import org.junit.platform.commons.support.ReflectionSupport; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.List; +import java.util.Optional; import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER; -public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback { +public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback, BeforeAllCallback, AfterAllCallback { - private ExtensionContext.Store getStore(ExtensionContext context) { - return context.getStore(ExtensionContext.Namespace.create(getClass(), context.getRequiredTestClass(), context.getRequiredTestMethod())); + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass())); + + setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store); + } + + @Override + public void afterAll(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass())); + + resetSetting(settings, getBroker(extensionContext), store); } @Override public void beforeTestExecution(ExtensionContext extensionContext) throws Exception { - extensionContext.getTestMethod().ifPresent(method -> { - JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class); - for (JvmSetting setting : settings) { - // get the setting name (might need var args substitution) - String settingName = getSettingName(setting); - - // get the setting ... - String oldSetting = System.getProperty(settingName); + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create( + getClass(), + extensionContext.getRequiredTestClass(), + extensionContext.getRequiredTestMethod() + )); + + setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store); + } - // if present - store in context to restore later - if (oldSetting != null) { - getStore(extensionContext).put(settingName, oldSetting); - } - - // set to new value - if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { - throw new IllegalArgumentException("You must either provide a value or a method reference " + - "for key JvmSettings." + setting.key()); - } - - // retrieve value from static test class method if no setting given - if (setting.value().equals(PLACEHOLDER)) { - extensionContext.getTestClass().ifPresent(klass -> { - try { - Method valueMethod = klass.getDeclaredMethod(setting.method()); - valueMethod.setAccessible(true); - System.setProperty(settingName, (String)valueMethod.invoke(null)); - } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new RuntimeException(e); - } - }); - } else { - System.setProperty(settingName, setting.value()); + @Override + public void afterTestExecution(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create( + getClass(), + extensionContext.getRequiredTestClass(), + extensionContext.getRequiredTestMethod() + )); + + resetSetting(settings, getBroker(extensionContext), store); + } + + private void setSetting(Class testClass, List settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception { + for (JvmSetting setting : settings) { + // get the setting name (might need var args substitution) + String settingName = getSettingName(setting); + + // get the setting value ... + String oldSetting = broker.getJvmSetting(settingName); + + // if present - store in context to restore later + if (oldSetting != null) { + store.put(settingName, oldSetting); + } + + // set to new value + if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { + throw new IllegalArgumentException("You must either provide a value or a method reference " + + "for key JvmSettings" + setting.key()); + } + + String value; + // Retrieve value from static (!) test class method if no direct setting given + if (setting.value().equals(PLACEHOLDER)) { + Optional valueMethod = ReflectionSupport.findMethod(testClass, setting.method()); + if (valueMethod.isEmpty() || ! Modifier.isStatic(valueMethod.get().getModifiers())) { + throw new IllegalStateException("Could not find a static method '" + setting.method() + "' in test class"); } + value = (String) ReflectionSupport.invokeMethod(valueMethod.get(), null); + // Set to new value by using the directly given value + } else { + value = setting.value(); } - }); + + // If the retrieved value is null, delete the setting (will be reset after the test), otherwise set. + if (value != null) { + broker.setJvmSetting(settingName, value); + } else if (oldSetting != null) { + broker.deleteJvmSetting(settingName); + } + } } - @Override - public void afterTestExecution(ExtensionContext extensionContext) throws Exception { - extensionContext.getTestMethod().ifPresent(method -> { - JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class); - for (JvmSetting setting : settings) { - // get the setting name (might need var args substitution) - String settingName = getSettingName(setting); - - // get a stored setting from context - String oldSetting = getStore(extensionContext).remove(settingName, String.class); - - // if present before, restore - if (oldSetting != null) { - System.setProperty(settingName, oldSetting); + private void resetSetting(List settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception { + for (JvmSetting setting : settings) { + // get the setting name (might need var args substitution) + String settingName = getSettingName(setting); + + // get a stored setting from context + String oldSetting = store.remove(settingName, String.class); + + // if present before, restore + if (oldSetting != null) { + broker.setJvmSetting(settingName, oldSetting); // if NOT present before, delete - } else { - System.clearProperty(settingName); - } + } else { + broker.deleteJvmSetting(settingName); } - }); + } } private String getSettingName(JvmSetting setting) { @@ -95,4 +138,15 @@ private String getSettingName(JvmSetting setting) { return target.getScopedKey(); } + + private JvmSettingBroker getBroker(ExtensionContext extensionContext) throws Exception { + // Is this test class using local system properties, then get a broker for these + if (AnnotationSupport.isAnnotated(extensionContext.getTestClass(), LocalJvmSettings.class)) { + return LocalJvmSettings.localBroker; + // NOTE: this might be extended later with other annotations to support other means of handling the settings + } else { + throw new IllegalStateException("You must provide the @LocalJvmSettings annotation to the test class"); + } + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java new file mode 100644 index 00000000000..372fa91f6f6 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java @@ -0,0 +1,39 @@ +package edu.harvard.iq.dataverse.util.testing; + +import org.junit.jupiter.api.extension.ExtendWith; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation expresses that a test class wants to manipulate local + * settings (because the tests run within the same JVM as the code itself). + * This is mostly true for unit tests. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@ExtendWith(JvmSettingExtension.class) +@Inherited +public @interface LocalJvmSettings { + + JvmSettingBroker localBroker = new JvmSettingBroker() { + @Override + public String getJvmSetting(String key) { + return System.getProperty(key); + } + + @Override + public void setJvmSetting(String key, String value) { + System.setProperty(key, value); + } + + @Override + public String deleteJvmSetting(String key) { + return System.clearProperty(key); + } + }; + +} \ No newline at end of file From 645770f0c08c042f934707d88c187de34ebab95b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 22:59:20 +0200 Subject: [PATCH 0100/1092] refactor(test): make existing test using JvmSetting annotated with @LocalJvmSettings --- .../dataverse/api/auth/BearerTokenAuthMechanismTest.java | 4 +++- .../api/auth/SessionCookieAuthMechanismTest.java | 8 +++++--- .../iq/dataverse/export/SchemaDotOrgExporterTest.java | 2 ++ .../dataverse/externaltools/ExternalToolHandlerTest.java | 2 ++ .../harvard/iq/dataverse/search/IndexServiceBeanTest.java | 2 ++ .../iq/dataverse/search/SolrClientServiceTest.java | 2 ++ .../harvard/iq/dataverse/settings/FeatureFlagsTest.java | 2 ++ .../harvard/iq/dataverse/settings/JvmSettingsTest.java | 2 ++ .../edu/harvard/iq/dataverse/util/SystemConfigTest.java | 2 ++ .../edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java | 2 ++ 10 files changed, 24 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 0370daa5ea2..b38300df660 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -25,7 +26,8 @@ import static edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism.*; import static org.junit.jupiter.api.Assertions.*; -public class BearerTokenAuthMechanismTest { +@LocalJvmSettings +class BearerTokenAuthMechanismTest { private static final String TEST_API_KEY = "test-api-key"; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java index 477f8ee377a..74a7d239c05 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -13,7 +14,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; -public class SessionCookieAuthMechanismTest { +@LocalJvmSettings +class SessionCookieAuthMechanismTest { private SessionCookieAuthMechanism sut; @@ -24,7 +26,7 @@ public void setUp() { @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "false", varArgs = "api-session-auth") - public void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse { sut.session = Mockito.mock(DataverseSession.class); User actual = sut.findUserFromRequest(new ContainerRequestTestFake()); @@ -34,7 +36,7 @@ public void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErro @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-session-auth") - public void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse { DataverseSession dataverseSessionStub = Mockito.mock(DataverseSession.class); User testAuthenticatedUser = new AuthenticatedUser(); Mockito.when(dataverseSessionStub.getUser()).thenReturn(testAuthenticatedUser); diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index e660cf78da2..722b74406d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -33,6 +33,7 @@ import javax.json.JsonObject; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; @@ -44,6 +45,7 @@ /** * For docs see {@link SchemaDotOrgExporter}. */ +@LocalJvmSettings public class SchemaDotOrgExporterTest { private static final Logger logger = Logger.getLogger(SchemaDotOrgExporterTest.class.getCanonicalName()); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index ab3a0263d66..c77d59123e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import javax.json.Json; @@ -21,6 +22,7 @@ import java.util.ArrayList; import java.util.List; +@LocalJvmSettings public class ExternalToolHandlerTest { // TODO: It would probably be better to split these into individual tests. diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index dd3dc2c6c95..ce6005a3d11 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.jupiter.api.BeforeEach; @@ -37,6 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) public class IndexServiceBeanTest { private static final Logger logger = Logger.getLogger(IndexServiceBeanTest.class.getCanonicalName()); diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java index a3b3c8a2080..72eafcd763c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -14,6 +15,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) class SolrClientServiceTest { diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java index 1a9fdeaa3da..26f2186695d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java @@ -1,10 +1,12 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; +@LocalJvmSettings class FeatureFlagsTest { @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java index 559d00fe0b7..6b03f20fc41 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java @@ -1,11 +1,13 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import edu.harvard.iq.dataverse.util.testing.SystemProperty; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings class JvmSettingsTest { @Test @JvmSetting(key = JvmSettings.VERSION, value = "foobar") diff --git a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java index 2806aa3aa9b..82b89bca678 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; @@ -15,6 +16,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) class SystemConfigTest { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java index 8310af8885c..d70a108e7c6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -16,6 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings class UrlTokenUtilTest { @Test From dd80162cb7f1b6ad5268057cdb7da547c9bf4b62 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:01:15 +0200 Subject: [PATCH 0101/1092] style(test,api,auth): make BearerTokenAuthMechanismTest simpler --- .../auth/BearerTokenAuthMechanismTest.java | 55 +++++-------------- 1 file changed, 15 insertions(+), 40 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index b38300df660..8a57ee4c41c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -27,6 +27,7 @@ import static org.junit.jupiter.api.Assertions.*; @LocalJvmSettings +@JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") class BearerTokenAuthMechanismTest { private static final String TEST_API_KEY = "test-api-key"; @@ -36,14 +37,12 @@ class BearerTokenAuthMechanismTest { @BeforeEach public void setUp() { sut = new BearerTokenAuthMechanism(); - } - - @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); sut.userSvc = Mockito.mock(UserServiceBean.class); + } + @Test + void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(null); User actual = sut.findUserFromRequest(testContainerRequest); @@ -51,11 +50,9 @@ public void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - sut.userSvc = Mockito.mock(UserServiceBean.class); + void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); @@ -63,11 +60,9 @@ public void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorRespo assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - sut.userSvc = Mockito.mock(UserServiceBean.class); + void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); @@ -76,12 +71,7 @@ public void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorRes } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -91,7 +81,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedA // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.empty()); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.empty()); // when ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); @@ -102,12 +92,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedA } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -127,12 +112,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedA assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -143,7 +123,7 @@ public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthE // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.of(userinfo)); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser(); @@ -160,12 +140,7 @@ public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthE } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -176,7 +151,7 @@ public void testFindUserFromRequest_oneProvider_validToken_noAccount() throws Wr // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.of(userinfo)); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(null); From 544a502e3f57b52a2dbaae562e43083e7a015315 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:03:33 +0200 Subject: [PATCH 0102/1092] refactor(auth,api,test): adapt exception checks in BearerTokenAuthMechanismTest - Now that we no longer need to catch OAuth2Exception, adapt accordingly - This fixed the failing tests --- .../api/auth/BearerTokenAuthMechanismTest.java | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 8a57ee4c41c..281f1d21d45 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -50,7 +49,7 @@ void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_invalid_token() { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); @@ -60,7 +59,7 @@ void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_no_OidcProvider() { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); @@ -71,7 +70,7 @@ void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_invalidToken_1() throws ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -92,7 +91,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErro } @Test - void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_invalidToken_2() throws ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -102,7 +101,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErro // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenThrow(OAuth2Exception.class); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenThrow(IOException.class); // when ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); @@ -112,7 +111,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErro assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -140,7 +139,7 @@ void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorRes } @Test - void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); From c859ef64adc6cb59065da4f0cf6aa0097a022701 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:16:19 +0200 Subject: [PATCH 0103/1092] doc(test): add changes to JvmSetting helper in testing guide --- doc/sphinx-guides/source/developers/testing.rst | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index c734bed8b70..5814d9d4e7b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -85,20 +85,26 @@ JUnit 5 Test Helper Extensions Our codebase provides little helpers to ease dealing with state during tests. Some tests might need to change something which should be restored after the test ran. -For unit tests, the most interesting part is to set a JVM setting just for the current test. -Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or -a test class to set and clear the property automatically. +For unit tests, the most interesting part is to set a JVM setting just for the current test or a whole test class. +(Which might be an inner class, too!). Please make use of the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` +annotation and also make sure to annotate the test class with ``@LocalJvmSettings``. Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")`` -to reference a method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow +to reference a static method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!) +If you want to delete a setting, simply provide a ``null`` value. This can be used to override a class-wide setting +or some other default that is present for some reason. + To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")`` has been added. (Note: it does not support method references.) Both extensions will ensure the global state of system properties is non-interfering for test executions. Tests using these extensions will be executed in serial. +This settings helper may be extended at a later time to manipulate settings in a remote instance during integration +or end-to-end testing. Stay tuned! + Observing Changes to Code Coverage ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From bda75c18d5b1799e81f17f6711b9323441f559ff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:46:52 +0200 Subject: [PATCH 0104/1092] feat(model): make UserRecordIdentifier comparable Adding equals and hashCode methods to enable testing for equality in tests, etc. --- .../authorization/UserRecordIdentifier.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java index 963ee592bbf..dfbb43fae46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java @@ -2,6 +2,8 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import java.util.Objects; + /** * Identifies a user using two strings: *
      @@ -38,4 +40,16 @@ public AuthenticatedUserLookup createAuthenticatedUserLookup( AuthenticatedUser return new AuthenticatedUserLookup(userIdInRepo, repoId, u); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof UserRecordIdentifier)) return false; + UserRecordIdentifier that = (UserRecordIdentifier) o; + return Objects.equals(repoId, that.repoId) && Objects.equals(getUserIdInRepo(), that.getUserIdInRepo()); + } + + @Override + public int hashCode() { + return Objects.hash(repoId, getUserIdInRepo()); + } } From 7004191eeec6f2deb5a01d52222fa1da9cea725b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:52:38 +0200 Subject: [PATCH 0105/1092] feat(test,api,auth): create actual integration tests in OIDCAuthenticationProviderFactoryIT - First test makes a roundtrip to receive the user info for the kcuser - Second test simulates an API request with a bearer token --- .../OIDCAuthenticationProviderFactoryIT.java | 123 +++++++++++++++++- 1 file changed, 116 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 53cfcca2742..a5aa29cc083 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -1,37 +1,146 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; +import com.nimbusds.oauth2.sdk.token.BearerAccessToken; +import com.nimbusds.openid.connect.sdk.claims.UserInfo; import dasniko.testcontainers.keycloak.KeycloakContainer; +import edu.harvard.iq.dataverse.UserServiceBean; +import edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism; +import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.admin.client.Keycloak; +import org.keycloak.admin.client.KeycloakBuilder; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Optional; +import java.util.Set; + +import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; +import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.mockito.Mockito.when; @Tag("testcontainers") @Testcontainers +@ExtendWith(MockitoExtension.class) +// NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() +@LocalJvmSettings +@JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) +@JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) +@JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") class OIDCAuthenticationProviderFactoryIT { + // NOTE: the following values are taken from the realm import file! static final String clientId = "oidc-client"; static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; static final String realm = "oidc-realm"; + static final String adminUser = "kcuser"; + static final String adminPassword = "kcpassword"; + static final String clientIdAdminCli = "admin-cli"; + // The realm JSON resides in conf/keycloak/oidc-realm.json and gets avail here using in pom.xml @Container - static KeycloakContainer keycloakContainer = new KeycloakContainer().withRealmImportFile("keycloak/oidc-realm.json"); + static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:19.0") + .withRealmImportFile("keycloak/oidc-realm.json") + .withAdminUsername(adminUser) + .withAdminPassword(adminPassword); - // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations + // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations (do no delete) private static String getAuthUrl() { return keycloakContainer.getAuthServerUrl() + "realms/" + realm; } + OIDCAuthProvider getProvider() throws Exception { + OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); + + assumeTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString() + .startsWith(keycloakContainer.getAuthServerUrl())); + + return oidcAuthProvider; + } + + Keycloak getAdminClient() { + return KeycloakBuilder.builder() + .serverUrl(keycloakContainer.getAuthServerUrl()) + .realm(realm) + .clientId(clientIdAdminCli) + .username(keycloakContainer.getAdminUsername()) + .password(keycloakContainer.getAdminPassword()) + .build(); + } + + String getBearerToken() throws Exception { + Keycloak keycloak = getAdminClient(); + return keycloak.tokenManager().getAccessTokenString(); + } + @Test - @JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) - @JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) - @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") void testCreateProvider() throws Exception { - OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); - assertTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString().startsWith(keycloakContainer.getAuthServerUrl())); + OIDCAuthProvider oidcAuthProvider = getProvider(); + String token = getBearerToken(); + assumeFalse(token == null); + + Optional info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + + assertTrue(info.isPresent()); + assertEquals(adminUser, info.get().getPreferredUsername()); + } + + @Mock + UserServiceBean userService; + @Mock + AuthenticationServiceBean authService; + + @InjectMocks + BearerTokenAuthMechanism bearerTokenAuthMechanism; + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true") + void testApiBearerAuth() throws Exception { + assumeFalse(userService == null); + assumeFalse(authService == null); + assumeFalse(bearerTokenAuthMechanism == null); + + // given + // Get the access token from the remote Keycloak in the container + String accessToken = getBearerToken(); + assumeFalse(accessToken == null); + + OIDCAuthProvider oidcAuthProvider = getProvider(); + // This will also receive the details from the remote Keycloak in the container + UserRecordIdentifier identifier = oidcAuthProvider.getUserIdentifier(new BearerAccessToken(accessToken)).get(); + String token = "Bearer " + accessToken; + BearerTokenKeyContainerRequestTestFake request = new BearerTokenKeyContainerRequestTestFake(token); + AuthenticatedUser user = new MockAuthenticatedUser(); + + // setup mocks (we don't want or need a database here) + when(authService.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Set.of(oidcAuthProvider.getId())); + when(authService.getAuthenticationProvider(oidcAuthProvider.getId())).thenReturn(oidcAuthProvider); + when(authService.lookupUser(identifier)).thenReturn(user); + when(userService.updateLastApiUseTime(user)).thenReturn(user); + + // when (let's do this again, but now with the actual subject under test!) + User lookedUpUser = bearerTokenAuthMechanism.findUserFromRequest(request); + + // then + assertNotNull(lookedUpUser); + assertEquals(user, lookedUpUser); } } \ No newline at end of file From 564d6a73bcc0e101299c7f370bdf5fc6d42f8287 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:53:34 +0200 Subject: [PATCH 0106/1092] feat(build): make integration test using Testcontainers executable via Maven Simply call mvn verify to execute, as it is meant to be! --- pom.xml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pom.xml b/pom.xml index 8764e4f493d..adda4bb31f5 100644 --- a/pom.xml +++ b/pom.xml @@ -729,6 +729,22 @@ ${skipUnitTests} + + + org.apache.maven.plugins + maven-failsafe-plugin + + testcontainers + + + + + integration-test + verify + + + + org.apache.maven.plugins maven-checkstyle-plugin From c207b3baa95a01c9d67cefd1cacaf5784bd914ff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:53:57 +0200 Subject: [PATCH 0107/1092] chore(build): update Keycloak Testcontainers version --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index adda4bb31f5..313f33b94b8 100644 --- a/pom.xml +++ b/pom.xml @@ -577,7 +577,7 @@ com.github.dasniko testcontainers-keycloak - 2.4.0 + 2.5.0 test From 7f8225f93c2353deb3f13e515ef1f43e9b0630c4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:56:13 +0200 Subject: [PATCH 0108/1092] chore(build,test): add temporary servlet dependency in test scope This is necessary because the Jakarta EE 8.0.0 API package did not properly ship the bundle files necessary for servlet execution. Not including this testing dependency leads to very cryptic errors ala MissingResourceException for Bundle with lang en-US. This should be removed once we migrate to Jakarta EE 10. --- pom.xml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pom.xml b/pom.xml index 313f33b94b8..e9a9b9dd611 100644 --- a/pom.xml +++ b/pom.xml @@ -580,6 +580,18 @@ 2.5.0 test + + + jakarta.servlet + jakarta.servlet-api + 4.0.4 + test + + org.mockito mockito-core From ac6354645c2bc8557e4cab19d2d28304605e634b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:09:29 +0200 Subject: [PATCH 0109/1092] fix(test,oidc): replace Keycloak Demo/Dev realm The realm used before (created with Keycloak 16) was for some reason not compatible with Keycloak 20+. A new "Test" realm was created with more users and working with Keycloak 20 and 21. All files to run Keycloak have been adapted in version and realm import file. --- conf/keycloak/docker-compose.yml | 12 +- .../keycloak/oidc-keycloak-auth-provider.json | 2 +- conf/keycloak/oidc-realm.json | 2108 ----------------- conf/keycloak/run-keycloak.sh | 4 +- conf/keycloak/test-realm.json | 1939 +++++++++++++++ docker-compose-dev.yml | 6 +- 6 files changed, 1952 insertions(+), 2119 deletions(-) delete mode 100644 conf/keycloak/oidc-realm.json create mode 100644 conf/keycloak/test-realm.json diff --git a/conf/keycloak/docker-compose.yml b/conf/keycloak/docker-compose.yml index 2776f6572df..12b2382bd3d 100644 --- a/conf/keycloak/docker-compose.yml +++ b/conf/keycloak/docker-compose.yml @@ -3,13 +3,15 @@ version: "3.9" services: keycloak: - image: 'jboss/keycloak:16.1.1' + image: 'quay.io/keycloak/keycloak:21.0' + command: + - "start-dev" + - "--import-realm" environment: - - KEYCLOAK_USER=kcadmin - - KEYCLOAK_PASSWORD=kcpassword - - KEYCLOAK_IMPORT=/tmp/oidc-realm.json + - KEYCLOAK_ADMIN=kcadmin + - KEYCLOAK_ADMIN_PASSWORD=kcpassword - KEYCLOAK_LOGLEVEL=DEBUG ports: - "8090:8080" volumes: - - './oidc-realm.json:/tmp/oidc-realm.json' + - './test-realm.json:/opt/keycloak/data/import/test-realm.json' diff --git a/conf/keycloak/oidc-keycloak-auth-provider.json b/conf/keycloak/oidc-keycloak-auth-provider.json index 7d09fe5f36e..7e01bd4c325 100644 --- a/conf/keycloak/oidc-keycloak-auth-provider.json +++ b/conf/keycloak/oidc-keycloak-auth-provider.json @@ -3,6 +3,6 @@ "factoryAlias": "oidc", "title": "OIDC-Keycloak", "subtitle": "OIDC-Keycloak", - "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/oidc-realm | clientId: oidc-client | clientSecret: ss6gE8mODCDfqesQaSG3gwUwZqZt547E", + "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/test | clientId: test | clientSecret: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8", "enabled": true } diff --git a/conf/keycloak/oidc-realm.json b/conf/keycloak/oidc-realm.json deleted file mode 100644 index 1b77f2b4384..00000000000 --- a/conf/keycloak/oidc-realm.json +++ /dev/null @@ -1,2108 +0,0 @@ -{ - "id": "oidc-realm", - "realm": "oidc-realm", - "notBefore": 0, - "defaultSignatureAlgorithm": "RS256", - "revokeRefreshToken": false, - "refreshTokenMaxReuse": 0, - "accessTokenLifespan": 300, - "accessTokenLifespanForImplicitFlow": 900, - "ssoSessionIdleTimeout": 1800, - "ssoSessionMaxLifespan": 36000, - "ssoSessionIdleTimeoutRememberMe": 0, - "ssoSessionMaxLifespanRememberMe": 0, - "offlineSessionIdleTimeout": 2592000, - "offlineSessionMaxLifespanEnabled": false, - "offlineSessionMaxLifespan": 5184000, - "clientSessionIdleTimeout": 0, - "clientSessionMaxLifespan": 0, - "clientOfflineSessionIdleTimeout": 0, - "clientOfflineSessionMaxLifespan": 0, - "accessCodeLifespan": 60, - "accessCodeLifespanUserAction": 300, - "accessCodeLifespanLogin": 1800, - "actionTokenGeneratedByAdminLifespan": 43200, - "actionTokenGeneratedByUserLifespan": 300, - "oauth2DeviceCodeLifespan": 600, - "oauth2DevicePollingInterval": 5, - "enabled": true, - "sslRequired": "external", - "registrationAllowed": false, - "registrationEmailAsUsername": false, - "rememberMe": false, - "verifyEmail": false, - "loginWithEmailAllowed": true, - "duplicateEmailsAllowed": false, - "resetPasswordAllowed": false, - "editUsernameAllowed": false, - "bruteForceProtected": false, - "permanentLockout": false, - "maxFailureWaitSeconds": 900, - "minimumQuickLoginWaitSeconds": 60, - "waitIncrementSeconds": 60, - "quickLoginCheckMilliSeconds": 1000, - "maxDeltaTimeSeconds": 43200, - "failureFactor": 30, - "roles": { - "realm": [ - { - "id": "13d76240-fcf8-4361-9dbf-de268717cfb2", - "name": "uma_authorization", - "description": "${role_uma_authorization}", - "composite": false, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - }, - { - "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5", - "name": "default-roles-oidc-realm", - "description": "${role_default-roles}", - "composite": true, - "composites": { - "realm": [ - "offline_access", - "uma_authorization" - ] - }, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - }, - { - "id": "b907fd4e-0e54-461c-9411-3f736eef7d2f", - "name": "offline_access", - "description": "${role_offline-access}", - "composite": false, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - } - ], - "client": { - "realm-management": [ - { - "id": "39342ea9-0b4e-4841-8996-433759e9297f", - "name": "create-client", - "description": "${role_create-client}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "f8680034-617d-45d3-9801-7bf0d704c549", - "name": "manage-users", - "description": "${role_manage-users}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "b08e4cc3-71e2-4395-b66b-fb1277b48b88", - "name": "manage-realm", - "description": "${role_manage-realm}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "c15dc407-d012-43af-9a21-a2923e1d7b74", - "name": "manage-events", - "description": "${role_manage-events}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "66c07cb7-42cd-4155-8485-6cc7bd37cba9", - "name": "view-realm", - "description": "${role_view-realm}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "0419515f-4ab8-43ca-ac69-e842195813c0", - "name": "view-events", - "description": "${role_view-events}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "aa553d5a-b2dc-4f81-979a-2af0a019fee0", - "name": "impersonation", - "description": "${role_impersonation}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "9567e1e9-b755-43a8-93ed-d5929391316f", - "name": "manage-clients", - "description": "${role_manage-clients}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "e3dab69f-7323-4aad-bf98-8b7697f36d57", - "name": "query-users", - "description": "${role_query-users}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "ee8a4855-d0d5-4261-bdba-b419d304a824", - "name": "query-groups", - "description": "${role_query-groups}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "4f251212-e922-4ac0-9cce-3ada607648d2", - "name": "view-identity-providers", - "description": "${role_view-identity-providers}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "34e1dc59-a975-424f-887b-52465e184a4b", - "name": "realm-admin", - "description": "${role_realm-admin}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "create-client", - "manage-users", - "manage-realm", - "manage-events", - "view-realm", - "view-events", - "impersonation", - "manage-clients", - "query-users", - "view-identity-providers", - "query-groups", - "view-clients", - "view-users", - "manage-authorization", - "manage-identity-providers", - "query-realms", - "query-clients", - "view-authorization" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "d35aca04-0182-40d3-96b8-1ce5cc118729", - "name": "view-clients", - "description": "${role_view-clients}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "query-clients" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "7d3b28d5-471a-4b2b-bc80-56d4ff80fd28", - "name": "view-users", - "description": "${role_view-users}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "query-users", - "query-groups" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "651059eb-fc1a-4f8d-9ced-ed28b0a2f965", - "name": "manage-authorization", - "description": "${role_manage-authorization}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "73f447e9-def8-4214-8516-56571f2c6f65", - "name": "manage-identity-providers", - "description": "${role_manage-identity-providers}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "1b5f7c39-885e-4246-8cf5-25769544fc3d", - "name": "query-realms", - "description": "${role_query-realms}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "350da4c1-69d4-4557-a9a8-8ba760db0225", - "name": "query-clients", - "description": "${role_query-clients}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "43d51082-6922-4765-8022-529d91a4603f", - "name": "view-authorization", - "description": "${role_view-authorization}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - } - ], - "security-admin-console": [], - "admin-cli": [], - "account-console": [], - "broker": [], - "oidc-client": [], - "account": [ - { - "id": "a163535c-71de-4b2d-9530-26b25eeb1c1e", - "name": "delete-account", - "description": "${role_delete-account}", - "composite": false, - "clientRole": true, - "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "attributes": {} - }, - { - "id": "851c6a9f-bce7-4c70-be82-084c25d61b25", - "name": "manage-account", - "composite": false, - "clientRole": true, - "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "attributes": {} - } - ] - } - }, - "groups": [], - "defaultRole": { - "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5", - "name": "default-roles-oidc-realm", - "description": "${role_default-roles}", - "composite": true, - "clientRole": false, - "containerId": "oidc-realm" - }, - "requiredCredentials": [ - "password" - ], - "otpPolicyType": "totp", - "otpPolicyAlgorithm": "HmacSHA1", - "otpPolicyInitialCounter": 0, - "otpPolicyDigits": 6, - "otpPolicyLookAheadWindow": 1, - "otpPolicyPeriod": 30, - "otpSupportedApplications": [ - "FreeOTP", - "Google Authenticator" - ], - "webAuthnPolicyRpEntityName": "keycloak", - "webAuthnPolicySignatureAlgorithms": [ - "ES256" - ], - "webAuthnPolicyRpId": "", - "webAuthnPolicyAttestationConveyancePreference": "not specified", - "webAuthnPolicyAuthenticatorAttachment": "not specified", - "webAuthnPolicyRequireResidentKey": "not specified", - "webAuthnPolicyUserVerificationRequirement": "not specified", - "webAuthnPolicyCreateTimeout": 0, - "webAuthnPolicyAvoidSameAuthenticatorRegister": false, - "webAuthnPolicyAcceptableAaguids": [], - "webAuthnPolicyPasswordlessRpEntityName": "keycloak", - "webAuthnPolicyPasswordlessSignatureAlgorithms": [ - "ES256" - ], - "webAuthnPolicyPasswordlessRpId": "", - "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", - "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", - "webAuthnPolicyPasswordlessRequireResidentKey": "not specified", - "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified", - "webAuthnPolicyPasswordlessCreateTimeout": 0, - "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, - "webAuthnPolicyPasswordlessAcceptableAaguids": [], - "users": [ - { - "username": "kcuser", - "enabled": true, - "totp": false, - "emailVerified": true, - "firstName": "Test", - "lastName": "Test", - "email": "test@test.com", - "credentials": [ - { - "type": "password", - "value": "kcpassword" - } - ] - } - ], - "scopeMappings": [ - { - "clientScope": "offline_access", - "roles": [ - "offline_access" - ] - } - ], - "clientScopeMappings": { - "account": [ - { - "client": "account-console", - "roles": [ - "manage-account" - ] - } - ] - }, - "clients": [ - { - "id": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "clientId": "account", - "name": "${client_account}", - "rootUrl": "${authBaseUrl}", - "baseUrl": "/realms/oidc-realm/account/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/realms/oidc-realm/account/*" - ], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "1e821c0e-f6b9-4324-9b23-e82b5431fb72", - "clientId": "account-console", - "name": "${client_account-console}", - "rootUrl": "${authBaseUrl}", - "baseUrl": "/realms/oidc-realm/account/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/realms/oidc-realm/account/*" - ], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "pkce.code.challenge.method": "S256" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "protocolMappers": [ - { - "id": "397616ab-4124-4a13-92b6-317423e818a3", - "name": "audience resolve", - "protocol": "openid-connect", - "protocolMapper": "oidc-audience-resolve-mapper", - "consentRequired": false, - "config": {} - } - ], - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "dddcc3e0-d742-422b-8b5f-84a292ea9d66", - "clientId": "admin-cli", - "name": "${client_admin-cli}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": false, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": true, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "df6f6cd0-a046-492f-84ac-b4fe31909be4", - "clientId": "broker", - "name": "${client_broker}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": true, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "c0af31b9-21aa-4e70-baf3-8d68850c4081", - "clientId": "oidc-client", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "secret": "ss6gE8mODCDfqesQaSG3gwUwZqZt547E", - "redirectUris": [ - "*" - ], - "webOrigins": [ - "+" - ], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "saml.force.post.binding": "false", - "saml.multivalued.roles": "false", - "oauth2.device.authorization.grant.enabled": "false", - "use.jwks.url": "true", - "backchannel.logout.revoke.offline.tokens": "false", - "saml.server.signature.keyinfo.ext": "false", - "use.refresh.tokens": "true", - "jwt.credential.certificate": "MIICpTCCAY0CBgGE8V6o6TANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtvaWRjLWNsaWVudDAeFw0yMjEyMDgxMDUyMDNaFw0zMjEyMDgxMDUzNDNaMBYxFDASBgNVBAMMC29pZGMtY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArUffTl+jXWzyY3T4VVtkiGyNnY+RgyAXUzz+dxT7wUQaYSiNPvmaxnio555pWjR403SRUjVxM8eJYgHK9s43qQWdheXBIHyLKaQfjVsTtSmHgFtPmjk+kweQs6fxUi5CNvtx4RTCaOK5wV8q5q1X7mb8cZ5+gLSx1f/pHtayFXMT75nV04aZKWgPztPz8w+QXUx9cuFY4OIiTdRbdyfr1oOiDtMbxxA22tggB/HSMVkSckT3LSPj7fJKJMPFYi/g1AXxGipX/q8XkmOBrvNePCpH0F/IZbC1vXEsDC6urfoijOdiZgPMobuADmWHPiw2zgCN8qa6QuLFaI+JduXT9QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCEOYRHkH8DnBucb+uN5c9U/fZY+mpglxzZvby7dGBXfVwLN+eP1kGcQPaFi+nshk7FgF4mR5/cmuAPZt+YBbgP0z37D49nB7S6sniwzfhCAAplOT4vmm+MjperTDsWFUGhQZJvN/jxqP2Xccw7N//ReYi7yOlmWhwGyqQyTi0ySbE3BY5eFvUKepekybYi/15XlyF8lwS2jH1MvnJAxAMNVpVUcP4wTnq/dOw5ybrVWF0mPnA8KVzTPuPE5nzZvZ3rkXQeEJTffIToR+T/DH/KTLXcNUtx4nG0ajJ0gM6iVAXGnKlI9Viq/M5Ese+52I6rQmxTsFMn57LNzKgMpWcE", - "oidc.ciba.grant.enabled": "false", - "use.jwks.string": "false", - "backchannel.logout.session.required": "false", - "client_credentials.use_refresh_token": "false", - "require.pushed.authorization.requests": "false", - "saml.client.signature": "false", - "id.token.as.detached.signature": "false", - "saml.assertion.signature": "false", - "saml.encrypt": "false", - "saml.server.signature": "false", - "exclude.session.state.from.auth.response": "false", - "saml.artifact.binding": "false", - "saml_force_name_id_format": "false", - "tls.client.certificate.bound.access.tokens": "false", - "saml.authnstatement": "false", - "display.on.consent.screen": "false", - "saml.onetimeuse.condition": "false" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": true, - "nodeReRegistrationTimeout": -1, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "clientId": "realm-management", - "name": "${client_realm-management}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": true, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "3747f98f-efbb-49ef-8238-a349bf5ab409", - "clientId": "security-admin-console", - "name": "${client_security-admin-console}", - "rootUrl": "${authAdminUrl}", - "baseUrl": "/admin/oidc-realm/console/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/admin/oidc-realm/console/*" - ], - "webOrigins": [ - "+" - ], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "pkce.code.challenge.method": "S256" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "protocolMappers": [ - { - "id": "2fbdf6c9-ee69-4edc-b780-ec62aecfc519", - "name": "locale", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "locale", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "locale", - "jsonType.label": "String" - } - } - ], - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - } - ], - "clientScopes": [ - { - "id": "f76f507d-7d1c-495b-9504-47830b3834f1", - "name": "phone", - "description": "OpenID Connect built-in scope: phone", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${phoneScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "be849ec8-1747-4efb-bc00-beeaf44f11c8", - "name": "phone number verified", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "phoneNumberVerified", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "phone_number_verified", - "jsonType.label": "boolean" - } - }, - { - "id": "8e8600ec-4290-435d-b109-9f0547cb4a1d", - "name": "phone number", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "phoneNumber", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "phone_number", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "54b87197-5309-4b2c-8ad9-f561a0fc178a", - "name": "role_list", - "description": "SAML role list", - "protocol": "saml", - "attributes": { - "consent.screen.text": "${samlRoleListScopeConsentText}", - "display.on.consent.screen": "true" - }, - "protocolMappers": [ - { - "id": "5fd831af-19a5-4a9c-b44f-2a806fae011c", - "name": "role list", - "protocol": "saml", - "protocolMapper": "saml-role-list-mapper", - "consentRequired": false, - "config": { - "single": "false", - "attribute.nameformat": "Basic", - "attribute.name": "Role" - } - } - ] - }, - { - "id": "2f85470d-8cb7-4f07-8602-47342d68af86", - "name": "web-origins", - "description": "OpenID Connect scope for add allowed web origins to the access token", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "false", - "display.on.consent.screen": "false", - "consent.screen.text": "" - }, - "protocolMappers": [ - { - "id": "c5d2aafc-f72d-4d7b-9d88-cd759f0e045e", - "name": "allowed web origins", - "protocol": "openid-connect", - "protocolMapper": "oidc-allowed-origins-mapper", - "consentRequired": false, - "config": {} - } - ] - }, - { - "id": "528face9-229a-4adf-98d8-68b1a22e880d", - "name": "microprofile-jwt", - "description": "Microprofile - JWT built-in scope", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "false" - }, - "protocolMappers": [ - { - "id": "89240a7c-10f3-4e09-9d6b-41955b86c58d", - "name": "groups", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-realm-role-mapper", - "consentRequired": false, - "config": { - "multivalued": "true", - "userinfo.token.claim": "true", - "user.attribute": "foo", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "groups", - "jsonType.label": "String" - } - }, - { - "id": "15b6db72-4870-480e-a675-87f87df5f8a5", - "name": "upn", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "username", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "upn", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "cdd11477-b02b-4886-bc6d-cf4b728ebc0e", - "name": "email", - "description": "OpenID Connect built-in scope: email", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${emailScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "627b9f4f-23d6-4480-adf4-264faf58de33", - "name": "email verified", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "emailVerified", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "email_verified", - "jsonType.label": "boolean" - } - }, - { - "id": "6a2adf2e-db2d-4ebe-8d48-f658f9b4a5ca", - "name": "email", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "email", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "email", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "8f830142-b3f1-40f0-82e2-ceed68857a40", - "name": "roles", - "description": "OpenID Connect scope for add user roles to the access token", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "false", - "display.on.consent.screen": "true", - "consent.screen.text": "${rolesScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "28a96dc6-c4dc-4aae-b316-28b56dccd077", - "name": "audience resolve", - "protocol": "openid-connect", - "protocolMapper": "oidc-audience-resolve-mapper", - "consentRequired": false, - "config": {} - }, - { - "id": "3e81050f-540e-4f3d-9abf-86406e484f76", - "name": "realm roles", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-realm-role-mapper", - "consentRequired": false, - "config": { - "user.attribute": "foo", - "access.token.claim": "true", - "claim.name": "realm_access.roles", - "jsonType.label": "String", - "multivalued": "true" - } - }, - { - "id": "13afa1f4-3fac-4c90-a9b4-e84e682f46e9", - "name": "client roles", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-client-role-mapper", - "consentRequired": false, - "config": { - "user.attribute": "foo", - "access.token.claim": "true", - "claim.name": "resource_access.${client_id}.roles", - "jsonType.label": "String", - "multivalued": "true" - } - } - ] - }, - { - "id": "3beac2fc-e947-408f-8422-ca9a1e66a258", - "name": "address", - "description": "OpenID Connect built-in scope: address", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${addressScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "12911891-db5c-4a35-80fa-555c5eda7e68", - "name": "address", - "protocol": "openid-connect", - "protocolMapper": "oidc-address-mapper", - "consentRequired": false, - "config": { - "user.attribute.formatted": "formatted", - "user.attribute.country": "country", - "user.attribute.postal_code": "postal_code", - "userinfo.token.claim": "true", - "user.attribute.street": "street", - "id.token.claim": "true", - "user.attribute.region": "region", - "access.token.claim": "true", - "user.attribute.locality": "locality" - } - } - ] - }, - { - "id": "8a29297a-e6f6-41ae-b25d-8a14236de535", - "name": "offline_access", - "description": "OpenID Connect built-in scope: offline_access", - "protocol": "openid-connect", - "attributes": { - "consent.screen.text": "${offlineAccessScopeConsentText}", - "display.on.consent.screen": "true" - } - }, - { - "id": "ce1622c5-701f-4e3e-9d2d-8dae0f07a295", - "name": "profile", - "description": "OpenID Connect built-in scope: profile", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${profileScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "98cc62b8-250a-4087-92da-bb0f0931e675", - "name": "full name", - "protocol": "openid-connect", - "protocolMapper": "oidc-full-name-mapper", - "consentRequired": false, - "config": { - "id.token.claim": "true", - "access.token.claim": "true", - "userinfo.token.claim": "true" - } - }, - { - "id": "b99c8c44-4cc9-4c87-a5a1-c14e64d472ae", - "name": "given name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "firstName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "given_name", - "jsonType.label": "String" - } - }, - { - "id": "903d5932-bdec-42bc-a53c-3cce93deaa1c", - "name": "zoneinfo", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "zoneinfo", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "zoneinfo", - "jsonType.label": "String" - } - }, - { - "id": "ccbdc095-28f7-4769-8261-2e32c7b6fab0", - "name": "picture", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "picture", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "picture", - "jsonType.label": "String" - } - }, - { - "id": "22a4a38c-f755-44f3-b847-803c7fb3cef5", - "name": "birthdate", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "birthdate", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "birthdate", - "jsonType.label": "String" - } - }, - { - "id": "78726920-b4e2-4ed2-b9e0-df38a7f82376", - "name": "updated at", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "updatedAt", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "updated_at", - "jsonType.label": "String" - } - }, - { - "id": "c64c6eb8-5cbe-4092-bf2c-dd02b8c0e0e8", - "name": "family name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "lastName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "family_name", - "jsonType.label": "String" - } - }, - { - "id": "306784d8-8da1-48d8-92a3-dccfff83bcaf", - "name": "middle name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "middleName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "middle_name", - "jsonType.label": "String" - } - }, - { - "id": "0ff127fa-774e-43a8-a1fc-47ea3f307aa1", - "name": "website", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "website", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "website", - "jsonType.label": "String" - } - }, - { - "id": "8989c6f8-25c5-4d02-aa06-25b3b77fc227", - "name": "profile", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "profile", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "profile", - "jsonType.label": "String" - } - }, - { - "id": "3b67000c-9cbf-43ee-9e05-26f560871897", - "name": "gender", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "gender", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "gender", - "jsonType.label": "String" - } - }, - { - "id": "c28b04de-2770-423e-9b9a-b3321d7300e2", - "name": "nickname", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "nickname", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "nickname", - "jsonType.label": "String" - } - }, - { - "id": "fd791ed4-d4ab-4df9-81b4-c69a3134bcab", - "name": "username", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "username", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "preferred_username", - "jsonType.label": "String" - } - }, - { - "id": "c7378ce5-3673-47b2-9ebc-92c772bebf9f", - "name": "locale", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "locale", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "locale", - "jsonType.label": "String" - } - } - ] - } - ], - "defaultDefaultClientScopes": [ - "web-origins", - "role_list", - "roles", - "email", - "profile" - ], - "defaultOptionalClientScopes": [ - "address", - "microprofile-jwt", - "offline_access", - "phone" - ], - "browserSecurityHeaders": { - "contentSecurityPolicyReportOnly": "", - "xContentTypeOptions": "nosniff", - "xRobotsTag": "none", - "xFrameOptions": "SAMEORIGIN", - "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", - "xXSSProtection": "1; mode=block", - "strictTransportSecurity": "max-age=31536000; includeSubDomains" - }, - "smtpServer": {}, - "eventsEnabled": false, - "eventsListeners": [ - "jboss-logging" - ], - "enabledEventTypes": [], - "adminEventsEnabled": false, - "adminEventsDetailsEnabled": false, - "identityProviders": [], - "identityProviderMappers": [], - "components": { - "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ - { - "id": "8e2d0c22-0627-4115-9f14-4225244333d9", - "name": "Trusted Hosts", - "providerId": "trusted-hosts", - "subType": "anonymous", - "subComponents": {}, - "config": { - "host-sending-registration-request-must-match": [ - "true" - ], - "client-uris-must-match": [ - "true" - ] - } - }, - { - "id": "45bdde87-a364-4d66-a12e-1a4fd42c85fb", - "name": "Full Scope Disabled", - "providerId": "scope", - "subType": "anonymous", - "subComponents": {}, - "config": {} - }, - { - "id": "7b7d3215-68d2-41db-bc0f-db0a45934a84", - "name": "Allowed Client Scopes", - "providerId": "allowed-client-templates", - "subType": "anonymous", - "subComponents": {}, - "config": { - "allow-default-scopes": [ - "true" - ] - } - }, - { - "id": "e067781a-6058-4f2b-9408-3390e9854cf8", - "name": "Consent Required", - "providerId": "consent-required", - "subType": "anonymous", - "subComponents": {}, - "config": {} - }, - { - "id": "296be954-8084-45c8-b6f3-94d53f7341f6", - "name": "Allowed Protocol Mapper Types", - "providerId": "allowed-protocol-mappers", - "subType": "anonymous", - "subComponents": {}, - "config": { - "allowed-protocol-mapper-types": [ - "saml-role-list-mapper", - "saml-user-property-mapper", - "oidc-usermodel-attribute-mapper", - "oidc-address-mapper", - "oidc-sha256-pairwise-sub-mapper", - "saml-user-attribute-mapper", - "oidc-usermodel-property-mapper", - "oidc-full-name-mapper" - ] - } - }, - { - "id": "b9a2a484-aee1-4633-aa37-a9ab2b74a239", - "name": "Allowed Client Scopes", - "providerId": "allowed-client-templates", - "subType": "authenticated", - "subComponents": {}, - "config": { - "allow-default-scopes": [ - "true" - ] - } - }, - { - "id": "016e4914-a32c-40fa-8aab-3eb25a411df5", - "name": "Max Clients Limit", - "providerId": "max-clients", - "subType": "anonymous", - "subComponents": {}, - "config": { - "max-clients": [ - "200" - ] - } - }, - { - "id": "a4fb2fa3-93b8-4497-8047-424f70f298c7", - "name": "Allowed Protocol Mapper Types", - "providerId": "allowed-protocol-mappers", - "subType": "authenticated", - "subComponents": {}, - "config": { - "allowed-protocol-mapper-types": [ - "oidc-sha256-pairwise-sub-mapper", - "oidc-full-name-mapper", - "saml-user-property-mapper", - "saml-role-list-mapper", - "oidc-usermodel-attribute-mapper", - "oidc-address-mapper", - "oidc-usermodel-property-mapper", - "saml-user-attribute-mapper" - ] - } - } - ], - "org.keycloak.keys.KeyProvider": [ - { - "id": "31b693fa-2b95-47a6-96a1-dfff868ca1df", - "name": "rsa-enc-generated", - "providerId": "rsa-enc-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ], - "algorithm": [ - "RSA-OAEP" - ] - } - }, - { - "id": "f1e63d09-45a0-4382-8346-0408ee906649", - "name": "hmac-generated", - "providerId": "hmac-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ], - "algorithm": [ - "HS256" - ] - } - }, - { - "id": "99084d92-06f5-4787-b932-a40b5377f3cb", - "name": "rsa-generated", - "providerId": "rsa-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ] - } - }, - { - "id": "9887f1bf-b4f7-4646-9919-a9dbde13ce74", - "name": "aes-generated", - "providerId": "aes-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ] - } - } - ] - }, - "internationalizationEnabled": false, - "supportedLocales": [], - "authenticationFlows": [ - { - "id": "a7f91199-178d-4399-8319-5063ffcc37b0", - "alias": "Account verification options", - "description": "Method with which to verity the existing account", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-email-verification", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 20, - "flowAlias": "Verify Existing Account by Re-authentication", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "602533e3-f7a1-4e25-9a12-f3080eeccec3", - "alias": "Authentication Options", - "description": "Authentication options.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "basic-auth", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "basic-auth-otp", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-spnego", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "ba7bcdfd-05c6-4da6-827b-24e3513bddbe", - "alias": "Browser - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-otp-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "d0f62327-ef2f-4561-8b5a-1f61faecdac0", - "alias": "Direct Grant - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "direct-grant-validate-otp", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "f10b85d0-26ee-4648-b81b-80213b066d76", - "alias": "First broker login - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-otp-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "d6af4ac0-f6bc-4197-bf01-6e2c321ecaad", - "alias": "Handle Existing Account", - "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-confirm-link", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "Account verification options", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "501ab743-2e2f-427d-820f-14deed111b08", - "alias": "Reset - Conditional OTP", - "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-otp", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "e02c3a63-a09d-4dde-9f6c-22c95eef8534", - "alias": "User creation or linking", - "description": "Flow for the existing/non-existing user alternatives", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticatorConfig": "create unique user config", - "authenticator": "idp-create-user-if-unique", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 20, - "flowAlias": "Handle Existing Account", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "c348906d-6266-4e68-937e-8f3d15c66524", - "alias": "Verify Existing Account by Re-authentication", - "description": "Reauthentication of existing account", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-username-password-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 20, - "flowAlias": "First broker login - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "cf6ba166-43d5-4687-95c4-0a184ca08885", - "alias": "browser", - "description": "browser based authentication", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "auth-cookie", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-spnego", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "identity-provider-redirector", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 25, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 30, - "flowAlias": "forms", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "87cb4f25-9275-4617-9e95-63adf1ce3ece", - "alias": "clients", - "description": "Base authentication for clients", - "providerId": "client-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "client-secret", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-jwt", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-secret-jwt", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-x509", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 40, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "e75b99c5-c566-4009-b0ba-c73716bed254", - "alias": "direct grant", - "description": "OpenID Connect Resource Owner Grant", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "direct-grant-validate-username", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "direct-grant-validate-password", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 30, - "flowAlias": "Direct Grant - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "8a97380c-0f70-45cb-a7b0-780eb70453ba", - "alias": "docker auth", - "description": "Used by Docker clients to authenticate against the IDP", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "docker-http-basic-authenticator", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "131e0aad-5422-4504-bafc-96be2fa44c34", - "alias": "first broker login", - "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticatorConfig": "review profile config", - "authenticator": "idp-review-profile", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "User creation or linking", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "e7d4b793-b3c2-4ec3-a2b1-04f7217e8f46", - "alias": "forms", - "description": "Username, password, otp and other auth forms.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "auth-username-password-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 20, - "flowAlias": "Browser - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "f59a7688-61a1-4ac9-a13a-03f92e022add", - "alias": "http challenge", - "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "no-cookie-redirect", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "Authentication Options", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "80a7b0f5-abb3-4780-be58-4ed1dc3e50fa", - "alias": "registration", - "description": "registration flow", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "registration-page-form", - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 10, - "flowAlias": "registration form", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "f18231cf-b803-493b-9dd6-ee8fa602c861", - "alias": "registration form", - "description": "registration form", - "providerId": "form-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "registration-user-creation", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-profile-action", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 40, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-password-action", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 50, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-recaptcha-action", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 60, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "34ccfce6-1488-4db3-b90e-d98e8d8b2ae6", - "alias": "reset credentials", - "description": "Reset credentials for a user if they forgot their password or something", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "reset-credentials-choose-user", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-credential-email", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-password", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 40, - "flowAlias": "Reset - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "4468100c-fa83-4c16-8970-d53cb592f93a", - "alias": "saml ecp", - "description": "SAML ECP Profile Authentication Flow", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "http-basic-authenticator", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - } - ], - "authenticatorConfig": [ - { - "id": "c3bb087e-7fe9-4f13-b1bd-c2d7d1320054", - "alias": "create unique user config", - "config": { - "require.password.update.after.registration": "false" - } - }, - { - "id": "09820d9d-3c12-45f3-bc62-97b53f8a7efe", - "alias": "review profile config", - "config": { - "update.profile.on.first.login": "missing" - } - } - ], - "requiredActions": [ - { - "alias": "CONFIGURE_TOTP", - "name": "Configure OTP", - "providerId": "CONFIGURE_TOTP", - "enabled": true, - "defaultAction": false, - "priority": 10, - "config": {} - }, - { - "alias": "terms_and_conditions", - "name": "Terms and Conditions", - "providerId": "terms_and_conditions", - "enabled": false, - "defaultAction": false, - "priority": 20, - "config": {} - }, - { - "alias": "UPDATE_PASSWORD", - "name": "Update Password", - "providerId": "UPDATE_PASSWORD", - "enabled": true, - "defaultAction": false, - "priority": 30, - "config": {} - }, - { - "alias": "UPDATE_PROFILE", - "name": "Update Profile", - "providerId": "UPDATE_PROFILE", - "enabled": true, - "defaultAction": false, - "priority": 40, - "config": {} - }, - { - "alias": "VERIFY_EMAIL", - "name": "Verify Email", - "providerId": "VERIFY_EMAIL", - "enabled": true, - "defaultAction": false, - "priority": 50, - "config": {} - }, - { - "alias": "delete_account", - "name": "Delete Account", - "providerId": "delete_account", - "enabled": false, - "defaultAction": false, - "priority": 60, - "config": {} - }, - { - "alias": "update_user_locale", - "name": "Update User Locale", - "providerId": "update_user_locale", - "enabled": true, - "defaultAction": false, - "priority": 1000, - "config": {} - } - ], - "browserFlow": "browser", - "registrationFlow": "registration", - "directGrantFlow": "direct grant", - "resetCredentialsFlow": "reset credentials", - "clientAuthenticationFlow": "clients", - "dockerAuthenticationFlow": "docker auth", - "attributes": { - "cibaBackchannelTokenDeliveryMode": "poll", - "cibaExpiresIn": "120", - "cibaAuthRequestedUserHint": "login_hint", - "oauth2DeviceCodeLifespan": "600", - "clientOfflineSessionMaxLifespan": "0", - "oauth2DevicePollingInterval": "5", - "clientSessionIdleTimeout": "0", - "parRequestUriLifespan": "60", - "clientSessionMaxLifespan": "0", - "clientOfflineSessionIdleTimeout": "0", - "cibaInterval": "5" - }, - "keycloakVersion": "16.1.1", - "userManagedAccessAllowed": false, - "clientProfiles": { - "profiles": [] - }, - "clientPolicies": { - "policies": [] - } -} diff --git a/conf/keycloak/run-keycloak.sh b/conf/keycloak/run-keycloak.sh index effb37f91b8..ddc5108bee4 100755 --- a/conf/keycloak/run-keycloak.sh +++ b/conf/keycloak/run-keycloak.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -DOCKER_IMAGE="jboss/keycloak:16.1.1" +DOCKER_IMAGE="quay.io/keycloak/keycloak:21.0" KEYCLOAK_USER="kcadmin" KEYCLOAK_PASSWORD="kcpassword" KEYCLOAK_PORT=8090 @@ -11,7 +11,7 @@ if [ ! "$(docker ps -q -f name=^/keycloak$)" ]; then docker start keycloak echo "INFO - Keycloak container restarted" else - docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/oidc-realm.json -v "$(pwd)"/oidc-realm.json:/tmp/oidc-realm.json $DOCKER_IMAGE + docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/test-realm.json -v "$(pwd)"/test-realm.json:/tmp/test-realm.json $DOCKER_IMAGE echo "INFO - Keycloak container created and running" fi else diff --git a/conf/keycloak/test-realm.json b/conf/keycloak/test-realm.json new file mode 100644 index 00000000000..efe71cc5d29 --- /dev/null +++ b/conf/keycloak/test-realm.json @@ -0,0 +1,1939 @@ +{ + "id" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "realm" : "test", + "displayName" : "", + "displayNameHtml" : "", + "notBefore" : 0, + "defaultSignatureAlgorithm" : "RS256", + "revokeRefreshToken" : false, + "refreshTokenMaxReuse" : 0, + "accessTokenLifespan" : 300, + "accessTokenLifespanForImplicitFlow" : 900, + "ssoSessionIdleTimeout" : 1800, + "ssoSessionMaxLifespan" : 36000, + "ssoSessionIdleTimeoutRememberMe" : 0, + "ssoSessionMaxLifespanRememberMe" : 0, + "offlineSessionIdleTimeout" : 2592000, + "offlineSessionMaxLifespanEnabled" : false, + "offlineSessionMaxLifespan" : 5184000, + "clientSessionIdleTimeout" : 0, + "clientSessionMaxLifespan" : 0, + "clientOfflineSessionIdleTimeout" : 0, + "clientOfflineSessionMaxLifespan" : 0, + "accessCodeLifespan" : 60, + "accessCodeLifespanUserAction" : 300, + "accessCodeLifespanLogin" : 1800, + "actionTokenGeneratedByAdminLifespan" : 43200, + "actionTokenGeneratedByUserLifespan" : 300, + "oauth2DeviceCodeLifespan" : 600, + "oauth2DevicePollingInterval" : 5, + "enabled" : true, + "sslRequired" : "none", + "registrationAllowed" : false, + "registrationEmailAsUsername" : false, + "rememberMe" : false, + "verifyEmail" : false, + "loginWithEmailAllowed" : true, + "duplicateEmailsAllowed" : false, + "resetPasswordAllowed" : false, + "editUsernameAllowed" : false, + "bruteForceProtected" : false, + "permanentLockout" : false, + "maxFailureWaitSeconds" : 900, + "minimumQuickLoginWaitSeconds" : 60, + "waitIncrementSeconds" : 60, + "quickLoginCheckMilliSeconds" : 1000, + "maxDeltaTimeSeconds" : 43200, + "failureFactor" : 30, + "roles" : { + "realm" : [ { + "id" : "075daee1-5ab2-44b5-adbf-fa49a3da8305", + "name" : "uma_authorization", + "description" : "${role_uma_authorization}", + "composite" : false, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + }, { + "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name" : "default-roles-test", + "description" : "${role_default-roles}", + "composite" : true, + "composites" : { + "realm" : [ "offline_access", "uma_authorization" ], + "client" : { + "account" : [ "view-profile", "manage-account" ] + } + }, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + }, { + "id" : "e6d31555-6be6-4dee-bc6a-40a53108e4c2", + "name" : "offline_access", + "description" : "${role_offline-access}", + "composite" : false, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + } ], + "client" : { + "realm-management" : [ { + "id" : "1955bd12-5f86-4a74-b130-d68a8ef6f0ee", + "name" : "impersonation", + "description" : "${role_impersonation}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "1109c350-9ab1-426c-9876-ef67d4310f35", + "name" : "view-authorization", + "description" : "${role_view-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "980c3fd3-1ae3-4b8f-9a00-d764c939035f", + "name" : "query-users", + "description" : "${role_query-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "5363e601-0f9d-4633-a8c8-28cb0f859b7b", + "name" : "query-groups", + "description" : "${role_query-groups}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "59aa7992-ad78-48db-868a-25d6e1d7db50", + "name" : "realm-admin", + "description" : "${role_realm-admin}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "impersonation", "view-authorization", "query-users", "query-groups", "manage-clients", "manage-realm", "view-identity-providers", "query-realms", "manage-authorization", "manage-identity-providers", "manage-users", "view-users", "view-realm", "create-client", "view-clients", "manage-events", "query-clients", "view-events" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "112f53c2-897d-4c01-81db-b8dc10c5b995", + "name" : "manage-clients", + "description" : "${role_manage-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "c7f57bbd-ef32-4a64-9888-7b8abd90777a", + "name" : "manage-realm", + "description" : "${role_manage-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "8885dac8-0af3-45af-94ce-eff5e801bb80", + "name" : "view-identity-providers", + "description" : "${role_view-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2673346c-b0ef-4e01-8a90-be03866093af", + "name" : "manage-authorization", + "description" : "${role_manage-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "b7182885-9e57-445f-8dae-17c16eb31b5d", + "name" : "manage-identity-providers", + "description" : "${role_manage-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "ba7bfe0c-cb07-4a47-b92c-b8132b57e181", + "name" : "manage-users", + "description" : "${role_manage-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "13a8f0fc-647d-4bfe-b525-73956898e550", + "name" : "query-realms", + "description" : "${role_query-realms}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842", + "name" : "view-realm", + "description" : "${role_view-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2875da34-006c-4b7f-bfc8-9ae8e46af3a2", + "name" : "view-users", + "description" : "${role_view-users}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-users", "query-groups" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "c8c8f7dc-876b-4263-806f-3329f7cd5fd3", + "name" : "create-client", + "description" : "${role_create-client}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4", + "name" : "view-clients", + "description" : "${role_view-clients}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-clients" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "6fd64c94-d663-4501-ad77-0dcf8887d434", + "name" : "manage-events", + "description" : "${role_manage-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "b321927a-023c-4d2a-99ad-24baf7ff6d83", + "name" : "query-clients", + "description" : "${role_query-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2fc21160-78de-457b-8594-e5c76cde1d5e", + "name" : "view-events", + "description" : "${role_view-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + } ], + "test" : [ ], + "security-admin-console" : [ ], + "admin-cli" : [ ], + "account-console" : [ ], + "broker" : [ { + "id" : "07ee59b5-dca6-48fb-83d4-2994ef02850e", + "name" : "read-token", + "description" : "${role_read-token}", + "composite" : false, + "clientRole" : true, + "containerId" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "attributes" : { } + } ], + "account" : [ { + "id" : "17d2f811-7bdf-4c73-83b4-1037001797b8", + "name" : "view-applications", + "description" : "${role_view-applications}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "d1ff44f9-419e-42fd-98e8-1add1169a972", + "name" : "delete-account", + "description" : "${role_delete-account}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0", + "name" : "manage-account-links", + "description" : "${role_manage-account-links}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55", + "name" : "view-profile", + "description" : "${role_view-profile}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48", + "name" : "manage-consent", + "description" : "${role_manage-consent}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "view-consent" ] + } + }, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "782f3b0c-a17b-4a87-988b-1a711401f3b0", + "name" : "manage-account", + "description" : "${role_manage-account}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "manage-account-links" ] + } + }, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "8a3bfe15-66d9-4f3d-83ac-801d682d42b0", + "name" : "view-consent", + "description" : "${role_view-consent}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + } ] + } + }, + "groups" : [ { + "id" : "d46f94c2-3b47-4288-b937-9cf918e54f0a", + "name" : "admins", + "path" : "/admins", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "e992ce15-baac-48a0-8834-06f6fcf6c05b", + "name" : "curators", + "path" : "/curators", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "531cf81d-a700-4336-808f-37a49709b48c", + "name" : "members", + "path" : "/members", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + } ], + "defaultRole" : { + "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name" : "default-roles-test", + "description" : "${role_default-roles}", + "composite" : true, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983" + }, + "requiredCredentials" : [ "password" ], + "otpPolicyType" : "totp", + "otpPolicyAlgorithm" : "HmacSHA1", + "otpPolicyInitialCounter" : 0, + "otpPolicyDigits" : 6, + "otpPolicyLookAheadWindow" : 1, + "otpPolicyPeriod" : 30, + "otpSupportedApplications" : [ "FreeOTP", "Google Authenticator" ], + "webAuthnPolicyRpEntityName" : "keycloak", + "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyRpId" : "", + "webAuthnPolicyAttestationConveyancePreference" : "not specified", + "webAuthnPolicyAuthenticatorAttachment" : "not specified", + "webAuthnPolicyRequireResidentKey" : "not specified", + "webAuthnPolicyUserVerificationRequirement" : "not specified", + "webAuthnPolicyCreateTimeout" : 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyAcceptableAaguids" : [ ], + "webAuthnPolicyPasswordlessRpEntityName" : "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyPasswordlessRpId" : "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference" : "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment" : "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey" : "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement" : "not specified", + "webAuthnPolicyPasswordlessCreateTimeout" : 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ], + "users" : [ { + "id" : "52cddd46-251c-4534-acc8-0580eeafb577", + "createdTimestamp" : 1684736014759, + "username" : "admin", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Admin", + "email" : "dataverse-admin@mailinator.com", + "credentials" : [ { + "id" : "28f1ece7-26fb-40f1-9174-5ffce7b85c0a", + "type" : "password", + "userLabel" : "Set to \"admin\"", + "createdDate" : 1684736057302, + "secretData" : "{\"value\":\"ONI7fl6BmooVTUgwN1W3m7hsRjMAYEr2l+Fp5+7IOYw1iIntwvZ3U3W0ZBcCFJ7uhcKqF101+rueM3dZfoshPQ==\",\"salt\":\"Hj7co7zYVei7xwx8EaYP3A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/admins" ] + }, { + "id" : "a3d8e76d-7e7b-42dc-bbd7-4258818a8a1b", + "createdTimestamp" : 1684755806552, + "username" : "affiliate", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Affiliate", + "email" : "dataverse-affiliate@mailinator.com", + "credentials" : [ { + "id" : "31c8eb1e-b2a8-4f86-833b-7c0536cd61a1", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755821743, + "secretData" : "{\"value\":\"T+RQ4nvmjknj7ds8NU7782j6PJ++uCu98zNoDQjIe9IKXah+13q4EcXO9IHmi2BJ7lgT0OIzwIoac4JEQLxhjQ==\",\"salt\":\"fnRmE9WmjAp4tlvGh/bxxQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "e5531496-cfb8-498c-a902-50c98d649e79", + "createdTimestamp" : 1684755721064, + "username" : "curator", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Curator", + "email" : "dataverse-curator@mailinator.com", + "credentials" : [ { + "id" : "664546b4-b936-45cf-a4cf-5e98b743fc7f", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755740776, + "secretData" : "{\"value\":\"AvVqybCNtCBVAdLEeJKresy9tc3c4BBUQvu5uHVQw4IjVagN6FpKGlDEKOrxhzdSM8skEvthOEqJkloPo1w+NQ==\",\"salt\":\"2em2DDRRlNEYsNR3xDqehw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/curators" ] + }, { + "id" : "c0082e7e-a3e9-45e6-95e9-811a34adce9d", + "createdTimestamp" : 1684755585802, + "username" : "user", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "User", + "email" : "dataverse-user@mailinator.com", + "credentials" : [ { + "id" : "00d6d67f-2e30-4da6-a567-bec38a1886a0", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755599597, + "secretData" : "{\"value\":\"z991rnjznAgosi5nX962HjM8/gN5GLJTdrlvi6G9cj8470X2/oZUb4Lka6s8xImgtEloCgWiKqH0EH9G4Y3a5A==\",\"salt\":\"/Uz7w+2IqDo+fQUGqxjVHw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/members" ] + } ], + "scopeMappings" : [ { + "clientScope" : "offline_access", + "roles" : [ "offline_access" ] + } ], + "clientScopeMappings" : { + "account" : [ { + "client" : "account-console", + "roles" : [ "manage-account" ] + } ] + }, + "clients" : [ { + "id" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "clientId" : "account", + "name" : "${client_account}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/test/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/realms/test/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "5d99f721-027c-478d-867d-61114e0a8192", + "clientId" : "account-console", + "name" : "${client_account-console}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/test/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/realms/test/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+", + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "e181a0ce-9a04-4468-a38a-aaef9f78f989", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + } ], + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "5eccc178-121e-4d0f-bcb2-04ae3c2e52ed", + "clientId" : "admin-cli", + "name" : "${client_admin-cli}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : false, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "clientId" : "broker", + "name" : "${client_broker}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "clientId" : "realm-management", + "name" : "${client_realm-management}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "bf7cf550-3875-4f97-9878-b2419a854058", + "clientId" : "security-admin-console", + "name" : "${client_security-admin-console}", + "rootUrl" : "${authAdminUrl}", + "baseUrl" : "/admin/test/console/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/admin/test/console/*" ], + "webOrigins" : [ "+" ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+", + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "ff845e16-e200-4894-ab51-37d8b9f2a445", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + } ], + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "9c27faa8-4b8d-4ad9-9cd1-880032ef06aa", + "clientId" : "test", + "name" : "A Test Client", + "description" : "Use for hacking and testing away a confidential client", + "rootUrl" : "", + "adminUrl" : "", + "baseUrl" : "", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8", + "redirectUris" : [ "*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : true, + "protocol" : "openid-connect", + "attributes" : { + "oidc.ciba.grant.enabled" : "false", + "client.secret.creation.time" : "1684735831", + "backchannel.logout.session.required" : "true", + "display.on.consent.screen" : "false", + "oauth2.device.authorization.grant.enabled" : "false", + "backchannel.logout.revoke.offline.tokens" : "false" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : true, + "nodeReRegistrationTimeout" : -1, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + } ], + "clientScopes" : [ { + "id" : "72f29e57-92fa-437b-828c-2b9d6fe56192", + "name" : "address", + "description" : "OpenID Connect built-in scope: address", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${addressScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "59581aea-70d6-4ee8-bec2-1fea5fc497ae", + "name" : "address", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-address-mapper", + "consentRequired" : false, + "config" : { + "user.attribute.formatted" : "formatted", + "user.attribute.country" : "country", + "user.attribute.postal_code" : "postal_code", + "userinfo.token.claim" : "true", + "user.attribute.street" : "street", + "id.token.claim" : "true", + "user.attribute.region" : "region", + "access.token.claim" : "true", + "user.attribute.locality" : "locality" + } + } ] + }, { + "id" : "f515ec81-3c1b-4d4d-b7a2-e7e8d47b6447", + "name" : "roles", + "description" : "OpenID Connect scope for add user roles to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${rolesScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "26d299a8-69e2-4864-9595-17a5b417fc61", + "name" : "realm roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "realm_access.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + }, { + "id" : "d2998083-a8db-4f4e-9aaa-9cad68d65b97", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + }, { + "id" : "7a4cb2e5-07a0-4c16-a024-71df7ddd6868", + "name" : "client roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-client-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "resource_access.${client_id}.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + } ] + }, { + "id" : "8f1eafef-92d6-434e-b9ec-6edec1fddd0a", + "name" : "offline_access", + "description" : "OpenID Connect built-in scope: offline_access", + "protocol" : "openid-connect", + "attributes" : { + "consent.screen.text" : "${offlineAccessScopeConsentText}", + "display.on.consent.screen" : "true" + } + }, { + "id" : "c03095aa-b656-447a-9767-0763c2ccb070", + "name" : "acr", + "description" : "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "948b230c-56d0-4000-937c-841cd395d3f9", + "name" : "acr loa level", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-acr-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true" + } + } ] + }, { + "id" : "cdf35f63-8ec7-41a0-ae12-f05d415818cc", + "name" : "phone", + "description" : "OpenID Connect built-in scope: phone", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${phoneScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "ba4348ff-90b1-4e09-89a8-e5c08b04d3d1", + "name" : "phone number", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumber", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number", + "jsonType.label" : "String" + } + }, { + "id" : "e6cceae5-8392-4348-b302-f610ece6056e", + "name" : "phone number verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumberVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "4318001c-2970-41d3-91b9-e31c08569872", + "name" : "email", + "description" : "OpenID Connect built-in scope: email", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${emailScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "406d02a6-866a-4962-8838-e8c58ada1505", + "name" : "email", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "email", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email", + "jsonType.label" : "String" + } + }, { + "id" : "33baabc1-9bf2-42e4-8b8e-a53c13f0b744", + "name" : "email verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "emailVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "5277a84f-d727-4c64-8432-d513127beee1", + "name" : "profile", + "description" : "OpenID Connect built-in scope: profile", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${profileScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "0a609875-2678-4056-93ef-dd5c03e6059d", + "name" : "given name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "firstName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "given_name", + "jsonType.label" : "String" + } + }, { + "id" : "7c510d18-07ee-4b78-8acd-24b777d11b3c", + "name" : "website", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "website", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "website", + "jsonType.label" : "String" + } + }, { + "id" : "0bb6d0ea-195f-49e8-918c-c419a26a661c", + "name" : "username", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "preferred_username", + "jsonType.label" : "String" + } + }, { + "id" : "5f1e644c-1acf-440c-b1a6-b5f65bcebfd9", + "name" : "profile", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "profile", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "profile", + "jsonType.label" : "String" + } + }, { + "id" : "c710bdb2-6cfd-4f60-9c4e-730188fc62f7", + "name" : "family name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "lastName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "family_name", + "jsonType.label" : "String" + } + }, { + "id" : "012d5038-0e13-42ba-9df7-2487c8e2eead", + "name" : "nickname", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "nickname", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "nickname", + "jsonType.label" : "String" + } + }, { + "id" : "21590b19-517d-4b6d-92f6-d4f71238677e", + "name" : "updated at", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "updatedAt", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "updated_at", + "jsonType.label" : "long" + } + }, { + "id" : "e4cddca7-1360-42f3-9854-da6cbe00c71e", + "name" : "birthdate", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "birthdate", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "birthdate", + "jsonType.label" : "String" + } + }, { + "id" : "afee328f-c64c-43e6-80d0-be2721c2ed0e", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + }, { + "id" : "780a1e2c-5b63-46f4-a5bf-dc3fd8ce0cbb", + "name" : "full name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-full-name-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true", + "userinfo.token.claim" : "true" + } + }, { + "id" : "aeebffff-f776-427e-83ed-064707ffce57", + "name" : "zoneinfo", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "zoneinfo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "zoneinfo", + "jsonType.label" : "String" + } + }, { + "id" : "b3e840a2-1794-4da1-bf69-31905cbff0d6", + "name" : "middle name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "middleName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "middle_name", + "jsonType.label" : "String" + } + }, { + "id" : "0607e0e4-4f7f-4214-996d-3599772ce1c7", + "name" : "picture", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "picture", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "picture", + "jsonType.label" : "String" + } + }, { + "id" : "426a609b-4e28-4132-af0d-13297b8cb63a", + "name" : "gender", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "gender", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "gender", + "jsonType.label" : "String" + } + } ] + }, { + "id" : "a1ebde82-ce21-438f-a3ad-261d3eeb1c01", + "name" : "role_list", + "description" : "SAML role list", + "protocol" : "saml", + "attributes" : { + "consent.screen.text" : "${samlRoleListScopeConsentText}", + "display.on.consent.screen" : "true" + }, + "protocolMappers" : [ { + "id" : "64653ac7-7ffc-4f7c-a589-03e3b68bbd25", + "name" : "role list", + "protocol" : "saml", + "protocolMapper" : "saml-role-list-mapper", + "consentRequired" : false, + "config" : { + "single" : "false", + "attribute.nameformat" : "Basic", + "attribute.name" : "Role" + } + } ] + }, { + "id" : "aeb5b852-dfec-4e67-9d9e-104abe9b3bf2", + "name" : "web-origins", + "description" : "OpenID Connect scope for add allowed web origins to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false", + "consent.screen.text" : "" + }, + "protocolMappers" : [ { + "id" : "e2fa8437-a0f1-46fc-af9c-c40fc09cd6a1", + "name" : "allowed web origins", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-allowed-origins-mapper", + "consentRequired" : false, + "config" : { } + } ] + }, { + "id" : "4fecd0d7-d4ad-457e-90f2-c7202bf01ff5", + "name" : "microprofile-jwt", + "description" : "Microprofile - JWT built-in scope", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "a9536634-a9f6-4ed5-a8e7-8379d3b002ca", + "name" : "upn", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "upn", + "jsonType.label" : "String" + } + }, { + "id" : "2ce1a702-9458-4926-9b8a-f82c07215755", + "name" : "groups", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "multivalued" : "true", + "user.attribute" : "foo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "groups", + "jsonType.label" : "String" + } + } ] + } ], + "defaultDefaultClientScopes" : [ "role_list", "profile", "email", "roles", "web-origins", "acr" ], + "defaultOptionalClientScopes" : [ "offline_access", "address", "phone", "microprofile-jwt" ], + "browserSecurityHeaders" : { + "contentSecurityPolicyReportOnly" : "", + "xContentTypeOptions" : "nosniff", + "xRobotsTag" : "none", + "xFrameOptions" : "SAMEORIGIN", + "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection" : "1; mode=block", + "strictTransportSecurity" : "max-age=31536000; includeSubDomains" + }, + "smtpServer" : { }, + "eventsEnabled" : false, + "eventsListeners" : [ "jboss-logging" ], + "enabledEventTypes" : [ ], + "adminEventsEnabled" : false, + "adminEventsDetailsEnabled" : false, + "identityProviders" : [ ], + "identityProviderMappers" : [ ], + "components" : { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy" : [ { + "id" : "8115796f-8f1f-4d6a-88f8-ca2938451260", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "044bd055-714d-478e-aa93-303d2161c427", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper" ] + } + }, { + "id" : "be465734-3b0f-4370-a144-73db756e23f8", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ] + } + }, { + "id" : "42a2f64d-ac9e-4221-9cf6-40ff8c868629", + "name" : "Trusted Hosts", + "providerId" : "trusted-hosts", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "host-sending-registration-request-must-match" : [ "true" ], + "client-uris-must-match" : [ "true" ] + } + }, { + "id" : "7ca08915-6c33-454c-88f2-20e1d6553b26", + "name" : "Max Clients Limit", + "providerId" : "max-clients", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "max-clients" : [ "200" ] + } + }, { + "id" : "f01f2b6f-3f01-4d01-b2f4-70577c6f599c", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "516d7f21-f21a-4690-831e-36ad313093b2", + "name" : "Consent Required", + "providerId" : "consent-required", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + }, { + "id" : "c79df6a0-d4d8-4866-b9e6-8ddb5d1bd38e", + "name" : "Full Scope Disabled", + "providerId" : "scope", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + } ], + "org.keycloak.userprofile.UserProfileProvider" : [ { + "id" : "cf47a21f-c8fb-42f2-9bff-feca967db183", + "providerId" : "declarative-user-profile", + "subComponents" : { }, + "config" : { } + } ], + "org.keycloak.keys.KeyProvider" : [ { + "id" : "6b4a2281-a9e8-43ab-aee7-190ae91b2842", + "name" : "aes-generated", + "providerId" : "aes-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "47b9c2c2-32dc-4317-bd8b-1c4e5bb740ca" ], + "secret" : [ "9VWsVSqbj5zWa8Mq-rRzOw" ], + "priority" : [ "100" ] + } + }, { + "id" : "68e2d2b0-4976-480f-ab76-f84a17686b05", + "name" : "rsa-enc-generated", + "providerId" : "rsa-enc-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEpQIBAAKCAQEAwuIcVVJDncorsQcFef4M/J9dsaNNmwEv/+4pCSZuco7IlA9uCfvwjYgfwQlWoCHCc7JFEtUOXhpLNR0SJ9w2eCC9A/0horjLmiVGU5sGACGrAxSgipt399k83mtkPBTikT1BXumPrX51ovdEPVPQSO0hIBwFn4ZDwA9P/00jNzzswyLC2UDdQrwIjm2xWjq1X82d8mL3+Yp8lF9qD1w305+XPiqCC+TUunKsuCQq5sddet+UoCDsFQyxsJi6cWJrryDvQmiDgM2wm68jn6hyzDE76J1az0wKEGqoMEwIy0juqZCyAqgsm3xA+zHpTcI3EyTwDGpMvWNJp8AWqXPNaQIDAQABAoIBAAethL1+n/6WpUBEaoHcVrq5/2+vo0+dfTyVZNKRFqtG0WOWPzOflFd1HZV7YVPuJI+uPi8ANmsnbh9YcaYg9JiTZ0hMZ++giBf0ID2hZxv995NyXnf7fkoFKghevYG+9mVPtHRmxKlKiPFWfHQjP1ACNKAD2UZdcdbzxicaIkPV/hP996mZA3xaaudggAJq7u/W67H2Q6ofGqW4TI5241d8T+6yobbvXRe4n8FKz4eK2aZv+N+zwh5JDMsJ8050+lCDsyoyakEPf+4veuPkewx4FemAiotDNcmoUQSDL26wLw8kk1uZ9JY0M88OL5pMyBuxTqy0F6BWBltq80mlefECgYEA4vZ8Agu2plXOzWASn0dyhCel3QoeUqNY8D8A+0vK9qWxUE9jMG13jAZmsL2I38SuwRN1DhJezbrn4QTuxTukxgSjLDv/pBp9UnXnCz/fg4yPTYsZ0zHqTMbwvdtfIzBHTCYyIJ+unxVYoenC0XZKSQXA3NN2zNqYpLhjStWdEZECgYEA29DznJxpDZsRUieRxFgZ+eRCjbQ9Q2A46preqMo1KOZ6bt9avxG3uM7pUC+UOeIizeRzxPSJ2SyptYPzdaNwKN3Lq+RhjHe1zYLngXb0CIQaRwNHqePxXF1sg0dTbmcxf+Co7yPG+Nd5nrQq9SQHC3tLTyL6x3VU/yAfMQqUklkCgYEAyVl8iGAV6RkE/4R04OOEv6Ng7WkVn6CUvYZXe5kw9YHnfWUAjS0AOrRPFAsBy+r0UgvN8+7uNjvTjPhQT5/rPVVN4WdVEyQA/E/m6j7/LvhbBaMbBRcqUnTHjNd6XoBtMCxOmkyvoShR2krE8AiuPHwjLoVXxsNDWhbO18wMrVECgYEAlmkICOXNzI2K8Jg62gse2yshjy0BrpSs3XtTWFPkxDPRGwSiZ5OMD10lsMSdvG3MOu5TeTWLDZvOFHJRqPFI0e3Sa7A+P4u6TwF/v8rRePJLuMO5ybo7cWRL2Bh6MlVSPZpQfjIQ+D0Y70uBCXS5jVW0VlYtG0Zh/qDQNxJyTyECgYEAuRINlZ0ag+1QTITapSatbFWd/KquGLpMjZyF4k5gVHs+4zHnnTi1YIDUInp1FJBqKD27z2byy7KFgbMBZQmsDs8i4fgzQrJHe3D4WFFHCjiClbeReejbas9bOnqhSQCiIy1Ck8vMAriAtctSA/g/qq6dQApSgcWaKvTVL2Ywa7E=" ], + "keyUse" : [ "ENC" ], + "certificate" : [ "MIIClzCCAX8CBgGIQhOIijANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLiHFVSQ53KK7EHBXn+DPyfXbGjTZsBL//uKQkmbnKOyJQPbgn78I2IH8EJVqAhwnOyRRLVDl4aSzUdEifcNnggvQP9IaK4y5olRlObBgAhqwMUoIqbd/fZPN5rZDwU4pE9QV7pj61+daL3RD1T0EjtISAcBZ+GQ8APT/9NIzc87MMiwtlA3UK8CI5tsVo6tV/NnfJi9/mKfJRfag9cN9Oflz4qggvk1LpyrLgkKubHXXrflKAg7BUMsbCYunFia68g70Jog4DNsJuvI5+ocswxO+idWs9MChBqqDBMCMtI7qmQsgKoLJt8QPsx6U3CNxMk8AxqTL1jSafAFqlzzWkCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAIEIfjqOr2m+8s2RR8VW/nBgOgu9HtPRda4qNhGbgBkZ8NDy7TwHqlHo1ujKW5RO438pRyLJmOibWN4a/rkUsSjin6vgy4l8KpQy+7a4cQCQHyl34TmPjbtiw1jKgiOjzRQY54NVwIJNMIMc1ZyQo4u0U30/FxgUv6akXfS5O1ePD+5xKOOC/Af9AletjhQMPwVxXDwFqfQf/p+SM4Pyn4L633MESfDrH8v9FjJd0lV5ZlEI4hpPtnbi9U+CInqCy3VDNlZjsXswaDRujjg3LERfOMvCgj+Dck3FzWG7EiCwXWNEPvdMzv4w7M6KXuiPPQkST8DUWjgkjUCeLBzT3yw==" ], + "priority" : [ "100" ], + "algorithm" : [ "RSA-OAEP" ] + } + }, { + "id" : "728769a3-99a4-4cca-959d-28181dfee7e8", + "name" : "rsa-generated", + "providerId" : "rsa-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEowIBAAKCAQEAxIszQCv8bX3sKXJVtuLJV6cH/uhkzxcTEIcDe7y2Y2SFM0x2nF6wRLk8QkvIrRmelilegUIJttqZxLXMpxwUJGizehHQMrOCzNoGBZdVanoK7nNa5+FOYtlvL4GxNfwzS36sp3PnKQiGv5Q7RGuPthjLFfqTmYx/7GTDJC4vLEW5S01Vy/Xc9FE4FsT0hnm91lRWjppc9893M5QUy/TPu8udIuNV87Ko5yiIxQqcPiAQXJaN4CyGaDcYhhzzHdxVptIk2FvtxhpmNxrbtmBCx/o9/rBDQNTis8Ex6ItWC2PvC17UPvyOcZ4Fv/qO0L6JZ0mrpH95CeDU1kEP+KKZrwIDAQABAoIBAGGl6SYiVG1PyTQEXqqY/UCjt3jBnEg5ZhrpgWUKKrGyAO2uOSXSc5AJWfN0NHUwC9b+IbplhW8IJ6qQSmfiLu2x6S2mSQLPphZB4gkIGYNntCOpQ0p+aZP6BGAddt5j+VYyTvR5RKlh15S6QEHrkMB/i/LVBl0c7XeUzlEc8wnyj8DGvlmpcQzIcbWfqEZ/FciDdKGNN0M4V/r1uQiOUVZ69SWDBBwu41YwF7PYUsX83q8zn0nBeMqz0ggSf33lW4w31fox9c7EjIF01gPArE5uT+d+AwjVKHpd08LWGR9W9NSXVOPUKkzOM+PyvKGvzjMnlrm/feqowKQbL2q/GP0CgYEA/EsrvUojkFIWxHc19KJdJvqlYgLeWq6P/J7UmHgpl+S3nG6b9HH4/aM/ICDa5hxd5bmP5p2V3EuZWnyb6/QB5eipC7Ss3oM7XeS/PwvTp6NTC1fypx2zHKse3iuLeCGneRxiw15mB02ArJ/qJw/VSQK2J7RiR4+b6HYpdzQnIysCgYEAx25dTQqskQqsx/orJzuUqfNv/C0W4vqfz1eL3akFrdK+YqghXKFsDmh61JpTrTKnRLAdQeyOrhKwbNsdxSEEaeeLayKLVlimoFXGd/LZb5LQiwFcrvTzhnB+FLmFgqTnuLkpfY1woHEwSW9TpJewjbT9S6g0L2uh223nVXuLMY0CgYEA3pMOlmMGtvbEoTSuRBDNb2rmZm4zbfrcijgxRAWWZCtiFL68FU5LJLBVK2nw09sot1cabZCOuhdzxhFymRneZs73+5y8eV17DV2VnvA3HIiI5dQD/YzFDECm7ceqtiOylLUHKGZqSn0ETMaTkzxzpIKg4qxPm+RE3jMIZ+J5uJsCgYBk2iUIrtsxxgo2Xwavomu9vkPlbQ/j3QYwHn+2qqEalDZ/QbMNWvyAFMn49cpXDgSUsdM54V0OHpllkzFs3ROUUumoViHMmqw47OefBQp8Z+xaP2gVef4lAIJiDKe9t5MPUWPwADTyjgrzN/8+fw9juiFVv0wUpwOFKgEQs5diiQKBgC6RpZESc5Nl4nHrDvIl5n/zYED6BaXoLl15NhcoBudt5SIRO/RpvBW69A7aE/UK6p7WXjq4mP1ssIWz4KgATCoXUgYvn0a7Ql79r/CMce6/FvcuweED6u6bD0kdXuYhe8fR9IPmLfnnb4Cx3JOJeRZbiBSP5HOZJ7nsKibxcgPm" ], + "keyUse" : [ "SIG" ], + "certificate" : [ "MIIClzCCAX8CBgGIQhOHjjANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMSLM0Ar/G197ClyVbbiyVenB/7oZM8XExCHA3u8tmNkhTNMdpxesES5PEJLyK0ZnpYpXoFCCbbamcS1zKccFCRos3oR0DKzgszaBgWXVWp6Cu5zWufhTmLZby+BsTX8M0t+rKdz5ykIhr+UO0Rrj7YYyxX6k5mMf+xkwyQuLyxFuUtNVcv13PRROBbE9IZ5vdZUVo6aXPfPdzOUFMv0z7vLnSLjVfOyqOcoiMUKnD4gEFyWjeAshmg3GIYc8x3cVabSJNhb7cYaZjca27ZgQsf6Pf6wQ0DU4rPBMeiLVgtj7wte1D78jnGeBb/6jtC+iWdJq6R/eQng1NZBD/iima8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAe0Bo1UpGfpOlJiVhp0XWExm8bdxFgXOU2M5XeZBsWAqBehvJkzn+tbAtlVNiIiN58XFFpH+xLZ2nJIZR5FHeCD3bYAgK72j5k45HJI95vPyslelfT/m3Np78+1iUa1U1WxN40JaowP1EeTkk5O8Pk4zTQ1Ne1usmKd+SJxI1KWN0kKuVFMmdNRb5kQKWeQvOSlWl7rd4bvHGvVnxgcPC1bshEJKRt+VpaUjpm6CKd8C3Kt7IWfIX4HTVhKZkmLn7qv6aSfwWelwZfLdaXcLXixqzqNuUk/VWbF9JT4iiag9F3mt7xryIkoRp1AEjCA82HqK72F4JCFyOhCiGrMfKJw==" ], + "priority" : [ "100" ] + } + }, { + "id" : "f30af2d2-d042-43b8-bc6d-22f6bab6934c", + "name" : "hmac-generated", + "providerId" : "hmac-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "6f0d9688-e974-42b4-9d84-8d098c51007c" ], + "secret" : [ "8nruwD66Revr9k21e-BHtcyvNzAMFOsstxSAB0Gdy2qe2qGRm2kYOwsPzrH9ZQSdj2041SraKo6a3SHvCyTBAQ" ], + "priority" : [ "100" ], + "algorithm" : [ "HS256" ] + } + } ] + }, + "internationalizationEnabled" : false, + "supportedLocales" : [ ], + "authenticationFlows" : [ { + "id" : "94c65ba1-ba50-4be2-94c4-de656145eb67", + "alias" : "Account verification options", + "description" : "Method with which to verity the existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-email-verification", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Verify Existing Account by Re-authentication", + "userSetupAllowed" : false + } ] + }, { + "id" : "3b706ddf-c4b6-498a-803c-772878bc9bc3", + "alias" : "Authentication Options", + "description" : "Authentication options.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "basic-auth", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "basic-auth-otp", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-spnego", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "9ea0b8f6-882c-45ad-9110-78adf5a5d233", + "alias" : "Browser - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-otp-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "99c5ba83-b585-4601-b740-1a26670bf4e9", + "alias" : "Direct Grant - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "direct-grant-validate-otp", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "65b73dec-7dd1-4de8-b542-a023b7104afc", + "alias" : "First broker login - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-otp-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "9a26b76f-da95-43f1-8da3-16c4a0654f07", + "alias" : "Handle Existing Account", + "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-confirm-link", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Account verification options", + "userSetupAllowed" : false + } ] + }, { + "id" : "0a77285e-d7d5-4b6c-aa9a-3eadb5e7e3d3", + "alias" : "Reset - Conditional OTP", + "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-otp", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "cb6c0b3b-2f5f-4493-9d14-6130f8b58dd7", + "alias" : "User creation or linking", + "description" : "Flow for the existing/non-existing user alternatives", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "create unique user config", + "authenticator" : "idp-create-user-if-unique", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Handle Existing Account", + "userSetupAllowed" : false + } ] + }, { + "id" : "0fd3db1b-e93d-4768-82ca-a1498ddc11d0", + "alias" : "Verify Existing Account by Re-authentication", + "description" : "Reauthentication of existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-username-password-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "First broker login - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "86610e70-f9f5-4c11-8a9e-9de1770565fb", + "alias" : "browser", + "description" : "browser based authentication", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-cookie", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-spnego", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "identity-provider-redirector", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 25, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 30, + "autheticatorFlow" : true, + "flowAlias" : "forms", + "userSetupAllowed" : false + } ] + }, { + "id" : "f6aa23dd-8532-4d92-9780-3ea226481e3b", + "alias" : "clients", + "description" : "Base authentication for clients", + "providerId" : "client-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "client-secret", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-jwt", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-secret-jwt", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-x509", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 40, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "4d2caf65-1703-4ddb-8890-70232e91bcd8", + "alias" : "direct grant", + "description" : "OpenID Connect Resource Owner Grant", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "direct-grant-validate-username", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "direct-grant-validate-password", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 30, + "autheticatorFlow" : true, + "flowAlias" : "Direct Grant - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "eaa20c41-5334-4fb4-8c45-fb9cc71f7f74", + "alias" : "docker auth", + "description" : "Used by Docker clients to authenticate against the IDP", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "docker-http-basic-authenticator", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "b9febfb1-f0aa-4590-b782-272a4aa11575", + "alias" : "first broker login", + "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "review profile config", + "authenticator" : "idp-review-profile", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "User creation or linking", + "userSetupAllowed" : false + } ] + }, { + "id" : "03bb6ff4-eccb-4f2f-8953-3769f78c3bf3", + "alias" : "forms", + "description" : "Username, password, otp and other auth forms.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-username-password-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Browser - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "38385189-246b-4ea0-ac05-d49dfe1709da", + "alias" : "http challenge", + "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "no-cookie-redirect", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Authentication Options", + "userSetupAllowed" : false + } ] + }, { + "id" : "1022f3c2-0469-41c9-861e-918908f103df", + "alias" : "registration", + "description" : "registration flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-page-form", + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : true, + "flowAlias" : "registration form", + "userSetupAllowed" : false + } ] + }, { + "id" : "00d36c3b-e1dc-41f8-bfd0-5f8c80ea07e8", + "alias" : "registration form", + "description" : "registration form", + "providerId" : "form-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-user-creation", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-profile-action", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 40, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-password-action", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 50, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-recaptcha-action", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 60, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "4374c16e-8c65-4168-94c2-df1ab3f3e6ad", + "alias" : "reset credentials", + "description" : "Reset credentials for a user if they forgot their password or something", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "reset-credentials-choose-user", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-credential-email", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-password", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 40, + "autheticatorFlow" : true, + "flowAlias" : "Reset - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "04d6ed6a-76c9-41fb-9074-bff8a80c2286", + "alias" : "saml ecp", + "description" : "SAML ECP Profile Authentication Flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "http-basic-authenticator", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + } ], + "authenticatorConfig" : [ { + "id" : "e7bad67d-1236-430a-a327-9194f9d1e2b0", + "alias" : "create unique user config", + "config" : { + "require.password.update.after.registration" : "false" + } + }, { + "id" : "287b5989-a927-4cf5-8067-74594ce19bc1", + "alias" : "review profile config", + "config" : { + "update.profile.on.first.login" : "missing" + } + } ], + "requiredActions" : [ { + "alias" : "CONFIGURE_TOTP", + "name" : "Configure OTP", + "providerId" : "CONFIGURE_TOTP", + "enabled" : true, + "defaultAction" : false, + "priority" : 10, + "config" : { } + }, { + "alias" : "terms_and_conditions", + "name" : "Terms and Conditions", + "providerId" : "terms_and_conditions", + "enabled" : false, + "defaultAction" : false, + "priority" : 20, + "config" : { } + }, { + "alias" : "UPDATE_PASSWORD", + "name" : "Update Password", + "providerId" : "UPDATE_PASSWORD", + "enabled" : true, + "defaultAction" : false, + "priority" : 30, + "config" : { } + }, { + "alias" : "UPDATE_PROFILE", + "name" : "Update Profile", + "providerId" : "UPDATE_PROFILE", + "enabled" : true, + "defaultAction" : false, + "priority" : 40, + "config" : { } + }, { + "alias" : "VERIFY_EMAIL", + "name" : "Verify Email", + "providerId" : "VERIFY_EMAIL", + "enabled" : true, + "defaultAction" : false, + "priority" : 50, + "config" : { } + }, { + "alias" : "delete_account", + "name" : "Delete Account", + "providerId" : "delete_account", + "enabled" : false, + "defaultAction" : false, + "priority" : 60, + "config" : { } + }, { + "alias" : "webauthn-register", + "name" : "Webauthn Register", + "providerId" : "webauthn-register", + "enabled" : true, + "defaultAction" : false, + "priority" : 70, + "config" : { } + }, { + "alias" : "webauthn-register-passwordless", + "name" : "Webauthn Register Passwordless", + "providerId" : "webauthn-register-passwordless", + "enabled" : true, + "defaultAction" : false, + "priority" : 80, + "config" : { } + }, { + "alias" : "update_user_locale", + "name" : "Update User Locale", + "providerId" : "update_user_locale", + "enabled" : true, + "defaultAction" : false, + "priority" : 1000, + "config" : { } + } ], + "browserFlow" : "browser", + "registrationFlow" : "registration", + "directGrantFlow" : "direct grant", + "resetCredentialsFlow" : "reset credentials", + "clientAuthenticationFlow" : "clients", + "dockerAuthenticationFlow" : "docker auth", + "attributes" : { + "cibaBackchannelTokenDeliveryMode" : "poll", + "cibaAuthRequestedUserHint" : "login_hint", + "oauth2DevicePollingInterval" : "5", + "clientOfflineSessionMaxLifespan" : "0", + "clientSessionIdleTimeout" : "0", + "clientOfflineSessionIdleTimeout" : "0", + "cibaInterval" : "5", + "cibaExpiresIn" : "120", + "oauth2DeviceCodeLifespan" : "600", + "parRequestUriLifespan" : "60", + "clientSessionMaxLifespan" : "0", + "frontendUrl" : "" + }, + "keycloakVersion" : "19.0.3", + "userManagedAccessAllowed" : false, + "clientProfiles" : { + "profiles" : [ ] + }, + "clientPolicies" : { + "policies" : [ ] + } +} \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 30c55661a20..f00be57ea9c 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -100,8 +100,8 @@ services: - /mail:mode=770,size=128M,uid=1000,gid=1000 dev_keycloak: - container_name: "dev_keycloack" - image: 'quay.io/keycloak/keycloak:19.0' + container_name: "dev_keycloak" + image: 'quay.io/keycloak/keycloak:21.0' hostname: keycloak environment: - KEYCLOAK_ADMIN=kcadmin @@ -116,7 +116,7 @@ services: ports: - "8090:8090" volumes: - - './conf/keycloak/oidc-realm.json:/opt/keycloak/data/import/oidc-realm.json' + - './conf/keycloak/test-realm.json:/opt/keycloak/data/import/test-realm.json' networks: dataverse: From 947ed780d8a3300c9202e62853a0b810b1039b49 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:10:40 +0200 Subject: [PATCH 0110/1092] docs(dev,oidc): update new OIDC test realm description --- .../source/developers/remote-users.rst | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst index d8f90e9257f..a07f104d1c8 100755 --- a/doc/sphinx-guides/source/developers/remote-users.rst +++ b/doc/sphinx-guides/source/developers/remote-users.rst @@ -39,7 +39,7 @@ STOP! ``oidc-keycloak-auth-provider.json`` was changed from http://localhost:809 If you are working on the OpenID Connect (OIDC) user authentication flow, you do not need to connect to a remote provider (as explained in :doc:`/installation/oidc`) to test this feature. Instead, you can use the available configuration that allows you to run a test Keycloak OIDC identity management service locally through a Docker container. -(Please note! The client secret (``ss6gE8mODCDfqesQaSG3gwUwZqZt547E``) is hard-coded in ``oidc-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.) +(Please note! The client secret (``94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8``) is hard-coded in ``test-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.) You can find this configuration in ``conf/keycloak``. There are two options available in this directory to run a Keycloak container: bash script or docker-compose. @@ -55,15 +55,32 @@ Now load the configuration defined in ``oidc-keycloak-auth-provider.json`` into You should see the new provider, called "OIDC-Keycloak", under "Other options" on the Log In page. -You should be able to log into Keycloak with the following credentials: - -- username: kcuser -- password: kcpassword +You should be able to log into Keycloak with the one of the following credentials: + +.. list-table:: + + * - Username + - Password + - Group + * - admin + - admin + - admins + * - curator + - curator + - curators + * - user + - user + - members + * - affiliate + - affiliate + - \- In case you want to stop and remove the Keycloak container, just run the other available bash script: ``./rm-keycloak.sh`` +Note: the Keycloak admin to login at the admin console is ``kcadmin:kcpassword`` + ---- Previous: :doc:`unf/index` | Next: :doc:`geospatial` From 23a11718618fcc15daa3d6ddcfb18f0f49378e9d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:13:47 +0200 Subject: [PATCH 0111/1092] test(oidc): update OIDC integration test with new realm This commit enables using Keycloak 20+ with this test by replacing the test realm and using the client to retrieve the access token on a side channel in a Keycloak 20+ compatible way. (The old one only worked for v19, this one is compatible with all) --- .../OIDCAuthenticationProviderFactoryIT.java | 71 ++++++++++++------- 1 file changed, 47 insertions(+), 24 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index a5aa29cc083..88f70c53948 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -8,6 +8,8 @@ import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser; @@ -17,6 +19,7 @@ import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.OAuth2Constants; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.KeycloakBuilder; import org.mockito.InjectMocks; @@ -32,6 +35,7 @@ import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; @@ -47,18 +51,19 @@ @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") class OIDCAuthenticationProviderFactoryIT { - // NOTE: the following values are taken from the realm import file! - static final String clientId = "oidc-client"; - static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; - static final String realm = "oidc-realm"; - static final String adminUser = "kcuser"; + static final String clientId = "test"; + static final String clientSecret = "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8"; + static final String realm = "test"; + static final String realmAdminUser = "admin"; + static final String realmAdminPassword = "admin"; + + static final String adminUser = "kcadmin"; static final String adminPassword = "kcpassword"; - static final String clientIdAdminCli = "admin-cli"; - // The realm JSON resides in conf/keycloak/oidc-realm.json and gets avail here using in pom.xml + // The realm JSON resides in conf/keycloak/test-realm.json and gets avail here using in pom.xml @Container - static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:19.0") - .withRealmImportFile("keycloak/oidc-realm.json") + static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:21.0") + .withRealmImportFile("keycloak/test-realm.json") .withAdminUsername(adminUser) .withAdminPassword(adminPassword); @@ -76,31 +81,44 @@ OIDCAuthProvider getProvider() throws Exception { return oidcAuthProvider; } - Keycloak getAdminClient() { - return KeycloakBuilder.builder() + // NOTE: This requires the "direct access grants" for the client to be enabled! + String getBearerTokenViaKeycloakAdminClient() throws Exception { + try (Keycloak keycloak = KeycloakBuilder.builder() .serverUrl(keycloakContainer.getAuthServerUrl()) + .grantType(OAuth2Constants.PASSWORD) .realm(realm) - .clientId(clientIdAdminCli) - .username(keycloakContainer.getAdminUsername()) - .password(keycloakContainer.getAdminPassword()) - .build(); - } - - String getBearerToken() throws Exception { - Keycloak keycloak = getAdminClient(); - return keycloak.tokenManager().getAccessTokenString(); + .clientId(clientId) + .clientSecret(clientSecret) + .username(realmAdminUser) + .password(realmAdminPassword) + .scope("openid") + .build()) { + return keycloak.tokenManager().getAccessTokenString(); + } } + /** + * This basic test covers configuring an OIDC provider via MPCONFIG and being able to use it. + */ @Test void testCreateProvider() throws Exception { + // given OIDCAuthProvider oidcAuthProvider = getProvider(); - String token = getBearerToken(); + String token = getBearerTokenViaKeycloakAdminClient(); assumeFalse(token == null); - Optional info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + Optional info = Optional.empty(); + + // when + try { + info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + } catch (OAuth2Exception e) { + System.out.println(e.getMessageBody()); + } + //then assertTrue(info.isPresent()); - assertEquals(adminUser, info.get().getPreferredUsername()); + assertEquals(realmAdminUser, info.get().getPreferredUsername()); } @Mock @@ -111,6 +129,11 @@ void testCreateProvider() throws Exception { @InjectMocks BearerTokenAuthMechanism bearerTokenAuthMechanism; + /** + * This test covers using an OIDC provider as authorization party when accessing the Dataverse API with a + * Bearer Token. See {@link BearerTokenAuthMechanism}. It needs to mock the auth services to avoid adding + * more dependencies. + */ @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true") void testApiBearerAuth() throws Exception { @@ -120,7 +143,7 @@ void testApiBearerAuth() throws Exception { // given // Get the access token from the remote Keycloak in the container - String accessToken = getBearerToken(); + String accessToken = getBearerTokenViaKeycloakAdminClient(); assumeFalse(accessToken == null); OIDCAuthProvider oidcAuthProvider = getProvider(); From 844339dd6adc74977a6011f58cc01dd924b002cc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:14:52 +0200 Subject: [PATCH 0112/1092] test(oidc): add third OIDC integration test for JSF login Using HtmlUnit as a browser replacement, enable testing if the Authorization Code Flow used when logging in via the UI works properly. --- pom.xml | 6 ++ .../OIDCAuthenticationProviderFactoryIT.java | 78 +++++++++++++++++++ 2 files changed, 84 insertions(+) diff --git a/pom.xml b/pom.xml index e9a9b9dd611..2ba01e5ab61 100644 --- a/pom.xml +++ b/pom.xml @@ -610,6 +610,12 @@ ${smallrye-mpconfig.version} test + + org.htmlunit + htmlunit + 3.2.0 + test + diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 88f70c53948..2c963e8df46 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -16,6 +16,13 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import org.htmlunit.FailingHttpStatusCodeException; +import org.htmlunit.WebClient; +import org.htmlunit.WebResponse; +import org.htmlunit.html.HtmlForm; +import org.htmlunit.html.HtmlInput; +import org.htmlunit.html.HtmlPage; +import org.htmlunit.html.HtmlSubmitInput; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -28,8 +35,11 @@ import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; @@ -166,4 +176,72 @@ void testApiBearerAuth() throws Exception { assertNotNull(lookedUpUser); assertEquals(user, lookedUpUser); } + + /** + * This test covers the {@link OIDCAuthProvider#buildAuthzUrl(String, String)} and + * {@link OIDCAuthProvider#getUserRecord(String, String, String)} methods that are used when + * a user authenticates via the JSF UI. It covers enabling PKCE, which is no hard requirement + * by the protocol, but might be required by some provider (as seen with Microsoft Azure AD). + * As we don't have a real browser, we use {@link WebClient} from HtmlUnit as a replacement. + */ + @Test + @JvmSetting(key = JvmSettings.OIDC_PKCE_ENABLED, value = "true") + void testAuthorizationCodeFlowWithPKCE() throws Exception { + // given + String state = "foobar"; + String callbackUrl = "http://localhost:8080/oauth2callback.xhtml"; + + OIDCAuthProvider oidcAuthProvider = getProvider(); + String authzUrl = oidcAuthProvider.buildAuthzUrl(state, callbackUrl); + //System.out.println(authzUrl); + + try (WebClient webClient = new WebClient()) { + webClient.getOptions().setCssEnabled(false); + webClient.getOptions().setJavaScriptEnabled(false); + // We *want* to know about the redirect, as it contains the data we need! + webClient.getOptions().setRedirectEnabled(false); + + HtmlPage loginPage = webClient.getPage(authzUrl); + assumeTrue(loginPage.getTitleText().contains("Sign in to " + realm)); + + HtmlForm form = loginPage.getForms().get(0); + HtmlInput username = form.getInputByName("username"); + HtmlInput password = form.getInputByName("password"); + HtmlSubmitInput submit = form.getInputByName("login"); + + username.type(realmAdminUser); + password.type(realmAdminPassword); + + FailingHttpStatusCodeException exception = assertThrows(FailingHttpStatusCodeException.class, submit::click); + assertEquals(302, exception.getStatusCode()); + + WebResponse response = exception.getResponse(); + assertNotNull(response); + + String callbackLocation = response.getResponseHeaderValue("Location"); + assertTrue(callbackLocation.startsWith(callbackUrl)); + //System.out.println(callbackLocation); + + String queryPart = callbackLocation.trim().split("\\?")[1]; + Map parameters = Pattern.compile("\\s*&\\s*") + .splitAsStream(queryPart) + .map(s -> s.split("=", 2)) + .collect(Collectors.toMap(a -> a[0], a -> a.length > 1 ? a[1]: "")); + //System.out.println(map); + assertTrue(parameters.containsKey("code")); + assertTrue(parameters.containsKey("state")); + + OAuth2UserRecord userRecord = oidcAuthProvider.getUserRecord( + parameters.get("code"), + parameters.get("state"), + callbackUrl + ); + + assertNotNull(userRecord); + assertEquals(realmAdminUser, userRecord.getUsername()); + } catch (OAuth2Exception e) { + System.out.println(e.getMessageBody()); + throw e; + } + } } \ No newline at end of file From 033d8e9f58cd39d8a697354facffc0d76abf59bd Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 23 May 2023 23:12:00 +0200 Subject: [PATCH 0113/1092] chore(build): update many Maven plugins to latest available versions This is mostly because with Maven 3.9 it starts to verify if a plugin is going to be compatible with Maven 4. Most plugins are being updated at the moment. --- modules/dataverse-parent/pom.xml | 28 ++++++++++++++++++++-------- pom.xml | 2 -- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 060fc22b4d2..97e83684d1a 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -174,19 +174,21 @@ 5.7.0 ${junit.jupiter.version} 2.28.2 + 0.8.10 9.3 - 3.8.1 - 3.2.2 + 3.11.0 + 3.3.0 3.3.2 - 3.2.0 - 3.0.0-M1 - 3.0.0-M5 - 3.0.0-M5 - 3.3.0 - 3.1.2 + 3.5.0 + 3.1.1 + 3.1.0 + 3.1.0 + 3.6.0 + 3.3.1 + 3.2.2 0.42.1 @@ -245,6 +247,11 @@ maven-failsafe-plugin ${maven-failsafe-plugin.version} + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + org.apache.maven.plugins maven-checkstyle-plugin @@ -257,6 +264,11 @@ + + org.jacoco + jacoco-maven-plugin + ${maven-jacoco-plugin.version} + io.fabric8 docker-maven-plugin diff --git a/pom.xml b/pom.xml index 2ba01e5ab61..28fc2b25af5 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,6 @@ 1.2.18.4 8.5.10 1.20.1 - 0.8.7 5.2.1 2.4.1 5.5.3 @@ -704,7 +703,6 @@ org.jacoco jacoco-maven-plugin - ${jacoco.version} ${basedir}/target/coverage-reports/jacoco-unit.exec ${basedir}/target/coverage-reports/jacoco-unit.exec From f112fe46e93133e7736bfcb018623b07eebee776 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:05:01 +0200 Subject: [PATCH 0114/1092] build,test: reconfigure JaCoCo test coverage measuring With the addition of integration tests we need to create a combined report of unit tests and integration tests (for now still keeping API tests out of the loop for normal cases). This commit reconfigures the JaCoCo plugin with multiple executions at certain times to execute the measurements and merge the results together. --- pom.xml | 72 ++++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 66 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 28fc2b25af5..5d1523e01b8 100644 --- a/pom.xml +++ b/pom.xml @@ -703,23 +703,78 @@ org.jacoco jacoco-maven-plugin - - ${basedir}/target/coverage-reports/jacoco-unit.exec - ${basedir}/target/coverage-reports/jacoco-unit.exec - jacoco-initialize prepare-agent + + ${project.build.directory}/coverage-reports/jacoco-unit.exec + surefire.jacoco.args + - jacoco-site - package + jacoco-after-unit + test report + + ${project.build.directory}/coverage-reports/jacoco-unit.exec + ${project.reporting.outputDirectory}/jacoco-unit-test-coverage-report + + + + jacoco-initialize-it + pre-integration-test + + prepare-agent + + + ${project.build.directory}/coverage-reports/jacoco-integration.exec + failsafe.jacoco.args + + + + jacoco-after-it + post-integration-test + + report + + + ${project.build.directory}/coverage-reports/jacoco-integration.exec + ${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report + + + + jacoco-merge-unit-and-it + post-integration-test + + merge + + + + + ${project.build.directory}/coverage-reports/ + + *.exec + + + + ${project.build.directory}/coverage-reports/merged.exec + + + + jacoco-report + post-integration-test + + report + + + ${project.build.directory}/coverage-reports/merged.exec + ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report + @@ -734,6 +789,9 @@ 2.3.1 + + ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report/jacoco.xml + org.apache.maven.plugins @@ -743,6 +801,7 @@ ${testsToExclude} ${skipUnitTests} + ${surefire.jacoco.args} ${argLine} @@ -751,6 +810,7 @@ maven-failsafe-plugin testcontainers + ${failsafe.jacoco.args} ${argLine} From 5975e268ef1150ef76edd589e1ce35a74e683ccf Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:23:54 +0200 Subject: [PATCH 0115/1092] ci: restructure Maven tests to include integration tests --- .github/workflows/maven_unit_test.yml | 78 +++++++++++++++++++++++++-- 1 file changed, 75 insertions(+), 3 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 629a45a6c2c..8d51702be6b 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -33,6 +33,7 @@ jobs: continue-on-error: ${{ matrix.experimental }} runs-on: ubuntu-latest steps: + # Basic setup chores - uses: actions/checkout@v2 - name: Set up JDK ${{ matrix.jdk }} uses: actions/setup-java@v2 @@ -45,13 +46,84 @@ jobs: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 + - name: Build with Maven run: mvn -DcompilerArgument=-Xlint:unchecked -Dtarget.java.version=${{ matrix.jdk }} -P all-unit-tests clean test - - name: Maven Code Coverage - env: + + # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports + - run: tar -cvf java-builddir.tar target + - uses: actions/upload-artifact@v3 + with: + name: java-builddir + path: java-builddir.tar + retention-days: 3 + integration-test: + runs-on: ubuntu-latest + needs: unittest + steps: + # Basic setup chores + - uses: actions/checkout@v3 + - name: Set up JDK ${{ matrix.jdk }} + uses: actions/setup-java@v2 + with: + java-version: ${{ matrix.jdk }} + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + # Get the build output from the unit test job + - uses: actions/download-artifact@v3 + with: + name: java-builddir + - run: tar -xvf java-builddir.tar; ls -laR target + + # Run integration tests + - run: mvn -Dtarget.java.version=${{ matrix.jdk }} verify + + # Wrap up and send to coverage job + - run: tar -cvf java-builddir.tar target + - uses: actions/upload-artifact@v3 + with: + name: java-builddir + path: java-builddir.tar + retention-days: 3 + coverage-report: + runs-on: ubuntu-latest + needs: integration-test + steps: + # Basic setup chores + - uses: actions/checkout@v3 + - name: Set up JDK ${{ matrix.jdk }} + uses: actions/setup-java@v2 + with: + java-version: ${{ matrix.jdk }} + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + # Get the build output from the integration test job + - uses: actions/download-artifact@v3 + with: + name: java-builddir + - run: tar -xvf java-builddir.tar; ls -laR target + + # Deposit Code Coverage + - name: Maven Code Coverage + env: CI_NAME: github COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }} - run: mvn -V -B jacoco:report coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }} + run: mvn -V -B coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }} + + # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + push-app-img: name: Publish App Image permissions: From 3b7aa106d118f7cecaefd7680b7a6dd7df2fc670 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:29:47 +0200 Subject: [PATCH 0116/1092] chore(ci): update actions/setup-java to v3 for Maven workflow --- .github/workflows/maven_unit_test.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 8d51702be6b..1ff08705e36 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -34,9 +34,9 @@ jobs: runs-on: ubuntu-latest steps: # Basic setup chores - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.jdk }} distribution: 'adopt' @@ -64,7 +64,7 @@ jobs: # Basic setup chores - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.jdk }} distribution: 'adopt' @@ -97,10 +97,9 @@ jobs: steps: # Basic setup chores - uses: actions/checkout@v3 - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + - uses: actions/setup-java@v3 with: - java-version: ${{ matrix.jdk }} + java-version: '11' distribution: 'adopt' - name: Cache Maven packages uses: actions/cache@v2 From 2c0a6aabf5852a441009aca39625e8a71eed66b9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:30:13 +0200 Subject: [PATCH 0117/1092] ci(test): fix missing build matrix for integration test job --- .github/workflows/maven_unit_test.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 1ff08705e36..3c1c5e7e3e2 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -60,6 +60,23 @@ jobs: integration-test: runs-on: ubuntu-latest needs: unittest + name: (${{ matrix.status}} / JDK ${{ matrix.jdk }}) Integration Tests + strategy: + fail-fast: false + matrix: + jdk: [ '11' ] + experimental: [ false ] + status: [ "Stable" ] + # + # JDK 17 builds disabled due to non-essential fails marking CI jobs as completely failed within + # Github Projects, PR lists etc. This was consensus on Slack #dv-tech. See issue #8094 + # (This is a limitation of how Github is currently handling these things.) + # + #include: + # - jdk: '17' + # experimental: true + # status: "Experimental" + continue-on-error: ${{ matrix.experimental }} steps: # Basic setup chores - uses: actions/checkout@v3 From 8a6f23166870c9460c635535cb021029d45509f5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:43:36 +0200 Subject: [PATCH 0118/1092] ci(test): tweak artifact upload and job names --- .github/workflows/maven_unit_test.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 3c1c5e7e3e2..a4753f24668 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -1,4 +1,4 @@ -name: Maven Unit Tests +name: Maven Tests on: push: @@ -96,21 +96,22 @@ jobs: - uses: actions/download-artifact@v3 with: name: java-builddir - - run: tar -xvf java-builddir.tar; ls -laR target + - run: tar -xvf java-builddir.tar - # Run integration tests - - run: mvn -Dtarget.java.version=${{ matrix.jdk }} verify + # Run integration tests (but not unit tests again) + - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify # Wrap up and send to coverage job - - run: tar -cvf java-builddir.tar target + - run: tar -cvf java-reportdir.tar target/site - uses: actions/upload-artifact@v3 with: - name: java-builddir - path: java-builddir.tar + name: java-reportdir + path: java-reportdir.tar retention-days: 3 coverage-report: runs-on: ubuntu-latest needs: integration-test + name: Coverage Report Submission steps: # Basic setup chores - uses: actions/checkout@v3 @@ -128,8 +129,8 @@ jobs: # Get the build output from the integration test job - uses: actions/download-artifact@v3 with: - name: java-builddir - - run: tar -xvf java-builddir.tar; ls -laR target + name: java-reportdir + - run: tar -xvf java-reportdir.tar # Deposit Code Coverage - name: Maven Code Coverage From edef7d5bd52f3625b2e15521046d97a73f999894 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:20:04 +0200 Subject: [PATCH 0119/1092] docs(config): add OIDC to JVM options list with backlink --- doc/sphinx-guides/source/installation/config.rst | 7 +++++++ doc/sphinx-guides/source/installation/oidc.rst | 2 ++ 2 files changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f8aef8c59da..4b1a8bd14b3 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2420,6 +2420,13 @@ Can also be set via any `supported MicroProfile Config API source`_, e.g. the en ``DATAVERSE_UI_SHOW_VALIDITY_FILTER``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. +dataverse.auth.oidc.* ++++++++++++++++++++++ + +Provision a single :doc:`OpenID Connect authentication provider ` using MicroProfile Config. You can find a list of +all available options at :ref:`oidc-mpconfig`. + + .. _feature-flags: Feature Flags diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index 9848d73b189..0dc5ca4ff4a 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -116,6 +116,8 @@ The Dataverse installation will automatically try to load the provider and retri You should see the new provider under "Other options" on the Log In page, as described in the :doc:`/user/account` section of the User Guide. +.. _oidc-mpconfig: + Provision via MPCONFIG ^^^^^^^^^^^^^^^^^^^^^^ From e91a046a790506e959ffcc9f71d27f6207b4d4e1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:51:04 +0200 Subject: [PATCH 0120/1092] feat(oidc): replace map-based cache of PKCE verifiers Instead of using an unlimited growth Map of verifiers as a cache, we now will use a real evicting cache implementation to limit the size and age of entries. This will limit resource waste and mitigate an unlikely but present attack vector of pumping up the cache to DDoS us. --- pom.xml | 7 +++++++ .../providers/oauth2/oidc/OIDCAuthProvider.java | 14 ++++++++++++-- .../harvard/iq/dataverse/settings/JvmSettings.java | 2 ++ .../META-INF/microprofile-config.properties | 4 ++++ 4 files changed, 25 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 5d1523e01b8..0484a80ddd9 100644 --- a/pom.xml +++ b/pom.xml @@ -386,6 +386,13 @@ oauth2-oidc-sdk 10.9.1 + + + com.github.ben-manes.caffeine + caffeine + 3.1.6 + + io.gdcc diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 52362f7abeb..818332ea282 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; import com.github.scribejava.core.builder.api.DefaultApi20; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; @@ -36,10 +38,13 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.net.URI; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -69,8 +74,13 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { /** * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret * we need again when verifying the response by the provider, thus the cache. + * To be absolutely sure this may not be abused to DDoS us and not let unused verifiers rot, + * use an evicting cache implementation and not a standard map. */ - private final Map verifierCache = new ConcurrentHashMap<>(); + private final Cache verifierCache = Caffeine.newBuilder() + .maximumSize(JvmSettings.OIDC_PKCE_CACHE_MAXSIZE.lookup(Integer.class)) + .expireAfterWrite(Duration.of(JvmSettings.OIDC_PKCE_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.SECONDS)) + .build(); public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL, boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException { @@ -201,7 +211,7 @@ public String buildAuthzUrl(String state, String callbackUrl) { public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception { // Retrieve the verifier from the cache and clear from the cache. If not found, will be null. // Will be sent to token endpoint for verification, so if required but missing, will lead to exception. - CodeVerifier verifier = verifierCache.remove(state); + CodeVerifier verifier = verifierCache.getIfPresent(state); // Create grant object - again, this is null-safe for the verifier AuthorizationGrant codeGrant = new AuthorizationCodeGrant( diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 144be72c91a..1122b64c139 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -125,6 +125,8 @@ public enum JvmSettings { SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"), OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"), OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"), + OIDC_PKCE_CACHE_MAXSIZE(SCOPE_OIDC_PKCE, "max-cache-size"), + OIDC_PKCE_CACHE_MAXAGE(SCOPE_OIDC_PKCE, "max-cache-age"), // UI SETTINGS SCOPE_UI(PREFIX, "ui"), diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 3e166d0527f..38a4d8df0ab 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -54,3 +54,7 @@ dataverse.pid.datacite.rest-api-url=https://api.test.datacite.org # Handle.Net dataverse.pid.handlenet.index=300 + +# AUTHENTICATION +dataverse.auth.oidc.pkce.max-cache-size=10000 +dataverse.auth.oidc.pkce.max-cache-age=300 \ No newline at end of file From 8d4a75e8236298d787fdf738512c615c13c3654e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:51:39 +0200 Subject: [PATCH 0121/1092] docs(oidc): describe new config options for PKCE verifier cache --- doc/sphinx-guides/source/installation/oidc.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index 0dc5ca4ff4a..e036e9c8470 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -168,3 +168,12 @@ The following options are available: - A subtitle, currently not displayed by the UI. - N - ``OpenID Connect`` + * - ``dataverse.auth.oidc.pkce.max-cache-size`` + - Tune the maximum size of all OIDC providers' verifier cache (= number of outstanding PKCE-enabled auth responses). + - N + - 10000 + * - ``dataverse.auth.oidc.pkce.max-cache-age`` + - Tune the maximum age of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime + of many OIDC access tokens. + - N + - 300 \ No newline at end of file From 8ca25fff4f4e1801c97ff28ba1947e4d05bfc915 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:56:47 +0200 Subject: [PATCH 0122/1092] docs(oidc,test): add release note --- .../9268-8349-oidc-improvements.md | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 doc/release-notes/9268-8349-oidc-improvements.md diff --git a/doc/release-notes/9268-8349-oidc-improvements.md b/doc/release-notes/9268-8349-oidc-improvements.md new file mode 100644 index 00000000000..cb0a9685c69 --- /dev/null +++ b/doc/release-notes/9268-8349-oidc-improvements.md @@ -0,0 +1,28 @@ +## OpenID Connect Authentication Provider Improvements + +### Using MicroProfile Config For Provisioning + +With this release it is possible to provision a single OIDC-based authentication provider +by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. + +If you are using an external OIDC provider component as identity management system and/or broker +to other authentication providers such as Google, eduGain SAML and so on, this might make your +life easier during instance setups and reconfiguration. You no longer need to generate the +necessary JSON file. + +### Adding PKCE Support + +Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable +support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) + +## Improved Testing + +With this release, we add a new type of testing to Dataverse: integration tests which are no end-to-end tests +like our API tests. Starting with OIDC authentication support, we test regularly on CI for working condition +of both OIDC login options in UI and API. + +The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. + +The support for setting JVM options during testing has been improved for developers. You now may add the +`@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is +also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. From 8b2937e80e53528106d12ea7e8ed7204c7b7ee2c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 17:35:23 -0400 Subject: [PATCH 0123/1092] Add call to populate file lists --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index b8d2507bc6b..c857d00a27d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -370,6 +370,7 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { public void setTermsGuestbookPopupAction(String popupAction){ if(popupAction != null && popupAction.length() > 0){ + logger.info("TGPA set to " + popupAction); this.termsGuestbookPopupAction = popupAction; } @@ -5178,6 +5179,9 @@ public boolean isFileAccessRequestMultiButtonEnabled(){ if (!isSessionUserAuthenticated() || !dataset.isFileAccessRequest()){ return false; } + //populate file lists + filterSelectedFiles(); + if( this.selectedRestrictedFiles == null || this.selectedRestrictedFiles.isEmpty() ){ return false; } From 7ddc0466a767b292e8dfe6625feea770b112ded5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 17:35:42 -0400 Subject: [PATCH 0124/1092] hide accept terms buttons in download case --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 5616fa48280..bdaa6d92432 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -41,7 +41,7 @@
    -
    +
    @@ -51,6 +51,7 @@
    + Date: Thu, 25 May 2023 17:49:17 -0400 Subject: [PATCH 0125/1092] handle changes from #6919 --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index bdaa6d92432..9c3391ef9ae 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -60,7 +60,7 @@ From 90186edc47772a2aa39089dc1607caecf3c3917a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 19:47:11 -0400 Subject: [PATCH 0126/1092] fix query --- .../edu/harvard/iq/dataverse/DataFileServiceBean.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 449e8d351c6..f4a33e4f12f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -861,10 +861,11 @@ private List retrieveFileAccessRequesters(DataFile fileIn) { // List requesters = em.createNativeQuery("select authenticated_user_id // from fileaccessrequests where datafile_id = // "+fileIn.getId()).getResultList(); - List requesters = em.createNativeQuery("select authenticated_user_id from fileaccessrequests where datafile_id = " + fileIn.getId() + " and request_state='CREATED'").getResultList(); - - for (Object userIdObj : requesters) { - Long userId = (Long) userIdObj; + TypedQuery typedQuery = em.createQuery("select f.user.id from FileAccessRequest f where f.dataFile.id = :file_id and f.requestState= :requestState", Long.class); + typedQuery.setParameter("file_id", fileIn.getId()); + typedQuery.setParameter("requestState", FileAccessRequest.RequestState.CREATED); + List requesters = typedQuery.getResultList(); + for (Long userId : requesters) { AuthenticatedUser user = userService.find(userId); if (user != null) { retList.add(user); From 728c886444161cd23ad060ab8665961f4452ace6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 01:16:08 +0200 Subject: [PATCH 0127/1092] ci: add installed gdcc packages to Maven run artifact --- .github/workflows/maven_unit_test.yml | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 51e3c5188f6..cc918e16d97 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -60,11 +60,15 @@ jobs: - run: rm -rf ~/.m2/repository/edu/harvard/iq/dataverse # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports - - run: tar -cvf java-builddir.tar target + - run: | + tar -cvf java-builddir.tar target + tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - uses: actions/upload-artifact@v3 with: - name: java-builddir - path: java-builddir.tar + name: java-artifacts + path: | + java-builddir.tar + java-m2-selection.tar retention-days: 3 integration-test: @@ -100,11 +104,12 @@ jobs: # Get the build output from the unit test job - uses: actions/download-artifact@v3 with: - name: java-builddir - - run: tar -xvf java-builddir.tar + name: java-artifacts + - run: | + tar -xvf java-builddir.tar + tar -xvf java-m2-selection.tar -C / # Run integration tests (but not unit tests again) - # TODO - adopt to parent module - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify # Wrap up and send to coverage job @@ -148,6 +153,7 @@ jobs: jacoco:report coveralls:report # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + push-app-img: name: Publish App Image permissions: From 77242777e2afa68335884d9eed2f68e1be750102 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 08:39:03 +0200 Subject: [PATCH 0128/1092] ci: some more fixes for Maven Tests --- .github/workflows/maven_unit_test.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index cc918e16d97..5a5e55f82aa 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -47,12 +47,14 @@ jobs: cache: maven # The reason why we use "install" here is that we want the submodules to be available in the next step. - # Also, we can cache them this way for jobs triggered by this one. - - name: Build with Maven + # Also, we can cache them this way for jobs triggered by this one. We need to skip ITs here, as we run + # them in the next job - but install usually runs through verify phase. + - name: Build with Maven and run unit tests run: > mvn -B -f modules/dataverse-parent -Dtarget.java.version=${{ matrix.jdk }} -DcompilerArgument=-Xlint:unchecked -P all-unit-tests + -DskipIntegrationTests -pl edu.harvard.iq:dataverse -am install @@ -127,10 +129,9 @@ jobs: steps: # Basic setup chores - uses: actions/checkout@v3 - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + - uses: actions/setup-java@v3 with: - java-version: ${{ matrix.jdk }} + java-version: '11' distribution: temurin cache: maven @@ -141,7 +142,7 @@ jobs: - run: tar -xvf java-reportdir.tar # Deposit Code Coverage - - name: Maven Code Coverage + - name: Deposit Code Coverage env: CI_NAME: github COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }} From 0287e037e6b76be83006c8447a7007d2386aea91 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 08:46:48 +0200 Subject: [PATCH 0129/1092] build: add config to enable skipping jacoco and failsafe tests --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index fd92ccdc06e..2aa74657422 100644 --- a/pom.xml +++ b/pom.xml @@ -747,6 +747,7 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec failsafe.jacoco.args + ${skipIntegrationTests} @@ -758,6 +759,7 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec ${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report + ${skipIntegrationTests} @@ -776,6 +778,7 @@ ${project.build.directory}/coverage-reports/merged.exec + ${skipIntegrationTests} @@ -787,6 +790,7 @@ ${project.build.directory}/coverage-reports/merged.exec ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report + ${skipIntegrationTests} @@ -824,6 +828,7 @@ testcontainers ${failsafe.jacoco.args} ${argLine} + ${skipIntegrationTests} From 28ddc3797b60eafb2b4144bb06610dc1caadf54f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 11:08:25 +0200 Subject: [PATCH 0130/1092] fix(auth,oidc): do not add null verifier to cache Will throw an NPE otherwise. --- .../authorization/providers/oauth2/oidc/OIDCAuthProvider.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 818332ea282..5eb2b391eb7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -191,7 +191,9 @@ public String buildAuthzUrl(String state, String callbackUrl) { // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us // the auth code! We use the state to cache the verifier, as the state is unique per authentication event. - this.verifierCache.put(state, pkceVerifier); + if (pkceVerifier != null) { + this.verifierCache.put(state, pkceVerifier); + } return req.toURI().toString(); } From 749c13be13c6c6e3c0f8bea26674e488cd62d6ab Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 11:10:09 +0200 Subject: [PATCH 0131/1092] feat(ct,oidc,auth): add dev keycloak connection to compose file --- docker-compose-dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index e3f93b77d4a..d582a6375f9 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -13,6 +13,10 @@ services: - DATAVERSE_DB_PASSWORD=secret - DATAVERSE_DB_USER=${DATAVERSE_DB_USER} - DATAVERSE_FEATURE_API_BEARER_AUTH=1 + - DATAVERSE_AUTH_OIDC_ENABLED=1 + - DATAVERSE_AUTH_OIDC_CLIENT_ID=test + - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 + - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From 849df5d2214a630fe70dc177c7188f09b4ae782b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 13:04:30 +0200 Subject: [PATCH 0132/1092] docs,fix(oidc): fix API auth docs example with new test realm --- doc/sphinx-guides/source/api/auth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst index bbc81b595e3..eced7afbbcf 100644 --- a/doc/sphinx-guides/source/api/auth.rst +++ b/doc/sphinx-guides/source/api/auth.rst @@ -77,6 +77,6 @@ To test if bearer tokens are working, you can try something like the following ( .. code-block:: bash - export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/oidc-realm/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=kcuser&password=kcpassword&grant_type=password&client_id=oidc-client&client_secret=ss6gE8mODCDfqesQaSG3gwUwZqZt547E" | jq '.access_token' -r | tr -d "\n"` + export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/test/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=user&password=user&grant_type=password&client_id=test&client_secret=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8" | jq '.access_token' -r | tr -d "\n"` curl -H "Authorization: Bearer $TOKEN" http://localhost:8080/api/users/:me From 502e660fe342939a617edd6d17a425c83b5a269b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 12 May 2023 13:22:46 -0400 Subject: [PATCH 0133/1092] suppress thumb generation after a failure --- .../edu/harvard/iq/dataverse/DvObject.java | 14 +++++ .../dataaccess/ImageThumbConverter.java | 55 ++++++++++++------- .../dataverse/ingest/IngestServiceBean.java | 4 +- .../V5.13.0.1__9506-track-thumb-failures.sql | 1 + 4 files changed, 54 insertions(+), 20 deletions(-) create mode 100644 src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 854888737ee..6cb3816e3f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -181,7 +181,20 @@ public boolean isPreviewImageAvailable() { public void setPreviewImageAvailable(boolean status) { this.previewImageAvailable = status; } + + /** Indicates whether a previous attempt to generate a preview image has failed, regardless of size. + * If so, we won't want to try again every time the preview/thumbnail is requested for a view. + */ + private boolean previewsHaveFailed; + + public boolean isPreviewsHaveFailed() { + return previewsHaveFailed; + } + public void setPreviewsHaveFailed(boolean previewsHaveFailed) { + this.previewsHaveFailed = previewsHaveFailed; + } + public Timestamp getModificationTime() { return modificationTime; } @@ -462,6 +475,7 @@ public void setStorageIdentifier(String storageIdentifier) { */ public abstract boolean isAncestorOf( DvObject other ); + @OneToMany(mappedBy = "definitionPoint",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true) List roleAssignments; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 2b4aed3a9a5..eb08646454d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -48,6 +48,7 @@ import java.nio.channels.WritableByteChannel; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.io.IOUtils; //import org.primefaces.util.Base64; @@ -110,15 +111,24 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (isThumbnailCached(storageIO, size)) { + logger.fine("Found cached thumbnail for " + file.getId()); return true; } - logger.fine("Checking for thumbnail, file type: " + file.getContentType()); - - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - return generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - return generatePDFThumbnail(storageIO, size); + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate thumbnail, file id: " + file.getId()); + // Don't try to generate if there have been failures: + if (!file.isPreviewsHaveFailed()) { + boolean thumbnailGenerated = false; + if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { + thumbnailGenerated = generateImageThumbnail(storageIO, size); + } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { + thumbnailGenerated = generatePDFThumbnail(storageIO, size); + } + if (!thumbnailGenerated) { + logger.fine("No thumbnail generated for " + file.getId()); + file.setPreviewGenerationHasPreviouslyFailed(true); + } + return thumbnailGenerated; } return false; @@ -436,20 +446,27 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (cachedThumbnailChannel == null) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); - // try to generate, if not available: - boolean generated = false; - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - generated = generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - generated = generatePDFThumbnail(storageIO, size); - } + // try to generate, if not available and hasn't failed before + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate base64 thumbnail, file id: " + file.getId()); + if (!file.isPreviewsHaveFailed()) { + boolean generated = false; + if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { + generated = generateImageThumbnail(storageIO, size); + } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { + generated = generatePDFThumbnail(storageIO, size); + } - if (generated) { - // try to open again: - try { - cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); - } catch (Exception ioEx) { - cachedThumbnailChannel = null; + if (!generated) { + // Record failure + logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); + file.setPreviewGenerationHasPreviouslyFailed(true); + } else { + // Success - try to open again: + try { + cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + } catch (Exception ioEx) { + cachedThumbnailChannel = null; + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 5a353453fe8..fbe2d7b38ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -292,7 +292,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException ioex) { logger.warning("Failed to save generated file " + generated.toString()); - } + //Shouldn't mark this file as having a preview after this. + dataFile.setPreviewImageAvailable(false); + } } // ... but we definitely want to delete it: diff --git a/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql new file mode 100644 index 00000000000..9b12d27db91 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql @@ -0,0 +1 @@ +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS previewshavefailed BOOLEAN DEFAULT FALSE; \ No newline at end of file From 0fea5ccca11b2348429ddfee75e4bafc709c7473 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 12 May 2023 13:25:38 -0400 Subject: [PATCH 0134/1092] refactor error --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index eb08646454d..254c334d655 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -126,7 +126,7 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (!thumbnailGenerated) { logger.fine("No thumbnail generated for " + file.getId()); - file.setPreviewGenerationHasPreviouslyFailed(true); + file.setPreviewsHaveFailed(true); } return thumbnailGenerated; } @@ -459,7 +459,7 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (!generated) { // Record failure logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - file.setPreviewGenerationHasPreviouslyFailed(true); + file.setPreviewsHaveFailed(true); } else { // Success - try to open again: try { From 8f5350ae0df4df60c55ff770259531935cb6ac9b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 10:32:21 -0400 Subject: [PATCH 0135/1092] cache isThumb available --- .../iq/dataverse/ThumbnailServiceWrapper.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 6c8db8c124b..e2bb21c8a4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -49,6 +49,7 @@ public class ThumbnailServiceWrapper implements java.io.Serializable { private Map dvobjectThumbnailsMap = new HashMap<>(); private Map dvobjectViewMap = new HashMap<>(); + private Map hasThumbMap = new HashMap<>(); private String getAssignedDatasetImage(Dataset dataset, int size) { if (dataset == null) { @@ -133,7 +134,7 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { if ((!((DataFile)result.getEntity()).isRestricted() || permissionsWrapper.hasDownloadFilePermission(result.getEntity())) - && dataFileService.isThumbnailAvailable((DataFile) result.getEntity())) { + && isThumbnailAvailable((DataFile) result.getEntity())) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( (DataFile) result.getEntity(), @@ -159,6 +160,13 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { return null; } + public boolean isThumbnailAvailable(DataFile entity) { + if(!hasThumbMap.containsKey(entity.getId())) { + hasThumbMap.put(entity.getId(), dataFileService.isThumbnailAvailable(entity)); + } + return hasThumbMap.get(entity.getId()); + } + // it's the responsibility of the user - to make sure the search result // passed to this method is of the Dataset type! public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { @@ -295,7 +303,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo } } - if (dataFileService.isThumbnailAvailable(thumbnailImageFile)) { + if (isThumbnailAvailable(thumbnailImageFile)) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( thumbnailImageFile, size); @@ -323,6 +331,7 @@ public String getDataverseCardImageAsBase64Url(SolrSearchResult result) { public void resetObjectMaps() { dvobjectThumbnailsMap = new HashMap<>(); dvobjectViewMap = new HashMap<>(); + hasThumbMap = new HashMap<>(); } From 8604eef7f470eade8dbf885ed42bc47407db74ff Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 13:22:18 -0400 Subject: [PATCH 0136/1092] set thumb fail column --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 5 ++++- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..a5822828682 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1127,7 +1127,7 @@ public boolean isThumbnailAvailable (DataFile file) { } // If thumbnails are not even supported for this class of files, - // there's notthing to talk about: + // there's nothing to talk about: if (!FileUtil.isThumbnailSupported(file)) { return false; } @@ -1149,6 +1149,9 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; + } else { + file.setPreviewsHaveFailed(true); + this.save(file); } return false; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 254c334d655..ab9294eea72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -115,7 +115,7 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s return true; } - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate thumbnail, file id: " + file.getId()); + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { boolean thumbnailGenerated = false; From aeae8f4ddbb05794c177e9b1d33725e1ed7d7e2f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 13:50:49 -0400 Subject: [PATCH 0137/1092] use thumb wrapper in edit and view files --- src/main/webapp/editFilesFragment.xhtml | 4 ++-- src/main/webapp/file-info-fragment.xhtml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..af06b44e3bc 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -360,13 +360,13 @@
    - - + #{fileMetadata.label} diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml index 33a8d2c3ca5..3e8e80d51e7 100644 --- a/src/main/webapp/file-info-fragment.xhtml +++ b/src/main/webapp/file-info-fragment.xhtml @@ -28,8 +28,8 @@
    - - + From c4ad20bc4b67b93908e60b76a251240f4a6e2540 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 13:49:35 -0400 Subject: [PATCH 0138/1092] add api --- .../edu/harvard/iq/dataverse/api/Admin.java | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index d219339add9..14c556e9caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2321,4 +2321,26 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl)); } + @DELETE + @Path("/clearThumbnailFailureFlag") + public Response clearThumbnailFailureFlag() { + em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE").executeUpdate(); + return ok("Thumnail Failure Flags cleared."); + } + + @DELETE + @Path("/clearThumbnailFailureFlag/{id}") + public Response clearThumbnailFailureFlagByDatafile(@PathParam("id") String fileId) { + try { + DataFile df = findDataFileOrDie(fileId); + Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE where id = ?"); + deleteQuery.setParameter(1, df.getId()); + deleteQuery.executeUpdate(); + return ok("Thumnail Failure Flag cleared for file id=: " + df.getId() + "."); + } catch (WrappedResponse r) { + logger.info("Could not find file with the id: " + fileId); + return error(Status.BAD_REQUEST, "Could not find file with the id: " + fileId); + } + } + } From 63e98b3b60a4baae98f1f88a282b97694929c443 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:16:47 -0400 Subject: [PATCH 0139/1092] make clearer --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index a5822828682..f41565c9449 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1149,11 +1149,9 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; - } else { - file.setPreviewsHaveFailed(true); - this.save(file); } - + file.setPreviewsHaveFailed(true); + this.save(file); return false; } From 2671cb75effb5425d02b3e874c7525b7833dc533 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:25:58 -0400 Subject: [PATCH 0140/1092] update comment --- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 6cb3816e3f1..87619450133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -182,8 +182,11 @@ public void setPreviewImageAvailable(boolean status) { this.previewImageAvailable = status; } - /** Indicates whether a previous attempt to generate a preview image has failed, regardless of size. - * If so, we won't want to try again every time the preview/thumbnail is requested for a view. + /** + * Indicates whether a previous attempt to generate a preview image has failed, + * regardless of size. This could be due to the file not being accessible, or a + * real failure in generating the thumbnail. In both cases, we won't want to try + * again every time the preview/thumbnail is requested for a view. */ private boolean previewsHaveFailed; From 19db99b1427700c9cc4ad462c0edd017e6dd5799 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:26:28 -0400 Subject: [PATCH 0141/1092] remove setting flag where datafile is not clearly being saved to db --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index ab9294eea72..921faba7989 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -126,7 +126,6 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (!thumbnailGenerated) { logger.fine("No thumbnail generated for " + file.getId()); - file.setPreviewsHaveFailed(true); } return thumbnailGenerated; } @@ -459,7 +458,6 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (!generated) { // Record failure logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - file.setPreviewsHaveFailed(true); } else { // Success - try to open again: try { From 156d025970eeb5223b6fd8343db09cafee057fed Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 1 Jun 2023 15:09:25 -0400 Subject: [PATCH 0142/1092] fix non-merge-able error when recording thumb fail --- .../iq/dataverse/DataFileServiceBean.java | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index f41565c9449..880b2ea7dc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1142,17 +1142,17 @@ public boolean isThumbnailAvailable (DataFile file) { is more important... */ - - if (ImageThumbConverter.isThumbnailAvailable(file)) { - file = this.find(file.getId()); - file.setPreviewImageAvailable(true); - this.save(file); - return true; - } - file.setPreviewsHaveFailed(true); - this.save(file); - return false; + file = this.find(file.getId()); + if (ImageThumbConverter.isThumbnailAvailable(file)) { + file.setPreviewImageAvailable(true); + this.save(file); + return true; + } else { + file.setPreviewsHaveFailed(true); + this.save(file); + return false; + } } From 97aa46cb3e9bd2d424961e68e9d024216740c57f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Jun 2023 16:50:38 -0400 Subject: [PATCH 0143/1092] rename script --- ...humb-failures.sql => V5.13.0.2__9506-track-thumb-failures.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.13.0.1__9506-track-thumb-failures.sql => V5.13.0.2__9506-track-thumb-failures.sql} (100%) diff --git a/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V5.13.0.2__9506-track-thumb-failures.sql similarity index 100% rename from src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql rename to src/main/resources/db/migration/V5.13.0.2__9506-track-thumb-failures.sql From dbc36c9d938571a5b61156611c445d266fbafe76 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Jun 2023 17:06:19 -0400 Subject: [PATCH 0144/1092] refactor - remove duplicate code --- .../dataaccess/ImageThumbConverter.java | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 921faba7989..fb0785ffd7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -114,7 +114,11 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s logger.fine("Found cached thumbnail for " + file.getId()); return true; } + return generateThumbnail(storageIO, size); + } + + private static boolean generateThumbnail(StorageIO storageIO, int size) { logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { @@ -131,7 +135,6 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } return false; - } // Note that this method works on ALL file types for which thumbnail @@ -446,25 +449,11 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); // try to generate, if not available and hasn't failed before - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate base64 thumbnail, file id: " + file.getId()); - if (!file.isPreviewsHaveFailed()) { - boolean generated = false; - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - generated = generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - generated = generatePDFThumbnail(storageIO, size); - } - - if (!generated) { - // Record failure - logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - } else { - // Success - try to open again: - try { - cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); - } catch (Exception ioEx) { - cachedThumbnailChannel = null; - } + if(generateThumbnail(storageIO, size)) { + try { + cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + } catch (Exception ioEx) { + cachedThumbnailChannel = null; } } From 0c8972304a43c25ed1de1c5cc6cc1c09ef419948 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 14 Jun 2023 10:30:05 -0400 Subject: [PATCH 0145/1092] try ds logos as url requests --- .../iq/dataverse/ThumbnailServiceWrapper.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index e2bb21c8a4c..66f79472178 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.api.Datasets; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -12,7 +13,8 @@ import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.util.FileUtil; -import java.io.File; +import edu.harvard.iq.dataverse.util.SystemConfig; + import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -21,6 +23,8 @@ import java.util.Base64; import java.util.HashMap; import java.util.Map; +import java.util.logging.Logger; + import javax.ejb.EJB; import javax.enterprise.context.RequestScoped; import javax.faces.view.ViewScoped; @@ -36,6 +40,9 @@ @RequestScoped @Named public class ThumbnailServiceWrapper implements java.io.Serializable { + + private static final Logger logger = Logger.getLogger(ThumbnailServiceWrapper.class.getCanonicalName()); + @Inject PermissionsWrapper permissionsWrapper; @EJB @@ -214,7 +221,13 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo this.dvobjectThumbnailsMap.put(datasetId, ""); return null; } + + String url = SystemConfig.getDataverseSiteUrlStatic() + "/datasets/" + dataset.getId() + "/logo"; + logger.fine("getDatasetCardImageAsBase64Url: " + url); + this.dvobjectThumbnailsMap.put(datasetId,url); + return url; +/* String cardImageUrl = null; StorageIO dataAccess = null; @@ -320,6 +333,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo //logger.info("dataset id " + result.getEntityId() + ", returning " + cardImageUrl); return cardImageUrl; + */ } // it's the responsibility of the user - to make sure the search result From dc4b6ae5201af228b1b484c6dd430713f8728ccc Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 14 Jun 2023 17:19:41 -0400 Subject: [PATCH 0146/1092] set the datasetid for search cards --- .../java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 66f79472178..4c3778527d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -194,6 +194,7 @@ public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { return null; } Dataset dataset = (Dataset)result.getEntity(); + dataset.setId(result.getEntityId()); Long versionId = result.getDatasetVersionId(); From 546cfdf2048158320e76a9345e9ebc3caf7ca6c2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 1 Jun 2023 15:09:25 -0400 Subject: [PATCH 0147/1092] fix non-merge-able error when recording thumb fail --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 880b2ea7dc4..ec12480d28d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1148,11 +1148,10 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; - } else { - file.setPreviewsHaveFailed(true); - this.save(file); - return false; } + file.setPreviewsHaveFailed(true); + this.save(file); + return false; } From d3a48dffdfaa56bba065b3c36a2b6469e4227c33 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 14 Jun 2023 17:44:02 -0400 Subject: [PATCH 0148/1092] typo --- .../java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 4c3778527d7..8dda91fd6a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -223,7 +223,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo return null; } - String url = SystemConfig.getDataverseSiteUrlStatic() + "/datasets/" + dataset.getId() + "/logo"; + String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsBase64Url: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; From f505428f12a5ead774642837bdb871deda34ee27 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 13:13:01 -0400 Subject: [PATCH 0149/1092] only send url if thumb should exist --- .../iq/dataverse/ThumbnailServiceWrapper.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 8dda91fd6a3..19c53ffa77e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.api.Datasets; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail; @@ -222,6 +223,20 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo this.dvobjectThumbnailsMap.put(datasetId, ""); return null; } + DataFile thumbnailFile = dataset.getThumbnailFile(); + + if (thumbnailFile == null) { + thumbnailFile = DatasetUtil.attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null); + if (thumbnailFile == null) { + logger.fine("Dataset (id :" + dataset.getId() + ") does not have a logo available that could be selected automatically."); + return null; + } + } + if (thumbnailFile.isRestricted()) { + logger.fine("Dataset (id :" + dataset.getId() + ") has a logo the user selected but the file must have later been restricted. Returning null."); + return null; + } + String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsBase64Url: " + url); From 2d177a60fe67df26bafad35cf237e048a21545ee Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 15:08:15 -0400 Subject: [PATCH 0150/1092] use inputStream.transferTo --- .../dataaccess/ImageThumbConverter.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index fb0785ffd7b..bd87c5541a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -223,30 +223,32 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s } if (tempFilesRequired) { - ReadableByteChannel pdfFileChannel; - + //ReadableByteChannel pdfFileChannel; + InputStream inputStream = null; try { storageIO.open(); - //inputStream = storageIO.getInputStream(); - pdfFileChannel = storageIO.getReadChannel(); + inputStream = storageIO.getInputStream(); + //pdfFileChannel = storageIO.getReadChannel(); } catch (Exception ioex) { logger.warning("caught Exception trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier()); return false; } File tempFile; - FileChannel tempFileChannel = null; + OutputStream outputStream = null; + //FileChannel tempFileChannel = null; try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); - tempFileChannel = new FileOutputStream(tempFile).getChannel(); + outputStream = new FileOutputStream(tempFile); + inputStream.transferTo(outputStream); - tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); + //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { logger.warning("GenerateImageThumb: failed to save pdf bytes in a temporary file."); return false; } finally { - IOUtils.closeQuietly(tempFileChannel); - IOUtils.closeQuietly(pdfFileChannel); + IOUtils.closeQuietly(inputStream); + IOUtils.closeQuietly(outputStream); } sourcePdfFile = tempFile; } From 6540b5da0966addffa3a0a6a9d7e67735f89e237 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 15:42:29 -0400 Subject: [PATCH 0151/1092] add debug --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index bd87c5541a5..4a2b8ea0e6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -240,7 +240,8 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); outputStream = new FileOutputStream(tempFile); - inputStream.transferTo(outputStream); + long sz = inputStream.transferTo(outputStream); + logger.info(" wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { @@ -763,7 +764,7 @@ public static String generatePDFThumbnailFromFile(String fileLocation, int size) try { fileSize = new File(fileLocation).length(); } catch (Exception ex) { - // + logger.warning("Can't open file: " + fileLocation); } if (fileSize == 0 || fileSize > sizeLimit) { From e202d0abc7395fe85218745510b32ade9b6ca770 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 19 Jun 2023 16:15:58 -0400 Subject: [PATCH 0152/1092] more debug --- .../iq/dataverse/dataaccess/ImageThumbConverter.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 4a2b8ea0e6d..3033269f3bc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -196,6 +196,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s // We rely on ImageMagick to convert PDFs; so if it's not installed, // better give up right away: if (!isImageMagickInstalled()) { + logger.info("Couldn't find IM"); return false; } @@ -218,12 +219,15 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s tempFilesRequired = true; } catch (IOException ioex) { + logger.warning(ioex.getMessage()); + ioex.printStackTrace(); // this on the other hand is likely a fatal condition :( return false; } if (tempFilesRequired) { //ReadableByteChannel pdfFileChannel; + logger.info("Creating temp file"); InputStream inputStream = null; try { storageIO.open(); @@ -241,7 +245,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s tempFile = File.createTempFile("tempFileToRescale", ".tmp"); outputStream = new FileOutputStream(tempFile); long sz = inputStream.transferTo(outputStream); - logger.info(" wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); + logger.info("Wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); } catch (IOException ioex) { From b9cd2bbf0c42fb4e7aada29d7cea817c195ca75d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 20 Jun 2023 10:22:05 -0400 Subject: [PATCH 0153/1092] include failed preview flag in queries --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 439e4b17ed4..0bd0a01aef1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -762,7 +762,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - // + "AND o.previewImageAvailable = false " + + "AND o.previewshavefailed = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.contenttype LIKE 'image/%' " @@ -796,7 +796,7 @@ public Long getThumbnailByVersionId(Long versionId) { + "AND df.id = o.id " + "AND fm.datasetversion_id = dv.id " + "AND fm.datafile_id = df.id " - // + "AND o.previewImageAvailable = false " + + "AND o.previewshavefailed = false " + "AND df.restricted = false " + "AND df.embargo_id is null " + "AND df.contenttype = 'application/pdf' " From ac5a9564848ba241a993e8e9252641820e9041b4 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 20 Jun 2023 10:22:59 -0400 Subject: [PATCH 0154/1092] use getThumbnailByVersionId --- .../iq/dataverse/ThumbnailServiceWrapper.java | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 19c53ffa77e..ff5e510e82c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -226,23 +226,20 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo DataFile thumbnailFile = dataset.getThumbnailFile(); if (thumbnailFile == null) { - thumbnailFile = DatasetUtil.attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null); - if (thumbnailFile == null) { - logger.fine("Dataset (id :" + dataset.getId() + ") does not have a logo available that could be selected automatically."); - return null; - } - } - if (thumbnailFile.isRestricted()) { - logger.fine("Dataset (id :" + dataset.getId() + ") has a logo the user selected but the file must have later been restricted. Returning null."); - return null; + + // We attempt to auto-select via the optimized, native query-based method + // from the DatasetVersionService: + if (datasetVersionService.getThumbnailByVersionId(versionId) == null) { + return null; + } } - String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; logger.fine("getDatasetCardImageAsBase64Url: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; + /* String cardImageUrl = null; StorageIO dataAccess = null; From 98acd6b50af770779329de1201663d8599edf16a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 20 Jun 2023 10:49:24 -0400 Subject: [PATCH 0155/1092] cleanup --- .../dataverse/dataaccess/ImageThumbConverter.java | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 3033269f3bc..458b8da227b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -196,7 +196,7 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s // We rely on ImageMagick to convert PDFs; so if it's not installed, // better give up right away: if (!isImageMagickInstalled()) { - logger.info("Couldn't find IM"); + logger.fine("Couldn't find ImageMagick"); return false; } @@ -220,19 +220,15 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s } catch (IOException ioex) { logger.warning(ioex.getMessage()); - ioex.printStackTrace(); // this on the other hand is likely a fatal condition :( return false; } if (tempFilesRequired) { - //ReadableByteChannel pdfFileChannel; - logger.info("Creating temp file"); InputStream inputStream = null; try { storageIO.open(); inputStream = storageIO.getInputStream(); - //pdfFileChannel = storageIO.getReadChannel(); } catch (Exception ioex) { logger.warning("caught Exception trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier()); return false; @@ -240,14 +236,11 @@ private static boolean generatePDFThumbnail(StorageIO storageIO, int s File tempFile; OutputStream outputStream = null; - //FileChannel tempFileChannel = null; try { tempFile = File.createTempFile("tempFileToRescale", ".tmp"); outputStream = new FileOutputStream(tempFile); - long sz = inputStream.transferTo(outputStream); - logger.info("Wrote " + sz + " bytes to " + tempFile.getAbsolutePath()); - - //tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize()); + //Reads/transfers all bytes from the input stream to the output stream. + inputStream.transferTo(outputStream); } catch (IOException ioex) { logger.warning("GenerateImageThumb: failed to save pdf bytes in a temporary file."); return false; From 98e5b3fbae8871ef0fecbd0550ad8fefb00e2b22 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 12:34:37 +0200 Subject: [PATCH 0156/1092] fix(ct): enable sane default for upload storage location in containers The default from microprofile-config.properties does NOT work, as the location must already be resolvable while the servlet is being initialized - the app shipped defaults file is not yet read at this point. This is similar to the database options, which must be set using one of the other Payara included config sources. (Non-easily resolvable timing issue). The solution for containers is to add an env var to the docker file, which can be overriden by any env var from compose or K8s etc. (Problem is the high ordinal of the env source though) --- src/main/docker/Dockerfile | 4 +++- src/main/resources/META-INF/microprofile-config.properties | 1 - 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 88020a118b5..f64e88cb414 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -27,7 +27,9 @@ FROM $BASE_IMAGE # Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application # setup in META-INF/microprofile-config.properties. # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile -ENV MP_CONFIG_PROFILE=ct +ENV MP_CONFIG_PROFILE=ct \ + # NOTE: this cannot be provided as default from microprofile-config.properties as not yet avail when servlet starts + DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 7c16495f870..748ed6de55a 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,7 +12,6 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} -%ct.dataverse.files.uploads=${STORAGE_DIR}/uploads # SEARCH INDEX dataverse.solr.host=localhost From d71cdf2d427011fc660794bb12afbab9db1c2bc7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 16:07:03 +0200 Subject: [PATCH 0157/1092] fix(ct,conf): switch to different approach to default upload location Instead of trying to provide a default using STORAGE_DIR env var from microprofile-config.properties as before, using this env var reference in glassfish-web.xml directly now. By defaulting to "." if not present (as in classic installations), it is fully equivalent to the former hardcoded default value. Providing a synced variant of it in microprofile-config.properties and leaving a hint about the pitfalls, we can reuse the setting for other purposes within the codebase as well (and expect the same behaviour because same defaults). --- src/main/docker/Dockerfile | 4 +--- src/main/resources/META-INF/microprofile-config.properties | 6 ++++++ src/main/webapp/WEB-INF/glassfish-web.xml | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index f64e88cb414..88020a118b5 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -27,9 +27,7 @@ FROM $BASE_IMAGE # Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application # setup in META-INF/microprofile-config.properties. # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile -ENV MP_CONFIG_PROFILE=ct \ - # NOTE: this cannot be provided as default from microprofile-config.properties as not yet avail when servlet starts - DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" +ENV MP_CONFIG_PROFILE=ct # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 748ed6de55a..f3745126cb2 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,6 +12,12 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} +# NOTE: the following uses STORAGE_DIR for both containers and classic installations. By defaulting to "." if not +# present, it equals the hardcoded default from before again. Also, be aware that this props file cannot provide +# any value for lookups in glassfish-web.xml during servlet initialization, as this file will not have +# been read yet! The names and their values are in sync here and over there to ensure the config checker +# is able to check for the directories (exist + writeable). +dataverse.files.uploads=${STORAGE_DIR:.}/uploads # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index e56d7013abf..8041ebd4447 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -18,5 +18,5 @@ This folder is not only holding compiled JSP pages but also the place where file streams are stored during uploads. As Dataverse does not use any JSP, there will only be uploads stored here. --> - + From a4ec3a66e76aa1559aea0c05cedc2da2b38d7b03 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 16:44:08 +0200 Subject: [PATCH 0158/1092] feat(conf): introduce ConfigCheckService to validate config on startup #9572 Starting with important local storage locations for the Dataverse application, this service uses EJB startup mechanisms to verify configuration bits on startup. Checks for the temp storage location and JSF upload location as crucial parts of the app, which, if not exist or write protected, while only cause errors and failures on the first data upload attempt. This is not desirable as it might cause users to be blocked. --- .../settings/ConfigCheckService.java | 65 +++++++++++++++++++ .../iq/dataverse/settings/JvmSettings.java | 1 + .../harvard/iq/dataverse/util/FileUtil.java | 29 ++++----- 3 files changed, 77 insertions(+), 18 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java new file mode 100644 index 00000000000..4ba028903b0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -0,0 +1,65 @@ +package edu.harvard.iq.dataverse.settings; + +import edu.harvard.iq.dataverse.util.FileUtil; + +import javax.annotation.PostConstruct; +import javax.ejb.DependsOn; +import javax.ejb.Singleton; +import javax.ejb.Startup; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@Startup +@Singleton +@DependsOn("StartupFlywayMigrator") +public class ConfigCheckService { + + private static final Logger logger = Logger.getLogger(ConfigCheckService.class.getCanonicalName()); + + public static class ConfigurationError extends RuntimeException { + public ConfigurationError(String message) { + super(message); + } + } + + @PostConstruct + public void startup() { + if (!checkSystemDirectories()) { + throw new ConfigurationError("Not all configuration checks passed successfully. See logs above."); + } + } + + /** + * In this method, we check the existence and write-ability of all important directories we use during + * normal operations. It does not include checks for the storage system. If directories are not available, + * try to create them (and fail when not allowed to). + * + * @return True if all checks successful, false otherwise. + */ + public boolean checkSystemDirectories() { + Map paths = Map.of( + Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")"); + + boolean success = true; + for (Path path : paths.keySet()) { + if (Files.notExists(path)) { + try { + Files.createDirectories(path); + } catch (IOException e) { + logger.log(Level.SEVERE, () -> "Could not create directory " + path + " for " + paths.get(path)); + success = false; + } + } else if (!Files.isWritable(path)) { + logger.log(Level.SEVERE, () -> "Directory " + path + " for " + paths.get(path) + " exists, but is not writeable"); + success = false; + } + } + return success; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index ff04a633ea7..c5c5682821a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -49,6 +49,7 @@ public enum JvmSettings { // FILES SETTINGS SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), + UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..ee1ee5a6a1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.ingest.IngestableDataChecker; import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.file.BagItFileHandler; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; @@ -1478,25 +1479,17 @@ public static boolean canIngestAsTabular(String mimeType) { } } + /** + * Return the location where data should be stored temporarily after uploading (UI or API) + * for local processing (ingest, unzip, ...) and transfer to final destination (see storage subsystem). + * + * This location is checked to be configured, does exist, and is writeable via + * {@link ConfigCheckService#checkSystemDirectories()}. + * + * @return String with a path to the temporary location. Will not be null (former versions did to indicate failure) + */ public static String getFilesTempDirectory() { - - String filesRootDirectory = JvmSettings.FILES_DIRECTORY.lookup(); - String filesTempDirectory = filesRootDirectory + "/temp"; - - if (!Files.exists(Paths.get(filesTempDirectory))) { - /* Note that "createDirectories()" must be used - not - * "createDirectory()", to make sure all the parent - * directories that may not yet exist are created as well. - */ - try { - Files.createDirectories(Paths.get(filesTempDirectory)); - } catch (IOException ex) { - logger.severe("Failed to create filesTempDirectory: " + filesTempDirectory ); - return null; - } - } - - return filesTempDirectory; + return JvmSettings.FILES_DIRECTORY.lookup() + File.separator + "temp"; } public static void generateS3PackageStorageIdentifier(DataFile dataFile) { From 6999093dcea8e889a24aafbe84dd6035e8a4b5db Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 17:37:40 +0200 Subject: [PATCH 0159/1092] feat(conf): make docroot location configurable #9662 Add JVM Setting and add to config checker on startup to ensure target location is in good shape. --- .../harvard/iq/dataverse/settings/ConfigCheckService.java | 3 ++- .../edu/harvard/iq/dataverse/settings/JvmSettings.java | 1 + .../resources/META-INF/microprofile-config.properties | 1 + src/main/webapp/WEB-INF/glassfish-web.xml | 8 ++++---- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index 4ba028903b0..443d12fc17a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -43,7 +43,8 @@ public void startup() { public boolean checkSystemDirectories() { Map paths = Map.of( Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", - Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")"); + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")", + Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup()), "docroot space (see " + JvmSettings.DOCROOT_DIRECTORY.getScopedKey() + ")"); boolean success = true; for (Path path : paths.keySet()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index c5c5682821a..540dc8201a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -50,6 +50,7 @@ public enum JvmSettings { SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), + DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index f3745126cb2..597d50b2e0c 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -18,6 +18,7 @@ dataverse.files.directory=/tmp/dataverse # been read yet! The names and their values are in sync here and over there to ensure the config checker # is able to check for the directories (exist + writeable). dataverse.files.uploads=${STORAGE_DIR:.}/uploads +dataverse.files.docroot=${STORAGE_DIR:.}/docroot # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 8041ebd4447..5088e5a7fba 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -10,10 +10,10 @@ - - - - + + + + + From 2913a52f35645621bace35c93a9c0b2707004da1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 18:32:55 +0200 Subject: [PATCH 0163/1092] refactor(conf): simplify sitemap output location lookup using new docroot setting --- .../iq/dataverse/sitemap/SiteMapUtil.java | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java index e32b811ee2c..86ae697f771 100644 --- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java @@ -3,6 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; @@ -210,16 +212,17 @@ public static boolean stageFileExists() { } return false; } - + + /** + * Lookup the location where to generate the sitemap. + * + * Note: the location is checked to be configured, does exist and is writeable in + * {@link ConfigCheckService#checkSystemDirectories()} + * + * @return Sitemap storage location ([docroot]/sitemap) + */ private static String getSitemapPathString() { - String sitemapPathString = "/tmp"; - // i.e. /usr/local/glassfish4/glassfish/domains/domain1 - String domainRoot = System.getProperty("com.sun.aas.instanceRoot"); - if (domainRoot != null) { - // Note that we write to a directory called "sitemap" but we serve just "/sitemap.xml" using PrettyFaces. - sitemapPathString = domainRoot + File.separator + "docroot" + File.separator + "sitemap"; - } - return sitemapPathString; + return JvmSettings.DOCROOT_DIRECTORY.lookup() + File.separator + "sitemap"; } } From 610c65dc9ddd403041ee95475810db2977e57623 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 21 Jun 2023 12:56:13 -0400 Subject: [PATCH 0164/1092] rename and cleanup --- .../edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../iq/dataverse/DataverseServiceBean.java | 45 ------- .../iq/dataverse/ThumbnailServiceWrapper.java | 117 +----------------- .../search/SearchIncludeFragment.java | 2 +- 4 files changed, 6 insertions(+), 160 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 3d608153ba3..2ca1fb825f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -483,7 +483,7 @@ public String getThumbnailString() { thumbnailString = datasetThumbnail.getBase64image(); } else { - thumbnailString = thumbnailServiceWrapper.getDatasetCardImageAsBase64Url(dataset, + thumbnailString = thumbnailServiceWrapper.getDatasetCardImageAsUrl(dataset, workingVersion.getId(), !workingVersion.isDraft(), ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index e092f209acd..e99458fbc9d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -346,51 +346,6 @@ public String getDataverseLogoThumbnailAsBase64ById(Long dvId) { } return null; } - - /* - public boolean isDataverseLogoThumbnailAvailable(Dataverse dataverse, User user) { - if (dataverse == null) { - return false; - } - - // First, check if the dataverse has a defined logo: - - //if (dataverse.getDataverseTheme() != null && dataverse.getDataverseTheme().getLogo() != null && !dataverse.getDataverseTheme().getLogo().equals("")) { - File dataverseLogoFile = getLogo(dataverse); - if (dataverseLogoFile != null) { - String logoThumbNailPath = null; - - if (dataverseLogoFile.exists()) { - logoThumbNailPath = ImageThumbConverter.generateImageThumbnailFromFile(dataverseLogoFile.getAbsolutePath(), 48); - if (logoThumbNailPath != null) { - return true; - } - } - } - //} - */ - // If there's no uploaded logo for this dataverse, go through its - // [released] datasets and see if any of them have card images: - // - // TODO: - // Discuss/Decide if we really want to do this - i.e., go through every - // file in every dataset below... - // -- L.A. 4.0 beta14 - /* - for (Dataset dataset : datasetService.findPublishedByOwnerId(dataverse.getId())) { - if (dataset != null) { - DatasetVersion releasedVersion = dataset.getReleasedVersion(); - - if (releasedVersion != null) { - if (datasetService.isDatasetCardImageAvailable(releasedVersion, user)) { - return true; - } - } - } - } */ - /* - return false; - } */ private File getLogo(Dataverse dataverse) { if (dataverse.getId() == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index ff5e510e82c..c75c29ea094 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -177,7 +177,7 @@ public boolean isThumbnailAvailable(DataFile entity) { // it's the responsibility of the user - to make sure the search result // passed to this method is of the Dataset type! - public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { + public String getDatasetCardImageAsUrl(SolrSearchResult result) { // Before we do anything else, check if it's a harvested dataset; // no need to check anything else if so (harvested datasets never have // thumbnails) @@ -199,10 +199,10 @@ public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { Long versionId = result.getDatasetVersionId(); - return getDatasetCardImageAsBase64Url(dataset, versionId, result.isPublishedState(), ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + return getDatasetCardImageAsUrl(dataset, versionId, result.isPublishedState(), ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); } - public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, boolean autoselect, int size) { + public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean autoselect, int size) { Long datasetId = dataset.getId(); if (datasetId != null) { if (this.dvobjectThumbnailsMap.containsKey(datasetId)) { @@ -235,118 +235,9 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo } String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo"; - logger.fine("getDatasetCardImageAsBase64Url: " + url); + logger.fine("getDatasetCardImageAsUrl: " + url); this.dvobjectThumbnailsMap.put(datasetId,url); return url; - - -/* - String cardImageUrl = null; - StorageIO dataAccess = null; - - try{ - dataAccess = DataAccess.getStorageIO(dataset); - } - catch(IOException ioex){ - // ignore - } - - InputStream in = null; - // See if the dataset already has a dedicated thumbnail ("logo") saved as - // an auxilary file on the dataset level: - // (don't bother checking if it exists; just try to open the input stream) - try { - in = dataAccess.getAuxFileAsInputStream(datasetLogoThumbnail + ".thumb" + size); - //thumb48addedByImageThumbConverter); - } catch (Exception ioex) { - //ignore - } - - if (in != null) { - try { - byte[] bytes = IOUtils.toByteArray(in); - String base64image = Base64.getEncoder().encodeToString(bytes); - cardImageUrl = FileUtil.DATA_URI_SCHEME + base64image; - this.dvobjectThumbnailsMap.put(datasetId, cardImageUrl); - return cardImageUrl; - } catch (IOException ex) { - this.dvobjectThumbnailsMap.put(datasetId, ""); - return null; - // (alternatively, we could ignore the exception, and proceed with the - // regular process of selecting the thumbnail from the available - // image files - ?) - } finally - { - IOUtils.closeQuietly(in); - } - } - - // If not, see if the dataset has one of its image files already assigned - // to be the designated thumbnail: - cardImageUrl = this.getAssignedDatasetImage(dataset, size); - - if (cardImageUrl != null) { - //logger.info("dataset id " + result.getEntity().getId() + " has a dedicated image assigned; returning " + cardImageUrl); - return cardImageUrl; - } - - // And finally, try to auto-select the thumbnail (unless instructed not to): - - if (!autoselect) { - return null; - } - - // We attempt to auto-select via the optimized, native query-based method - // from the DatasetVersionService: - Long thumbnailImageFileId = datasetVersionService.getThumbnailByVersionId(versionId); - - if (thumbnailImageFileId != null) { - //cardImageUrl = FILE_CARD_IMAGE_URL + thumbnailImageFileId; - if (this.dvobjectThumbnailsMap.containsKey(thumbnailImageFileId)) { - // Yes, return previous answer - //logger.info("using cached result for ... "+datasetId); - if (!"".equals(this.dvobjectThumbnailsMap.get(thumbnailImageFileId))) { - return this.dvobjectThumbnailsMap.get(thumbnailImageFileId); - } - return null; - } - - DataFile thumbnailImageFile = null; - - if (dvobjectViewMap.containsKey(thumbnailImageFileId) - && dvobjectViewMap.get(thumbnailImageFileId).isInstanceofDataFile()) { - thumbnailImageFile = (DataFile) dvobjectViewMap.get(thumbnailImageFileId); - } else { - thumbnailImageFile = dataFileService.findCheapAndEasy(thumbnailImageFileId); - if (thumbnailImageFile != null) { - // TODO: - // do we need this file on the map? - it may not even produce - // a thumbnail! - dvobjectViewMap.put(thumbnailImageFileId, thumbnailImageFile); - } else { - this.dvobjectThumbnailsMap.put(thumbnailImageFileId, ""); - return null; - } - } - - if (isThumbnailAvailable(thumbnailImageFile)) { - cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( - thumbnailImageFile, - size); - //ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - } - - if (cardImageUrl != null) { - this.dvobjectThumbnailsMap.put(thumbnailImageFileId, cardImageUrl); - } else { - this.dvobjectThumbnailsMap.put(thumbnailImageFileId, ""); - } - } - - //logger.info("dataset id " + result.getEntityId() + ", returning " + cardImageUrl); - - return cardImageUrl; - */ } // it's the responsibility of the user - to make sure the search result diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index bfe397cf48c..99fe4cd979b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -1302,7 +1302,7 @@ public void setDisplayCardValues() { result.setImageUrl(thumbnailServiceWrapper.getDataverseCardImageAsBase64Url(result)); } else if (result.getType().equals("datasets")) { if (result.getEntity() != null) { - result.setImageUrl(thumbnailServiceWrapper.getDatasetCardImageAsBase64Url(result)); + result.setImageUrl(thumbnailServiceWrapper.getDatasetCardImageAsUrl(result)); } if (result.isHarvested()) { From 391504de43d8992e4b97d506fdfc763e512a8fc4 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 21 Jun 2023 13:46:35 -0400 Subject: [PATCH 0165/1092] api docs --- doc/sphinx-guides/source/api/native-api.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index b39cf91337a..24f6c0d4ced 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4649,3 +4649,23 @@ A curl example using an ``ID`` curl -X POST -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/feedback Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. + +.. _thumbnail_reset: + +Reset Thumbnail Failure Flags +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If Dataverse attempts to create a thumbnail image for an image or pdf file and the attempt fails, Dataverse will set a flag for the file to avoid repeated attempts to generate the thumbnail. +For cases where the problem may have been temporary (or fixed in a later Dataverse release), two API calls exist to reset this flag for all files or for a given file. + +Curl examples + +.. code-block:: bash + + export SERVER_URL=http://localhost + export fileID=1234 + + curl -X DELETE $SERVER_URL/api/admin/clearThumbnailFailureFlag + + curl -X DELETE $SERVER_URL/api/admin/clearThumbnailFailureFlag/$fileID + From de7963a0635646f6c00e1362fc87152029394839 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 21 Jun 2023 13:53:30 -0400 Subject: [PATCH 0166/1092] refactor typo --- .../iq/dataverse/dataaccess/ImageThumbConverter.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 458b8da227b..febf659b71a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -114,11 +114,11 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s logger.fine("Found cached thumbnail for " + file.getId()); return true; } - return generateThumbnail(storageIO, size); + return generateThumbnail(file, storageIO, size); } - private static boolean generateThumbnail(StorageIO storageIO, int size) { + private static boolean generateThumbnail(DataFile file, StorageIO storageIO, int size) { logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { @@ -449,7 +449,7 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); // try to generate, if not available and hasn't failed before - if(generateThumbnail(storageIO, size)) { + if(generateThumbnail(file, storageIO, size)) { try { cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); } catch (Exception ioEx) { From fd4d9199fade70e6a2387ad02051bbf4865fa9f2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 10:33:37 -0400 Subject: [PATCH 0167/1092] add gb fragment --- .../guestbook-terms-popup-fragment.xhtml | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 9c3391ef9ae..829fa6539b9 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -40,6 +40,111 @@
    + + + +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + +
    + + + + + + + + + + + +
    +
    +
    +
    +
    Date: Thu, 22 Jun 2023 10:34:11 -0400 Subject: [PATCH 0168/1092] change render param - not clear this fragment stays here though --- src/main/webapp/file-download-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml index e1020c85e69..6fe3863b85f 100644 --- a/src/main/webapp/file-download-popup-fragment.xhtml +++ b/src/main/webapp/file-download-popup-fragment.xhtml @@ -138,7 +138,7 @@ value="#{MarkupChecker:sanitizeBasicHTML(workingVersion.termsOfUseAndAccess.termsOfAccess)}" escape="false" />
    - + #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -167,7 +168,7 @@ action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + @@ -186,7 +187,7 @@ disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + #{bundle['file.downloadBtn.format.tab']} @@ -205,7 +206,7 @@ action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + #{bundle['file.downloadBtn.format.rdata']} From 58bc6c9a264ca7ef5caf44ad3fbc3a04a38f67fb Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 15:40:48 -0400 Subject: [PATCH 0170/1092] missing actionListeners --- src/main/webapp/filesFragment.xhtml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 4c1a943b86e..7e1cb4ac4cd 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -444,6 +444,7 @@ onclick="if (!testFilesSelected()) return false;" action="#{DatasetPage.startDownloadSelectedOriginal()}" update="@form" oncomplete="showPopup();"> + #{bundle.download} @@ -462,6 +463,7 @@ update="@form" oncomplete="showPopup();" onclick="if (!testFilesSelected()) return false;" actionListener="#{DatasetPage.startDownloadSelectedOriginal()}"> + #{bundle.downloadOriginal} From 60c4db0d593b455a02d6df2ebe7801b8c9dbca5e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 15:42:24 -0400 Subject: [PATCH 0171/1092] add missing params, change fileDownloadHelper to EJB --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- src/main/webapp/dataset.xhtml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index f01fe00937e..40b0ba1a010 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -244,7 +244,7 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; - @Inject + @EJB FileDownloadHelper fileDownloadHelper; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index c39042a91ce..1cc8213ecd5 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1511,9 +1511,11 @@ + + From 1e8495cff14955a12f7829a55fb7b2a434c537e6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 16:07:04 -0400 Subject: [PATCH 0172/1092] use Inject and restore getter/setter --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 40b0ba1a010..e890752c19c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -244,7 +244,7 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; - @EJB + @Inject FileDownloadHelper fileDownloadHelper; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; @@ -5476,6 +5476,14 @@ public FileDownloadServiceBean getFileDownloadService() { public void setFileDownloadService(FileDownloadServiceBean fileDownloadService) { this.fileDownloadService = fileDownloadService; } + + public FileDownloadHelper getFileDownloadHelper() { + return fileDownloadHelper; + } + + public void setFileDownloadHelper(FileDownloadHelper fileDownloadHelper) { + this.fileDownloadHelper = fileDownloadHelper; + } public GuestbookResponseServiceBean getGuestbookResponseService() { From 1400e07486223465641e289756c7d38577dbf00b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 12:28:15 -0400 Subject: [PATCH 0173/1092] fix gb render conditions, initial fix for download buttons --- .../webapp/guestbook-terms-popup-fragment.xhtml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 829fa6539b9..ab75ffbe3e7 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -40,7 +40,7 @@ - + - + From 51005bad3afd714aa8edbbbb03eb0faf604470c2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 15:31:22 -0400 Subject: [PATCH 0177/1092] update fragment with license info and buttons for other views --- .../guestbook-terms-popup-fragment.xhtml | 195 +++++++++++++++--- 1 file changed, 168 insertions(+), 27 deletions(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index ab75ffbe3e7..4bddbb0dd38 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -9,37 +9,138 @@ xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> -

    - #{someActivelyEmbargoedFiles ? bundle['file.requestAccessTermsDialog.embargoed.tip'] : bundle['file.requestAccessTermsDialog.tip']} -

    -

    - #{bundle['file.requestAccessTermsDialog.embargoed']} -

    -
    -
    -
    +
    +
    \ No newline at end of file From aa60eae8826bf72309e595afe7dd96a59fb44c74 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 8 Sep 2023 17:01:37 +0100 Subject: [PATCH 0382/1092] Added: deleted, tabularData, and fileAccessRequest boolean fields to DataFile API payload --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index fb8df057dcc..9bda1e24cfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -688,9 +688,12 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) + .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) - .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); + .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()) + .add("deleted", df.getDeleted()) + .add("fileAccessRequest", df.getOwner().isFileAccessRequest()); /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the From 312aedd3a2cc816e686c29e071c96a47081af29e Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 8 Sep 2023 17:28:33 +0100 Subject: [PATCH 0383/1092] Stash: userFileAccessRequested endpoint WIP --- .../edu/harvard/iq/dataverse/api/Access.java | 59 +++++++++++++------ 1 file changed, 41 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index ccdec19456c..52d97703ff9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1678,7 +1678,47 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); } } - + + @GET + @AuthRequired + @Path("/datafile/{id}/userFileAccessRequested") + public Response userFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + AuthenticatedUser requestAuthenticatedUser; + try { + dataFile = findDataFileOrDie(dataFileId); + requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + boolean fileAccessRequested = false; + List requests = dataFile.getFileAccessRequests(); + for (FileAccessRequest fileAccessRequest : requests) { + if (fileAccessRequest.getAuthenticatedUser().getId().equals(requestAuthenticatedUser.getId())) { + fileAccessRequested = true; + break; + } + } + return ok(fileAccessRequested); + } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } + // checkAuthorization is a convenience method; it calls the boolean method // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not. @@ -1946,21 +1986,4 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } - - @GET - @AuthRequired - @Path("/datafile/{id}/userPermissions") - public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); - jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); - return ok(jsonObjectBuilder); - } } From 5024ac47aec272971d4146d8944e5a60a3b023ad Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Sep 2023 12:03:04 -0400 Subject: [PATCH 0384/1092] create 6.0 release notes #9860 --- doc/release-notes/8094-java-17.md | 1 - doc/release-notes/8305-payara6-ee10-v3.md | 5 - doc/release-notes/9260-solr930.md | 63 ----- doc/release-notes/9340-payara5to6.md | 132 --------- doc/release-notes/9782-juni5-transition.md | 7 - doc/release-notes/9812-archiver-warnings.md | 7 - doc/release-notes/9838-rm-vagrant.md | 1 - doc/release-notes/9860-6.0-release-notes.md | 282 ++++++++++++++++++++ 8 files changed, 282 insertions(+), 216 deletions(-) delete mode 100644 doc/release-notes/8094-java-17.md delete mode 100644 doc/release-notes/8305-payara6-ee10-v3.md delete mode 100644 doc/release-notes/9260-solr930.md delete mode 100644 doc/release-notes/9340-payara5to6.md delete mode 100644 doc/release-notes/9782-juni5-transition.md delete mode 100644 doc/release-notes/9812-archiver-warnings.md delete mode 100644 doc/release-notes/9838-rm-vagrant.md create mode 100644 doc/release-notes/9860-6.0-release-notes.md diff --git a/doc/release-notes/8094-java-17.md b/doc/release-notes/8094-java-17.md deleted file mode 100644 index f3c81145465..00000000000 --- a/doc/release-notes/8094-java-17.md +++ /dev/null @@ -1 +0,0 @@ -Java 17 or higher is now required. diff --git a/doc/release-notes/8305-payara6-ee10-v3.md b/doc/release-notes/8305-payara6-ee10-v3.md deleted file mode 100644 index 94369e0211f..00000000000 --- a/doc/release-notes/8305-payara6-ee10-v3.md +++ /dev/null @@ -1,5 +0,0 @@ -Payara has been updated from version 5 to 6. - -Developers, you are encouraged to upgrade to Payara 6 immediately. - -Sysadmins, instructions on how to upgrade production installations will be written as part of https://github.com/IQSS/dataverse/issues/9340 diff --git a/doc/release-notes/9260-solr930.md b/doc/release-notes/9260-solr930.md deleted file mode 100644 index 07824920b3e..00000000000 --- a/doc/release-notes/9260-solr930.md +++ /dev/null @@ -1,63 +0,0 @@ -Solr has been upgraded to Solr 9. You must install Solr fresh and reindex. You cannot use your old schema.xml because the format has changed. - -Specifically, to install Solr fresh you should follow the instructions for new installations, found at https://guides.dataverse.org/en/9260-solr930/installation/prerequisites.html#installing-solr - -These instructions are copied below and tweaked a bit for an upgrade scenario. - -We assume that you already have a user called "solr" (from the instructions above), added during your initial installation of Solr. - -1. Become the "solr" user and then download and configure Solr. - - ``` - su - solr - cd /usr/local/solr - wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz - tar xvzf solr-9.3.0.tgz - cd solr-9.3.0 - cp -r server/solr/configsets/_default server/solr/collection1 - ``` - -1. Unzip "dvinstall.zip" from this release. Unzip it into /tmp. Then copy the following files into place. - - ``` - cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf - - cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf - ``` - -1. A Dataverse installation requires a change to the jetty.xml file that ships with Solr. - - Edit /usr/local/solr/solr-9.3.0/server/etc/jetty.xml , increasing `requestHeaderSize` from `8192` to `102400` - -1. Tell Solr to create the core "collection1" on startup. - - ``` - echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties - ``` - -1. Update your init script. - - Your init script may be located at `/etc/systemd/system/solr.service`, for example. Update the path to Solr to be `/usr/local/solr/solr-9.3.0`. - -1. Start Solr using your init script and check collection1. - - The collection1 check below should print out fields Dataverse uses like "dsDescription". - - ``` - systemctl start solr.service - curl http://localhost:8983/solr/collection1/schema/fields - ``` - -1. Reindex Solr. - - For details, see https://guides.dataverse.org/en/9260-solr930/admin/solr-search-index.html - - ``` - curl http://localhost:8080/api/admin/index - ``` - -1. If you have custom metadata blocks installed, you must update your Solr schema.xml to include your custom fields. - - For details, please see https://guides.dataverse.org/en/9260-solr930/admin/metadatacustomization.html#updating-the-solr-schema - - At a high level you will be copying custom fields from the output of http://localhost:8080/api/admin/index/solr/schema or using a script to automate this. diff --git a/doc/release-notes/9340-payara5to6.md b/doc/release-notes/9340-payara5to6.md deleted file mode 100644 index 68162ef7598..00000000000 --- a/doc/release-notes/9340-payara5to6.md +++ /dev/null @@ -1,132 +0,0 @@ -## Upgrade from Payara 5 to Payara 6 - -1. Download Payara 6.2023.8 as of this writing: - - `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip` - -1. Unzip it to /usr/local (or your preferred location): - - `sudo unzip payara-6.2023.8.zip -d /usr/local/` - -1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default): - - `sudo chown -R dataverse /usr/local/payara6` - -1. Undeploy Dataverse (if deployed, using the unprivileged service account. Version 5.14 is assumed in the example below): - - `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` - - `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` - -1. Stop Payara 5: - - `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` - -1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml: - - `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig` - - `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt` - - `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml` - - The lines will appear in two sections, examples shown below (but your content will vary). - - Section 1: system properties (under ``) - - ``` - - - - - - ``` - - Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below. - - Section 2: JVM options (under ``, the one under ``, not under ``) - - ``` - -Ddataverse.files.directory=/usr/local/dvn/data - -Ddataverse.files.file.type=file - -Ddataverse.files.file.label=file - -Ddataverse.files.file.directory=/usr/local/dvn/data - -Ddataverse.rserve.host=localhost - -Ddataverse.rserve.port=6311 - -Ddataverse.rserve.user=rserve - -Ddataverse.rserve.password=rserve - -Ddataverse.auth.password-reset-timeout-in-minutes=60 - -Ddataverse.timerServer=true - -Ddataverse.fqdn=dev1.dataverse.org - -Ddataverse.siteUrl=https://dev1.dataverse.org - -Ddataverse.files.storage-driver-id=file - -Ddoi.username=testaccount - -Ddoi.password=notmypassword - -Ddoi.baseurlstring=https://mds.test.datacite.org/ - -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org - ``` - -1. Check the `Xmx` setting in `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`. (The one under ``, where you put the JVM options, not the one under ``.) Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`). - -1. Copy jhove.conf and jhoveConfig.xsd from Payara 5, edit and change payara5 to payara6 - - `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/` - - `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*` - - `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf` - -1. Copy logos from Payara 5 to Payara 6 - - These logos are for collections (dataverses). - - `sudo -u dataverse cp -r /usr/local/payara5/glassfish/domains/domain1/docroot/logos /usr/local/payara6/glassfish/domains/domain1/docroot` - -1. If you are using Make Data Count (MDC), edit :MDCLogPath - - Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well. - -1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable. - -1. Start Payara: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` - -1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession` - -1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them: - - ``` - $ echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt - $ sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt - Enter the value for the aliasname operand> dataverse.db.password - Command create-password-alias executed successfully. - ``` - - You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or Datacite. - -1. Enable workaround for FISH-7722: - - The following workaround is for https://github.com/payara/Payara/issues/6337 - - `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED` - -1. Create the network listener on port 8009 - - `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector` - -1. Deploy the Dataverse 6.0 warfile: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war` - -1. Check that you get a version number from Dataverse: - - `curl http://localhost:8080/api/info/version` - -1. Perform one final Payara restart to ensure that timers are initialized properly: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain` - - `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` diff --git a/doc/release-notes/9782-juni5-transition.md b/doc/release-notes/9782-juni5-transition.md deleted file mode 100644 index b7ffcc0de0d..00000000000 --- a/doc/release-notes/9782-juni5-transition.md +++ /dev/null @@ -1,7 +0,0 @@ -# Migrating all test to JUnit 5 -With this release, we transition all of our test cases (see `src/test/`) to use JUnit 5 only. -Moving forward from JUnit 4 will allow writing tests in more concise and easier ways. -The tests themselves have not been altered, but updated to match JUnit 5 ways. -They have not been extended or dropped coverage; this is mostly a preparation of things to come in the future. -If you are writing tests in JUnit 4 in your feature branches, you need to migrate. -The development guides section of testing has been updated as well. diff --git a/doc/release-notes/9812-archiver-warnings.md b/doc/release-notes/9812-archiver-warnings.md deleted file mode 100644 index 716223b3f46..00000000000 --- a/doc/release-notes/9812-archiver-warnings.md +++ /dev/null @@ -1,7 +0,0 @@ -# Potential Archiver Incompatibilities with Payara6 -The Google Cloud and DuraCloud Archivers (see https://guides.dataverse.org/en/latest/installation/config.html#bagit-export) may not work in v6.0. -This is due to their dependence on libraries that include classes in javax.* packages that are no longer available. -If these classes are actually used when the archivers run, the archivers would fail. -As these two archivers require additional setup, they have not been tested in v6.0. -Community members using these archivers or considering their use are encouraged to test them with v6.0 and report any errors and/or provide fixes for them that can be included in future releases. - diff --git a/doc/release-notes/9838-rm-vagrant.md b/doc/release-notes/9838-rm-vagrant.md deleted file mode 100644 index 910f2e0b2f0..00000000000 --- a/doc/release-notes/9838-rm-vagrant.md +++ /dev/null @@ -1 +0,0 @@ -Vagrant has been removed. See #9838. diff --git a/doc/release-notes/9860-6.0-release-notes.md b/doc/release-notes/9860-6.0-release-notes.md new file mode 100644 index 00000000000..9d0e167901d --- /dev/null +++ b/doc/release-notes/9860-6.0-release-notes.md @@ -0,0 +1,282 @@ +# Dataverse 6.0 + +This is a platform upgrade release. Payara, Solr, and Java have been upgraded. No features have been added to the Dataverse software itself. Only a handful of bugs were fixed. + +Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project! + +## Release Highlights + +### Payara 6 + +Payara has been upgraded to version 6.2023.8. For details, see PR #9685 and PR #9795. + +Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/). + +### Solr 9 + +Solr has been upgraded to version 9.3.0. For details, see PR #9787. + +### Java 17 + +Java has been upgraded to version 17. For details, see PR #9764. + + +## Installation + +If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! + +If you would like to be on our [map of Dataverse installations](https://dataverse.org/installations) around the world, please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org! + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 5.14. + +### Upgrade from Java 11 to Java 17 + +Java 17 is now required for Dataverse. Solr can run under Java 11 or Java 17 but the latter is recommended. In preparation for the Java upgrade, stop both Dataverse/Payara and Solr. + +1. Undeploy Dataverse, if deployed, using the unprivileged service account. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` + + `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` + +1. Stop Payara 5. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` + +1. Stop Solr 8. + + `sudo systemctl stop solr.service` + +1. Install Java 17. + + Assuming you are using RHEL or a derivative such as Rocky Linux: + + `sudo yum install java-17-openjdk` + +1. Set Java 17 as the default. + + Assuming you are using RHEL or a derivative such as Rocky Linux: + + `sudo alternatives --config java` + +1. Test that Java 17 is the default. + + `java -version` + +### Upgrade from Payara 5 to Payara 6 + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +1. Download Payara 6.2023.8. + + `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip` + +1. Unzip it to /usr/local (or your preferred location). + + `sudo unzip payara-6.2023.8.zip -d /usr/local/` + +1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default). + + `sudo chown -R dataverse /usr/local/payara6` + +1. Undeploy Dataverse, if deployed, using the unprivileged service account. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` + + `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` + +1. Stop Payara 5, if running. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` + +1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml. + + `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig` + + `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt` + + `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml` + + The lines will appear in two sections, examples shown below (but your content will vary). + + Section 1: system properties (under ``) + + ``` + + + + + + ``` + + Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below. + + Section 2: JVM options (under ``, the one under ``, not under ``) + + ``` + -Ddataverse.files.directory=/usr/local/dvn/data + -Ddataverse.files.file.type=file + -Ddataverse.files.file.label=file + -Ddataverse.files.file.directory=/usr/local/dvn/data + -Ddataverse.rserve.host=localhost + -Ddataverse.rserve.port=6311 + -Ddataverse.rserve.user=rserve + -Ddataverse.rserve.password=rserve + -Ddataverse.auth.password-reset-timeout-in-minutes=60 + -Ddataverse.timerServer=true + -Ddataverse.fqdn=dev1.dataverse.org + -Ddataverse.siteUrl=https://dev1.dataverse.org + -Ddataverse.files.storage-driver-id=file + -Ddoi.username=testaccount + -Ddoi.password=notmypassword + -Ddoi.baseurlstring=https://mds.test.datacite.org/ + -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org + ``` + +1. Check the `Xmx` setting in `domain.xml`. + + Under `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`, check the `Xmx` setting under ``, where you put the JVM options, not the one under ``. Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`). + +1. Copy `jhove.conf` and `jhoveConfig.xsd` from Payara 5, edit and change `payara5` to `payara6`. + + `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/` + + `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*` + + `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf` + +1. Copy logos from Payara 5 to Payara 6. + + These logos are for collections (dataverses). + + `sudo -u dataverse cp -r /usr/local/payara5/glassfish/domains/domain1/docroot/logos /usr/local/payara6/glassfish/domains/domain1/docroot` + +1. If you are using Make Data Count (MDC), edit :MDCLogPath. + + Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well. + +1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable. + +1. Start Payara. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` + +1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession` + +1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them. + + `echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt` + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt` + + When you are prompted "Enter the value for the aliasname operand", enter `dataverse.db.password` + + You should see "Command create-password-alias executed successfully." + + You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or DataCite. + +1. Enable workaround for FISH-7722. + + The following workaround is for https://github.com/payara/Payara/issues/6337 + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED` + +1. Create the network listener on port 8009. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector` + +1. Deploy the Dataverse 6.0 war file. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war` + +1. Check that you get a version number from Dataverse. + + This is just a sanity check that Dataverse has been deployed properly. + + `curl http://localhost:8080/api/info/version` + +1. Perform one final Payara restart to ensure that timers are initialized properly. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain` + + `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` + +### Upgrade from Solr 8 to 9 + +Solr has been upgraded to Solr 9. You must install Solr fresh and reindex. You cannot use your old `schema.xml` because the format has changed. + +The instructions below are copied from https://guides.dataverse.org/en/6.0/installation/prerequisites.html#installing-solr and tweaked a bit for an upgrade scenario. + +We assume that you already have a user called "solr" (from the instructions above), added during your initial installation of Solr. We also assume that you have already stopped Solr 8 as explained in the instructions above about upgrading Java. + +1. Become the "solr" user and then download and configure Solr. + + `su - solr` + + `cd /usr/local/solr` + + `wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz` + + `tar xvzf solr-9.3.0.tgz` + + `cd solr-9.3.0` + + `cp -r server/solr/configsets/_default server/solr/collection1` + +1. Unzip "dvinstall.zip" from this release. Unzip it into /tmp. Then copy the following files into place. + + `cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + + `cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + +1. A Dataverse installation requires a change to the jetty.xml file that ships with Solr. + + Edit `/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`, increasing `requestHeaderSize` from `8192` to `102400` + +1. Tell Solr to create the core "collection1" on startup. + + `echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties` + +1. Update your init script. + + Your init script may be located at `/etc/systemd/system/solr.service`, for example. Update the path to Solr to be `/usr/local/solr/solr-9.3.0`. + +1. Start Solr using your init script and check collection1. + + The collection1 check below should print out fields Dataverse uses like "dsDescription". + + `systemctl start solr.service` + + `curl http://localhost:8983/solr/collection1/schema/fields` + +1. Reindex Solr. + + For details, see https://guides.dataverse.org/en/6.0/admin/solr-search-index.html but here is the reindex command: + + `curl http://localhost:8080/api/admin/index` + +1. If you have custom metadata blocks installed, you must update your Solr `schema.xml` to include your custom fields. + + For details, please see https://guides.dataverse.org/en/6.0/admin/metadatacustomization.html#updating-the-solr-schema + + At a high level you will be copying custom fields from the output of http://localhost:8080/api/admin/index/solr/schema or using a script to automate this. + +## Potential Archiver Incompatibilities with Payara 6 + +The [Google Cloud and DuraCloud archivers](https://guides.dataverse.org/en/5.14/installation/config.html#bagit-export) may not work in Dataverse 6.0. + +This is due to the archivers' dependence on libraries that include classes in `javax.* packages` that are no longer available. If these classes are actually used when the archivers run, the archivers would fail. As these two archivers require additional setup, they have not been tested in 6.0. Community members using these archivers or considering their use are encouraged to test them with 6.0 and report any errors and/or provide fixes for them that can be included in future releases. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.0 Milestone](https://github.com/IQSS/dataverse/milestone/109?closed=1) in GitHub. + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. \ No newline at end of file From 4749c06c1a8c4871eacf25fb12585af6dfe9e7a8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Sep 2023 09:09:15 -0400 Subject: [PATCH 0385/1092] add breaking changes section #9860 Co-authored-by: Oliver Bertuch --- doc/release-notes/9860-6.0-release-notes.md | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/doc/release-notes/9860-6.0-release-notes.md b/doc/release-notes/9860-6.0-release-notes.md index 9d0e167901d..dff23ca4ce9 100644 --- a/doc/release-notes/9860-6.0-release-notes.md +++ b/doc/release-notes/9860-6.0-release-notes.md @@ -6,19 +6,22 @@ Thank you to all of the community members who contributed code, suggestions, bug ## Release Highlights -### Payara 6 -Payara has been upgraded to version 6.2023.8. For details, see PR #9685 and PR #9795. +### Breaking Changes -Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/). +This release contains a few major upgrades to core components. Detailed instructions to upgrade follow below. -### Solr 9 +#### Runtime +- Supported (required) Java version has been upgraded from v11 to v17. For details, see PR #9764 +- Payara application server has been upgraded to version 6.2023.8. + - This is a required update. For details, see PR #9685 and PR #9795. + - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/). +- Solr has been upgraded to version 9.3.0. For details, see PR #9787. -Solr has been upgraded to version 9.3.0. For details, see PR #9787. +#### Development +- Removal of Vagrant and Docker AIO, sunsetted in Dataverse v5.14. See PR #9838 and PR #9685 for details. +- All tests have been migrated to use JUnit 5 exclusively from now on out. See PR #9796 for details. -### Java 17 - -Java has been upgraded to version 17. For details, see PR #9764. ## Installation From 25bda807975b3faa4e61890aaf5c33266b806e2e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Sep 2023 09:18:01 -0400 Subject: [PATCH 0386/1092] tweaks #9860 --- doc/release-notes/9860-6.0-release-notes.md | 26 ++++++++++----------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/doc/release-notes/9860-6.0-release-notes.md b/doc/release-notes/9860-6.0-release-notes.md index dff23ca4ce9..e896beef78f 100644 --- a/doc/release-notes/9860-6.0-release-notes.md +++ b/doc/release-notes/9860-6.0-release-notes.md @@ -4,25 +4,25 @@ This is a platform upgrade release. Payara, Solr, and Java have been upgraded. N Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project! -## Release Highlights +## Release Highlights (Major Upgrades, Breaking Changes) - -### Breaking Changes - -This release contains a few major upgrades to core components. Detailed instructions to upgrade follow below. +This release contains a major upgrades to core components. Detailed upgrade instructions can be found below. #### Runtime -- Supported (required) Java version has been upgraded from v11 to v17. For details, see PR #9764 + +- The required Java version has been increased from version 11 to 17. + - See PR #9764 for details. - Payara application server has been upgraded to version 6.2023.8. - - This is a required update. For details, see PR #9685 and PR #9795. - - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/). -- Solr has been upgraded to version 9.3.0. For details, see PR #9787. + - This is a required update. + - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/) + - See PR #9685 and PR #9795 for details. +- Solr has been upgraded to version 9.3.0. + - See PR #9787. for details. #### Development -- Removal of Vagrant and Docker AIO, sunsetted in Dataverse v5.14. See PR #9838 and PR #9685 for details. -- All tests have been migrated to use JUnit 5 exclusively from now on out. See PR #9796 for details. - +- Removal of Vagrant and Docker All In One (docker-aio), deprecated in Dataverse v5.14. See PR #9838 and PR #9685 for details. +- All tests have been migrated to use JUnit 5 exclusively from now on. See PR #9796 for details. ## Installation @@ -282,4 +282,4 @@ For the complete list of code changes in this release, see the [6.0 Milestone](h ## Getting Help -For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. \ No newline at end of file +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. From 5b8c1918788cbc3b8a565a5cbf730aae48f5111c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Sep 2023 10:35:51 -0400 Subject: [PATCH 0387/1092] more tweaks #9860 --- doc/release-notes/9860-6.0-release-notes.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/9860-6.0-release-notes.md b/doc/release-notes/9860-6.0-release-notes.md index e896beef78f..eb22dbff2ee 100644 --- a/doc/release-notes/9860-6.0-release-notes.md +++ b/doc/release-notes/9860-6.0-release-notes.md @@ -6,9 +6,9 @@ Thank you to all of the community members who contributed code, suggestions, bug ## Release Highlights (Major Upgrades, Breaking Changes) -This release contains a major upgrades to core components. Detailed upgrade instructions can be found below. +This release contains major upgrades to core components. Detailed upgrade instructions can be found below. -#### Runtime +### Runtime - The required Java version has been increased from version 11 to 17. - See PR #9764 for details. @@ -19,7 +19,7 @@ This release contains a major upgrades to core components. Detailed upgrade inst - Solr has been upgraded to version 9.3.0. - See PR #9787. for details. -#### Development +### Development - Removal of Vagrant and Docker All In One (docker-aio), deprecated in Dataverse v5.14. See PR #9838 and PR #9685 for details. - All tests have been migrated to use JUnit 5 exclusively from now on. See PR #9796 for details. From 99e74d0a61572a79dfe1493865d92efe02d89af0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Sep 2023 12:11:52 -0400 Subject: [PATCH 0388/1092] typo #9860 --- doc/release-notes/9860-6.0-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9860-6.0-release-notes.md b/doc/release-notes/9860-6.0-release-notes.md index eb22dbff2ee..ef432f42111 100644 --- a/doc/release-notes/9860-6.0-release-notes.md +++ b/doc/release-notes/9860-6.0-release-notes.md @@ -17,7 +17,7 @@ This release contains major upgrades to core components. Detailed upgrade instru - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/) - See PR #9685 and PR #9795 for details. - Solr has been upgraded to version 9.3.0. - - See PR #9787. for details. + - See PR #9787 for details. ### Development From 49508a68e674116116ffb9e36a56f69c1fb7bbe0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Sep 2023 10:12:34 -0400 Subject: [PATCH 0389/1092] rename release notes file #9860 --- .../{9860-6.0-release-notes.md => 6.0-release-notes.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/release-notes/{9860-6.0-release-notes.md => 6.0-release-notes.md} (100%) diff --git a/doc/release-notes/9860-6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md similarity index 100% rename from doc/release-notes/9860-6.0-release-notes.md rename to doc/release-notes/6.0-release-notes.md From 4cd8724796a6390a4c5e3f9aa6e115f26ea136a4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Sep 2023 10:12:58 -0400 Subject: [PATCH 0390/1092] mention template/terms bug #9825 in release notes #9860 --- doc/release-notes/6.0-release-notes.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md index ef432f42111..342bdd5ac7b 100644 --- a/doc/release-notes/6.0-release-notes.md +++ b/doc/release-notes/6.0-release-notes.md @@ -276,6 +276,17 @@ The [Google Cloud and DuraCloud archivers](https://guides.dataverse.org/en/5.14/ This is due to the archivers' dependence on libraries that include classes in `javax.* packages` that are no longer available. If these classes are actually used when the archivers run, the archivers would fail. As these two archivers require additional setup, they have not been tested in 6.0. Community members using these archivers or considering their use are encouraged to test them with 6.0 and report any errors and/or provide fixes for them that can be included in future releases. +## Bug Fix for Dataset Templates with Custom Terms of Use + +A bug was fixed for the following scenario: + +- Create a template with custom terms. +- Set that template as the default. +- Try to create a dataset. +- A 500 error appears before the form to create dataset is even shown. + +For more details, see issue #9825 and PR #9892 + ## Complete List of Changes For the complete list of code changes in this release, see the [6.0 Milestone](https://github.com/IQSS/dataverse/milestone/109?closed=1) in GitHub. From 12613be60c836a283cc3b085bfd404d183746fd7 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Sep 2023 12:41:36 -0400 Subject: [PATCH 0391/1092] mention postgres in release note #9717 #9860 --- doc/release-notes/6.0-release-notes.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md index 342bdd5ac7b..b5e6d5a201d 100644 --- a/doc/release-notes/6.0-release-notes.md +++ b/doc/release-notes/6.0-release-notes.md @@ -18,6 +18,8 @@ This release contains major upgrades to core components. Detailed upgrade instru - See PR #9685 and PR #9795 for details. - Solr has been upgraded to version 9.3.0. - See PR #9787 for details. +- PostgreSQL 13 remains the tested and supported version. + - That said, the installer and Flyway have been upgraded to support PostgreSQL 14 and 15. See the [PostgreSQL](https://guides.dataverse.org/en/6.0/installation/prerequisites.html#postgresql) section of the Installation Guide and PR #9877 for details. ### Development From d481c611ecc631e5dff2706fb9027433a33c6097 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Sep 2023 12:49:16 -0400 Subject: [PATCH 0392/1092] reword prompt to opt-in to the map #9860 --- doc/release-notes/6.0-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md index b5e6d5a201d..1cf431ef6b0 100644 --- a/doc/release-notes/6.0-release-notes.md +++ b/doc/release-notes/6.0-release-notes.md @@ -30,7 +30,7 @@ This release contains major upgrades to core components. Detailed upgrade instru If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! -If you would like to be on our [map of Dataverse installations](https://dataverse.org/installations) around the world, please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org! +Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! ## Upgrade Instructions From 7259b35e95ebe9feb2d845283cc4ab97752e3104 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Sep 2023 12:58:31 -0400 Subject: [PATCH 0393/1092] mention joining the GDCC #9860 --- doc/release-notes/6.0-release-notes.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md index 1cf431ef6b0..df916216f5b 100644 --- a/doc/release-notes/6.0-release-notes.md +++ b/doc/release-notes/6.0-release-notes.md @@ -32,6 +32,8 @@ If this is a new installation, please follow our [Installation Guide](https://gu Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! +You are also very welcome to join the [Global Dataverse Community Consortium](https://dataversecommunity.global) (GDCC). + ## Upgrade Instructions Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. From 455cb2c950eab61bcaa25a8251cef186e8ea3837 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 8 Sep 2023 18:10:49 +0100 Subject: [PATCH 0394/1092] Fixed: removed deleted field from DataFile payload which causes nullability issues --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 9bda1e24cfb..2b04bb3f657 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -692,7 +692,6 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()) - .add("deleted", df.getDeleted()) .add("fileAccessRequest", df.getOwner().isFileAccessRequest()); /* * The restricted state was not included prior to #9175 so to avoid backward From 55a81be1c6b279a253b12782a18e7b7c1db9bb9e Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 8 Sep 2023 18:12:34 +0100 Subject: [PATCH 0395/1092] Refactor: simpler IT testGetUserPermissionsOnFile --- .../harvard/iq/dataverse/api/AccessIT.java | 23 ++----------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 76012882ef5..dadd4093fc2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -633,27 +633,8 @@ public void testZipUploadAndDownload() throws IOException { @Test public void testGetUserPermissionsOnFile() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Assert user permissions on file - int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); - + // Call with valid file id + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken); getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); assertTrue(canDownloadFile); From 1d7118b08e336c246d1db7c734ce2376ebc028bc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Sep 2023 18:27:50 -0400 Subject: [PATCH 0396/1092] bump version to 6.0 #9861 --- doc/sphinx-guides/source/conf.py | 4 ++-- doc/sphinx-guides/source/versions.rst | 3 ++- modules/dataverse-parent/pom.xml | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 2c2ddf1bdf6..7ff17eb45ed 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -66,9 +66,9 @@ # built documents. # # The short X.Y version. -version = '5.14' +version = '6.0' # The full version, including alpha/beta/rc tags. -release = '5.14' +release = '6.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index d5ffb2acb66..2000a2097f0 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 5.14 +- 6.0 +- `5.14 `__ - `5.13 `__ - `5.12.1 `__ - `5.12 `__ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index c2623877e9e..c45d59e4f5f 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 5.14 + 6.0 17 UTF-8 From 0248e1e0be00a332430563fcd762cbfdc26ac290 Mon Sep 17 00:00:00 2001 From: GPortas Date: Sat, 9 Sep 2023 12:02:01 +0100 Subject: [PATCH 0397/1092] Added: tests and tweaks for userFileAccessRequested API endpoint --- .../edu/harvard/iq/dataverse/api/Access.java | 2 +- .../harvard/iq/dataverse/api/AccessIT.java | 32 +++++++++++++++++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++++ 3 files changed, 36 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 52d97703ff9..256af8ec65e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1682,7 +1682,7 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara @GET @AuthRequired @Path("/datafile/{id}/userFileAccessRequested") - public Response userFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { DataFile dataFile; AuthenticatedUser requestAuthenticatedUser; try { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index dadd4093fc2..48b6eee38e1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -26,11 +26,9 @@ import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; +import static org.junit.jupiter.api.Assertions.*; /** * @@ -631,6 +629,34 @@ public void testZipUploadAndDownload() throws IOException { System.out.println("Zip upload-and-download round trip test: success!"); } + @Test + public void testGetUserFileAccessRequested() { + // Create new user + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + + String dataFileId = Integer.toString(tabFile3IdRestricted); + + // Call with new user and unrequested access file + Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + + boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertFalse(userFileAccessRequested); + + // Request file access for the new user + Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken); + requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Call with new user and requested access file + getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + + userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertTrue(userFileAccessRequested); + } + @Test public void testGetUserPermissionsOnFile() { // Call with valid file id diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 973642b1617..164f3a66ffb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3329,6 +3329,12 @@ static Response getFileDataTables(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/dataTables"); } + static Response getUserFileAccessRequested(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested"); + } + static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) From d33e8f53ffa55c1d67711ca3ea7a833574c63d7a Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 11 Sep 2023 11:13:29 +0100 Subject: [PATCH 0398/1092] Added: hasBeenDeleted files API endpoint. Pending IT --- .../java/edu/harvard/iq/dataverse/api/Files.java | 12 ++++++++++++ .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 12 ++++++++++++ 2 files changed, 24 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 6712b68a09b..6d60de18c70 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -107,6 +107,8 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; + @Inject + DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -870,4 +872,14 @@ public Response setFileCategories(@Context ContainerRequestContext crc, @PathPar } }, getRequestUser(crc)); } + + @GET + @AuthRequired + @Path("{id}/hasBeenDeleted") + public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); + }, getRequestUser(crc)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 164f3a66ffb..d243d3c47f2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3374,4 +3374,16 @@ static Response setFileCategories(String dataFileId, String apiToken, List Date: Mon, 11 Sep 2023 12:10:23 +0100 Subject: [PATCH 0399/1092] Changed: :MaxEmbargoDurationInMonths setting directly exposed via API info endpoint --- doc/sphinx-guides/source/api/native-api.rst | 16 +++-- .../edu/harvard/iq/dataverse/api/Info.java | 35 ++++++----- .../edu/harvard/iq/dataverse/api/InfoIT.java | 59 +++++++------------ 3 files changed, 51 insertions(+), 59 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 44486278cc8..52d9099cf63 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3318,6 +3318,8 @@ Show Support Of Incomplete Metadata Deposition Learn if an instance has been configured to allow deposition of incomplete datasets via the API. See also :ref:`create-dataset-command` and :ref:`dataverse.api.allow-incomplete-metadata` +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + .. code-block:: bash export SERVER_URL=https://demo.dataverse.org @@ -3337,6 +3339,8 @@ Get the configured zip file download limit. The response contains the long value This limit comes from the database setting :ref:`:ZipDownloadLimit` if set, or the default value if the database setting is not set, which is 104857600 (100MB). +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + .. code-block:: bash export SERVER_URL=https://demo.dataverse.org @@ -3349,24 +3353,24 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/info/zipDownloadLimit" -Show Support Of The Embargo Feature -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Get Maximum Embargo Duration In Months +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Know if the Dataverse instance has been configured to allow embargoes. +Get the maximum embargo duration in months, if available, configured through the database setting :ref:`:MaxEmbargoDurationInMonths` from the Configuration section of the Installation Guide. -The endpoint checks whether the database setting :ref:`:MaxEmbargoDurationInMonths`, which enables the embargo feature, has a value that enables the feature or not. +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. .. code-block:: bash export SERVER_URL=https://demo.dataverse.org - curl "$SERVER_URL/api/info/embargoEnabled" + curl "$SERVER_URL/api/info/settings/:MaxEmbargoDurationInMonths" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/info/embargoEnabled" + curl "https://demo.dataverse.org/api/info/settings/:MaxEmbargoDurationInMonths" .. _metadata-blocks-api: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index be71a3a9fc7..ccf918f1104 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -25,14 +25,15 @@ public class Info extends AbstractApiBean { @GET @Path("settings/:DatasetPublishPopupCustomText") public Response getDatasetPublishPopupCustomText() { - String setting = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText); - if (setting != null) { - return ok(Json.createObjectBuilder().add("message", setting)); - } else { - return notFound("Setting " + SettingsServiceBean.Key.DatasetPublishPopupCustomText + " not found"); - } + return getSettingByKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText); } - + + @GET + @Path("settings/:MaxEmbargoDurationInMonths") + public Response getMaxEmbargoDurationInMonths() { + return getSettingByKey(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + } + @GET @AuthRequired @Path("version") @@ -41,25 +42,25 @@ public Response getInfo(@Context ContainerRequestContext crc) { String[] comps = versionStr.split("build",2); String version = comps[0].trim(); JsonValue build = comps.length > 1 ? Json.createArrayBuilder().add(comps[1].trim()).build().get(0) : JsonValue.NULL; - + return response( req -> ok( Json.createObjectBuilder().add("version", version) .add("build", build)), getRequestUser(crc)); } - + @GET @AuthRequired @Path("server") public Response getServer(@Context ContainerRequestContext crc) { return response( req -> ok(JvmSettings.FQDN.lookup()), getRequestUser(crc)); } - + @GET @AuthRequired @Path("apiTermsOfUse") public Response getTermsOfUse(@Context ContainerRequestContext crc) { return response( req -> ok(systemConfig.getApiTermsOfUse()), getRequestUser(crc)); } - + @GET @Path("settings/incompleteMetadataViaApi") public Response getAllowsIncompleteMetadata() { @@ -73,10 +74,12 @@ public Response getZipDownloadLimit() { return ok(zipDownloadLimit); } - @GET - @Path("embargoEnabled") - public Response getEmbargoEnabled() { - String setting = settingsSvc.getValueForKey(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); - return ok(setting != null && !setting.equals("0")); + private Response getSettingByKey(SettingsServiceBean.Key key) { + String setting = settingsService.getValueForKey(key); + if (setting != null) { + return ok(Json.createObjectBuilder().add("message", setting)); + } else { + return notFound("Setting " + key + " not found"); + } } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java index 1b2c513252d..3d5691dbe03 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import static io.restassured.RestAssured.given; + import io.restassured.response.Response; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import org.junit.jupiter.api.AfterAll; @@ -17,38 +18,23 @@ public class InfoIT { @BeforeAll public static void setUpClass() { UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + UtilIT.deleteSetting(SettingsServiceBean.Key.DatasetPublishPopupCustomText); } @AfterAll public static void afterClass() { UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + UtilIT.deleteSetting(SettingsServiceBean.Key.DatasetPublishPopupCustomText); } @Test public void testGetDatasetPublishPopupCustomText() { + testSettingEndpoint(SettingsServiceBean.Key.DatasetPublishPopupCustomText, "Hello world!"); + } - given().urlEncodingEnabled(false) - .body("Hello world!") - .put("/api/admin/settings/" - + SettingsServiceBean.Key.DatasetPublishPopupCustomText); - - Response response = given().urlEncodingEnabled(false) - .get("/api/info/settings/" + SettingsServiceBean.Key.DatasetPublishPopupCustomText); - response.prettyPrint(); - response.then().assertThat().statusCode(OK.getStatusCode()) - .body("data.message", equalTo("Hello world!")); - - given().urlEncodingEnabled(false) - .delete("/api/admin/settings/" - + SettingsServiceBean.Key.DatasetPublishPopupCustomText); - - response = given().urlEncodingEnabled(false) - .get("/api/info/settings/" + SettingsServiceBean.Key.DatasetPublishPopupCustomText); - response.prettyPrint(); - response.then().assertThat().statusCode(NOT_FOUND.getStatusCode()) - .body("message", equalTo("Setting " - + SettingsServiceBean.Key.DatasetPublishPopupCustomText - + " not found")); + @Test + public void testGetMaxEmbargoDurationInMonths() { + testSettingEndpoint(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); } @Test @@ -68,9 +54,9 @@ public void testGetServer() { response.then().assertThat().statusCode(OK.getStatusCode()) .body("data.message", notNullValue()); } - + @Test - public void getTermsOfUse() { + public void testGetTermsOfUse() { Response response = given().urlEncodingEnabled(false) .get("/api/info/apiTermsOfUse"); response.prettyPrint(); @@ -79,7 +65,7 @@ public void getTermsOfUse() { } @Test - public void getAllowsIncompleteMetadata() { + public void testGetAllowsIncompleteMetadata() { Response response = given().urlEncodingEnabled(false) .get("/api/info/settings/incompleteMetadataViaApi"); response.prettyPrint(); @@ -88,7 +74,7 @@ public void getAllowsIncompleteMetadata() { } @Test - public void getZipDownloadLimit() { + public void testGetZipDownloadLimit() { Response response = given().urlEncodingEnabled(false) .get("/api/info/zipDownloadLimit"); response.prettyPrint(); @@ -96,19 +82,18 @@ public void getZipDownloadLimit() { .body("data", notNullValue()); } - @Test - public void getEmbargoEnabled() { - String testEndpoint = "/api/info/embargoEnabled"; - // Embargo disabled - Response response = given().urlEncodingEnabled(false).get(testEndpoint); + private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) { + String endpoint = "/api/info/settings/" + settingKey; + // Setting not found + Response response = given().urlEncodingEnabled(false).get(endpoint); response.prettyPrint(); - response.then().assertThat().statusCode(OK.getStatusCode()) - .body("data", equalTo(false)); - // Embargo enabled - UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); - response = given().urlEncodingEnabled(false).get(testEndpoint); + response.then().assertThat().statusCode(NOT_FOUND.getStatusCode()) + .body("message", equalTo("Setting " + settingKey + " not found")); + // Setting exists + UtilIT.setSetting(settingKey, testSettingValue); + response = given().urlEncodingEnabled(false).get(endpoint); response.prettyPrint(); response.then().assertThat().statusCode(OK.getStatusCode()) - .body("data", equalTo(true)); + .body("data.message", equalTo(testSettingValue)); } } From d3d305c78ac579513654eb4c8a4add6d1ff8db88 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 11 Sep 2023 12:12:31 +0100 Subject: [PATCH 0400/1092] Changed: updated release notes --- doc/release-notes/9880-info-api-zip-limit-embargo.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9880-info-api-zip-limit-embargo.md b/doc/release-notes/9880-info-api-zip-limit-embargo.md index 6e15d503ae9..d2afb139e72 100644 --- a/doc/release-notes/9880-info-api-zip-limit-embargo.md +++ b/doc/release-notes/9880-info-api-zip-limit-embargo.md @@ -2,4 +2,4 @@ Implemented the following new endpoints: - getZipDownloadLimit (/api/info/zipDownloadLimit): Get the configured zip file download limit. The response contains the long value of the limit in bytes. -- getEmbargoEnabled (/api/info/embargoEnabled): Know if the Dataverse instance has been configured to allow embargoes. +- getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths. From f754b710934dc3412614ff77cde8a4c2c2efafd2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 11 Sep 2023 12:15:02 +0100 Subject: [PATCH 0401/1092] Changed: private Info.java method renamed --- src/main/java/edu/harvard/iq/dataverse/api/Info.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index ccf918f1104..0652539b595 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -25,13 +25,13 @@ public class Info extends AbstractApiBean { @GET @Path("settings/:DatasetPublishPopupCustomText") public Response getDatasetPublishPopupCustomText() { - return getSettingByKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText); + return getSettingResponseByKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText); } @GET @Path("settings/:MaxEmbargoDurationInMonths") public Response getMaxEmbargoDurationInMonths() { - return getSettingByKey(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + return getSettingResponseByKey(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); } @GET @@ -74,7 +74,7 @@ public Response getZipDownloadLimit() { return ok(zipDownloadLimit); } - private Response getSettingByKey(SettingsServiceBean.Key key) { + private Response getSettingResponseByKey(SettingsServiceBean.Key key) { String setting = settingsService.getValueForKey(key); if (setting != null) { return ok(Json.createObjectBuilder().add("message", setting)); From 7929c4ffae6bb3d0d9662a7d06b2e50e7bafdcb6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Sep 2023 10:02:28 -0400 Subject: [PATCH 0402/1092] remove embargo example, no longer used in #9881 --- doc/sphinx-guides/source/developers/api-design.rst | 8 -------- 1 file changed, 8 deletions(-) diff --git a/doc/sphinx-guides/source/developers/api-design.rst b/doc/sphinx-guides/source/developers/api-design.rst index e485236223e..d7a14716c4d 100755 --- a/doc/sphinx-guides/source/developers/api-design.rst +++ b/doc/sphinx-guides/source/developers/api-design.rst @@ -45,14 +45,6 @@ Settings need to be exposed outside to API clients outside of ``/api/admin`` (wh - Final path example: ``/api/info/zipDownloadLimit`` -- If you are exposing logic based on a database setting rather that the setting itself (e.g. a boolean for if embargo is enabled rather than the value of ``:MaxEmbargoDurationInMonths``): - - - Use ``/api/info`` as the root path. - - - Append a meaningful name for the setting (e.g. ``embargoEnabled``). - - - Final path example: ``/api/info/embargoEnabled`` - - If the database setting you're exposing make more sense outside of ``/api/info`` because there's more context (e.g. ``:CustomDatasetSummaryFields``): - Feel free to use a path outside of ``/api/info`` as the root path. From 6e08bad0eb3e979fd50e7cbac7cfab4e2b2d5cd5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 11 Sep 2023 11:23:11 -0400 Subject: [PATCH 0403/1092] typo #9880 --- doc/sphinx-guides/source/developers/api-design.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/api-design.rst b/doc/sphinx-guides/source/developers/api-design.rst index d7a14716c4d..e7a7a6408bb 100755 --- a/doc/sphinx-guides/source/developers/api-design.rst +++ b/doc/sphinx-guides/source/developers/api-design.rst @@ -59,5 +59,5 @@ Settings need to be exposed outside to API clients outside of ``/api/admin`` (wh - Append a meaningful name for the setting (e.g. ``incompleteMetadataViaApi``). - - Final path example: ``/api/info/embargoEnabled`` + - Final path example: ``/api/info/incompleteMetadataViaApi`` From 1aa3703dfbbddc00356bc1a6dceb12ea577c80de Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 12:01:28 +0100 Subject: [PATCH 0404/1092] Added: IT for getHasBeenDeleted Files API endpoint --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 50 +++++++++++++++++-- 1 file changed, 46 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index bb6b261c387..7f1ca4c8d70 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -37,10 +37,7 @@ import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.CoreMatchers.hasItem; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class FilesIT { @@ -2235,4 +2232,49 @@ public void testSetFileCategories() { .body("data.categories", hasItem(testCategory2)) .statusCode(OK.getStatusCode()); } + + @Test + public void testGetHasBeenDeleted() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Publish dataverse and dataset + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has not been deleted + Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertFalse(fileHasBeenDeleted); + + // Delete test file + Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken); + deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has been deleted + getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertTrue(fileHasBeenDeleted); + } } From c224af6fc4c3c31fe52523c5eb67b770060f8b79 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 12:19:50 +0100 Subject: [PATCH 0405/1092] Added: docs for userFileAccessRequested endpoint --- doc/sphinx-guides/source/api/dataaccess.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index d714c90372a..0bfd29ed79d 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -404,6 +404,18 @@ A curl example using an ``id``:: curl -H "X-Dataverse-key:$API_TOKEN" -X GET http://$SERVER/api/access/datafile/{id}/listRequests +User Has Requested Access to a File: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``/api/access/datafile/{id}/userFileAccessRequested`` + +This method returns true or false depending on whether or not the calling user has requested access to a particular file. + +A curl example using an ``id``:: + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userFileAccessRequested" + + Get User Permissions on a File: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 578fdc5e4f63414b85507b6605f787630cdbdfe4 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 12:27:35 +0100 Subject: [PATCH 0406/1092] Added: docs for hasBeenDeleted endpoint --- doc/sphinx-guides/source/api/native-api.rst | 37 +++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 8b7d34425bc..bfadf1eb9a4 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2872,6 +2872,43 @@ The fully expanded example above (without environment variables) looks like this If you are interested in download counts for multiple files, see :doc:`/api/metrics`. +File Has Been Deleted +~~~~~~~~~~~~~~~~~~~~~ + +Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl "$SERVER_URL/api/files/$ID/hasBeenDeleted" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/24/hasBeenDeleted" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000" + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ From 85b9139f94c364cf25bdf3590c5b769543f81a0f Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 12:34:19 +0100 Subject: [PATCH 0407/1092] Added: release notes for #9851 --- ...-payload-extension-new-file-access-endpoints.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md diff --git a/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md new file mode 100644 index 00000000000..f306ae2ab80 --- /dev/null +++ b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md @@ -0,0 +1,14 @@ +Implemented the following new endpoints: + +- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. + + +- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + + +In addition, the DataFile API payload has been extended to include the following fields: + +- tabularData: Boolean field to know if the DataFile is of tabular type + + +- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) From aacbc643b4fe7827c14edd1ebf97c82c6e0bdc53 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 12:42:22 +0100 Subject: [PATCH 0408/1092] Fixed: curl call examples in files API docs --- doc/sphinx-guides/source/api/native-api.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index bfadf1eb9a4..90f4ad4e800 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2846,13 +2846,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl "$SERVER_URL/api/files/$ID/downloadCount" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/downloadCount" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/24/downloadCount" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/downloadCount" A curl example using a ``PERSISTENT_ID`` @@ -2862,13 +2862,13 @@ A curl example using a ``PERSISTENT_ID`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" If you are interested in download counts for multiple files, see :doc:`/api/metrics`. @@ -2885,13 +2885,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl "$SERVER_URL/api/files/$ID/hasBeenDeleted" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/hasBeenDeleted" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/24/hasBeenDeleted" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/hasBeenDeleted" A curl example using a ``PERSISTENT_ID`` @@ -2901,13 +2901,13 @@ A curl example using a ``PERSISTENT_ID`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000" Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ From 918f1fb90a6ebc6072f91a62a54223bae3ad132a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Sep 2023 09:31:10 -0400 Subject: [PATCH 0409/1092] Revert "#9717 grant CREATE instead of ALL per pdurbin" This reverts commit f71274e7c7a4d47ab7fb973320bcfdb7e6822fbd. --- scripts/installer/install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 3aedbd8c6ad..9da64bff32e 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -413,7 +413,7 @@ # 3e. set permissions: - conn_cmd = "GRANT CREATE PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";" + conn_cmd = "GRANT ALL PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";" try: cur.execute(conn_cmd) except: From 270e0fd0a28b516f62dc29e927bbb19753f47d19 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Sep 2023 10:08:33 -0400 Subject: [PATCH 0410/1092] temporary fix for local compile issues --- .../harvest/server/web/servlet/OAIServlet.java | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index 9cf1629abfc..3ce88fdf204 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -74,18 +74,13 @@ public class OAIServlet extends HttpServlet { @EJB SystemConfig systemConfig; - - @Inject - @ConfigProperty(name = "dataverse.oai.server.maxidentifiers", defaultValue="100") - private Integer maxListIdentifiers; - @Inject - @ConfigProperty(name = "dataverse.oai.server.maxsets", defaultValue="100") - private Integer maxListSets; + //Todo - revert this change - added to get past some local compile issues + private Integer maxListIdentifiers=100; + + private Integer maxListSets=100; - @Inject - @ConfigProperty(name = "dataverse.oai.server.maxrecords", defaultValue="10") - private Integer maxListRecords; + private Integer maxListRecords=10; private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.server.web.servlet.OAIServlet"); // If we are going to stick with this solution - of providing a minimalist From 1828855a162683d564e02507ce60fd99963b43d0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Sep 2023 10:09:53 -0400 Subject: [PATCH 0411/1092] updates/fixes re: extending RemoteOverlay, etc. --- .../iq/dataverse/dataaccess/DataAccess.java | 2 + .../dataaccess/GlobusOverlayAccessIO.java | 208 +++++++----------- .../dataaccess/RemoteOverlayAccessIO.java | 9 +- .../iq/dataverse/dataaccess/StorageIO.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 52 +++-- 5 files changed, 119 insertions(+), 154 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java index f2eb0236df4..8387f8110cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java @@ -132,6 +132,8 @@ public static StorageIO getDirectStorageIO(String fullStorageLocation) return new SwiftAccessIO<>(storageLocation, storageDriverId); case REMOTE: return new RemoteOverlayAccessIO<>(storageLocation, storageDriverId); + case GLOBUS: + return new GlobusOverlayAccessIO<>(storageLocation, storageDriverId); default: logger.warning("Could not find storage driver for: " + fullStorageLocation); throw new IOException("getDirectStorageIO: Unsupported storage method."); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 16345cd1f9c..b00724e2825 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -58,46 +58,63 @@ /* * Globus Overlay Driver * - * Remote: - * StorageIdentifier format: ://// - * Storage location: / - * Internal - * StorageIdentifier format: ://// - * Storage location: /// + * Remote: StorageIdentifier format: + * ://// Storage location: + * / Internal StorageIdentifier format: + * :// Storage location: + * /// * * baseUrl: globus:// - + * */ public class GlobusOverlayAccessIO extends RemoteOverlayAccessIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO"); - - private String globusAccessToken = null; + String globusAccessToken = null; /* - * If this is set to true, the store supports Globus transfer in and Dataverse/the globus app manage file locations, access controls, deletion, etc. + * If this is set to true, the store supports Globus transfer in and + * Dataverse/the globus app manage file locations, access controls, deletion, + * etc. */ - private boolean isDataverseManaged = false; + private boolean dataverseManaged = false; public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); - this.setIsLocalFile(false); - configureStores(req, driverId, null); - logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); - path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); - validatePath(path); + if (dvObject instanceof DataFile) { + globusAccessToken = retrieveGlobusAccessToken(); + } + dataverseManaged = isDataverseManaged(this.driverId); - logger.fine("Relative path: " + path); + logger.info("GAT3: " + globusAccessToken); } public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { - super(null, null, driverId); - this.setIsLocalFile(false); - configureStores(null, driverId, storageLocation); + this.driverId = driverId; + this.dataverseManaged = isDataverseManaged(this.driverId); + if (dataverseManaged) { + String[] parts = DataAccess.getDriverIdAndStorageLocation(storageLocation); + path = parts[1]; + } else { + this.setIsLocalFile(false); + configureStores(null, driverId, storageLocation); + + path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); + validatePath(path); + logger.fine("Relative path: " + path); + } +//ToDo - only when needed? + globusAccessToken = retrieveGlobusAccessToken(); + + } + + private String retrieveGlobusAccessToken() { + // String globusToken = JvmSettings.GLOBUS_TOKEN.lookup(driverId); + String globusToken = System.getProperty("dataverse.files." + this.driverId + ".globus-token"); - path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); - validatePath(path); - logger.fine("Relative path: " + path); + AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken); + return accessToken.getOtherTokens().get(0).getAccessToken(); } private void validatePath(String relPath) throws IOException { @@ -114,6 +131,7 @@ private void validatePath(String relPath) throws IOException { // Call the Globus API to get the file size @Override long retrieveSize() { + logger.info("GAT2: " + globusAccessToken); // Construct Globus URL URI absoluteURI = null; try { @@ -121,16 +139,16 @@ long retrieveSize() { String endpointWithBasePath = baseUrl.substring(baseUrl.lastIndexOf("://") + 3); int pathStart = endpointWithBasePath.indexOf("/"); logger.info("endpointWithBasePath: " + endpointWithBasePath); - String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart+1) : ""); + String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : ""); logger.info("directoryPath: " + directoryPath); - if(isDataverseManaged) { + if (dataverseManaged && (dvObject!=null)) { Dataset ds = ((DataFile) dvObject).getOwner(); directoryPath = directoryPath + "/" + ds.getAuthority() + "/" + ds.getIdentifier(); logger.info("directoryPath now: " + directoryPath); } - if(filenameStart > 0) { + if (filenameStart > 0) { directoryPath = directoryPath + path.substring(0, filenameStart); } logger.info("directoryPath finally: " + directoryPath); @@ -168,12 +186,15 @@ long retrieveSize() { return -1; } - - - + + @Override + public InputStream getInputStream() throws IOException { + throw new IOException("Not implemented"); + } + @Override public void delete() throws IOException { - + // Fix // Delete is best-effort - we tell the remote server and it may or may not // implement this call @@ -205,9 +226,6 @@ public void delete() throws IOException { } - - - @Override public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName) throws IOException { @@ -218,114 +236,37 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary if (secretKey == null) { return baseUrl + "/" + path; } else { - return UrlSignerUtil.signUrl(baseUrl + "/" + path, getUrlExpirationMinutes(), null, "GET", - secretKey); + return UrlSignerUtil.signUrl(baseUrl + "/" + path, getUrlExpirationMinutes(), null, "GET", secretKey); } } else { return baseStore.generateTemporaryDownloadUrl(auxiliaryTag, auxiliaryType, auxiliaryFileName); } } - private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { - // String globusToken = JvmSettings.GLOBUS_TOKEN.lookup(driverId); - String globusToken = System.getProperty("dataverse.files." + this.driverId + ".globus-token"); - isDataverseManaged = Boolean.getBoolean("dataverse.files." + this.driverId + ".managed"); + private static boolean isDataverseManaged(String driverId) { + return Boolean.getBoolean("dataverse.files." + driverId + ".managed"); + } - AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken); - globusAccessToken = accessToken.getOtherTokens().get(0).getAccessToken(); - // endpointWithBasePath = JvmSettings.BASE_URI.lookup(this.driverId); - baseUrl = System.getProperty("dataverse.files." + this.driverId + ".base-url"); - logger.info("base-url is " + baseUrl); + static boolean isValidIdentifier(String driverId, String storageId) { + String baseIdentifier = storageId.substring(storageId.lastIndexOf("//") + 2); + String baseUrl = System.getProperty("dataverse.files." + driverId + ".base-url"); if (baseUrl == null) { - throw new IOException("dataverse.files." + this.driverId + ".base-url is required"); - } else { - try { - new URI(baseUrl); - } catch (Exception e) { - logger.warning( - "Trouble interpreting base-url for store: " + this.driverId + " : " + e.getLocalizedMessage()); - throw new IOException("Can't interpret base-url as a URI"); - } - + return false; } - - if (baseStore == null) { - String baseDriverId = getBaseStoreIdFor(driverId); - String fullStorageLocation = null; - String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", - DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - - if (dvObject instanceof Dataset) { - baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); + // Internally managed endpoints require standard name pattern (submitted via + // /addFile(s) api) + if (isDataverseManaged(driverId)) { + boolean hasStandardName = usesStandardNamePattern(baseIdentifier); + if (hasStandardName) { + return true; } else { - if (this.getDvObject() != null) { - fullStorageLocation = getStoragePath(); - - // S3 expects :/// - switch (baseDriverType) { - case DataAccess.S3: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" - + fullStorageLocation; - break; - case DataAccess.FILE: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") - + "/" + fullStorageLocation; - break; - default: - logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: " - + System.getProperty("dataverse.files." + baseDriverId + ".type")); - throw new IOException("Not supported"); - } - - } else if (storageLocation != null) { - // ://// - // remoteDriverId:// is removed if coming through directStorageIO - int index = storageLocation.indexOf(DataAccess.SEPARATOR); - if (index > 0) { - storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); - } - // THe base store needs the baseStoreIdentifier and not the relative URL - fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); - - switch (baseDriverType) { - case DataAccess.S3: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" - + fullStorageLocation; - break; - case DataAccess.FILE: - fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") - + "/" + fullStorageLocation; - break; - default: - logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: " - + System.getProperty("dataverse.files." + baseDriverId + ".type")); - throw new IOException("Not supported"); - } - } - baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); - } - if (baseDriverType.contentEquals(DataAccess.S3)) { - ((S3AccessIO) baseStore).setMainDriver(false); + logger.warning("Unacceptable identifier pattern in submitted identifier: " + baseIdentifier); + return false; } } - remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name"); + // Remote endpoints require a valid URI within the baseUrl try { - remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); - } catch (MalformedURLException mfue) { - logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); - } - } - - - protected static boolean isValidIdentifier(String driverId, String storageId) { - String urlPath = storageId.substring(storageId.lastIndexOf("//") + 2); - String baseUrl = System.getProperty("dataverse.files." + driverId + ".base-url"); - try { - URI absoluteURI = new URI(baseUrl + "/" + urlPath); + URI absoluteURI = new URI(baseUrl + "/" + baseIdentifier); if (!absoluteURI.normalize().toString().startsWith(baseUrl)) { logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId); return false; @@ -338,7 +279,6 @@ protected static boolean isValidIdentifier(String driverId, String storageId) { return true; } - public static void main(String[] args) { System.out.println("Running the main method"); if (args.length > 0) { @@ -347,15 +287,19 @@ public static void main(String[] args) { // System.setProperty("dataverse.files.globus.globus_client_id", // "2791b83e-b989-47c5-a7fa-ce65fd949522"); System.setProperty("dataverse.files.globus.base-url", "globus://d8c42580-6528-4605-9ad8-116a61982644"); - System.out.println("Valid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + System.out.println("NotValid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + System.out.println("ValidRemote: " + isValidIdentifier("globus", "globus://localid//of/the/hill")); + System.setProperty("dataverse.files.globus.managed", "true"); + + System.out.println("ValidLocal: " + isValidIdentifier("globus", "globus://176e28068b0-1c3f80357c42")); // System.setProperty("dataverse.files.globus.globus-token","Mjc5MWI4M2UtYjk4OS00N2M1LWE3ZmEtY2U2NWZkOTQ5NTIyOkt4ZEdndFVDUDVZZG5sRG4rRHEzaVMxTHBtTVRGNlB3RjlwWm9kRTBWNVE9"); System.setProperty("dataverse.files.globus.globus-token", "YTVlNzFjNzItYWVkYi00Mzg4LTkzNWQtY2NhM2IyODI2MzdmOnErQXRBeWNEMVM3amFWVnB0RlFnRk5zMTc3OFdDa3lGeVZPT3k0RDFpaXM9"); System.setProperty("dataverse.files.globus.base-store", "file"); System.setProperty("dataverse.files.file.type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); System.setProperty("dataverse.files.file.directory", "/tmp/files"); - logger.info(JvmSettings.BASE_URL.lookup("globus")); - logger.info(JvmSettings.GLOBUS_TOKEN.lookup("globus")); + // logger.info(JvmSettings.BASE_URL.lookup("globus")); + // logger.info(JvmSettings.GLOBUS_TOKEN.lookup("globus")); try { GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO( diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index 6b15bcf1dc8..a9653f2ab68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -65,6 +65,8 @@ public class RemoteOverlayAccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); + String globusAccessToken = null; + protected StorageIO baseStore = null; protected String path = null; protected String baseUrl = null; @@ -79,6 +81,9 @@ public class RemoteOverlayAccessIO extends StorageIO { protected static boolean trustCerts = false; protected int httpConcurrency = 4; + public RemoteOverlayAccessIO() { + } + public RemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); this.setIsLocalFile(false); @@ -445,7 +450,7 @@ int getUrlExpirationMinutes() { return 60; } - private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + protected void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { baseUrl = System.getProperty("dataverse.files." + this.driverId + ".base-url"); if (baseUrl == null) { throw new IOException("dataverse.files." + this.driverId + ".base-url is required"); @@ -616,7 +621,7 @@ public void saveInputStream(InputStream inputStream, Long filesize) throws IOExc } - protected static boolean isValidIdentifier(String driverId, String storageId) { + static boolean isValidIdentifier(String driverId, String storageId) { String urlPath = storageId.substring(storageId.lastIndexOf("//") + 2); String baseUrl = System.getProperty("dataverse.files." + driverId + ".base-url"); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index bfd5c5f0d8f..333d72e09b2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -611,7 +611,7 @@ public static boolean isDirectUploadEnabled(String driverId) { //Check that storageIdentifier is consistent with store's config //False will prevent direct uploads - protected static boolean isValidIdentifier(String driverId, String storageId) { + static boolean isValidIdentifier(String driverId, String storageId) { return false; } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 5c387710844..d98e1c9b7f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -594,11 +594,10 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin globusLogger.info("Starting an globusUpload "); - String datasetIdentifier = dataset.getStorageIdentifier(); - + // ToDo - use DataAccess methods? - String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") + 3); - datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") + 3); + //String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") + 3); + //datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") + 3); Thread.sleep(5000); @@ -670,18 +669,26 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin JsonArray filesJsonArray = jsonObject.getJsonArray("files"); if (filesJsonArray != null) { + String datasetIdentifier = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from // externalTool String storageIdentifier = fileJsonObject.getString("storageIdentifier"); - String[] bits = storageIdentifier.split(":"); - String bucketName = bits[1].replace("/", ""); + String[] parts = DataAccess.getDriverIdAndStorageLocation(storageIdentifier); + String storeId = parts[0]; + //If this is an S3 store, we need to split out the bucket name + String[] bits = parts[1].split(":"); + String bucketName = ""; + if(bits.length > 1) { + bucketName = bits[0]; + } String fileId = bits[bits.length - 1]; // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 - String fullPath = storageType + bucketName + "/" + datasetIdentifier + "/" + fileId; + //or globus:///10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 + String fullPath = storeId + "://" + bucketName + "/" + datasetIdentifier + "/" + fileId; String fileName = fileJsonObject.getString("fileName"); inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); @@ -690,7 +697,8 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, globusLogger); JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); - +logger.info("Size: " + newfilesJsonArray.size()); +logger.info("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); JsonArrayBuilder jsonDataSecondAPI = Json.createArrayBuilder(); for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { @@ -699,15 +707,21 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin String storageIdentifier = fileJsonObject.getString("storageIdentifier"); String fileName = fileJsonObject.getString("fileName"); String directoryLabel = fileJsonObject.getString("directoryLabel"); - String[] bits = storageIdentifier.split(":"); + String[] parts = DataAccess.getDriverIdAndStorageLocation(storageIdentifier); + //If this is an S3 store, we need to split out the bucket name + String[] bits = parts[1].split(":"); + String bucketName = ""; + if(bits.length > 1) { + bucketName = bits[0]; + } String fileId = bits[bits.length - 1]; - + List newfileJsonObject = IntStream.range(0, newfilesJsonArray.size()) .mapToObj(index -> ((JsonObject) newfilesJsonArray.get(index)).getJsonObject(fileId)) .filter(Objects::nonNull).collect(Collectors.toList()); - if (newfileJsonObject != null) { - if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { + logger.info("List Size: " + newfileJsonObject.size()); + //if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { JsonPatch path = Json.createPatchBuilder() .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); fileJsonObject = path.apply(fileJsonObject); @@ -716,11 +730,11 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin fileJsonObject = path.apply(fileJsonObject); jsonDataSecondAPI.add(fileJsonObject); countSuccess++; - } else { - globusLogger.info(fileName - + " will be skipped from adding to dataset by second API due to missing values "); - countError++; - } + // } else { + // globusLogger.info(fileName + // + " will be skipped from adding to dataset by second API due to missing values "); + // countError++; + // } } else { globusLogger.info(fileName + " will be skipped from adding to dataset by second API due to missing values "); @@ -1045,8 +1059,8 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger) } catch (IOException ioex) { count = 3; logger.info(ioex.getMessage()); - globusLogger.info("S3AccessIO: DataFile (fullPAth " + fullPath - + ") does not appear to be an S3 object associated with driver: "); + globusLogger.info("DataFile (fullPAth " + fullPath + + ") does not appear to be accessible withing Dataverse: "); } catch (Exception ex) { count = count + 1; ex.printStackTrace(); From d6727c0bb7409a195fbb1878a492f5c2b0b23d4d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Sep 2023 10:54:54 -0400 Subject: [PATCH 0412/1092] CREATE instead of ALL for public schema --- scripts/installer/install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 9da64bff32e..5a7b9f75696 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -422,7 +422,7 @@ conn.close() if int(pg_major_version) >= 15: - conn_cmd = "GRANT ALL ON SCHEMA public TO "+pgUser+";" + conn_cmd = "GRANT CREATE ON SCHEMA public TO "+pgUser+";" print("PostgreSQL 15 or higher detected. Running " + conn_cmd) try: cur.execute(conn_cmd) From 37136c039471d15888609724916e89723394879b Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 17:14:11 +0100 Subject: [PATCH 0413/1092] Added: optional includeDeaccessioned parameter for getVersionFiles API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 13 ++++++++---- ...LatestAccessibleDatasetVersionCommand.java | 17 ++++++++------- ...tLatestPublishedDatasetVersionCommand.java | 21 ++++++++++++------- ...pecificPublishedDatasetVersionCommand.java | 18 +++++++++------- 4 files changed, 42 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d082d9c29da..5064579ebfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -501,10 +501,11 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @QueryParam("categoryName") String categoryName, @QueryParam("searchText") String searchText, @QueryParam("orderCriteria") String orderCriteria, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; try { fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; @@ -2709,11 +2710,15 @@ public static T handleVersion(String versionId, DsVersionHandler hdl) } private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, false); + } + + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse { DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler>() { @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned); } @Override @@ -2723,12 +2728,12 @@ public Command handleDraft() { @Override public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned); } @Override public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds); + return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned); } })); if (dsv == null || dsv.getId() == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index 680a5c3aaef..1454a4b1fdd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -17,29 +17,30 @@ /** * Get the latest version of a dataset a user can view. + * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand{ +public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; + private final boolean includeDeaccessioned; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { + this(aRequest, anAffectedDataset, false); + } + + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - - return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds)); - + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned)); } - - - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index 18adff2e55c..9765d0945d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -9,26 +9,31 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand{ +public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; - + private boolean includeDeaccessioned; + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { + this(aRequest, anAffectedDataset, false); + } + + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv: ds.getVersions()) { - if (dsv.isReleased()) { + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { return dsv; - } } - return null; } - } \ No newline at end of file + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java index 3efb38e4a91..879a694ef57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java @@ -15,27 +15,32 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand{ +public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final long majorVersion; private final long minorVersion; - + private boolean includeDeaccessioned; + public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum) { + this(aRequest, anAffectedDataset, majorVersionNum, minorVersionNum, false); + } + + public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; majorVersion = majorVersionNum; minorVersion = minorVersionNum; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv: ds.getVersions()) { - if (dsv.isReleased()) { + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) { return dsv; } @@ -43,5 +48,4 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { } return null; } - -} \ No newline at end of file +} From d9b3f547a14bd882e252ddc9d8060a7f40bfff3d Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Sep 2023 17:24:11 +0100 Subject: [PATCH 0414/1092] Fixed: null check for DataFile owner in JsonPrinter --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 2b04bb3f657..e5cd72ff5fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -691,8 +691,11 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) - .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()) - .add("fileAccessRequest", df.getOwner().isFileAccessRequest()); + .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); + Dataset dfOwner = df.getOwner(); + if (dfOwner != null) { + builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); + } /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the From fa9d8694ca82d2399bee200d3b3aed3d6579f386 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Sep 2023 12:49:30 -0400 Subject: [PATCH 0415/1092] merge issues --- .../iq/dataverse/FileAccessRequestServiceBean.java | 10 ++++------ .../harvard/iq/dataverse/FileDownloadServiceBean.java | 1 + 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java index 215e4695a75..af8577fad34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java @@ -1,12 +1,10 @@ package edu.harvard.iq.dataverse; import java.util.List; -import javax.ejb.Stateless; -import javax.inject.Named; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; - -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; /** * diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 06509b30c88..0758f053470 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import edu.harvard.iq.dataverse.util.StringUtil; From f6fe5dcbd73bbf8d631589dc335244bd90bc0c5e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Sep 2023 12:50:10 -0400 Subject: [PATCH 0416/1092] change gbar to Boolean to match flyway --- .../edu/harvard/iq/dataverse/DvObjectContainer.java | 13 +++++-------- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index a18b09cb4b8..da77df786d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -39,7 +39,7 @@ public boolean isEffectivelyPermissionRoot() { private String metadataLanguage=null; - private String guestbookAtRequest = null; + private Boolean guestbookAtRequest = null; public String getEffectiveStorageDriverId() { String id = storageDriver; @@ -101,7 +101,7 @@ public static boolean isMetadataLanguageSet(String mdLang) { public boolean getEffectiveGuestbookEntryAtRequest() { boolean gbAtRequest = false; - if (guestbookAtRequest==null || UNDEFINED_CODE.equals(guestbookAtRequest)) { + if (guestbookAtRequest==null) { if (this.getOwner() != null) { gbAtRequest = this.getOwner().getEffectiveGuestbookEntryAtRequest(); } else { @@ -111,15 +111,12 @@ public boolean getEffectiveGuestbookEntryAtRequest() { } } } else { - gbAtRequest = Boolean.parseBoolean(guestbookAtRequest); + gbAtRequest = guestbookAtRequest; } return gbAtRequest; } - public String getGuestbookEntryAtRequest() { - if (guestbookAtRequest == null) { - return UNDEFINED_CODE; - } + public Boolean getGuestbookEntryAtRequest() { return guestbookAtRequest; } @@ -128,7 +125,7 @@ public void setGuestbookEntryAtRequest(String gbAtRequest) { this.guestbookAtRequest = null; } else { //Force to true or false - this.guestbookAtRequest = Boolean.valueOf(Boolean.parseBoolean(gbAtRequest)).toString(); + this.guestbookAtRequest = Boolean.valueOf(Boolean.parseBoolean(gbAtRequest)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 55e329bea68..a1191a6f4e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3888,8 +3888,8 @@ public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @P } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - String gbAtRequest = dataset.getGuestbookEntryAtRequest(); - if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) { + Boolean gbAtRequest = dataset.getGuestbookEntryAtRequest(); + if(gbAtRequest == null) { return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } return ok(dataset.getEffectiveGuestbookEntryAtRequest()); From 829fed9db0333f108fb3bf01782c44ec44cf353f Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 12 Sep 2023 17:16:54 -0400 Subject: [PATCH 0417/1092] Fixes the issues with the permissions of the docker scripts on Windows reported on the issue #9904 --- modules/container-configbaker/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 564216b3572..44f3806a591 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -38,6 +38,7 @@ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml # Copy the data from scripts/api that provide the common base setup you'd get from the installer. # ".dockerignore" will take care of taking only the bare necessities COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ +RUN chmod +x ${BOOTSTRAP_DIR}/*/*.sh # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] From bfe7f9c3537a89b75fd3190d063433c8f6147f96 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 10:56:59 -0400 Subject: [PATCH 0418/1092] RestAssured tests for the new functionality added to the /versions api. (#9763) --- .../harvard/iq/dataverse/api/DatasetsIT.java | 85 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 47 +++++++++- 2 files changed, 130 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b353b4488d0..d5b3dbca05a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -555,6 +555,91 @@ public void testCreatePublishDestroyDataset() { } + /** + * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} + * are called from other RestAssured tests, in this class and also FileIT. + * But this test is dedicated to this api specifically, and focuses on the + * functionality added to it in 6.1. + */ + @Test + public void testDatasetVersionsAPI() { + // Create user + String apiToken = UtilIT.createRandomUserGetToken(); + + // Create user with no permission + String apiTokenNoPerms = UtilIT.createRandomUserGetToken(); + + // Create Collection + String collectionAlias = UtilIT.createRandomCollectionGetAlias(apiToken); + + // Create Dataset + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(collectionAlias, apiToken); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + // Upload file + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Integer fileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + // Check that the file we just uploaded is shown by the versions api: + Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .body("data.files.size()", equalTo(1)) + .statusCode(OK.getStatusCode()); + + // Now check that the file is NOT shown, when we ask the versions api to + // skip files: + boolean skipFiles = true; + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken, skipFiles); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .body("data.files", equalTo(null)) + .statusCode(OK.getStatusCode()); + + // Publish collection and dataset + UtilIT.publishDataverseViaNativeApi(collectionAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // Upload another file: + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response uploadResponse2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, apiToken); + uploadResponse2.then().assertThat().statusCode(OK.getStatusCode()); + + // We should now have a published version, and a draft. + + // Call /versions api, *with the owner api token*, make sure both + // versions are listed + Response versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(2)); + + // And now call it with an un-privileged token, to make sure only one + // (the published one) version is shown: + + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)); + + // And now call the "short", no-files version of the same api + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms, skipFiles); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].files", equalTo(null)); + } + + /** * This test requires the root dataverse to be published to pass. */ diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e47971f9b92..678d4e5523b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -9,6 +9,7 @@ import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static jakarta.ws.rs.core.Response.Status.CREATED; import java.io.File; import java.io.IOException; @@ -51,7 +52,6 @@ import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; -import static org.junit.jupiter.api.Assertions.*; public class UtilIT { @@ -119,6 +119,16 @@ public static Response createRandomUser() { return createRandomUser("user"); } + + /** + * A convenience method for creating a random test user, when all you need + * is the api token. + * @return apiToken + */ + public static String createRandomUserGetToken(){ + Response createUser = createRandomUser(); + return getApiTokenFromResponse(createUser); + } public static Response createUser(String username, String email) { logger.info("Creating user " + username); @@ -369,6 +379,20 @@ static Response createRandomDataverse(String apiToken) { String category = null; return createDataverse(alias, category, apiToken); } + + /** + * A convenience method for creating a random collection and getting its + * alias in one step. + * @param apiToken + * @return alias + */ + static String createRandomCollectionGetAlias(String apiToken){ + + Response createCollectionResponse = createRandomDataverse(apiToken); + //createDataverseResponse.prettyPrint(); + createCollectionResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + return UtilIT.getAliasFromResponse(createCollectionResponse); + } static Response showDataverseContents(String alias, String apiToken) { return given() @@ -1403,9 +1427,17 @@ static Response nativeGetUsingPersistentId(String persistentId, String apiToken) } static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken) { + return getDatasetVersion(persistentId, versionNumber, apiToken, false); + } + + static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken, boolean skipFiles) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/:persistentId/versions/" + versionNumber + "?persistentId=" + persistentId); + .get("/api/datasets/:persistentId/versions/" + + versionNumber + + "?persistentId=" + + persistentId + + (skipFiles ? "&includeFiles=false" : "")); } static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { @@ -1767,6 +1799,10 @@ static Response removeDatasetThumbnail(String datasetPersistentId, String apiTok } static Response getDatasetVersions(String idOrPersistentId, String apiToken) { + return getDatasetVersions(idOrPersistentId, apiToken, false); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, boolean skipFiles) { logger.info("Getting Dataset Versions"); String idInPath = idOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. @@ -1774,6 +1810,13 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken) { idInPath = ":persistentId"; optionalQueryParam = "?persistentId=" + idOrPersistentId; } + if (skipFiles) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?includeFiles=false"; + } else { + optionalQueryParam = optionalQueryParam.concat("&includeFiles=false"); + } + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() From 8e894c37a17ce184bb3c59eb027dc03ed0f21274 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 11:42:25 -0400 Subject: [PATCH 0419/1092] added another test, for the pagination functionality in the /versions api (also being added in 6.1). #9763 --- .../harvard/iq/dataverse/api/DatasetsIT.java | 26 ++++++++++++++----- .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 ++++++++++++++++ 2 files changed, 41 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index d5b3dbca05a..4a0e1c857c7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -557,7 +557,7 @@ public void testCreatePublishDestroyDataset() { /** * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} - * are called from other RestAssured tests, in this class and also FileIT. + * are already called from other RestAssured tests, in this class and also FileIT. * But this test is dedicated to this api specifically, and focuses on the * functionality added to it in 6.1. */ @@ -584,8 +584,6 @@ public void testDatasetVersionsAPI() { String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Integer fileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); // Check that the file we just uploaded is shown by the versions api: Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); @@ -615,13 +613,27 @@ public void testDatasetVersionsAPI() { // We should now have a published version, and a draft. // Call /versions api, *with the owner api token*, make sure both - // versions are listed + // versions are listed; also check that the correct numbers of files + // are shown in each version (2 in the draft, 1 in the published). Response versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken); versionsResponse.prettyPrint(); versionsResponse.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.size()", equalTo(2)); - + .body("data.size()", equalTo(2)) + .body("data[0].files.size()", equalTo(2)) + .body("data[1].files.size()", equalTo(1)); + + // Now call the this api with the new (as of 6.1) pagination parameters + Integer offset = 0; + Integer howmany = 1; + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken, offset, howmany); + // (the above should return only one version, the draft) + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].files.size()", equalTo(2)); + // And now call it with an un-privileged token, to make sure only one // (the published one) version is shown: @@ -630,7 +642,7 @@ public void testDatasetVersionsAPI() { versionsResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(1)); - + // And now call the "short", no-files version of the same api versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms, skipFiles); versionsResponse.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 678d4e5523b..f94cfa8e400 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1803,6 +1803,14 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken) { } static Response getDatasetVersions(String idOrPersistentId, String apiToken, boolean skipFiles) { + return getDatasetVersions(idOrPersistentId, apiToken, null, null, skipFiles); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit) { + return getDatasetVersions(idOrPersistentId, apiToken, offset, limit, false); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit, boolean skipFiles) { logger.info("Getting Dataset Versions"); String idInPath = idOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. @@ -1817,6 +1825,20 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken, boo optionalQueryParam = optionalQueryParam.concat("&includeFiles=false"); } } + if (offset != null) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?offset="+offset; + } else { + optionalQueryParam = optionalQueryParam.concat("&offset="+offset); + } + } + if (limit != null) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?limit="+limit; + } else { + optionalQueryParam = optionalQueryParam.concat("&limit="+limit); + } + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() From b9e99f3e7253d836aadebac8b128efa21027eef8 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 11:43:42 -0400 Subject: [PATCH 0420/1092] typo in a comment. #9763 --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 4a0e1c857c7..e726337cf8b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -557,7 +557,7 @@ public void testCreatePublishDestroyDataset() { /** * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} - * are already called from other RestAssured tests, in this class and also FileIT. + * are already called from other RestAssured tests, in this class and also in FilesIT. * But this test is dedicated to this api specifically, and focuses on the * functionality added to it in 6.1. */ From f164a681deaf14d27ee5fb35a344805d86ac631b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 13 Sep 2023 11:46:27 -0400 Subject: [PATCH 0421/1092] more typos in comments. (#9763) --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index e726337cf8b..23fc5911ad0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -623,7 +623,7 @@ public void testDatasetVersionsAPI() { .body("data[0].files.size()", equalTo(2)) .body("data[1].files.size()", equalTo(1)); - // Now call the this api with the new (as of 6.1) pagination parameters + // Now call this api with the new (as of 6.1) pagination parameters Integer offset = 0; Integer howmany = 1; versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken, offset, howmany); @@ -635,7 +635,7 @@ public void testDatasetVersionsAPI() { .body("data[0].files.size()", equalTo(2)); // And now call it with an un-privileged token, to make sure only one - // (the published one) version is shown: + // (the published) version is shown: versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms); versionsResponse.prettyPrint(); From 3d2255b963f869028b68576075462664f67a5888 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20ROUCOU?= Date: Wed, 13 Sep 2023 18:35:40 +0200 Subject: [PATCH 0422/1092] Assign roles from email address Give a user a role from email address of the user's account --- .../iq/dataverse/authorization/users/AuthenticatedUser.java | 3 ++- src/main/webapp/roles-assign.xhtml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 89429b912f6..17db9e63e8b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -64,7 +64,8 @@ @NamedQuery( name="AuthenticatedUser.filter", query="select au from AuthenticatedUser au WHERE (" + "LOWER(au.userIdentifier) like LOWER(:query) OR " - + "lower(concat(au.firstName,' ',au.lastName)) like lower(:query))"), + + "lower(concat(au.firstName,' ',au.lastName)) like lower(:query) or " + + "lower(au.email) like lower(:query))"), @NamedQuery( name="AuthenticatedUser.findAdminUser", query="select au from AuthenticatedUser au WHERE " + "au.superuser = true " diff --git a/src/main/webapp/roles-assign.xhtml b/src/main/webapp/roles-assign.xhtml index 4b31f10dbfc..4b355c74d5c 100644 --- a/src/main/webapp/roles-assign.xhtml +++ b/src/main/webapp/roles-assign.xhtml @@ -31,7 +31,8 @@ styleClass="DropdownPopup" panelStyleClass="DropdownPopupPanel" var="roleAssignee" itemLabel="#{roleAssignee.displayInfo.title}" itemValue="#{roleAssignee}" converter="roleAssigneeConverter"> - + + From e15b80319621965acd0cc3c8aa3987f78de53442 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 10:49:29 +0200 Subject: [PATCH 0423/1092] feat(test): update tags for ITs in new class since Dataverse 6.0 With moving to JUnit5 in Dataverse 6.0, we can add nice tag strings in a helper class. With this commit we add a tag for integration tests ("integration"), but also add a feature like tag "testcontainers" to allow for more granular filtering. --- .../oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java | 4 +++- src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 2c963e8df46..e794bfa7d48 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import edu.harvard.iq.dataverse.util.testing.Tags; import org.htmlunit.FailingHttpStatusCodeException; import org.htmlunit.WebClient; import org.htmlunit.WebResponse; @@ -51,7 +52,8 @@ import static org.junit.jupiter.api.Assumptions.assumeTrue; import static org.mockito.Mockito.when; -@Tag("testcontainers") +@Tag(Tags.INTEGRATION_TEST) +@Tag(Tags.USES_TESTCONTAINERS) @Testcontainers @ExtendWith(MockitoExtension.class) // NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java index dcbd0529d8b..1544d393896 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java @@ -2,4 +2,6 @@ public class Tags { public static final String NOT_ESSENTIAL_UNITTESTS = "not-essential-unittests"; + public static final String INTEGRATION_TEST = "integration"; + public static final String USES_TESTCONTAINERS = "testcontainers"; } From 9f173dbd89b906bdacbeda0708d0bf5ab07e62f5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 10:50:39 +0200 Subject: [PATCH 0424/1092] feat(test): make test groups (tags) in verify phase selectable from property Adding a property allows creating filters on the command line. See also: https://junit.org/junit5/docs/current/user-guide/#running-tests-tag-expressions --- pom.xml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 7ded2367e88..bd3c08c8256 100644 --- a/pom.xml +++ b/pom.xml @@ -20,6 +20,7 @@ false false + integration @@ -845,7 +846,7 @@ org.apache.maven.plugins maven-failsafe-plugin - testcontainers + ${it.groups} ${failsafe.jacoco.args} ${argLine} ${skipIntegrationTests} From 7b4153c0ceb954b5f69330ec5c3f70f4f7f2f0fc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 10:52:22 +0200 Subject: [PATCH 0425/1092] feat(test): only instrument our own classes with JaCoCo This way we avoid unnecessary instrumentation of dependency classes we don't want anyway and get us in trouble when methods are too large etc. --- pom.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/pom.xml b/pom.xml index bd3c08c8256..a8633b62b2e 100644 --- a/pom.xml +++ b/pom.xml @@ -745,6 +745,11 @@ ${project.build.directory}/coverage-reports/jacoco-unit.exec surefire.jacoco.args + + edu/harvard/iq/dataverse/* + io/gdcc/* + org/dataverse/* +
    @@ -768,6 +773,11 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec failsafe.jacoco.args ${skipIntegrationTests} + + edu/harvard/iq/dataverse/* + io/gdcc/* + org/dataverse/* + From 6d1807c4751cad1f62e6fe39f4909c8ea8f75777 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 10:52:51 +0200 Subject: [PATCH 0426/1092] fix(test): skip JaCoCo execution also when unit tests are skipped --- pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pom.xml b/pom.xml index a8633b62b2e..13a834e9a67 100644 --- a/pom.xml +++ b/pom.xml @@ -745,6 +745,7 @@ ${project.build.directory}/coverage-reports/jacoco-unit.exec surefire.jacoco.args + ${skipUnitTests} edu/harvard/iq/dataverse/* io/gdcc/* @@ -761,6 +762,7 @@ ${project.build.directory}/coverage-reports/jacoco-unit.exec ${project.reporting.outputDirectory}/jacoco-unit-test-coverage-report + ${skipUnitTests} From 2cf7d45381680beb31bdfec5768a26072a50eda5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 14:12:54 +0200 Subject: [PATCH 0427/1092] doc(test): update testcontainers instructions after changes in DV 6.0 Also cleans up the mix of tabs and spaces for indentation. --- .../source/developers/testing.rst | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 8e44bcc822d..18686e566f8 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -5,7 +5,7 @@ Testing In order to keep our codebase healthy, the Dataverse Project encourages developers to write automated tests in the form of unit tests and integration tests. We also welcome ideas for how to improve our automated testing. .. contents:: |toctitle| - :local: + :local: The Health of a Codebase ------------------------ @@ -294,28 +294,30 @@ To run a test with Testcontainers, you will need to write a JUnit 5 test. Please make sure to: 1. End your test class with ``IT`` -2. Provide a ``@Tag("testcontainers")`` to be picked up during testing. +2. Annotate the test class with two tags: -.. code:: java + .. code:: java - /** A very minimal example for a Testcontainers integration test class. */ - @Testcontainers - @Tag("testcontainers") - class MyExampleIT { /* ... */ } + /** A very minimal example for a Testcontainers integration test class. */ + @Testcontainers + @Tag(edu.harvard.iq.dataverse.util.testing.Tags.INTEGRATION_TEST) + @Tag(edu.harvard.iq.dataverse.util.testing.Tags.USES_TESTCONTAINERS) + class MyExampleIT { /* ... */ } -If using upstream Modules, e.g. for PostgreSQL or similar, you will need to add +If using upstream modules, e.g. for PostgreSQL or similar, you will need to add a dependency to ``pom.xml`` if not present. `See the PostgreSQL module example. `_ To run these tests, simply call out to Maven: .. code:: - mvn -P tc verify + mvn verify .. note:: - 1. Remember to have Docker ready to serve or tests will fail. - 2. This will not run any unit tests or API tests. + 1. Remember to have Docker ready to serve or tests will fail. + 2. You can skip running unit tests by adding ``-DskipUnitTests`` + 3. You can choose to ignore test with Testcontainers by adding ``-Dit.groups='integration & !testcontainers'`` Measuring Coverage of Integration Tests --------------------------------------- From d8283d9b56e9809e413fcee447971badcd8a57c5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 14:14:56 +0200 Subject: [PATCH 0428/1092] doc(test): fix location of JaCoCo reports With the combining of unit and integration test coverage reports, the location where to access the HTML reports changed for single and merged reports. --- doc/sphinx-guides/source/developers/testing.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 18686e566f8..9566cd509ca 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -114,7 +114,10 @@ Observing Changes to Code Coverage Once you've written some tests, you're probably wondering how much you've helped to increase the code coverage. In Netbeans, do a "clean and build." Then, under the "Projects" tab, right-click "dataverse" and click "Code Coverage" -> "Show Report". For each Java file you have open, you should be able to see the percentage of code that is covered by tests and every line in the file should be either green or red. Green indicates that the line is being exercised by a unit test and red indicates that it is not. -In addition to seeing code coverage in Netbeans, you can also see code coverage reports by opening ``target/site/jacoco/index.html`` in your browser. +In addition to seeing code coverage in Netbeans, you can also see code coverage reports by opening ``target/site/jacoco-X-test-coverage-report/index.html`` in your browser. +Depending on the report type you want to look at, let ``X`` be one of ``unit``, ``integration`` or ``merged``. +"Merged" will display combined coverage of both unit and integration test, but does currently not cover API tests. + Testing Commands ^^^^^^^^^^^^^^^^ From ba87bca46a5738e55ee6e0cc7b917e8fe3fd1a33 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 14:17:56 +0200 Subject: [PATCH 0429/1092] doc(test): fix language for API ITs The described testing is not about integration tests, but end to end API tests. This commit makes the language more precise. --- doc/sphinx-guides/source/developers/testing.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 9566cd509ca..78e5a47fb5b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -322,10 +322,11 @@ To run these tests, simply call out to Maven: 2. You can skip running unit tests by adding ``-DskipUnitTests`` 3. You can choose to ignore test with Testcontainers by adding ``-Dit.groups='integration & !testcontainers'`` -Measuring Coverage of Integration Tests ---------------------------------------- -Measuring the code coverage of integration tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara6" as the Payara directory and "dataverse" as the Payara Unix user. +Measuring Coverage of API Tests +------------------------------- + +Measuring the code coverage of API tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara6" as the Payara directory and "dataverse" as the Payara Unix user. Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara. @@ -375,8 +376,8 @@ Run this as the "dataverse" user. Note that after deployment the file "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" exists and is empty. -Run Integration Tests -~~~~~~~~~~~~~~~~~~~~~ +Run API Tests +~~~~~~~~~~~~~ Note that even though you see "docker-aio" in the command below, we assume you are not necessarily running the test suite within Docker. (Some day we'll probably move this script to another directory.) For this reason, we pass the URL with the normal port (8080) that app servers run on to the ``run-test-suite.sh`` script. From 071971981eb72f922d22a1ad9d2f1e0058b71136 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 14 Sep 2023 14:25:47 +0200 Subject: [PATCH 0430/1092] doc(test): add link to JUnit filter expression docs --- doc/sphinx-guides/source/developers/testing.rst | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 78e5a47fb5b..d7925308e4b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -316,11 +316,12 @@ To run these tests, simply call out to Maven: mvn verify -.. note:: +Notes: - 1. Remember to have Docker ready to serve or tests will fail. - 2. You can skip running unit tests by adding ``-DskipUnitTests`` - 3. You can choose to ignore test with Testcontainers by adding ``-Dit.groups='integration & !testcontainers'`` +1. Remember to have Docker ready to serve or tests will fail. +2. You can skip running unit tests by adding ``-DskipUnitTests`` +3. You can choose to ignore test with Testcontainers by adding ``-Dit.groups='integration & !testcontainers'`` + Learn more about `filter expressions in the JUnit 5 guide `_. Measuring Coverage of API Tests From efaf5d558b34705f8f6998c56a53a8a3d62050ad Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 15 Sep 2023 14:19:33 +0200 Subject: [PATCH 0431/1092] refactor(test,sitemap): make SiteMapUtilTest use better JUnit5 checks --- .../iq/dataverse/sitemap/SiteMapUtilTest.java | 73 +++++++++---------- 1 file changed, 35 insertions(+), 38 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index ac6fa1e5166..41032ffa811 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -10,7 +10,6 @@ import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -21,17 +20,39 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import static org.junit.jupiter.api.Assertions.*; + import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.xml.sax.SAXException; -public class SiteMapUtilTest { - +class SiteMapUtilTest { + + @TempDir + Path tempDir; + Path tempDocroot; + + @BeforeEach + void setup() throws IOException { + // NOTE: This might be unsafe for parallel tests, but our @SystemProperty helper does not yet support + // lookups from vars or methods. + System.setProperty("test.filesDir", tempDir.toString()); + this.tempDocroot = tempDir.resolve("docroot"); + Files.createDirectory(tempDocroot); + } + + @AfterEach + void teardown() { + System.clearProperty("test.filesDir"); + } + @Test - public void testUpdateSiteMap() throws IOException, ParseException { - + void testUpdateSiteMap() throws IOException, ParseException, SAXException { + // given List dataverses = new ArrayList<>(); String publishedDvString = "publishedDv1"; Dataverse publishedDataverse = new Dataverse(); @@ -77,40 +98,18 @@ public void testUpdateSiteMap() throws IOException, ParseException { datasetVersions.add(datasetVersion); deaccessioned.setVersions(datasetVersions); datasets.add(deaccessioned); - - Path tmpDirPath = Files.createTempDirectory(null); - String tmpDir = tmpDirPath.toString(); - File docroot = new File(tmpDir + File.separator + "docroot"); - docroot.mkdirs(); - // TODO: this and the above should be replaced with JUnit 5 @TestDir - System.setProperty("test.filesDir", tmpDir); - + + // when SiteMapUtil.updateSiteMap(dataverses, datasets); - - String pathToTest = tmpDirPath + File.separator + "docroot" + File.separator + "sitemap"; - String pathToSiteMap = pathToTest + File.separator + "sitemap.xml"; - - Exception wellFormedXmlException = null; - try { - assertTrue(XmlValidator.validateXmlWellFormed(pathToSiteMap)); - } catch (Exception ex) { - System.out.println("Exception caught checking that XML is well formed: " + ex); - wellFormedXmlException = ex; - } - assertNull(wellFormedXmlException); - - Exception notValidAgainstSchemaException = null; - try { - assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"))); - } catch (MalformedURLException | SAXException ex) { - System.out.println("Exception caught validating XML against the sitemap schema: " + ex); - notValidAgainstSchemaException = ex; - } - assertNull(notValidAgainstSchemaException); + + // then + String pathToSiteMap = tempDocroot.resolve("sitemap").resolve("sitemap.xml").toString(); + assertDoesNotThrow(() -> XmlValidator.validateXmlWellFormed(pathToSiteMap)); + assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"))); File sitemapFile = new File(pathToSiteMap); String sitemapString = XmlPrinter.prettyPrintXml(new String(Files.readAllBytes(Paths.get(sitemapFile.getAbsolutePath())))); - System.out.println("sitemap: " + sitemapString); + //System.out.println("sitemap: " + sitemapString); assertTrue(sitemapString.contains("1955-11-12")); assertTrue(sitemapString.contains(publishedPid)); @@ -118,8 +117,6 @@ public void testUpdateSiteMap() throws IOException, ParseException { assertFalse(sitemapString.contains(harvestedPid)); assertFalse(sitemapString.contains(deaccessionedPid)); - System.clearProperty("test.filesDir"); - } } From 573bed941cfebae0282c53785a3847cddaf6b809 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 15 Sep 2023 09:39:46 -0400 Subject: [PATCH 0432/1092] typo - using schema.org SoftwareApplication --- src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index fd219bf9d93..1ea1a5411fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -303,7 +303,7 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) { //Start with a reference to the Dataverse software JsonObjectBuilder dvSoftwareBuilder = Json.createObjectBuilder() - .add("@type", JsonLDTerm.ore("SoftwareApplication").getLabel()) + .add("@type", JsonLDTerm.schemaOrg("SoftwareApplication").getLabel()) .add(JsonLDTerm.schemaOrg("name").getLabel(), DATAVERSE_SOFTWARE_NAME) .add(JsonLDTerm.schemaOrg("version").getLabel(), systemConfig.getVersion(true)) .add(JsonLDTerm.schemaOrg("url").getLabel(), DATAVERSE_SOFTWARE_URL); From 5a7568afc9804dc5e9159dc5f395eebfc963d2c1 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 15 Sep 2023 11:12:12 -0400 Subject: [PATCH 0433/1092] update docs, add release note --- doc/release-notes/9859-ORE and Bag updates.md | 14 ++++++++++++++ doc/sphinx-guides/source/admin/integrations.rst | 9 ++++++++- doc/sphinx-guides/source/api/native-api.rst | 4 +++- doc/sphinx-guides/source/installation/config.rst | 14 +++++++------- 4 files changed, 32 insertions(+), 9 deletions(-) create mode 100644 doc/release-notes/9859-ORE and Bag updates.md diff --git a/doc/release-notes/9859-ORE and Bag updates.md b/doc/release-notes/9859-ORE and Bag updates.md new file mode 100644 index 00000000000..dd3ae3bbbe1 --- /dev/null +++ b/doc/release-notes/9859-ORE and Bag updates.md @@ -0,0 +1,14 @@ +Dataverse's OAI_ORE Metadata Export format and archival BagIT exports +(which include the OAI-ORE metadata export file) have been updated to include +information about the dataset version state, e.g. RELEASED or DEACCESSIONED +and to indicate which version of Dataverse was used to create the archival Bag. +As part of the latter, the current OAI_ORE Metadata format has been given a 1.0.0 +version designation and it is expected that any future changes to the OAI_ORE export +format will result in a version change and that tools such as DVUploader that can +recreate datasets from archival Bags will start indicating which version(s) of the +OAI_ORE format they can read. + +Dataverse installations that have been using archival Bags may wish to update any +existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse +[archival Bag export API](https://guides.dataverse.org/en/latest/installation/config.html#bagit-export-api-calls) +to generate updated versions. \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index 21adf8338d9..9a24cf0715c 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -217,7 +217,14 @@ Sponsored by the `Ontario Council of University Libraries (OCUL) `_ zipped `BagIt `_ bags to the `Chronopolis `_ via `DuraCloud `_, to a local file system, or to `Google Cloud Storage `_. +A Dataverse installation can be configured to submit a copy of published Dataset versions, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ bags to `Chronopolis `_ via `DuraCloud `_, a local file system, any S3 store, or to `Google Cloud Storage `_. +Submission can be automated to occur upon publication, or can be done periodically (via external scripting). +The archival status of each Dataset version can be seen in the Dataset page version table and queried via API. + +The archival Bags include all of the files and metadata in a given dataset version and are sufficient to recreate the dataset, e.g. in a new Dataverse instance, or potentially in another RDA-conformant repository. +Specifically, the archival Bags include an OAI-ORE Map serialized as JSON-LD that describe the dataset and it's files, as well as information about the version of Dataverse used to export the archival Bag. + +The `DVUploader `_ includes functionality to recreate a Dataset from an archival Bag produced by Dataverse (using the Dataverse API to do so). For details on how to configure this integration, see :ref:`BagIt Export` in the :doc:`/installation/config` section of the Installation Guide. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4d9466703e4..e87842ab1c7 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2088,10 +2088,12 @@ The API call requires a Json body that includes the list of the fileIds that the curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" "$SERVER_URL/api/datasets/:persistentId/files/actions/:unset-embargo?persistentId=$PERSISTENT_IDENTIFIER" -d "$JSON" +.. _Archival Status API: + Get the Archival Status of a Dataset By Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Archiving is an optional feature that may be configured for a Dataverse installation. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. +Archival :ref:`BagIt Export` is an optional feature that may be configured for a Dataverse installation. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. ``GET /api/datasets/$dataset-id/$version/archivalStatus`` returns the archival status of the specified dataset version. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f9fe74afc7c..cd841e22f6c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1,4 +1,3 @@ -============= Configuration ============= @@ -1425,24 +1424,25 @@ BagIt file handler configuration settings: BagIt Export ------------ -Your Dataverse installation may be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ archival Bags (sometimes called BagPacks) to `Chronopolis `_ via `DuraCloud `_ or alternately to any folder on the local filesystem. +Your Dataverse installation may be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ archival Bags (sometimes called BagPacks) to one of several supported storage services. +Supported services include `Chronopolis `_ via `DuraCloud `_, Google's Cloud, and any service that can provide an S3 interface or handle files transferred to a folder on the local filesystem. -These archival Bags include all of the files and metadata in a given dataset version and are sufficient to recreate the dataset, e.g. in a new Dataverse instance, or postentially in another RDA-conformant repository. +These archival Bags include all of the files and metadata in a given dataset version and are sufficient to recreate the dataset, e.g. in a new Dataverse instance, or potentially in another RDA-conformant repository. The `DVUploader `_ includes functionality to recreate a Dataset from an archival Bag produced by Dataverse. (Note that this functionality is distinct from the :ref:`BagIt File Handler` upload files to an existing Dataset via the Dataverse user interface.) The Dataverse Software offers an internal archive workflow which may be configured as a PostPublication workflow via an admin API call to manually submit previously published Datasets and prior versions to a configured archive such as Chronopolis. The workflow creates a `JSON-LD `_ serialized `OAI-ORE `_ map file, which is also available as a metadata export format in the Dataverse Software web interface. At present, archiving classes include the DuraCloudSubmitToArchiveCommand, LocalSubmitToArchiveCommand, GoogleCloudSubmitToArchive, and S3SubmitToArchiveCommand , which all extend the AbstractSubmitToArchiveCommand and use the configurable mechanisms discussed below. (A DRSSubmitToArchiveCommand, which works with Harvard's DRS also exists and, while specific to DRS, is a useful example of how Archivers can support single-version-only semantics and support archiving only from specified collections (with collection specific parameters)). -All current options support the archival status APIs and the same status is available in the dataset page version table (for contributors/those who could view the unpublished dataset, with more detail available to superusers). +All current options support the :ref:`Archival Status API` calls and the same status is available in the dataset page version table (for contributors/those who could view the unpublished dataset, with more detail available to superusers). .. _Duracloud Configuration: Duracloud Configuration +++++++++++++++++++++++ -Also note that while the current Chronopolis implementation generates the archival Bag and submits it to the archive's DuraCloud interface, the step to make a 'snapshot' of the space containing the archival Bag (and verify it's successful submission) are actions a curator must take in the DuraCloud interface. +The current Chronopolis implementation generates the archival Bag and submits it to the archive's DuraCloud interface. The step to make a 'snapshot' of the space containing the archival Bag (and verify it's successful submission) are actions a curator must take in the DuraCloud interface. -The minimal configuration to support an archiver integration involves adding a minimum of two Dataverse Software Keys and any required Payara jvm options. The example instructions here are specific to the DuraCloud Archiver\: +The minimal configuration to support archiver integration involves adding a minimum of two Dataverse Software settings. Individual archivers may require additional settings and/or Payara jvm options and micro-profile settings. The example instructions here are specific to the DuraCloud Archiver\: \:ArchiverClassName - the fully qualified class to be used for archiving. For example: @@ -1452,7 +1452,7 @@ The minimal configuration to support an archiver integration involves adding a m ``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":DuraCloudHost, :DuraCloudPort, :DuraCloudContext, :BagGeneratorThreads"`` -The DPN archiver defines three custom settings, one of which is required (the others have defaults): +The DuraCloud archiver defines three custom settings, one of which is required (the others have defaults): \:DuraCloudHost - the URL for your organization's Duracloud site. For example: From 2ae1a9f847c256236bec8874ba307eb7e1631967 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Fri, 15 Sep 2023 17:55:26 +0200 Subject: [PATCH 0434/1092] Get JSON object from JsonUtil --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index febb785cd95..984c607aac7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -34,7 +34,6 @@ import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; import org.apache.commons.validator.routines.DomainValidator; -import java.io.StringReader; import java.sql.Timestamp; import java.text.ParseException; import java.util.ArrayList; @@ -53,7 +52,6 @@ import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonString; import jakarta.json.JsonValue; import jakarta.json.JsonValue.ValueType; @@ -682,8 +680,7 @@ private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) thr // convert DTO to datasetField so we can back valid values. Gson gson = new Gson(); String jsonString = gson.toJson(geoCoverageDTO); - JsonReader jsonReader = Json.createReader(new StringReader(jsonString)); - JsonObject obj = jsonReader.readObject(); + JsonObject obj = JsonUtil.getJsonObject(jsonString); DatasetField geoCoverageField = parseField(obj); // add back valid values From cbd395e584026ea17b80577d881b4ecd62fc6dab Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 15 Sep 2023 16:53:31 -0400 Subject: [PATCH 0435/1092] make configure tools available at the dataset level #9589 --- doc/release-notes/9589-ds-configure-tool.md | 1 + .../source/admin/external-tools.rst | 2 +- .../source/api/external-tools.rst | 4 +- .../edu/harvard/iq/dataverse/DatasetPage.java | 16 ++++ .../externaltools/ExternalToolHandler.java | 7 ++ src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/dataset.xhtml | 11 +++ .../iq/dataverse/api/ExternalToolsIT.java | 79 +++++++++++++++++++ .../ExternalToolHandlerTest.java | 42 ++++++++++ 9 files changed, 160 insertions(+), 3 deletions(-) create mode 100644 doc/release-notes/9589-ds-configure-tool.md diff --git a/doc/release-notes/9589-ds-configure-tool.md b/doc/release-notes/9589-ds-configure-tool.md new file mode 100644 index 00000000000..70ac5fcaa6a --- /dev/null +++ b/doc/release-notes/9589-ds-configure-tool.md @@ -0,0 +1 @@ +Configure tools are now available at the dataset level. They appear under the "Edit Dataset" menu. See also #9589. diff --git a/doc/sphinx-guides/source/admin/external-tools.rst b/doc/sphinx-guides/source/admin/external-tools.rst index 67075e986bb..68b5f493d49 100644 --- a/doc/sphinx-guides/source/admin/external-tools.rst +++ b/doc/sphinx-guides/source/admin/external-tools.rst @@ -115,7 +115,7 @@ Dataset level explore tools allow the user to explore all the files in a dataset Dataset Level Configure Tools +++++++++++++++++++++++++++++ -Configure tools at the dataset level are not currently supported. +Dataset level configure tools can be launched by users who have edit access to the dataset and are found under the "Edit Dataset" menu. Writing Your Own External Tool ------------------------------ diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index 05affaf975e..ed68bb09ee0 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -40,7 +40,7 @@ How External Tools Are Presented to Users An external tool can appear in your Dataverse installation in a variety of ways: - as an explore, preview, query or configure option for a file -- as an explore option for a dataset +- as an explore or configure option for a dataset - as an embedded preview on the file landing page See also the :ref:`testing-external-tools` section of the Admin Guide for some perspective on how Dataverse installations will expect to test your tool before announcing it to their users. @@ -92,7 +92,7 @@ Terminology scope Whether the external tool appears and operates at the **file** level or the **dataset** level. Note that a file level tool much also specify the type of file it operates on (see "contentType" below). - types Whether the external tool is an **explore** tool, a **preview** tool, a **query** tool, a **configure** tool or any combination of these (multiple types are supported for a single tool). Configure tools require an API token because they make changes to data files (files within datasets). Configure tools are currently not supported at the dataset level. The older "type" keyword that allows you to pass a single type as a string is deprecated but still supported. + types Whether the external tool is an **explore** tool, a **preview** tool, a **query** tool, a **configure** tool or any combination of these (multiple types are supported for a single tool). Configure tools require an API token because they make changes to data files (files within datasets). The older "type" keyword that allows you to pass a single type as a string is deprecated but still supported. toolUrl The **base URL** of the tool before query parameters are added. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index d20175b6e1a..78ccb5542ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -391,6 +391,7 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { Map> fileQueryToolsByFileId = new HashMap<>(); List fileQueryTools = new ArrayList<>(); private List datasetExploreTools; + private List datasetConfigureTools; public Boolean isHasRsyncScript() { return hasRsyncScript; @@ -2153,6 +2154,7 @@ private String init(boolean initFull) { previewTools = externalToolService.findFileToolsByType(ExternalTool.Type.PREVIEW); fileQueryTools = externalToolService.findFileToolsByType(ExternalTool.Type.QUERY); datasetExploreTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.EXPLORE); + datasetConfigureTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.CONFIGURE); rowsPerPage = 10; if (dataset.getId() != null && canUpdateDataset()) { hasRestrictedFiles = workingVersion.isHasRestrictedFile(); @@ -5572,6 +5574,10 @@ public List getDatasetExploreTools() { return datasetExploreTools; } + public List getDatasetConfigureTools() { + return datasetConfigureTools; + } + Boolean thisLatestReleasedVersion = null; public boolean isThisLatestReleasedVersion() { @@ -5789,6 +5795,16 @@ public void explore(ExternalTool externalTool) { PrimeFaces.current().executeScript(externalToolHandler.getExploreScript()); } + public void configure(ExternalTool externalTool) { + ApiToken apiToken = null; + User user = session.getUser(); + if (user instanceof AuthenticatedUser) { + apiToken = authService.findApiTokenByUser((AuthenticatedUser) user); + } + ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataset, apiToken, session.getLocaleCode()); + PrimeFaces.current().executeScript(externalToolHandler.getConfigureScript()); + } + private FileMetadata fileMetadataForAction; public FileMetadata getFileMetadataForAction() { diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index a52679deebc..de4317464e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -253,4 +253,11 @@ public String getExploreScript() { logger.fine("Exploring with " + toolUrl); return getScriptForUrl(toolUrl); } + + // TODO: Consider merging with getExploreScript + public String getConfigureScript() { + String toolUrl = this.getToolUrlWithQueryParams(); + logger.fine("Configuring with " + toolUrl); + return getScriptForUrl(toolUrl); + } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 997f0470cc3..837d8b2f7c1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1367,6 +1367,7 @@ dataset.pageTitle=Add New Dataset dataset.accessBtn=Access Dataset dataset.accessBtn.header.download=Download Options dataset.accessBtn.header.explore=Explore Options +dataset.accessBtn.header.configure=Configure Options dataset.accessBtn.header.compute=Compute Options dataset.accessBtn.download.size=ZIP ({0}) dataset.accessBtn.too.big=The dataset is too large to download. Please select the files you need from the files table. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 67dcf89c380..55bf113abec 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -453,6 +453,17 @@ + + + + +
  • + + + +
  • +
    +
  • diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 6f414fb3e24..a9f6055fc9e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; @@ -235,6 +236,84 @@ public void testDatasetLevelTool1() { } + @Test + public void testDatasetLevelToolConfigure() { + + // Delete all external tools before testing. + Response getTools = UtilIT.getExternalTools(); + getTools.prettyPrint(); + getTools.then().assertThat() + .statusCode(OK.getStatusCode()); + String body = getTools.getBody().asString(); + JsonReader bodyObject = Json.createReader(new StringReader(body)); + JsonArray tools = bodyObject.readObject().getJsonArray("data"); + for (int i = 0; i < tools.size(); i++) { + JsonObject tool = tools.getJsonObject(i); + int id = tool.getInt("id"); + Response deleteExternalTool = UtilIT.deleteExternalTool(id); + deleteExternalTool.prettyPrint(); + } + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = JsonPath.from(createDataset.getBody().asString()).getInt("data.id"); + String datasetPid = JsonPath.from(createDataset.getBody().asString()).getString("data.persistentId"); + + String toolManifest = """ +{ + "displayName": "Turbo Dataset Config", + "description": "Read/write access.", + "types": [ + "configure" + ], + "scope": "dataset", + "toolUrl": "http://datasettool1.com", + "toolParameters": { + "queryParameters": [ + { + "datasetPid": "{datasetPid}" + }, + { + "localeCode": "{localeCode}" + } + ] + } + } +"""; + + Response addExternalTool = UtilIT.addExternalTool(JsonUtil.getJsonObject(toolManifest)); + addExternalTool.prettyPrint(); + addExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.displayName", CoreMatchers.equalTo("Turbo Dataset Config")); + + Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "configure", apiToken); + getExternalToolsByDatasetId.prettyPrint(); + getExternalToolsByDatasetId.then().assertThat() + .body("data[0].displayName", CoreMatchers.equalTo("Turbo Dataset Config")) + .body("data[0].scope", CoreMatchers.equalTo("dataset")) + .body("data[0].types[0]", CoreMatchers.equalTo("configure")) + .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("http://datasettool1.com?datasetPid=" + datasetPid)) + .statusCode(OK.getStatusCode()); + + } + @Test public void testAddFilelToolNoFileId() throws IOException { JsonObjectBuilder job = Json.createObjectBuilder(); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index 39bf96210fc..ad2a24ecdb8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -1,10 +1,12 @@ package edu.harvard.iq.dataverse.externaltools; +import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.settings.JvmSettings; @@ -15,6 +17,7 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -234,4 +237,43 @@ public void testGetToolUrlWithAllowedApiCalls() { assertTrue(signedUrl.contains("&token=")); System.out.println(JsonUtil.prettyPrint(jo)); } + + @Test + @JvmSetting(key = JvmSettings.SITE_URL, value = "https://librascholar.org") + public void testDatasetConfigureTool() { + List externalToolTypes = new ArrayList<>(); + var externalToolType = new ExternalToolType(); + externalToolType.setType(ExternalTool.Type.CONFIGURE); + externalToolTypes.add(externalToolType); + var scope = ExternalTool.Scope.DATASET; + String toolUrl = "http://example.com"; + var externalTool = new ExternalTool("displayName", "toolName", "description", externalToolTypes, scope, toolUrl, "{}", DataFileServiceBean.MIME_TYPE_TSV_ALT); + + externalTool.setToolParameters(Json.createObjectBuilder() + .add("queryParameters", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("siteUrl", "{siteUrl}") + ) + .add(Json.createObjectBuilder() + .add("datasetPid", "{datasetPid}") + ) + .add(Json.createObjectBuilder() + .add("localeCode", "{localeCode}") + ) + ) + .build().toString()); + + var dataset = new Dataset(); + dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.5072", "ABC123", null, DOIServiceBean.DOI_RESOLVER_URL, null)); + ApiToken nullApiToken = null; + String nullLocaleCode = "en"; + var externalToolHandler = new ExternalToolHandler(externalTool, dataset, nullApiToken, nullLocaleCode); + System.out.println("tool: " + externalToolHandler.getToolUrlWithQueryParams()); + assertEquals("http://example.com?siteUrl=https://librascholar.org&datasetPid=doi:10.5072/ABC123&localeCode=en", externalToolHandler.getToolUrlWithQueryParams()); + assertFalse(externalToolHandler.getExternalTool().isExploreTool()); + assertEquals("configure", externalToolHandler.getExternalTool().getExternalToolTypes().get(0).getType().toString()); + assertEquals("dataset", externalToolHandler.getExternalTool().getScope().toString()); + + } + } From 129985535d825ceb501cad899c6ba57771d0eee1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Sat, 16 Sep 2023 16:31:08 +0100 Subject: [PATCH 0436/1092] Stash: deaccessionDataset API endpoint WIP --- .../harvard/iq/dataverse/api/Datasets.java | 33 +++++++++++++++++-- ...tLatestPublishedDatasetVersionCommand.java | 5 +-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 10 ++++++ 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 5064579ebfb..48d84ba95d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -29,6 +29,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand; @@ -525,9 +526,9 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); @@ -3922,4 +3923,32 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, return response(req -> ok( getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); } + + @PUT + @AuthRequired + @Path("{id}/versions/{versionId}/deaccession") + public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (":draft".equals(versionId) || ":latest".equals(versionId)) { + return badRequest("Only :latest-published or a specific version can be deaccessioned"); + } + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); + try (StringReader stringReader = new StringReader(jsonBody)) { + JsonObject jsonObject = Json.createReader(stringReader).readObject(); + datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason")); + String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null); + if (deaccessionForwardURL != null) { + try { + datasetVersion.setArchiveNote(deaccessionForwardURL); + } catch (IllegalArgumentException iae) { + return error(Response.Status.BAD_REQUEST, "Invalid deaccession forward URL: " + iae.getMessage()); + } + } + execCommand(new DeaccessionDatasetVersionCommand(dvRequestService.getDataverseRequest(), datasetVersion, false)); + return ok("Dataset " + datasetId + " deaccessioned for version " + versionId); + } catch (JsonParsingException jpe) { + return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index 9765d0945d8..4e4252fd155 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -15,7 +16,7 @@ @RequiredPermissions({}) public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; - private boolean includeDeaccessioned; + private final boolean includeDeaccessioned; public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { this(aRequest, anAffectedDataset, false); @@ -30,7 +31,7 @@ public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Datase @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { for (DatasetVersion dsv : ds.getVersions()) { - if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { return dsv; } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index d243d3c47f2..e32a813a4d3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3386,4 +3386,14 @@ static Response getHasBeenDeleted(String dataFileId, String apiToken) { .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/hasBeenDeleted"); } + + static Response deaccessionDataset(Integer datasetId, String version, String apiToken) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("deaccessionReason", "Test deaccession."); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); + } } From ae16dadddd7978dae23dd62671c05433db2f9300 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 18 Sep 2023 10:13:52 -0400 Subject: [PATCH 0437/1092] minor cleanup --- .../iq/dataverse/globus/GlobusServiceBean.java | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 56219f843a7..9aae4dffc03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -130,7 +130,7 @@ ArrayList checkPermisions(AccessToken clientTokenUser, String directory, return ids; } - +/* public void updatePermision(AccessToken clientTokenUser, String directory, String principalType, String perm) throws MalformedURLException { if (directory != null && !directory.equals("")) { @@ -163,8 +163,8 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin count++; } } - - public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException { +*/ + public void deletePermission(String ruleId, Logger globusLogger) throws MalformedURLException { if (ruleId.length() > 0) { AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); @@ -554,7 +554,9 @@ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, DataFile df) + rawStorageId + "&fileName=" + df.getCurrentName(); } } - return tokenUtil.replaceTokensWithValues(appUrl) + "&storePrefix=" + storePrefix; + String finalUrl = tokenUtil.replaceTokensWithValues(appUrl) + "&storePrefix=" + storePrefix; + logger.info("Calling app: " + finalUrl); + return finalUrl; } public String getGlobusDownloadScript(Dataset dataset, ApiToken apiToken) { @@ -624,7 +626,7 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin String taskStatus = getTaskStatus(task); if (ruleId.length() > 0) { - deletePermision(ruleId, globusLogger); + deletePermission(ruleId, globusLogger); } // If success, switch to an EditInProgress lock - do this before removing the @@ -897,7 +899,7 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro String taskStatus = getTaskStatus(task); if (ruleId.length() > 0) { - deletePermision(ruleId, globusLogger); + deletePermission(ruleId, globusLogger); } if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) { From 9562b788b7dfbfec53d6d7e9aeb52e690cddddf4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 18 Sep 2023 10:14:43 -0400 Subject: [PATCH 0438/1092] start allowupload method, fix messaging when disabled --- .../harvard/iq/dataverse/api/Datasets.java | 63 ++++++++++++++++++- src/main/java/propertyFiles/Bundle.properties | 3 + 2 files changed, 64 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 599890913fd..a999a71b2d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3393,6 +3393,65 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, } + /** Requests permissions for a given globus user to upload to the dataset + * + * @param crc + * @param datasetId + * @param jsonData + * @return + * @throws IOException + * @throws ExecutionException + * @throws InterruptedException + */ + @POST + @AuthRequired + @Path("{id}/allowGlobusUpload") + @Consumes(MediaType.APPLICATION_JSON) + public Response allowGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, String jsonBody + ) throws IOException, ExecutionException, InterruptedException { + + + logger.info(" ==== (api allowGlobusUpload) jsonBody ====== " + jsonBody); + + + if (!systemConfig.isGlobusUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled")); + } + + // ------------------------------------- + // (1) Get the user from the ContainerRequestContext + // ------------------------------------- + User authUser; + authUser = getRequestUser(crc); + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + // Async Call + globusService.givePermission(jsonBody, jsonBody, jsonBody, null, datasetId, jsonBody).globusDownload(jsonData, dataset, authUser); + + return ok("Async call to Globus Download started"); + + } + + /** Monitors a globus download and removes permissions on the dir/dataset when done + * + * @param crc + * @param datasetId + * @param jsonData + * @return + * @throws IOException + * @throws ExecutionException + * @throws InterruptedException + */ @POST @AuthRequired @Path("{id}/deleteglobusRule") @@ -3404,8 +3463,8 @@ public Response deleteglobusRule(@Context ContainerRequestContext crc, @PathPara logger.info(" ==== (api deleteglobusRule) jsonData ====== " + jsonData); - if (!systemConfig.isHTTPUpload()) { - return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + if (!systemConfig.isGlobusDownload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled")); } // ------------------------------------- diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 997f0470cc3..0343e109e61 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2623,6 +2623,9 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status +datasets.api.globusdownloaddisabled=File transfer from Dataverse via Globus is not available for this installation of Dataverse. +datasets.api.globusuploaddisabled=File transfer to Dataverse via Globus is not available for this installation of Dataverse. + #Dataverses.java From f5aa17a635fb085f8c42ac31f3c73660ea5c1e5e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 18 Sep 2023 10:15:44 -0400 Subject: [PATCH 0439/1092] fix formatting --- .../java/edu/harvard/iq/dataverse/util/bagit/OREMap.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 1ea1a5411fa..aa653a6e360 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -327,11 +327,7 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) { aggBuilder.add(JsonLDTerm.ore("aggregates").getLabel(), aggResArrayBuilder.build()) .add(JsonLDTerm.schemaOrg("hasPart").getLabel(), fileArray.build()).build()) // and finally add the context - .add("@context", contextBuilder.build()) - ; - - - + .add("@context", contextBuilder.build()); return oremapBuilder; } } From 258023e4fd75850da3dc6a7160c26ef8711da158 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 18 Sep 2023 10:34:11 -0400 Subject: [PATCH 0440/1092] use cog icon to match file level #9589 --- src/main/webapp/dataset.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 55bf113abec..b76b33a267c 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -454,7 +454,7 @@
  • - +
  • From c6197b3bf23ad1dccb023ea668799e7a79805d93 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Mon, 18 Sep 2023 10:40:05 -0400 Subject: [PATCH 0441/1092] #9920 support Postgres 16 --- pom.xml | 4 ++-- scripts/installer/install.py | 6 +++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 7ba22d2a076..c5b7fc302f3 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,7 @@ war 1.2.18.4 - 9.21.2 + 9.22.1 1.20.1 0.8.7 5.2.1 @@ -790,7 +790,7 @@ true docker-build - 13 + 16 gdcc/dataverse:${app.image.tag} unstable diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 5a7b9f75696..18995695638 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -422,9 +422,13 @@ conn.close() if int(pg_major_version) >= 15: + admin_conn_string = "dbname='"+pgDb+"' user='postgres' password='"+pgAdminPassword+"' host='"+pgHost+"'" + conn = psycopg2.connect(admin_conn_string) + conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + cur = conn.cursor() conn_cmd = "GRANT CREATE ON SCHEMA public TO "+pgUser+";" - print("PostgreSQL 15 or higher detected. Running " + conn_cmd) try: + print("PostgreSQL 15 or higher detected. Running " + conn_cmd) cur.execute(conn_cmd) except: if force: From bf42a926c3fa72ffbc2ffbef2de5d9e253cb8511 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 18 Sep 2023 18:14:55 +0200 Subject: [PATCH 0442/1092] reverted out-of-band setting in S3AccessIO --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 12069cac650..5f7643b3115 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1834,8 +1834,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); - boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); + boolean directEnabled = Boolean.getBoolean("dataverse.files." + driverId + ".upload-redirect"); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); From d8ea581360b3ab4c61bbf99e5e7607396fbe99bb Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 18 Sep 2023 18:30:52 +0200 Subject: [PATCH 0443/1092] add-file-metadata-api made literal as the ref does not exist --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index eeb87cc9ec9..8e87fb23102 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -511,7 +511,7 @@ A Dataverse installation may also be configured to reference some files (e.g. la A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. By default, Dataverse support uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). -With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the :ref:`add-file-metadata-api` call used to add metadata and inform Dataverse that a new file has been added to the relevant store. +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the add-file-metadata-api call used to add metadata and inform Dataverse that a new file has been added to the relevant store. The following sections describe how to set up various types of stores and how to configure for multiple stores. From f1df8290df9452a307f4bc8941affb10bafb87e8 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Mon, 18 Sep 2023 15:37:25 -0400 Subject: [PATCH 0444/1092] #9760 add UBC Dataverse_Utils to client libraries page --- doc/sphinx-guides/source/api/client-libraries.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 62069f62c23..b79996d3df4 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -50,6 +50,8 @@ Python There are multiple Python modules for interacting with Dataverse APIs. +`UBC's Dataverse Utilities `_ are a set of Python console utilities which allow one to upload datasets from a tab-separated-value spreadsheet, bulk release multiple datasets, bulk delete unpublished datasets, quickly duplicate records. replace licenses, and more. For additional information see their `PyPi page `_. + `EasyDataverse `_ is a Python library designed to simplify the management of Dataverse datasets in an object-oriented way, giving users the ability to upload, download, and update datasets with ease. By utilizing metadata block configurations, EasyDataverse automatically generates Python objects that contain all the necessary details required to create the native Dataverse JSON format used to create or edit datasets. Adding files and directories is also possible with EasyDataverse and requires no additional API calls. This library is particularly well-suited for client applications such as workflows and scripts as it minimizes technical complexities and facilitates swift development. `pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. From 7cd11f3547099c382e2eaf01f10e1e00398ec588 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 18 Sep 2023 15:45:45 -0400 Subject: [PATCH 0445/1092] Update config.rst Update reference --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 8e87fb23102..8c9829e0b81 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -511,7 +511,7 @@ A Dataverse installation may also be configured to reference some files (e.g. la A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. By default, Dataverse support uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). -With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the add-file-metadata-api call used to add metadata and inform Dataverse that a new file has been added to the relevant store. +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the :ref:`Adding the Uploaded file to the Dataset ` API call (described in the :doc:`/developers/s3-direct-upload-api` page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. The following sections describe how to set up various types of stores and how to configure for multiple stores. From 81be260e14f18d27091f2ea9e35bb20be2405989 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 18 Sep 2023 15:46:31 -0400 Subject: [PATCH 0446/1092] typo --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 8c9829e0b81..c3607bd837a 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -510,7 +510,7 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -By default, Dataverse support uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). +By default, Dataverse supports uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the :ref:`Adding the Uploaded file to the Dataset ` API call (described in the :doc:`/developers/s3-direct-upload-api` page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. The following sections describe how to set up various types of stores and how to configure for multiple stores. From a1a1233d2dbaa09b30e306960187bf3b29fc1337 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Mon, 18 Sep 2023 15:46:43 -0400 Subject: [PATCH 0447/1092] #9760 move UBC beneath pyDataverse per pdurbin --- doc/sphinx-guides/source/api/client-libraries.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index b79996d3df4..a25efe3a5f8 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -50,12 +50,12 @@ Python There are multiple Python modules for interacting with Dataverse APIs. -`UBC's Dataverse Utilities `_ are a set of Python console utilities which allow one to upload datasets from a tab-separated-value spreadsheet, bulk release multiple datasets, bulk delete unpublished datasets, quickly duplicate records. replace licenses, and more. For additional information see their `PyPi page `_. - `EasyDataverse `_ is a Python library designed to simplify the management of Dataverse datasets in an object-oriented way, giving users the ability to upload, download, and update datasets with ease. By utilizing metadata block configurations, EasyDataverse automatically generates Python objects that contain all the necessary details required to create the native Dataverse JSON format used to create or edit datasets. Adding files and directories is also possible with EasyDataverse and requires no additional API calls. This library is particularly well-suited for client applications such as workflows and scripts as it minimizes technical complexities and facilitates swift development. `pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. +`UBC's Dataverse Utilities `_ are a set of Python console utilities which allow one to upload datasets from a tab-separated-value spreadsheet, bulk release multiple datasets, bulk delete unpublished datasets, quickly duplicate records. replace licenses, and more. For additional information see their `PyPi page `_. + `dataverse-client-python `_ had its initial release in 2015. `Robert Liebowitz `_ created this library while at the `Center for Open Science (COS) `_ and the COS uses it to integrate the `Open Science Framework (OSF) `_ with Dataverse installations via an add-on which itself is open source and listed on the :doc:`/api/apps` page. `Pooch `_ is a Python library that allows library and application developers to download data. Among other features, it takes care of various protocols, caching in OS-specific locations, checksum verification and adds optional features like progress bars or log messages. Among other popular repositories, Pooch supports Dataverse in the sense that you can reference Dataverse-hosted datasets by just a DOI and Pooch will determine the data repository type, query the Dataverse API for contained files and checksums, giving you an easy interface to download them. From 467eb51166fe742f3506bef706f821a139e4c6b3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 18 Sep 2023 21:46:45 +0200 Subject: [PATCH 0448/1092] feat(ct): add labels with service dependency versions to images #9928 --- src/main/docker/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 88020a118b5..aa39078fb06 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -51,4 +51,6 @@ LABEL org.opencontainers.image.created="@git.build.time@" \ org.opencontainers.image.vendor="Global Dataverse Community Consortium" \ org.opencontainers.image.licenses="Apache-2.0" \ org.opencontainers.image.title="Dataverse Application Image" \ - org.opencontainers.image.description="This container image provides the research data repository software Dataverse in a box." \ No newline at end of file + org.opencontainers.image.description="This container image provides the research data repository software Dataverse in a box." \ + org.dataverse.deps.postgresql.version="@postgresql.server.version@" \ + org.dataverse.deps.solr.version="@solr.version@" \ No newline at end of file From 6b3c583c6c2b730bb5ab892010da37e2889d5bbe Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 18 Sep 2023 21:55:47 -0400 Subject: [PATCH 0449/1092] use a ref #9689 --- doc/sphinx-guides/source/api/dataaccess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 7b1feed7814..6a726cdc7b9 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -83,7 +83,7 @@ Basic access URI: ``/api/access/datafile/$id`` -.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to `FilePIDsEnabled `_ +.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to :ref:`:FilePIDsEnabled`. Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: From 30d1bffd126a2465117bccff1d4e97269c5500e0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 19 Sep 2023 16:22:11 +0200 Subject: [PATCH 0450/1092] feat(ct): make configbaker able to expose data from bootstrap process via env file #9933 --- .../scripts/bootstrap.sh | 30 +++++++++++++++---- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/modules/container-configbaker/scripts/bootstrap.sh b/modules/container-configbaker/scripts/bootstrap.sh index 1aa9e232953..d09c1be5414 100644 --- a/modules/container-configbaker/scripts/bootstrap.sh +++ b/modules/container-configbaker/scripts/bootstrap.sh @@ -5,16 +5,17 @@ set -euo pipefail function usage() { - echo "Usage: $(basename "$0") [-h] [-u instanceUrl] [-t timeout] []" + echo "Usage: $(basename "$0") [-h] [-u instanceUrl] [-t timeout] [-e targetEnvFile] []" echo "" echo "Execute initial configuration (bootstrapping) of an empty Dataverse instance." echo -n "Known personas: " find "${BOOTSTRAP_DIR}" -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | paste -sd ' ' echo "" echo "Parameters:" - echo "instanceUrl - Location on container network where to reach your instance. Default: 'http://dataverse:8080'" - echo " timeout - Provide how long to wait for the instance to become available (using wait4x). Default: '2m'" - echo " persona - Configure persona to execute. Calls ${BOOTSTRAP_DIR}//init.sh. Default: 'base'" + echo " instanceUrl - Location on container network where to reach your instance. Default: 'http://dataverse:8080'" + echo " timeout - Provide how long to wait for the instance to become available (using wait4x). Default: '2m'" + echo "targetEnvFile - Path to a file where the bootstrap process can expose information as env vars (e.g. dataverseAdmin's API token)" + echo " persona - Configure persona to execute. Calls ${BOOTSTRAP_DIR}//init.sh. Default: 'base'" echo "" echo "Note: This script will wait for the Dataverse instance to be available before executing the bootstrapping." echo " It also checks if already bootstrapped before (availability of metadata blocks) and skip if true." @@ -25,12 +26,14 @@ function usage() { # Set some defaults as documented DATAVERSE_URL=${DATAVERSE_URL:-"http://dataverse:8080"} TIMEOUT=${TIMEOUT:-"2m"} +TARGET_ENV_FILE=${TARGET_ENV_FILE:-""} -while getopts "u:t:h" OPTION +while getopts "u:t:e:h" OPTION do case "$OPTION" in u) DATAVERSE_URL="$OPTARG" ;; t) TIMEOUT="$OPTARG" ;; + e) TARGET_ENV_FILE="$OPTARG" ;; h) usage;; \?) usage;; esac @@ -54,6 +57,21 @@ if [[ $BLOCK_COUNT -gt 0 ]]; then exit 0 fi +# Provide a space to store environment variables output to +ENV_OUT=$(mktemp) +export ENV_OUT + # Now execute the bootstrapping script echo "Now executing bootstrapping script at ${BOOTSTRAP_DIR}/${PERSONA}/init.sh." -exec "${BOOTSTRAP_DIR}/${PERSONA}/init.sh" +# shellcheck disable=SC1090 +source "${BOOTSTRAP_DIR}/${PERSONA}/init.sh" + +# If the env file option was given, check if the file is writeable and copy content from the temporary file +if [[ -n "${TARGET_ENV_FILE}" ]]; then + if [[ -f "${TARGET_ENV_FILE}" && -w "${TARGET_ENV_FILE}" ]]; then + cat "${ENV_OUT}" > "${TARGET_ENV_FILE}" + else + echo "File ${TARGET_ENV_FILE} not found, is a directory or not writeable" + exit 2 + fi +fi From 9d76d13546fca546ce06049576254da77a5b7cab Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 19 Sep 2023 16:23:13 +0200 Subject: [PATCH 0451/1092] feat(ct): make configbaker bootstrap dev persona output api token #9933 --- modules/container-configbaker/scripts/bootstrap/dev/init.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/container-configbaker/scripts/bootstrap/dev/init.sh b/modules/container-configbaker/scripts/bootstrap/dev/init.sh index 1042478963d..efdaee3d0c3 100644 --- a/modules/container-configbaker/scripts/bootstrap/dev/init.sh +++ b/modules/container-configbaker/scripts/bootstrap/dev/init.sh @@ -17,6 +17,8 @@ curl "${DATAVERSE_URL}/api/admin/settings/:DoiProvider" -X PUT -d FAKE API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \") export API_TOKEN +# ${ENV_OUT} comes from bootstrap.sh and will expose the saved information back to the host if enabled. +echo "API_TOKEN=${API_TOKEN}" >> "${ENV_OUT}" echo "Publishing root dataverse..." curl -H "X-Dataverse-key:$API_TOKEN" -X POST "${DATAVERSE_URL}/api/dataverses/:root/actions/:publish" From 3d1521fdb4a1cfc9b89325e9f4085e73bc3f6435 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 19 Sep 2023 14:22:15 -0400 Subject: [PATCH 0452/1092] #9931 document harvesting server IT config --- doc/sphinx-guides/source/developers/testing.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index acaeccf4f23..81e820fb869 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -225,6 +225,20 @@ If ``dataverse.siteUrl`` is absent, you can add it with: ``./asadmin create-jvm-options "-Ddataverse.siteUrl=http\://localhost\:8080"`` +dataverse.oai.server.maxidentifiers +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The OAI Harvesting tests require that the paging limit for ListIdentifiers must be set to 2, in order to be able to trigger this paging behavior without having to create and export too many datasets: + +``./asadmin create-jvm-options "-Ddataverse.oai.server.maxidentifiers=2"`` + +dataverse.oai.server.maxrecords +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The OAI Harvesting tests require that the paging limit for ListRecords must be set to 2, in order to be able to trigger this paging behavior without having to create and export too many datasets: + +``./asadmin create-jvm-options "-Ddataverse.oai.server.maxrecords=2"`` + Identifier Generation ^^^^^^^^^^^^^^^^^^^^^ From 116845c753a8364d14bad2edafcebf6a0e28dde6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Sep 2023 15:09:11 -0400 Subject: [PATCH 0453/1092] refactoring, add allowUpload api call --- .../harvard/iq/dataverse/api/Datasets.java | 7 +- .../dataaccess/GlobusOverlayAccessIO.java | 2 +- .../iq/dataverse/globus/GlobusEndpoint.java | 31 ++++++ .../dataverse/globus/GlobusServiceBean.java | 104 ++++++++++++------ 4 files changed, 109 insertions(+), 35 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a999a71b2d4..745f294fee6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3434,11 +3434,14 @@ public Response allowGlobusUpload(@Context ContainerRequestContext crc, @PathPar } catch (WrappedResponse wr) { return wr.getResponse(); } + + JsonObject params = JsonUtil.getJsonObject(jsonBody); + String principal = params.getString("principal"); // Async Call - globusService.givePermission(jsonBody, jsonBody, jsonBody, null, datasetId, jsonBody).globusDownload(jsonData, dataset, authUser); + globusService.givePermission("identity", principal, "rw", dataset); - return ok("Async call to Globus Download started"); + return ok("Permission Granted"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index b18e6bb7e76..965dc3c0947 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -214,7 +214,7 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary } } - private static boolean isDataverseManaged(String driverId) { + public static boolean isDataverseManaged(String driverId) { return Boolean.getBoolean("dataverse.files." + driverId + ".managed"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java new file mode 100644 index 00000000000..d1e5d19a592 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java @@ -0,0 +1,31 @@ +package edu.harvard.iq.dataverse.globus; + +public class GlobusEndpoint { + + private String id; + private String clientToken; + private String basePath; + + + public GlobusEndpoint(String id, String clientToken, String basePath) { + + } + public String getId() { + return id; + } + public void setId(String id) { + this.id = id; + } + public String getClientToken() { + return clientToken; + } + public void setClientToken(String clientToken) { + this.clientToken = clientToken; + } + public String getBasePath() { + return basePath; + } + public void setBasePath(String basePath) { + this.basePath = basePath; + } +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 9aae4dffc03..910ee796e0e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -46,6 +46,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; @@ -106,23 +107,23 @@ public void setUserTransferToken(String userTransferToken) { this.userTransferToken = userTransferToken; } - ArrayList checkPermisions(AccessToken clientTokenUser, String directory, String globusEndpoint, - String principalType, String principal) throws MalformedURLException { - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access_list"); + private ArrayList checkPermissions(GlobusEndpoint endpoint, String principalType, String principal) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access_list"); MakeRequestResponse result = makeRequest(url, "Bearer", - clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null); + endpoint.getClientToken(), "GET", null); ArrayList ids = new ArrayList(); if (result.status == 200) { AccessList al = parseJson(result.jsonResponse, AccessList.class, false); for (int i = 0; i < al.getDATA().size(); i++) { Permissions pr = al.getDATA().get(i); - if ((pr.getPath().equals(directory + "/") || pr.getPath().equals(directory)) + if ((pr.getPath().equals(endpoint.getBasePath() + "/") || pr.getPath().equals(endpoint.getBasePath())) && pr.getPrincipalType().equals(principalType) && ((principal == null) || (principal != null && pr.getPrincipal().equals(principal)))) { ids.add(pr.getId()); } else { - logger.info(pr.getPath() + " === " + directory + " == " + pr.getPrincipalType()); + logger.info(pr.getPath() + " === " + endpoint.getBasePath() + " == " + pr.getPrincipalType()); continue; } } @@ -185,24 +186,24 @@ public void deletePermission(String ruleId, Logger globusLogger) throws Malforme } - public int givePermission(String principalType, String principal, String perm, AccessToken clientTokenUser, - String directory, String globusEndpoint) throws MalformedURLException { + public int givePermission(String principalType, String principal, String perm, Dataset dataset) throws MalformedURLException { - ArrayList rules = checkPermisions(clientTokenUser, directory, globusEndpoint, principalType, principal); + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + ArrayList rules = checkPermissions(endpoint, principalType, principal); Permissions permissions = new Permissions(); permissions.setDATA_TYPE("access"); permissions.setPrincipalType(principalType); permissions.setPrincipal(principal); - permissions.setPath(directory + "/"); + permissions.setPath(endpoint.getBasePath() + "/"); permissions.setPermissions(perm); Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; if (rules.size() == 0) { logger.info("Start creating the rule"); - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access"); - result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(), "POST", + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access"); + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "POST", gson.toJson(permissions)); if (result.status == 400) { @@ -214,9 +215,9 @@ public int givePermission(String principalType, String principal, String perm, A return result.status; } else { logger.info("Start Updating the rule"); - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access/" + rules.get(0)); - result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(), "PUT", + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "PUT", gson.toJson(permissions)); if (result.status == 400) { @@ -438,36 +439,25 @@ static class MakeRequestResponse { } - private MakeRequestResponse findDirectory(String directory, AccessToken clientTokenUser, String globusEndpoint) + private MakeRequestResponse findDirectory(String directory, String clientToken, String globusEndpoint) throws MalformedURLException { URL url = new URL(" https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/ls?path=" + directory + "/"); MakeRequestResponse result = makeRequest(url, "Bearer", - clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null); + clientToken, "GET", null); logger.info("find directory status:" + result.status); return result; } - public boolean giveGlobusPublicPermissions(String datasetId) + public boolean giveGlobusPublicPermissions(Dataset dataset) throws UnsupportedEncodingException, MalformedURLException { - String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - String globusBasicToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, ""); - if (globusEndpoint.equals("") || globusBasicToken.equals("")) { - return false; - } - AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); - if (clientTokenUser == null) { - logger.severe("Cannot get client token "); - return false; - } + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); - String directory = getDirectory(datasetId); - logger.info(directory); - MakeRequestResponse status = findDirectory(directory, clientTokenUser, globusEndpoint); + MakeRequestResponse status = findDirectory(endpoint.getBasePath(), endpoint.getClientToken(), endpoint.getId()); if (status.status == 200) { @@ -485,8 +475,7 @@ public boolean giveGlobusPublicPermissions(String datasetId) * 201) { logger.info("Cannot get permission for " + file.getName()); } } } } */ - int perStatus = givePermission("all_authenticated_users", "", "r", clientTokenUser, directory, - globusEndpoint); + int perStatus = givePermission("all_authenticated_users", "", "r", dataset); logger.info("givePermission status " + perStatus); if (perStatus == 409) { logger.info("Permissions already exist or limit was reached"); @@ -1287,4 +1276,55 @@ public String calculatemime(String fileName) throws InterruptedException { * updatePermision(clientTokenUser, directory, "identity", "r"); return true; } * */ + + GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { + Dataset dataset = null; + if (dvObject instanceof Dataset) { + dataset = (Dataset) dvObject; + } else if (dvObject instanceof DataFile) { + dataset = (Dataset) dvObject.getOwner(); + } else { + throw new IllegalArgumentException("Unsupported DvObject type: " + dvObject.getClass().getName()); + } + String driverId = dataset.getEffectiveStorageDriverId(); + GlobusEndpoint endpoint = null; + String baseUrl = System.getProperty("dataverse.files." + driverId + ".base-url"); + + String endpointWithBasePath = baseUrl.substring(baseUrl.lastIndexOf("://") + 3); + int pathStart = endpointWithBasePath.indexOf("/"); + logger.info("endpointWithBasePath: " + endpointWithBasePath); + String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : ""); + logger.info("directoryPath: " + directoryPath); + + if (GlobusOverlayAccessIO.isDataverseManaged(driverId) && (dataset!=null)) { + directoryPath = directoryPath + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); + logger.info("directoryPath now: " + directoryPath); + + } else { + //remote store - may have path in file storageidentifier + String relPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); + int filenameStart = relPath.lastIndexOf("/") + 1; + if (filenameStart > 0) { + directoryPath = directoryPath + relPath.substring(0, filenameStart); + } + } + logger.info("directoryPath finally: " + directoryPath); + + String endpointId = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath; + + logger.info("endpointId: " + endpointId); + + String globusToken = System.getProperty("dataverse.files." + driverId + ".globus-token"); + + AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken); + String clientToken = accessToken.getOtherTokens().get(0).getAccessToken(); + + endpoint = new GlobusEndpoint(endpointId, clientToken, directoryPath); + + return endpoint; + } + + private static boolean isDataverseManaged(String driverId) { + return Boolean.getBoolean("dataverse.files." + driverId + ".managed"); + } } From e313c7cf74974de1b126ff1b13b2caf02a482fa8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Sep 2023 15:18:24 -0400 Subject: [PATCH 0454/1092] typo --- doc/release-notes/9599-guestbook-at-request.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9599-guestbook-at-request.md b/doc/release-notes/9599-guestbook-at-request.md index 72255150f37..e9554b71fb4 100644 --- a/doc/release-notes/9599-guestbook-at-request.md +++ b/doc/release-notes/9599-guestbook-at-request.md @@ -1,2 +1,2 @@ Dataverse can now be configured (via the dataverse.files.guestbook-at-request option) to display any configured guestbook to users when they request restricted file(s) or when they download files (the historic default). -The global default defined by this setting can be overridden at the collection level on the collection page and at the inidividual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. +The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default - showing guestbooks when files are downloaded - remains as it was in prior Dataverse versions. From 750069b5ac39b0f71f14507c7770f09f36ed3af2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Sep 2023 15:41:12 -0400 Subject: [PATCH 0455/1092] more String->Boolean changes --- .../java/edu/harvard/iq/dataverse/DataversePage.java | 2 +- .../edu/harvard/iq/dataverse/DvObjectContainer.java | 10 ++-------- .../java/edu/harvard/iq/dataverse/SettingsWrapper.java | 10 +++++----- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 7 +++---- 4 files changed, 11 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 943a74327d5..4ce7042d421 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -1286,7 +1286,7 @@ public String getCurationLabelSetNameLabel() { return setName; } - public Set> getGuestbookEntryOptions() { + public Set> getGuestbookEntryOptions() { return settingsWrapper.getGuestbookEntryOptions(this.dataverse).entrySet(); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index da77df786d8..7c1e096dc45 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -120,15 +120,9 @@ public Boolean getGuestbookEntryAtRequest() { return guestbookAtRequest; } - public void setGuestbookEntryAtRequest(String gbAtRequest) { - if (gbAtRequest != null && gbAtRequest.equals(UNDEFINED_CODE)) { - this.guestbookAtRequest = null; - } else { - //Force to true or false - this.guestbookAtRequest = Boolean.valueOf(Boolean.parseBoolean(gbAtRequest)); - } + public void setGuestbookEntryAtRequest(Boolean gbAtRequest) { + this.guestbookAtRequest = gbAtRequest; } - /* Dataverse collections can be configured to allow use of Curation labels and have this inheritable value to decide which set of labels to use. * This mechanism is similar to that for the storageDriver except that there is an addition option to disable use of labels. diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index e41a18c1431..aa5680b4e14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -632,8 +632,8 @@ public String getDefaultMetadataLanguage() { } } - public Map getGuestbookEntryOptions(DvObjectContainer target) { - Map currentMap = new HashMap(); + public Map getGuestbookEntryOptions(DvObjectContainer target) { + Map currentMap = new HashMap(); String atDownload = BundleUtil.getStringFromBundle("dataverse.guestbookentry.atdownload"); String atRequest = BundleUtil.getStringFromBundle("dataverse.guestbookentry.atrequest"); Optional gbDefault = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class); @@ -649,9 +649,9 @@ public Map getGuestbookEntryOptions(DvObjectContainer target) { useDefault = (defaultOption ? atRequest : atDownload) + BundleUtil.getStringFromBundle("dataverse.inherited"); } - currentMap.put("null", useDefault); - currentMap.put(Boolean.toString(true), atRequest); - currentMap.put(Boolean.toString(false), atDownload); + currentMap.put(null, useDefault); + currentMap.put(Boolean.TRUE, atRequest); + currentMap.put(Boolean.FALSE, atDownload); } else { // Setting not defined - leave empty } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a1191a6f4e6..07ac287ddb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3924,10 +3924,9 @@ public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, if (!gbAtRequestOpt.isPresent()) { return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it."); } - String choice = Boolean.valueOf(gbAtRequest).toString(); - dataset.setGuestbookEntryAtRequest(choice); + dataset.setGuestbookEntryAtRequest(gbAtRequest); datasetService.merge(dataset); - return ok("Guestbook Entry At Request set to: " + choice); + return ok("Guestbook Entry At Request set to: " + gbAtRequest); } @DELETE @@ -3955,7 +3954,7 @@ public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext cr return error(Response.Status.NOT_FOUND, "No such dataset"); } - dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE); + dataset.setGuestbookEntryAtRequest(null); datasetService.merge(dataset); return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } From 2e2fb380aad84d92253064ab3c58c0293b7d6c8b Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 05:53:29 +0100 Subject: [PATCH 0456/1092] Added: getFileDataTables endpoint permission checks for restricted and embargoed files --- .../edu/harvard/iq/dataverse/api/Files.java | 25 ++++++++++++++----- .../edu/harvard/iq/dataverse/api/FilesIT.java | 18 +++++++++++-- 2 files changed, 35 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 4c411a631f1..fec60f10f3f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.api.auth.AuthRequired; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -79,6 +80,8 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonDT; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; + import jakarta.ws.rs.core.UriInfo; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; @@ -840,12 +843,22 @@ public Response getFileDownloadCount(@Context ContainerRequestContext crc, @Path @AuthRequired @Path("{id}/dataTables") public Response getFileDataTables(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - if (!dataFile.isTabularData()) { - return error(BAD_REQUEST, "This operation is only available for tabular files."); + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse e) { + return error(Response.Status.NOT_FOUND, "File not found for given id."); + } + if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { + DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); + boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); + if (!hasPermissionToDownloadFile) { + return error(FORBIDDEN, "Insufficient permissions to access the requested information."); } - return ok(jsonDT(dataFile.getDataTables())); - }, getRequestUser(crc)); + } + if (!dataFile.isTabularData()) { + return error(BAD_REQUEST, "This operation is only available for tabular files."); + } + return ok(jsonDT(dataFile.getDataTables())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index de91e5644cf..0a16bca7008 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2190,11 +2190,25 @@ public void testGetFileDataTables() throws InterruptedException { // Ensure tabular file is ingested sleep(2000); + String testTabularFileId = Integer.toString(JsonPath.from(uploadTabularFileResponse.body().asString()).getInt("data.files[0].dataFile.id")); + // Get file data tables for the tabular file and assert data is obtained - int testTabularFileId = JsonPath.from(uploadTabularFileResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(Integer.toString(testTabularFileId), apiToken); + Response getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, apiToken); getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); int dataTablesNumber = JsonPath.from(getFileDataTablesForTabularFileResponse.body().asString()).getList("data").size(); assertTrue(dataTablesNumber > 0); + + // Get file data tables for a restricted tabular file as the owner and assert data is obtained + Response restrictFileResponse = UtilIT.restrictFile(testTabularFileId, true, apiToken); + restrictFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, apiToken); + getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data tables for a restricted tabular file as other user and assert forbidden error is thrown + Response createRandomUser = UtilIT.createRandomUser(); + createRandomUser.then().assertThat().statusCode(OK.getStatusCode()); + String randomUserApiToken = UtilIT.getApiTokenFromResponse(createRandomUser); + getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); + getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } } From bbfdff391f63cc412e59734b53f0992a937a594a Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 09:01:08 +0100 Subject: [PATCH 0457/1092] Added: deaccessionDataset API endpoint (pending IT) --- .../harvard/iq/dataverse/api/Datasets.java | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 83 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 18 +++- 3 files changed, 82 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 48d84ba95d7..b7d09cd5d98 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3944,7 +3944,7 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa return error(Response.Status.BAD_REQUEST, "Invalid deaccession forward URL: " + iae.getMessage()); } } - execCommand(new DeaccessionDatasetVersionCommand(dvRequestService.getDataverseRequest(), datasetVersion, false)); + execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false)); return ok("Dataset " + datasetId + " deaccessioned for version " + versionId); } catch (JsonParsingException jpe) { return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6f103df3fe8..1b77e6c09e5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3299,7 +3299,7 @@ public void getVersionFiles() throws IOException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, apiToken); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3313,7 +3313,7 @@ public void getVersionFiles() throws IOException { String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3324,7 +3324,7 @@ public void getVersionFiles() throws IOException { assertEquals(testPageSize, fileMetadatasCount); // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3334,7 +3334,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), false, apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3345,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), false, apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3356,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), false, apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3367,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), false, apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3378,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), false, apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3390,13 +3390,13 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); // Test Content Type - Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, apiToken); + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, false, apiToken); getVersionFilesResponseContentType.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3412,7 +3412,7 @@ public void getVersionFiles() throws IOException { setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, apiToken); + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, false, apiToken); getVersionFilesResponseCategoryName.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3427,7 @@ public void getVersionFiles() throws IOException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3452,7 @@ public void getVersionFiles() throws IOException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3461,7 @@ public void getVersionFiles() throws IOException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3471,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, false, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3483,7 +3483,7 @@ public void getVersionFiles() throws IOException { assertEquals(3, fileMetadatasCount); // Test Search Text - Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, "test_1", null, apiToken); + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, "test_1", null, false, apiToken); getVersionFilesResponseSearchText.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3491,6 +3491,33 @@ public void getVersionFiles() throws IOException { fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String latestPublishedVersion = ":latest-published"; + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, false, apiToken); + getVersionFilesResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, true, apiToken); + getVersionFilesResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFilesResponseDeaccessioned.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName4)) + .body("data[4].label", equalTo(testFileName5)); } @Test @@ -3533,7 +3560,7 @@ public void getVersionFileCounts() throws IOException { createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3548,5 +3575,27 @@ public void getVersionFileCounts() throws IOException { assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String latestPublishedVersion = ":latest-published"; + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, false, apiToken); + getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, true, apiToken); + getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); + assertEquals(4, (Integer) responseJsonPath.get("data.total")); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e32a813a4d3..086fef5f18a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3276,10 +3276,21 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String contentType, String accessStatus, String categoryName, String searchText, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, + String version, + Integer limit, + Integer offset, + String contentType, + String accessStatus, + String categoryName, + String searchText, + String orderCriteria, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .contentType("application/json"); + .contentType("application/json") + .queryParam("includeDeaccessioned", includeDeaccessioned); if (limit != null) { requestSpecification = requestSpecification.queryParam("limit", limit); } @@ -3355,9 +3366,10 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { + static Response getVersionFileCounts(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } From b19fb8267d08978b530d3be19cec7edddd72b566 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 09:21:53 +0100 Subject: [PATCH 0458/1092] Added: deaccessionDataset API endpoint IT --- .../harvard/iq/dataverse/api/DatasetsIT.java | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 1b77e6c09e5..7c0099ef34c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3598,4 +3598,41 @@ public void getVersionFileCounts() throws IOException { responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); assertEquals(4, (Integer) responseJsonPath.get("data.total")); } + + @Test + public void deaccessionDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Test that :draft and :latest are not allowed + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":draft", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + // Test that a not found error occurs when there is no published version available + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Test that the dataset is successfully deaccessioned when published + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that a not found error occurs when the only published version has already been deaccessioned + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } } From b6ce32b030dded2e2dd3ebf8d2e3b8b65583ea12 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 20 Sep 2023 10:02:07 +0100 Subject: [PATCH 0459/1092] Refactor: dataset version string identifiers extracted to constants --- .../iq/dataverse/api/ApiConstants.java | 5 ++ .../harvard/iq/dataverse/api/Datasets.java | 32 ++++----- .../iq/dataverse/dataset/DatasetUtil.java | 6 +- .../externaltools/ExternalToolHandler.java | 4 +- .../harvard/iq/dataverse/util/FileUtil.java | 4 +- .../iq/dataverse/util/URLTokenUtil.java | 5 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 71 +++++++++---------- .../iq/dataverse/api/DownloadFilesIT.java | 9 +-- .../edu/harvard/iq/dataverse/api/FilesIT.java | 12 ++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 9 +-- 10 files changed, 84 insertions(+), 73 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java index 296869762da..347a8946a46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java @@ -12,4 +12,9 @@ private ApiConstants() { // Authentication public static final String CONTAINER_REQUEST_CONTEXT_USER = "user"; + + // Dataset + public static final String DS_VERSION_LATEST = ":latest"; + public static final String DS_VERSION_DRAFT = ":draft"; + public static final String DS_VERSION_LATEST_PUBLISHED = ":latest-published"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b7d09cd5d98..62d87b198fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -98,6 +98,7 @@ import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.search.IndexServiceBean; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -391,8 +392,8 @@ public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam( @AuthRequired @Path("{id}/versions/{versionId}") public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId ){ - if ( ! ":draft".equals(versionId) ) { - return badRequest("Only the :draft version can be deleted"); + if (!DS_VERSION_DRAFT.equals(versionId)) { + return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted"); } return response( req -> { @@ -545,7 +546,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, @Path public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { folderName = folderName == null ? "" : folderName; - versionId = versionId == null ? ":latest-published" : versionId; + versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId; DatasetVersion version; try { @@ -620,8 +621,8 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, @AuthRequired @Path("{id}/versions/{versionId}/linkset") public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if ( ":draft".equals(versionId) ) { - return badRequest("Signposting is not supported on the :draft version"); + if (DS_VERSION_DRAFT.equals(versionId)) { + return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version"); } User user = getRequestUser(crc); return response(req -> { @@ -706,10 +707,9 @@ public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc @AuthRequired @Path("{id}/versions/{versionId}") @Consumes(MediaType.APPLICATION_JSON) - public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId){ - - if ( ! ":draft".equals(versionId) ) { - return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); + public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) { + if (!DS_VERSION_DRAFT.equals(versionId)) { + return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated"); } try ( StringReader rdr = new StringReader(jsonBody) ) { @@ -792,7 +792,7 @@ public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @ @Path("{id}/metadata") @Produces("application/ld+json, application/json-ld") public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return getVersionJsonLDMetadata(crc, id, ":draft", uriInfo, headers); + return getVersionJsonLDMetadata(crc, id, DS_VERSION_DRAFT, uriInfo, headers); } @PUT @@ -1726,7 +1726,7 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio return error(Status.NOT_FOUND, "This Dataset has no custom license"); } persistentId = getRequestParameter(":persistentId".substring(1)); - if (versionId.equals(":draft")) { + if (versionId.equals(DS_VERSION_DRAFT)) { versionId = "DRAFT"; } } catch (WrappedResponse wrappedResponse) { @@ -2687,11 +2687,11 @@ private void msgt(String m) { public static T handleVersion(String versionId, DsVersionHandler hdl) throws WrappedResponse { switch (versionId) { - case ":latest": + case DS_VERSION_LATEST: return hdl.handleLatest(); - case ":draft": + case DS_VERSION_DRAFT: return hdl.handleDraft(); - case ":latest-published": + case DS_VERSION_LATEST_PUBLISHED: return hdl.handleLatestPublished(); default: try { @@ -3928,8 +3928,8 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @AuthRequired @Path("{id}/versions/{versionId}/deaccession") public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if (":draft".equals(versionId) || ":latest".equals(versionId)) { - return badRequest("Only :latest-published or a specific version can be deaccessioned"); + if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) { + return badRequest("Only " + DS_VERSION_LATEST_PUBLISHED + " or a specific version can be deaccessioned"); } return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index adbd132bce8..ac1567b24e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -580,10 +582,10 @@ public static String getLicenseURI(DatasetVersion dsv) { // Return the URI // For standard licenses, just return the stored URI return (license != null) ? license.getUri().toString() - // For custom terms, construct a URI with :draft or the version number in the URI + // For custom terms, construct a URI with draft version constant or the version number in the URI : (dsv.getVersionState().name().equals("DRAFT") ? dsv.getDataverseSiteUrl() - + "/api/datasets/:persistentId/versions/:draft/customlicense?persistentId=" + + "/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "/customlicense?persistentId=" + dsv.getDataset().getGlobalId().asString() : dsv.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" + dsv.getVersionNumber() + "." + dsv.getMinorVersionNumber() + "/customlicense?persistentId=" diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index a52679deebc..570ef7d4194 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -34,6 +34,8 @@ import org.apache.commons.codec.binary.StringUtils; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST; + /** * Handles an operation on a specific file. Requires a file id in order to be * instantiated. Applies logic based on an {@link ExternalTool} specification, @@ -110,7 +112,7 @@ public String handleRequest(boolean preview) { switch (externalTool.getScope()) { case DATASET: callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" - + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + + dataset.getId() + "/versions/" + DS_VERSION_LATEST + "/toolparams/" + externalTool.getId(); break; case FILE: callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 5f7643b3115..327609d5e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -34,6 +34,8 @@ import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.ingest.IngestReport; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; @@ -2152,7 +2154,7 @@ private static String getFileAccessUrl(FileMetadata fileMetadata, String apiLoca private static String getFolderAccessUrl(DatasetVersion version, String currentFolder, String subFolder, String apiLocation, boolean originals) { String datasetId = version.getDataset().getId().toString(); String versionTag = version.getFriendlyVersionNumber(); - versionTag = versionTag.replace("DRAFT", ":draft"); + versionTag = versionTag.replace("DRAFT", DS_VERSION_DRAFT); if (!"".equals(currentFolder)) { subFolder = currentFolder + "/" + subFolder; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index 4ae76a7b8db..c864823176e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -14,6 +14,8 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; + public class URLTokenUtil { protected static final Logger logger = Logger.getLogger(URLTokenUtil.class.getCanonicalName()); @@ -177,8 +179,7 @@ private String getTokenValue(String value) { } } if (("DRAFT").equals(versionString)) { - versionString = ":draft"; // send the token needed in api calls that can be substituted for a numeric - // version. + versionString = DS_VERSION_DRAFT; // send the token needed in api calls that can be substituted for a numeric version. } return versionString; case FILE_METADATA_ID: diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 7c0099ef34c..5c1eb66b63d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; import io.restassured.RestAssured; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; @@ -500,7 +501,7 @@ public void testCreatePublishDestroyDataset() { assertTrue(datasetContactFromExport.toString().contains("finch@mailinator.com")); assertTrue(firstValue.toString().contains("finch@mailinator.com")); - Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, ":latest-published", apiToken); + Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken); getDatasetVersion.prettyPrint(); getDatasetVersion.then().assertThat() .body("data.datasetId", equalTo(datasetId)) @@ -1159,7 +1160,7 @@ public void testPrivateUrl() { assertEquals(OK.getStatusCode(), createPrivateUrlForPostVersionOneDraft.getStatusCode()); // A Contributor has DeleteDatasetDraft - Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, ":draft", contributorApiToken); + Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, DS_VERSION_DRAFT, contributorApiToken); deleteDraftVersionAsContributor.prettyPrint(); deleteDraftVersionAsContributor.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3257,7 +3258,7 @@ public void getDatasetVersionCitation() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, ":draft", apiToken); + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, apiToken); getDatasetVersionCitationResponse.prettyPrint(); getDatasetVersionCitationResponse.then().assertThat() @@ -3293,13 +3294,11 @@ public void getVersionFiles() throws IOException { UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName5, new byte[300], apiToken); UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[400], apiToken); - String testDatasetVersion = ":latest"; - // Test pagination and NameAZ order criteria (the default criteria) int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, false, apiToken); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, null, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3313,7 +3312,7 @@ public void getVersionFiles() throws IOException { String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, false, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3324,7 +3323,7 @@ public void getVersionFiles() throws IOException { assertEquals(testPageSize, fileMetadatasCount); // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, false, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize * 2, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3334,7 +3333,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), false, apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), false, apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3344,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), false, apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), false, apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3355,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), false, apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), false, apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3366,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), false, apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), false, apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3377,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), false, apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), false, apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3390,13 +3389,13 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); // Test Content Type - Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, false, apiToken); + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, "image/png", null, null, null, null, false, apiToken); getVersionFilesResponseContentType.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3412,7 +3411,7 @@ public void getVersionFiles() throws IOException { setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, false, apiToken); + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, testCategory, null, null, false, apiToken); getVersionFilesResponseCategoryName.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3426,7 @@ public void getVersionFiles() throws IOException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3451,7 @@ public void getVersionFiles() throws IOException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3460,7 @@ public void getVersionFiles() throws IOException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, false, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3470,7 @@ public void getVersionFiles() throws IOException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, false, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, false, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3483,7 +3482,7 @@ public void getVersionFiles() throws IOException { assertEquals(3, fileMetadatasCount); // Test Search Text - Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, "test_1", null, false, apiToken); + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, "test_1", null, false, apiToken); getVersionFilesResponseSearchText.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3498,17 +3497,15 @@ public void getVersionFiles() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - String latestPublishedVersion = ":latest-published"; - - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // includeDeaccessioned false - Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, false, apiToken); + Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, false, apiToken); getVersionFilesResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // includeDeaccessioned true - Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, latestPublishedVersion, null, null, null, null, null, null, null, true, apiToken); + Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, true, apiToken); getVersionFilesResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); getVersionFilesResponseDeaccessioned.then().assertThat() @@ -3560,7 +3557,7 @@ public void getVersionFileCounts() throws IOException { createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", false, apiToken); + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3582,17 +3579,15 @@ public void getVersionFileCounts() throws IOException { Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - String latestPublishedVersion = ":latest-published"; - - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, latestPublishedVersion, apiToken); + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // includeDeaccessioned false - Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, false, apiToken); + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // includeDeaccessioned true - Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, latestPublishedVersion, true, apiToken); + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, true, apiToken); getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); @@ -3613,14 +3608,14 @@ public void deaccessionDataset() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - // Test that :draft and :latest are not allowed - Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":draft", apiToken); + // Test that draft and latest version constants are not allowed + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_DRAFT, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); // Test that a not found error occurs when there is no published version available - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); // Test that the dataset is successfully deaccessioned when published @@ -3628,11 +3623,11 @@ public void deaccessionDataset() { publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); // Test that a not found error occurs when the only published version has already been deaccessioned - deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, ":latest-published", apiToken); + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java index 598ba36c1e1..927efb0b142 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java @@ -16,6 +16,9 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST_PUBLISHED; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static jakarta.ws.rs.core.Response.Status.OK; @@ -188,8 +191,7 @@ public void downloadAllFilesByVersion() throws IOException { HashSet expectedFiles6 = new HashSet<>(Arrays.asList("CODE_OF_CONDUCT.md", "LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md")); assertEquals(expectedFiles6, filenamesFound6); - String datasetVersionLatestPublished = ":latest-published"; - Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, datasetVersionLatestPublished, apiToken); + Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken); downloadFiles9.then().assertThat() .statusCode(OK.getStatusCode()); @@ -200,8 +202,7 @@ public void downloadAllFilesByVersion() throws IOException { assertEquals(expectedFiles7, filenamesFound7); // Guests cannot download draft versions. - String datasetVersionDraft = ":draft"; - Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, datasetVersionDraft, null); + Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, DS_VERSION_DRAFT, null); downloadFiles10.prettyPrint(); downloadFiles10.then().assertThat() .statusCode(UNAUTHORIZED.getStatusCode()) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 7f1ca4c8d70..94e895a7b7b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -10,6 +10,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; import io.restassured.path.json.JsonPath; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static io.restassured.path.json.JsonPath.with; import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1354,7 +1356,7 @@ public void testDataSizeInDataverse() throws InterruptedException { .statusCode(OK.getStatusCode()); String apiTokenRando = createUserGetToken(); - Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), ":draft", apiTokenRando); + Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), DS_VERSION_DRAFT, apiTokenRando); datasetStorageSizeResponseDraft.prettyPrint(); assertEquals(UNAUTHORIZED.getStatusCode(), datasetStorageSizeResponseDraft.getStatusCode()); Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); @@ -1607,7 +1609,7 @@ public void test_CrawlableAccessToDatasetFiles() { // Expected values in the output: String expectedTitleTopFolder = "Index of folder /"; String expectedLinkTopFolder = folderName + "/"; - String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=:draft&folder=subfolder"; + String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=" + DS_VERSION_DRAFT + "&folder=subfolder"; String expectedTitleSubFolder = "Index of folder /" + folderName; String expectedLinkAhrefSubFolder = "/api/access/datafile/" + folderName + "/" + dataFileId; @@ -1987,7 +1989,7 @@ public void testDeleteFile() { deleteResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 2 deleted from post v1.0 draft - Response postv1draft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft.prettyPrint(); postv1draft.then().assertThat() .body("data.files.size()", equalTo(1)) @@ -2009,7 +2011,7 @@ public void testDeleteFile() { downloadResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 still in post v1.0 draft - Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) @@ -2024,7 +2026,7 @@ public void testDeleteFile() { deleteResponse3.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 deleted from post v1.0 draft - Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft3.prettyPrint(); postv1draft3.then().assertThat() .body("data.files[0]", equalTo(null)) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 086fef5f18a..8c6a2d6e75d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -38,6 +38,7 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.path.xml.XmlPath.from; import static io.restassured.RestAssured.given; import edu.harvard.iq.dataverse.DatasetField; @@ -515,7 +516,7 @@ static Response updateDatasetMetadataViaNative(String persistentId, String pathT .header(API_TOKEN_HTTP_HEADER, apiToken) .body(jsonIn) .contentType("application/json") - .put("/api/datasets/:persistentId/versions/:draft?persistentId=" + persistentId); + .put("/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "?persistentId=" + persistentId); return response; } @@ -791,7 +792,7 @@ static Response deleteAuxFile(Long fileId, String formatTag, String formatVersio static Response getCrawlableFileAccess(String datasetId, String folderName, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken); - String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=:draft"; + String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=" + DS_VERSION_DRAFT; if (StringUtil.nonEmpty(folderName)) { apiPath = apiPath.concat("&folder="+folderName); } @@ -1407,7 +1408,7 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/:persistentId/versions/:latest-published/metadata/citation?persistentId=" + persistentId); + .get("/api/datasets/:persistentId/versions/" + DS_VERSION_LATEST_PUBLISHED + "/metadata/citation?persistentId=" + persistentId); } static Response makeSuperUser(String username) { @@ -2922,7 +2923,7 @@ static Response findDatasetStorageSize(String datasetId, String apiToken) { static Response findDatasetDownloadSize(String datasetId) { return given() - .get("/api/datasets/" + datasetId + "/versions/:latest/downloadsize"); + .get("/api/datasets/" + datasetId + "/versions/" + DS_VERSION_LATEST + "/downloadsize"); } static Response findDatasetDownloadSize(String datasetId, String version, String apiToken) { From c0dacb50fb117f01639b22bae6b404c6cc71596b Mon Sep 17 00:00:00 2001 From: Ludovic DANIEL Date: Wed, 20 Sep 2023 16:59:25 +0200 Subject: [PATCH 0460/1092] #9940 - fixed various issues with generated urls of authors for signposting --- .../dataverse/util/SignpostingResources.java | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 2c9b7167059..19e1c1298ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -19,6 +19,8 @@ Two configurable options allow changing the limit for the number of authors or d import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; +import org.apache.commons.validator.routines.UrlValidator; + import java.util.ArrayList; import java.util.LinkedList; import java.util.List; @@ -164,12 +166,11 @@ private List getAuthorURLs(boolean limit) { for (DatasetAuthor da : workingDatasetVersion.getDatasetAuthors()) { logger.fine(String.format("idtype: %s; idvalue: %s, affiliation: %s; identifierUrl: %s", da.getIdType(), da.getIdValue(), da.getAffiliation(), da.getIdentifierAsUrl())); - String authorURL = ""; - authorURL = getAuthorUrl(da); + String authorURL = getAuthorUrl(da); if (authorURL != null && !authorURL.isBlank()) { // return empty if number of visible author more than max allowed // >= since we're comparing before incrementing visibleAuthorCounter - if (visibleAuthorCounter >= maxAuthors) { + if (limit && visibleAuthorCounter >= maxAuthors) { authorURLs.clear(); break; } @@ -211,15 +212,22 @@ private String getAuthorsAsString(List datasetAuthorURLs) { * */ private String getAuthorUrl(DatasetAuthor da) { - String authorURL = ""; - //If no type and there's a value, assume it is a URL (is this reasonable?) - //Otherise, get the URL using the type and value - if (da.getIdType() != null && !da.getIdType().isBlank() && da.getIdValue()!=null) { - authorURL = da.getIdValue(); - } else { - authorURL = da.getIdentifierAsUrl(); + + final String identifierAsUrl = da.getIdentifierAsUrl(); + // First, try to get URL using the type and value + if(identifierAsUrl != null) { + return identifierAsUrl; } - return authorURL; + + final String idValue = da.getIdValue(); + UrlValidator urlValidator = new UrlValidator(new String[]{"http", "https"}); + // Otherwise, try to use idValue as url if it's valid + if(urlValidator.isValid(idValue)) { + return idValue; + } + + // No url found + return null; } private JsonArrayBuilder getJsonAuthors(List datasetAuthorURLs) { From 35a5278402a13075017c3c98cea0a17d7f2da167 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 20 Sep 2023 12:22:59 -0400 Subject: [PATCH 0461/1092] cleanup --- .../edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../iq/dataverse/ManagePermissionsPage.java | 45 ------------------- 2 files changed, 1 insertion(+), 46 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index fcad6ae081a..5ce6207250d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3280,7 +3280,7 @@ private boolean filterSelectedFiles(){ } return someFiles; } -//QDRADA - still needed? + public void validateFilesForRequestAccess(){ this.filterSelectedFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 9bb00bbb6d5..0e277c5aa32 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -401,51 +401,6 @@ public List completeRoleAssignee( String query ) { return roleAssigneeService.filterRoleAssignees(query, dvObject, roleAssignSelectedRoleAssignees); } - public void grantAccess(ActionEvent evt) { - //QDRADA - logger.info("grantAccess Called"); - try { - throw new Exception("grantAccessCalled"); - } catch (Exception e) { - e.printStackTrace(); - } - /* - // Find the built in file downloader role (currently by alias) - DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER); - for (RoleAssignee roleAssignee : selectedRoleAssignees) { - boolean sendNotification = false; - for (DataFile file : selectedFiles) { - if (assignRole(roleAssignee, file, fileDownloaderRole)) { - if (file.isReleased()) { - sendNotification = true; - } - // remove request, if it exist - for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { - if (file.getFileAccessRequesters().remove(au)) { - List fileAccessRequests = fileAccessRequestService.findAllByAuthenticatedUserIdAndRequestState(au.getId(), FileAccessRequest.RequestState.CREATED); - for(FileAccessRequest far : fileAccessRequests){ - far.setStateGranted(); - fileAccessRequestService.save(far); - } - file.setFileAccessRequests(fileAccessRequests); - datafileService.save(file); - } - } - } - - } - - if (sendNotification) { - for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { - userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId()); - } - } - } - - initMaps(); - */ - } - public List getAvailableRoles() { List roles = new LinkedList<>(); if (dvObject != null && dvObject.getId() != null) { From d06259f00b04965571cd4965d34fccd9f0067249 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 20 Sep 2023 14:10:41 -0400 Subject: [PATCH 0462/1092] Revert "more String->Boolean changes" This reverts commit 750069b5ac39b0f71f14507c7770f09f36ed3af2. --- .../java/edu/harvard/iq/dataverse/DataversePage.java | 2 +- .../edu/harvard/iq/dataverse/DvObjectContainer.java | 10 ++++++++-- .../java/edu/harvard/iq/dataverse/SettingsWrapper.java | 10 +++++----- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 7 ++++--- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 4ce7042d421..943a74327d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -1286,7 +1286,7 @@ public String getCurationLabelSetNameLabel() { return setName; } - public Set> getGuestbookEntryOptions() { + public Set> getGuestbookEntryOptions() { return settingsWrapper.getGuestbookEntryOptions(this.dataverse).entrySet(); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index 7c1e096dc45..da77df786d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -120,9 +120,15 @@ public Boolean getGuestbookEntryAtRequest() { return guestbookAtRequest; } - public void setGuestbookEntryAtRequest(Boolean gbAtRequest) { - this.guestbookAtRequest = gbAtRequest; + public void setGuestbookEntryAtRequest(String gbAtRequest) { + if (gbAtRequest != null && gbAtRequest.equals(UNDEFINED_CODE)) { + this.guestbookAtRequest = null; + } else { + //Force to true or false + this.guestbookAtRequest = Boolean.valueOf(Boolean.parseBoolean(gbAtRequest)); + } } + /* Dataverse collections can be configured to allow use of Curation labels and have this inheritable value to decide which set of labels to use. * This mechanism is similar to that for the storageDriver except that there is an addition option to disable use of labels. diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index aa5680b4e14..e41a18c1431 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -632,8 +632,8 @@ public String getDefaultMetadataLanguage() { } } - public Map getGuestbookEntryOptions(DvObjectContainer target) { - Map currentMap = new HashMap(); + public Map getGuestbookEntryOptions(DvObjectContainer target) { + Map currentMap = new HashMap(); String atDownload = BundleUtil.getStringFromBundle("dataverse.guestbookentry.atdownload"); String atRequest = BundleUtil.getStringFromBundle("dataverse.guestbookentry.atrequest"); Optional gbDefault = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class); @@ -649,9 +649,9 @@ public Map getGuestbookEntryOptions(DvObjectContainer target) { useDefault = (defaultOption ? atRequest : atDownload) + BundleUtil.getStringFromBundle("dataverse.inherited"); } - currentMap.put(null, useDefault); - currentMap.put(Boolean.TRUE, atRequest); - currentMap.put(Boolean.FALSE, atDownload); + currentMap.put("null", useDefault); + currentMap.put(Boolean.toString(true), atRequest); + currentMap.put(Boolean.toString(false), atDownload); } else { // Setting not defined - leave empty } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 07ac287ddb3..a1191a6f4e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3924,9 +3924,10 @@ public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, if (!gbAtRequestOpt.isPresent()) { return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it."); } - dataset.setGuestbookEntryAtRequest(gbAtRequest); + String choice = Boolean.valueOf(gbAtRequest).toString(); + dataset.setGuestbookEntryAtRequest(choice); datasetService.merge(dataset); - return ok("Guestbook Entry At Request set to: " + gbAtRequest); + return ok("Guestbook Entry At Request set to: " + choice); } @DELETE @@ -3954,7 +3955,7 @@ public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext cr return error(Response.Status.NOT_FOUND, "No such dataset"); } - dataset.setGuestbookEntryAtRequest(null); + dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE); datasetService.merge(dataset); return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } From 3ad53d7c0b73260122ec5deecf072edc456d2a8e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 20 Sep 2023 14:54:43 -0400 Subject: [PATCH 0463/1092] fix Dataverse UI ability to set GB at request param --- .../java/edu/harvard/iq/dataverse/DvObjectContainer.java | 9 ++++++--- .../java/edu/harvard/iq/dataverse/SettingsWrapper.java | 2 +- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java index da77df786d8..f7d361d76f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java @@ -116,12 +116,15 @@ public boolean getEffectiveGuestbookEntryAtRequest() { return gbAtRequest; } - public Boolean getGuestbookEntryAtRequest() { - return guestbookAtRequest; + public String getGuestbookEntryAtRequest() { + if(guestbookAtRequest==null) { + return UNDEFINED_CODE; + } + return Boolean.valueOf(guestbookAtRequest).toString(); } public void setGuestbookEntryAtRequest(String gbAtRequest) { - if (gbAtRequest != null && gbAtRequest.equals(UNDEFINED_CODE)) { + if (gbAtRequest == null || gbAtRequest.equals(UNDEFINED_CODE)) { this.guestbookAtRequest = null; } else { //Force to true or false diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index e41a18c1431..0a1d0effc03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -649,7 +649,7 @@ public Map getGuestbookEntryOptions(DvObjectContainer target) { useDefault = (defaultOption ? atRequest : atDownload) + BundleUtil.getStringFromBundle("dataverse.inherited"); } - currentMap.put("null", useDefault); + currentMap.put(DvObjectContainer.UNDEFINED_CODE, useDefault); currentMap.put(Boolean.toString(true), atRequest); currentMap.put(Boolean.toString(false), atDownload); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a1191a6f4e6..55e329bea68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3888,8 +3888,8 @@ public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @P } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - Boolean gbAtRequest = dataset.getGuestbookEntryAtRequest(); - if(gbAtRequest == null) { + String gbAtRequest = dataset.getGuestbookEntryAtRequest(); + if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) { return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } return ok(dataset.getEffectiveGuestbookEntryAtRequest()); From d9102434f53f78692572cc4ddd1cf4bb195a67df Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 20 Sep 2023 17:22:14 -0400 Subject: [PATCH 0464/1092] revert #5863 --- .../harvard/iq/dataverse/FileDownload.java | 163 ------------------ .../iq/dataverse/GuestbookResponse.java | 56 +++--- .../GuestbookResponseServiceBean.java | 10 +- 3 files changed, 34 insertions(+), 195 deletions(-) delete mode 100644 src/main/java/edu/harvard/iq/dataverse/FileDownload.java diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java b/src/main/java/edu/harvard/iq/dataverse/FileDownload.java deleted file mode 100644 index a79281f71f0..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package edu.harvard.iq.dataverse; - -import java.io.Serializable; -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.Temporal; -import jakarta.persistence.TemporalType; -import jakarta.persistence.Transient; -import jakarta.persistence.CascadeType; -import jakarta.persistence.OneToOne; -import jakarta.persistence.MapsId; -import jakarta.persistence.FetchType; -import jakarta.persistence.JoinColumn; -import java.util.Date; - - -/** - * - * @author marina - */ -@Entity -public class FileDownload implements Serializable { - - @Id - private Long id; - - @OneToOne(fetch = FetchType.LAZY) - @MapsId - private GuestbookResponse guestbookResponse; - - @Temporal(value = TemporalType.TIMESTAMP) - private Date downloadTimestamp; - - /* - Transient Values carry non-written information - that will assist in the download process - - selected file ids is a comma delimited list that contains the file ids for multiple download - - fileFormat tells the download api which format a subsettable file should be downloaded as - */ - - @Transient - private String selectedFileIds; - - @Transient - private String fileFormat; - - - /** - * Possible values for downloadType include "Download", "Subset", - * or the displayName of an ExternalTool. - * - * TODO: Types like "Download" and "Subset" should - * be defined once as constants (likely an enum) rather than having these - * strings duplicated in various places when setDownloadtype() is called. - */ - private String downloadtype; - private String sessionId; - - public FileDownload(){ - - } - - public FileDownload(FileDownload source){ - this.setDownloadTimestamp(source.getDownloadTimestamp()); - this.setDownloadtype(source.getDownloadtype()); - this.setFileFormat(source.getFileFormat()); - this.setGuestbookResponse(source.getGuestbookResponse()); - this.setSelectedFileIds(source.getSelectedFileIds()); - this.setSessionId(source.getSessionId()); - } - - public String getFileFormat() { - return fileFormat; - } - - //for download - public void setFileFormat(String downloadFormat) { - this.fileFormat = downloadFormat; - } - - public String getDownloadtype() { - return downloadtype; - } - - public void setDownloadtype(String downloadtype) { - this.downloadtype = downloadtype; - } - - public String getSessionId() { - return sessionId; - } - - public void setSessionId(String sessionId) { - this.sessionId = sessionId; - } - - public String getSelectedFileIds() { - return selectedFileIds; - } - - public void setSelectedFileIds(String selectedFileIds) { - this.selectedFileIds = selectedFileIds; - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public Date getDownloadTimestamp(){ - return this.downloadTimestamp; - } - - public void setDownloadTimestamp(Date downloadTimestamp){ - this.downloadTimestamp = downloadTimestamp; - } - - - public void setGuestbookResponse(GuestbookResponse gbr){ - this.guestbookResponse = gbr; - } - - public GuestbookResponse getGuestbookResponse(){ - return this.guestbookResponse; - } - - @Override - public int hashCode() { - int hash = 0; - hash += (id != null ? id.hashCode() : 0); - return hash; - } - - @Override - public boolean equals(Object object) { - // TODO: Warning - this method won't work in the case the id fields are not set - if (!(object instanceof FileDownload)) { - return false; - } - FileDownload other = (FileDownload) object; - if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { - return false; - } - return true; - } - - @Override - public String toString() { - return "edu.harvard.iq.dataverse.FileDownload[ id=" + id + " ]"; - } - - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 35c299fa5d5..658413c772d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -67,9 +67,6 @@ public class GuestbookResponse implements Serializable { @JoinColumn(nullable=true) private AuthenticatedUser authenticatedUser; - @OneToOne(cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},mappedBy="guestbookResponse",fetch = FetchType.LAZY) - private FileDownload fileDownload; - @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},fetch = FetchType.LAZY) //private FileAccessRequest fileAccessRequest; private List fileAccessRequests; @@ -94,15 +91,35 @@ public class GuestbookResponse implements Serializable { @Temporal(value = TemporalType.TIMESTAMP) private Date responseTime; + /** + * Possible values for downloadType include "Download", "Subset", + * or the displayName of an ExternalTool. + * + * TODO: Types like "Download" and "Subset" should + * be defined once as constants (likely an enum) rather than having these + * strings duplicated in various places when setDownloadtype() is called. + */ + private String downloadtype; + private String sessionId; + /* Transient Values carry non-written information that will assist in the download process - writeResponse is set to false when dataset version is draft. + - selected file ids is a comma delimited list that contains the file ids for multiple download + - fileFormat tells the download api which format a subsettable file should be downloaded as + */ @Transient private boolean writeResponse = true; + @Transient + private String selectedFileIds; + + @Transient + private String fileFormat; + /** * This transient variable is a place to temporarily retrieve the * ExternalTool object from the popup when the popup is required on the @@ -110,6 +127,7 @@ public class GuestbookResponse implements Serializable { */ @Transient private ExternalTool externalTool; + public boolean isWriteResponse() { return writeResponse; @@ -120,19 +138,19 @@ public void setWriteResponse(boolean writeResponse) { } public String getSelectedFileIds(){ - return this.fileDownload.getSelectedFileIds(); + return this.getSelectedFileIds(); } public void setSelectedFileIds(String selectedFileIds) { - this.fileDownload.setSelectedFileIds(selectedFileIds); + this.setSelectedFileIds(selectedFileIds); } public String getFileFormat() { - return this.fileDownload.getFileFormat(); + return this.getFileFormat(); } public void setFileFormat(String downloadFormat) { - this.fileDownload.setFileFormat(downloadFormat); + this.setFileFormat(downloadFormat); } public ExternalTool getExternalTool() { @@ -144,10 +162,6 @@ public void setExternalTool(ExternalTool externalTool) { } public GuestbookResponse(){ - if(this.getFileDownload() == null){ - this.fileDownload = new FileDownload(); - this.fileDownload.setGuestbookResponse(this); - } } public GuestbookResponse(GuestbookResponse source){ @@ -160,7 +174,7 @@ public GuestbookResponse(GuestbookResponse source){ this.setDataset(source.getDataset()); this.setDatasetVersion(source.getDatasetVersion()); this.setAuthenticatedUser(source.getAuthenticatedUser()); - + this.setSessionId(source.getSessionId()); List customQuestionResponses = new ArrayList<>(); if (!source.getCustomQuestionResponses().isEmpty()){ for (CustomQuestionResponse customQuestionResponse : source.getCustomQuestionResponses() ){ @@ -173,7 +187,6 @@ public GuestbookResponse(GuestbookResponse source){ } this.setCustomQuestionResponses(customQuestionResponses); this.setGuestbook(source.getGuestbook()); - this.setFileDownload(source.getFileDownload()); } @@ -231,7 +244,6 @@ public Date getResponseTime() { public void setResponseTime(Date responseTime) { this.responseTime = responseTime; - this.getFileDownload().setDownloadTimestamp(responseTime); } public String getResponseDate() { @@ -246,14 +258,6 @@ public void setCustomQuestionResponses(List customQuesti this.customQuestionResponses = customQuestionResponses; } - public FileDownload getFileDownload(){ - return fileDownload; - } - - public void setFileDownload(FileDownload fDownload){ - this.fileDownload = fDownload; - } - public List getFileAccessRequests(){ return fileAccessRequests; } @@ -295,21 +299,21 @@ public void setAuthenticatedUser(AuthenticatedUser authenticatedUser) { } public String getDownloadtype() { - return this.fileDownload.getDownloadtype(); + return this.getDownloadtype(); } public void setDownloadtype(String downloadtype) { - this.fileDownload.setDownloadtype(downloadtype); + this.setDownloadtype(downloadtype); } public String getSessionId() { - return this.fileDownload.getSessionId(); + return this.getSessionId(); } public void setSessionId(String sessionId) { - this.fileDownload.setSessionId(sessionId); + this.setSessionId(sessionId); } public String toHtmlFormattedResponse() { diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index b635a0c03a6..6c84f1cacf1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -63,15 +63,14 @@ public class GuestbookResponseServiceBean { + " and r.dataset_id = o.id " + " and r.guestbook_id = g.id ";*/ - private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, f.downloadtype," + private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, r.downloadtype," + " m.label, r.dataFile_id, r.name, r.email, r.institution, r.position," + " o.protocol, o.authority, o.identifier, d.protocol, d.authority, d.identifier " - + "from guestbookresponse r, filedownload f, filemetadata m, dvobject o, guestbook g, dvobject d " + + "from guestbookresponse r, filemetadata m, dvobject o, guestbook g, dvobject d " + "where " + "m.datasetversion_id = (select max(datasetversion_id) from filemetadata where datafile_id =r.datafile_id ) " + " and m.datafile_id = r.datafile_id " + " and d.id = r.datafile_id " - + " and r.id = f.guestbookresponse_id " + " and r.dataset_id = o.id " + " and r.guestbook_id = g.id "; @@ -79,14 +78,13 @@ public class GuestbookResponseServiceBean { // on the guestbook-results.xhtml page (the info we show on the page is // less detailed than what we let the users download as CSV files, so this // query has fewer fields than the one above). -- L.A. - private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select r.id, v.value, r.responsetime, f.downloadtype, m.label, r.name " - + "from guestbookresponse r, filedownload f, datasetfieldvalue v, filemetadata m , dvobject o " + private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select r.id, v.value, r.responsetime, r.downloadtype, m.label, r.name " + + "from guestbookresponse r, datasetfieldvalue v, filemetadata m , dvobject o " + "where " + " v.datasetfield_id = (select id from datasetfield f where datasetfieldtype_id = 1 " + " and datasetversion_id = (select max(id) from datasetversion where dataset_id =r.dataset_id )) " + " and m.datasetversion_id = (select max(datasetversion_id) from filemetadata where datafile_id =r.datafile_id ) " + " and m.datafile_id = r.datafile_id " - + " and r.id = f.guestbookresponse_id " + " and r.dataset_id = o.id "; // And a custom query for retrieving *all* the custom question responses, for From 4b5710de04193f3745b3cabd5c6fc751299d3009 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 20 Sep 2023 17:30:06 -0400 Subject: [PATCH 0465/1092] fix revert, renames, add AccessRequest event type --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../harvard/iq/dataverse/FileDownloadHelper.java | 6 +++--- .../iq/dataverse/FileDownloadServiceBean.java | 4 +++- .../harvard/iq/dataverse/GuestbookResponse.java | 14 +++++++------- .../iq/dataverse/GuestbookResponseServiceBean.java | 12 ++++++------ .../engine/command/impl/GetUserTracesCommand.java | 2 +- .../V6.0.0.1__9599-guestbook-at-request.sql | 8 ++++++++ 7 files changed, 29 insertions(+), 19 deletions(-) create mode 100644 src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 5ce6207250d..d920ab87bb4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3246,7 +3246,7 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa } else { guestbookResponse.setFileFormat(""); } - guestbookResponse.setDownloadtype("Download"); + guestbookResponse.setEventType("Download"); } /*helper function to filter the selected files into , diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 0f93f68623a..f574ed46f34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -72,7 +72,7 @@ private boolean testResponseLength(String value) { // terms/etc. popup. public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) { PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); - guestbookResponse.setDownloadtype("Download"); + guestbookResponse.setEventType("Download"); // Note that this method is only ever called from the file-download-popup - // meaning we know for the fact that we DO want to save this // guestbookResponse permanently in the database. @@ -93,7 +93,7 @@ public void writeGuestbookAndOpenSubset(GuestbookResponse guestbookResponse) { PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadDataSubsetPopup').show()"); - guestbookResponse.setDownloadtype("Subset"); + guestbookResponse.setEventType("Subset"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } @@ -158,7 +158,7 @@ public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookRe * Writes a guestbook entry for either popup scenario: guestbook or terms. */ public boolean writeGuestbookAndShowPreview(GuestbookResponse guestbookResponse) { - guestbookResponse.setDownloadtype("Explore"); + guestbookResponse.setEventType("Explore"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 0758f053470..a80781b16ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -201,6 +201,8 @@ public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookRe return; } + guestbookResponse.setEventType("AccessRequest"); + List selectedDataFiles = new ArrayList<>(); //always make sure it's at least an empty List if(guestbookResponse.getDataFile() != null ){ //one file 'selected' by 'Request Access' button click @@ -362,7 +364,7 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter String localeCode = session.getLocaleCode(); ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd, localeCode); // Persist the name of the tool (i.e. "Data Explorer", etc.) - guestbookResponse.setDownloadtype(externalTool.getDisplayName()); + guestbookResponse.setEventType(externalTool.getDisplayName()); PrimeFaces.current().executeScript(externalToolHandler.getExploreScript()); // This is the old logic from TwoRavens, null checks and all. if (guestbookResponse != null && guestbookResponse.isWriteResponse() diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 658413c772d..d2d1b672716 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -99,7 +99,7 @@ public class GuestbookResponse implements Serializable { * be defined once as constants (likely an enum) rather than having these * strings duplicated in various places when setDownloadtype() is called. */ - private String downloadtype; + private String eventType; private String sessionId; /* @@ -298,22 +298,22 @@ public void setAuthenticatedUser(AuthenticatedUser authenticatedUser) { this.authenticatedUser = authenticatedUser; } - public String getDownloadtype() { - return this.getDownloadtype(); + public String getEventType() { + return this.eventType; } - public void setDownloadtype(String downloadtype) { - this.setDownloadtype(downloadtype); + public void setEventType(String eventType) { + this.eventType = eventType; } public String getSessionId() { - return this.getSessionId(); + return this.sessionId; } public void setSessionId(String sessionId) { - this.setSessionId(sessionId); + this.sessionId= sessionId; } public String toHtmlFormattedResponse() { diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 6c84f1cacf1..00e44f2dbf6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -668,7 +668,7 @@ public GuestbookResponse initGuestbookResponseForFragment(DatasetVersion working if (dataset.getGuestbook() != null && !dataset.getGuestbook().getCustomQuestions().isEmpty()) { initCustomQuestions(guestbookResponse, dataset); } - guestbookResponse.setDownloadtype("Download"); + guestbookResponse.setEventType("Download"); guestbookResponse.setDataset(dataset); @@ -722,9 +722,9 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String if (dataset.getGuestbook() != null && !dataset.getGuestbook().getCustomQuestions().isEmpty()) { initCustomQuestions(guestbookResponse, dataset); } - guestbookResponse.setDownloadtype("Download"); + guestbookResponse.setEventType("Download"); if(downloadFormat.toLowerCase().equals("subset")){ - guestbookResponse.setDownloadtype("Subset"); + guestbookResponse.setEventType("Subset"); } if(downloadFormat.toLowerCase().equals("explore")){ /** @@ -740,7 +740,7 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String * over in the "explore" method of FileDownloadServiceBean just * before the guestbookResponse is written. */ - guestbookResponse.setDownloadtype("Explore"); + guestbookResponse.setEventType("Explore"); } guestbookResponse.setDataset(dataset); @@ -819,7 +819,7 @@ public GuestbookResponse initDefaultGuestbookResponse(Dataset dataset, DataFile guestbookResponse.setDataset(dataset); guestbookResponse.setResponseTime(new Date()); guestbookResponse.setSessionId(session.toString()); - guestbookResponse.setDownloadtype("Download"); + guestbookResponse.setEventType("Download"); setUserDefaultResponses(guestbookResponse, session); return guestbookResponse; } @@ -840,7 +840,7 @@ public GuestbookResponse initAPIGuestbookResponse(Dataset dataset, DataFile data guestbookResponse.setDataset(dataset); guestbookResponse.setResponseTime(new Date()); guestbookResponse.setSessionId(session.toString()); - guestbookResponse.setDownloadtype("Download"); + guestbookResponse.setEventType("Download"); setUserDefaultResponses(guestbookResponse, session, user); return guestbookResponse; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java index e41d70d9804..df0b5d785e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java @@ -212,7 +212,7 @@ public JsonObjectBuilder execute(CommandContext ctxt) throws CommandException { try { JsonObjectBuilder gbe = Json.createObjectBuilder() .add("id", guestbookResponse.getId()) - .add("downloadType", guestbookResponse.getDownloadtype()) + .add("eventType", guestbookResponse.getEventType()) .add("filename", guestbookResponse.getDataFile().getCurrentName()) .add("date", guestbookResponse.getResponseDate()) .add("guestbookName", guestbookResponse.getGuestbook().getName()); diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql new file mode 100644 index 00000000000..63b8c8f531e --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -0,0 +1,8 @@ +ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS eventtype VARCHAR(255); +ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS sessionid VARCHAR(255); + +UPDATE guestbookresponse g + SET eventtype = (SELECT downloadtype FROM filedownload f where f.guestbookresponse_id = g.id), + sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id); + +DROP TABLE filedownload; From 887d26f2fe5c41ca71f0031a9eae6dbfa13e8559 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Sep 2023 10:11:33 +0100 Subject: [PATCH 0466/1092] Added: docs for deaccessioning API endpoints --- doc/sphinx-guides/source/api/native-api.rst | 42 ++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 90f4ad4e800..f46bd0dd17c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1020,7 +1020,17 @@ Usage example: Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them. -Keep in mind that you can combine all of the above query params depending on the results you are looking for. +By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. + +If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?includeDeaccessioned=true" + +.. note:: Keep in mind that you can combine all of the above query params depending on the results you are looking for. Get File Counts in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1048,6 +1058,16 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" +By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. + +If you want to consider deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?includeDeaccessioned=true" + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1344,6 +1364,26 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/versions/:draft" +Deaccession Dataset +~~~~~~~~~~~~~~~~~~~ + +Given a version of a dataset, updates its status to deaccessioned. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSIONID=1.0 + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" + Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 1d661e74f2671405143023e22018c5ca197b9c5c Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Sep 2023 10:37:24 +0100 Subject: [PATCH 0467/1092] Added: release notes for #9852 --- .../9852-files-api-extension-deaccession.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 doc/release-notes/9852-files-api-extension-deaccession.md diff --git a/doc/release-notes/9852-files-api-extension-deaccession.md b/doc/release-notes/9852-files-api-extension-deaccession.md new file mode 100644 index 00000000000..c5f6741932a --- /dev/null +++ b/doc/release-notes/9852-files-api-extension-deaccession.md @@ -0,0 +1,10 @@ +Extended the existing endpoints: + +- getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts) + +The above endpoints now accept a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files or file counts. + +Additionally, a new endpoint has been developed to support version deaccessioning through API (Given a dataset and a version). + +- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession) From 3c7fa8f0eeb34db7d2ca12d4f7eae8e4e02df1d8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Sep 2023 11:04:58 +0100 Subject: [PATCH 0468/1092] Added: friendlyType field to DataFile API json payload --- doc/release-notes/9852-files-api-extension-deaccession.md | 2 ++ .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 + 2 files changed, 3 insertions(+) diff --git a/doc/release-notes/9852-files-api-extension-deaccession.md b/doc/release-notes/9852-files-api-extension-deaccession.md index c5f6741932a..55698580e3c 100644 --- a/doc/release-notes/9852-files-api-extension-deaccession.md +++ b/doc/release-notes/9852-files-api-extension-deaccession.md @@ -8,3 +8,5 @@ The above endpoints now accept a new boolean optional query parameter "includeDe Additionally, a new endpoint has been developed to support version deaccessioning through API (Given a dataset and a version). - deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession) + +Finally, the DataFile API payload has been extended to add the field "friendlyType" diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index e5cd72ff5fc..c4f9e47accf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -665,6 +665,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("pidURL", pidURL) .add("filename", fileName) .add("contentType", df.getContentType()) + .add("friendlyType", df.getFriendlyType()) .add("filesize", df.getFilesize()) .add("description", fileMetadata.getDescription()) .add("categories", getFileCategories(fileMetadata)) From f848f009516ed8fe229403d4262dc5874831cae1 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Sep 2023 12:25:05 -0400 Subject: [PATCH 0469/1092] use static final values for fixed event types --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../edu/harvard/iq/dataverse/FileDownloadHelper.java | 6 +++--- .../harvard/iq/dataverse/FileDownloadServiceBean.java | 2 +- .../java/edu/harvard/iq/dataverse/GuestbookResponse.java | 9 +++++++++ .../iq/dataverse/GuestbookResponseServiceBean.java | 8 ++++---- 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index d920ab87bb4..f99c10b2b79 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3246,7 +3246,7 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa } else { guestbookResponse.setFileFormat(""); } - guestbookResponse.setEventType("Download"); + guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); } /*helper function to filter the selected files into , diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index f574ed46f34..a6ae7223d9d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -72,7 +72,7 @@ private boolean testResponseLength(String value) { // terms/etc. popup. public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) { PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); - guestbookResponse.setEventType("Download"); + guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); // Note that this method is only ever called from the file-download-popup - // meaning we know for the fact that we DO want to save this // guestbookResponse permanently in the database. @@ -93,7 +93,7 @@ public void writeGuestbookAndOpenSubset(GuestbookResponse guestbookResponse) { PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadDataSubsetPopup').show()"); - guestbookResponse.setEventType("Subset"); + guestbookResponse.setEventType(GuestbookResponse.SUBSET); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } @@ -158,7 +158,7 @@ public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookRe * Writes a guestbook entry for either popup scenario: guestbook or terms. */ public boolean writeGuestbookAndShowPreview(GuestbookResponse guestbookResponse) { - guestbookResponse.setEventType("Explore"); + guestbookResponse.setEventType(GuestbookResponse.EXPLORE); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a80781b16ff..ab99b904b73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -201,7 +201,7 @@ public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookRe return; } - guestbookResponse.setEventType("AccessRequest"); + guestbookResponse.setEventType(GuestbookResponse.ACCESS_REQUEST); List selectedDataFiles = new ArrayList<>(); //always make sure it's at least an empty List diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index d2d1b672716..b27571cdbf5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -127,8 +127,17 @@ public class GuestbookResponse implements Serializable { */ @Transient private ExternalTool externalTool; + + /* Event Types - there are four pre-defined values in use. + * The type can also be the name of a previewer/explore tool + */ + static final String ACCESS_REQUEST = "AccessRequest"; + static final String DOWNLOAD = "Download"; + static final String SUBSET = "Subset"; + static final String EXPLORE = "Explore"; + public boolean isWriteResponse() { return writeResponse; } diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 00e44f2dbf6..17bc4bea330 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -668,7 +668,7 @@ public GuestbookResponse initGuestbookResponseForFragment(DatasetVersion working if (dataset.getGuestbook() != null && !dataset.getGuestbook().getCustomQuestions().isEmpty()) { initCustomQuestions(guestbookResponse, dataset); } - guestbookResponse.setEventType("Download"); + guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); guestbookResponse.setDataset(dataset); @@ -722,7 +722,7 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String if (dataset.getGuestbook() != null && !dataset.getGuestbook().getCustomQuestions().isEmpty()) { initCustomQuestions(guestbookResponse, dataset); } - guestbookResponse.setEventType("Download"); + guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); if(downloadFormat.toLowerCase().equals("subset")){ guestbookResponse.setEventType("Subset"); } @@ -819,7 +819,7 @@ public GuestbookResponse initDefaultGuestbookResponse(Dataset dataset, DataFile guestbookResponse.setDataset(dataset); guestbookResponse.setResponseTime(new Date()); guestbookResponse.setSessionId(session.toString()); - guestbookResponse.setEventType("Download"); + guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); setUserDefaultResponses(guestbookResponse, session); return guestbookResponse; } @@ -840,7 +840,7 @@ public GuestbookResponse initAPIGuestbookResponse(Dataset dataset, DataFile data guestbookResponse.setDataset(dataset); guestbookResponse.setResponseTime(new Date()); guestbookResponse.setSessionId(session.toString()); - guestbookResponse.setEventType("Download"); + guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); setUserDefaultResponses(guestbookResponse, session, user); return guestbookResponse; } From cd82c2d7d2769b2c1038df953fa7f53ad4eb6adb Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Sep 2023 14:46:59 -0400 Subject: [PATCH 0470/1092] update queries, missed 'Subset' etc --- .../GuestbookResponseServiceBean.java | 31 ++++++++++++------- .../V6.0.0.1__9599-guestbook-at-request.sql | 28 +++++++++++++++++ src/main/webapp/dataset.xhtml | 6 ++-- src/main/webapp/dataverse.xhtml | 2 +- src/main/webapp/file-info-fragment.xhtml | 2 +- src/main/webapp/file.xhtml | 4 +-- 6 files changed, 55 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 17bc4bea330..4800ffd439f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -724,7 +724,7 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String } guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD); if(downloadFormat.toLowerCase().equals("subset")){ - guestbookResponse.setEventType("Subset"); + guestbookResponse.setEventType(GuestbookResponse.SUBSET); } if(downloadFormat.toLowerCase().equals("explore")){ /** @@ -740,7 +740,7 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String * over in the "explore" method of FileDownloadServiceBean just * before the guestbookResponse is written. */ - guestbookResponse.setEventType("Explore"); + guestbookResponse.setEventType(GuestbookResponse.EXPLORE); } guestbookResponse.setDataset(dataset); @@ -904,29 +904,36 @@ public void save(GuestbookResponse guestbookResponse) { em.persist(guestbookResponse); } + + /* + * Metrics - download counts from GuestbookResponses: Any GuestbookResponse that + * is not of eventtype=='AccessRequest' is considered a download. This includes + * actual 'Download's, downloads of 'Subset's, and use by 'Explore' tools and + * previewers (where eventtype is the previewer name) + */ - public Long getCountGuestbookResponsesByDataFileId(Long dataFileId) { + public Long getDownloadCountByDataFileId(Long dataFileId) { // datafile id is null, will return 0 - Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.datafile_id = " + dataFileId); + Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.datafile_id = " + dataFileId + "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); return (Long) query.getSingleResult(); } - public Long getCountGuestbookResponsesByDatasetId(Long datasetId) { - return getCountGuestbookResponsesByDatasetId(datasetId, null); + public Long getDownloadCountByDatasetId(Long datasetId) { + return getDownloadCountByDatasetId(datasetId, null); } - public Long getCountGuestbookResponsesByDatasetId(Long datasetId, LocalDate date) { + public Long getDownloadCountByDatasetId(Long datasetId, LocalDate date) { // dataset id is null, will return 0 Query query; if(date != null) { - query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId + " and responsetime < '" + date.toString() + "'"); + query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId + " and responsetime < '" + date.toString() + "' and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); }else { - query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId); + query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId+ "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'"); } return (Long) query.getSingleResult(); } - public Long getCountOfAllGuestbookResponses() { + public Long getTotalDownloadCount() { // dataset id is null, will return 0 // "SELECT COUNT(*)" is notoriously expensive in PostgresQL for large @@ -955,10 +962,12 @@ public Long getCountOfAllGuestbookResponses() { } catch (IllegalArgumentException iae) { // Don't do anything, we'll fall back to using "SELECT COUNT()" } - Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o;"); + Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"';"); return (Long) query.getSingleResult(); } + //End Metrics/download counts + public List findByAuthenticatedUserId(AuthenticatedUser user) { Query query = em.createNamedQuery("GuestbookResponse.findByAuthenticatedUserId"); query.setParameter("authenticatedUserId", user.getId()); diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql index 63b8c8f531e..df98047b513 100644 --- a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -6,3 +6,31 @@ UPDATE guestbookresponse g sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id); DROP TABLE filedownload; + +-- This creates a function that ESTIMATES the size of the +-- GuestbookResponse table (for the metrics display), instead +-- of relying on straight "SELECT COUNT(*) ..." +-- Significant potential savings for an active installation. + +CREATE OR REPLACE FUNCTION estimateGuestBookResponseTableSize() +RETURNS bigint AS $$ +DECLARE + estimatedsize bigint; +BEGIN + SELECT CASE WHEN relpages=0 THEN 0 + ELSE ((reltuples / relpages) + * (pg_relation_size('public.guestbookresponse') / current_setting('block_size')::int))::bigint + * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0) THEN 1 + ELSE 1 - (SELECT (most_common_freqs::text::bigint[])[array_position(most_common_vals::text::text[], 'AccessRequest')] + FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END) + END + FROM pg_class + WHERE oid = 'public.guestbookresponse'::regclass INTO estimatedsize; + + if estimatedsize = 0 then + SELECT COUNT(id) FROM guestbookresponse INTO estimatedsize; + END if; + + RETURN estimatedsize; +END; +$$ LANGUAGE plpgsql IMMUTABLE; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 47ca447da43..a9f1ff339f0 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -540,7 +540,7 @@
    - + @@ -562,9 +562,9 @@ data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.dataset.downloads.makedatacount.tip']}"> - + - )
    diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index e4484d38b97..41e2807c4fd 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -458,7 +458,7 @@
    - +
    diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml index 33a8d2c3ca5..ca82738f920 100644 --- a/src/main/webapp/file-info-fragment.xhtml +++ b/src/main/webapp/file-info-fragment.xhtml @@ -67,7 +67,7 @@
    - +
    diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 3772b3c4dbe..744b593dbd8 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -298,7 +298,7 @@
    - + @@ -306,7 +306,7 @@
    - + From e506279ad28018ec71e4990e0d99e36c66fe6a26 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Sep 2023 14:51:37 -0400 Subject: [PATCH 0471/1092] fix recursive calls --- .../java/edu/harvard/iq/dataverse/GuestbookResponse.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index b27571cdbf5..3fb0d6691bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -147,19 +147,19 @@ public void setWriteResponse(boolean writeResponse) { } public String getSelectedFileIds(){ - return this.getSelectedFileIds(); + return this.selectedFileIds; } public void setSelectedFileIds(String selectedFileIds) { - this.setSelectedFileIds(selectedFileIds); + this.selectedFileIds = selectedFileIds; } public String getFileFormat() { - return this.getFileFormat(); + return this.fileFormat; } public void setFileFormat(String downloadFormat) { - this.setFileFormat(downloadFormat); + this.fileFormat = downloadFormat; } public ExternalTool getExternalTool() { From bfce43eca504192e553d85bce103f88aa481af88 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Sep 2023 15:55:12 -0400 Subject: [PATCH 0472/1092] update stored function, make update idempotent --- .../V6.0.0.1__9599-guestbook-at-request.sql | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql index df98047b513..fd892fd3356 100644 --- a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -1,12 +1,19 @@ ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS eventtype VARCHAR(255); ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS sessionid VARCHAR(255); -UPDATE guestbookresponse g - SET eventtype = (SELECT downloadtype FROM filedownload f where f.guestbookresponse_id = g.id), - sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id); - -DROP TABLE filedownload; +DO $$ + BEGIN + IF EXISTS (select 1 from pg_class where relname='filedownload') THEN + UPDATE guestbookresponse g + SET eventtype = (SELECT downloadtype FROM filedownload f where f.guestbookresponse_id = g.id), + sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id); + DROP TABLE filedownload; + END IF; + END + $$ ; + + -- This creates a function that ESTIMATES the size of the -- GuestbookResponse table (for the metrics display), instead -- of relying on straight "SELECT COUNT(*) ..." @@ -17,18 +24,20 @@ RETURNS bigint AS $$ DECLARE estimatedsize bigint; BEGIN - SELECT CASE WHEN relpages=0 THEN 0 + SELECT CASE WHEN relpages<10 THEN 0 ELSE ((reltuples / relpages) * (pg_relation_size('public.guestbookresponse') / current_setting('block_size')::int))::bigint - * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0) THEN 1 - ELSE 1 - (SELECT (most_common_freqs::text::bigint[])[array_position(most_common_vals::text::text[], 'AccessRequest')] - FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END) + * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0 + OR (select array_position(most_common_vals::text::text[], 'AccessRequest') + FROM pg_stats WHERE tablename='guestbookresponse' AND attname='eventtype') IS NULL) THEN 1 + ELSE 1 - (SELECT (most_common_freqs::text::text[])[array_position(most_common_vals::text::text[], 'AccessRequest')]::bigint + FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END) END FROM pg_class WHERE oid = 'public.guestbookresponse'::regclass INTO estimatedsize; if estimatedsize = 0 then - SELECT COUNT(id) FROM guestbookresponse INTO estimatedsize; + SELECT COUNT(id) FROM guestbookresponse WHERE eventtype!= 'AccessRequest' INTO estimatedsize; END if; RETURN estimatedsize; From efa3d518a8184bef56b59ead26afba3af53ccb03 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 22 Sep 2023 09:56:28 -0400 Subject: [PATCH 0473/1092] remove todo --- .../iq/dataverse/GuestbookResponse.java | 29 +++++++------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 3fb0d6691bf..203be69cf14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -90,18 +90,19 @@ public class GuestbookResponse implements Serializable { @Temporal(value = TemporalType.TIMESTAMP) private Date responseTime; - - /** - * Possible values for downloadType include "Download", "Subset", - * or the displayName of an ExternalTool. - * - * TODO: Types like "Download" and "Subset" should - * be defined once as constants (likely an enum) rather than having these - * strings duplicated in various places when setDownloadtype() is called. - */ - private String eventType; + private String sessionId; + private String eventType; + + /** Event Types - there are four pre-defined values in use. + * The type can also be the name of a previewer/explore tool + */ + static final String ACCESS_REQUEST = "AccessRequest"; + static final String DOWNLOAD = "Download"; + static final String SUBSET = "Subset"; + static final String EXPLORE = "Explore"; + /* Transient Values carry non-written information that will assist in the download process @@ -128,14 +129,6 @@ public class GuestbookResponse implements Serializable { @Transient private ExternalTool externalTool; - /* Event Types - there are four pre-defined values in use. - * The type can also be the name of a previewer/explore tool - */ - - static final String ACCESS_REQUEST = "AccessRequest"; - static final String DOWNLOAD = "Download"; - static final String SUBSET = "Subset"; - static final String EXPLORE = "Explore"; public boolean isWriteResponse() { From 0350f99697d7099f7a4e89cc41064a3bae25d60b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 22 Sep 2023 10:14:30 -0400 Subject: [PATCH 0474/1092] add notes, reference issue/PR about the estimation method --- .../db/migration/V6.0.0.1__9599-guestbook-at-request.sql | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql index fd892fd3356..109963d8382 100644 --- a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -17,7 +17,12 @@ DO $$ -- This creates a function that ESTIMATES the size of the -- GuestbookResponse table (for the metrics display), instead -- of relying on straight "SELECT COUNT(*) ..." +-- It uses statistics to estimate the number of guestbook entries +-- and the fraction of them related to downloads, +-- i.e. those that weren't created for 'AccessRequest' events. -- Significant potential savings for an active installation. +-- See https://github.com/IQSS/dataverse/issues/8840 and +-- https://github.com/IQSS/dataverse/pull/8972 for more details CREATE OR REPLACE FUNCTION estimateGuestBookResponseTableSize() RETURNS bigint AS $$ From bbed57df0f2624f6a9300af509906e902541c82b Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Fri, 22 Sep 2023 10:26:02 -0400 Subject: [PATCH 0475/1092] #9944 add python-dvuploader to client libraries page --- doc/sphinx-guides/source/api/client-libraries.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index a25efe3a5f8..d53b9b2d776 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -52,6 +52,8 @@ There are multiple Python modules for interacting with Dataverse APIs. `EasyDataverse `_ is a Python library designed to simplify the management of Dataverse datasets in an object-oriented way, giving users the ability to upload, download, and update datasets with ease. By utilizing metadata block configurations, EasyDataverse automatically generates Python objects that contain all the necessary details required to create the native Dataverse JSON format used to create or edit datasets. Adding files and directories is also possible with EasyDataverse and requires no additional API calls. This library is particularly well-suited for client applications such as workflows and scripts as it minimizes technical complexities and facilitates swift development. +`python-dvuploader `_ implements Jim Myers' excellent `dv-uploader `_ as a Python module. It offers parallel direct uploads to Dataverse backend storage, streams files directly instead of buffering them in memory, and supports multi-part uploads, chunking data accordingly. + `pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. `UBC's Dataverse Utilities `_ are a set of Python console utilities which allow one to upload datasets from a tab-separated-value spreadsheet, bulk release multiple datasets, bulk delete unpublished datasets, quickly duplicate records. replace licenses, and more. For additional information see their `PyPi page `_. From b3808c5f5d4b058a5be359c27a35254936a37266 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 22 Sep 2023 12:20:31 -0400 Subject: [PATCH 0476/1092] We talked about this on the Containerization Working Group meeting on 2023-09-21. Deleting the duplicated chmod and moving the bootstrap script copy to be executed before the chmod. --- modules/container-configbaker/Dockerfile | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 44f3806a591..2975b043213 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -26,8 +26,12 @@ RUN true && \ # Make our working directories mkdir -p ${SCRIPT_DIR} ${SECRETS_DIR} ${SOLR_TEMPLATE} -# Get in the scripts and make them executable (just in case...) +# Get in the scripts COPY maven/scripts maven/solr/update-fields.sh ${SCRIPT_DIR}/ +# Copy the data from scripts/api that provide the common base setup you'd get from the installer. +# ".dockerignore" will take care of taking only the bare necessities +COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ +# Make the scripts executable RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh # Copy the Solr config bits @@ -35,10 +39,8 @@ COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml -# Copy the data from scripts/api that provide the common base setup you'd get from the installer. -# ".dockerignore" will take care of taking only the bare necessities -COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ -RUN chmod +x ${BOOTSTRAP_DIR}/*/*.sh + + # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] From c82c47e0c08d9a0d507ee980fec78fd7345d4f4a Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Fri, 22 Sep 2023 13:33:57 -0400 Subject: [PATCH 0477/1092] #9944 python-dvuploader moved to gdcc org --- doc/sphinx-guides/source/api/client-libraries.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index d53b9b2d776..4aa5b935e27 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -52,7 +52,7 @@ There are multiple Python modules for interacting with Dataverse APIs. `EasyDataverse `_ is a Python library designed to simplify the management of Dataverse datasets in an object-oriented way, giving users the ability to upload, download, and update datasets with ease. By utilizing metadata block configurations, EasyDataverse automatically generates Python objects that contain all the necessary details required to create the native Dataverse JSON format used to create or edit datasets. Adding files and directories is also possible with EasyDataverse and requires no additional API calls. This library is particularly well-suited for client applications such as workflows and scripts as it minimizes technical complexities and facilitates swift development. -`python-dvuploader `_ implements Jim Myers' excellent `dv-uploader `_ as a Python module. It offers parallel direct uploads to Dataverse backend storage, streams files directly instead of buffering them in memory, and supports multi-part uploads, chunking data accordingly. +`python-dvuploader `_ implements Jim Myers' excellent `dv-uploader `_ as a Python module. It offers parallel direct uploads to Dataverse backend storage, streams files directly instead of buffering them in memory, and supports multi-part uploads, chunking data accordingly. `pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. From 53a901c8ce60edd4cfd87a179092286dd9520a86 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 22 Sep 2023 14:24:29 -0400 Subject: [PATCH 0478/1092] update metrics api queries --- .../java/edu/harvard/iq/dataverse/GuestbookResponse.java | 3 +-- .../harvard/iq/dataverse/metrics/MetricsServiceBean.java | 8 ++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 203be69cf14..976f1e084ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -98,7 +98,7 @@ public class GuestbookResponse implements Serializable { * The type can also be the name of a previewer/explore tool */ - static final String ACCESS_REQUEST = "AccessRequest"; + public static final String ACCESS_REQUEST = "AccessRequest"; static final String DOWNLOAD = "Download"; static final String SUBSET = "Subset"; static final String EXPLORE = "Explore"; @@ -129,7 +129,6 @@ public class GuestbookResponse implements Serializable { @Transient private ExternalTool externalTool; - public boolean isWriteResponse() { return writeResponse; diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 065b42e5afe..837cd518817 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.GuestbookResponse; import edu.harvard.iq.dataverse.Metric; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil.MetricType; @@ -424,6 +425,7 @@ public JsonArray downloadsTimeSeries(Dataverse d) { + "select distinct COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') as date, count(id)\n" + "from guestbookresponse\n" + ((d == null) ? "" : "where dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")") + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + " group by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') order by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "');"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -456,6 +458,7 @@ public long downloadsToMonth(String yyyymm, Dataverse d) throws ParseException { + "from guestbookresponse\n" + "where (date_trunc('month', responsetime) <= to_date('" + yyyymm + "','YYYY-MM')" + "or responsetime is NULL)\n" // includes historic guestbook records without date + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");") ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -477,6 +480,7 @@ public long downloadsPastDays(int days, Dataverse d) { + "select count(id)\n" + "from guestbookresponse\n" + "where responsetime > current_date - interval '" + days + "' day\n" + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");") ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -489,6 +493,7 @@ public JsonArray fileDownloadsTimeSeries(Dataverse d, boolean uniqueCounts) { + " FROM guestbookresponse gb, DvObject ob" + " where ob.id = gb.datafile_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n") + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.datafile_id, ob.id, ob.protocol, ob.authority, ob.identifier, to_char(gb.responsetime, 'YYYY-MM') order by to_char(gb.responsetime, 'YYYY-MM');"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -503,6 +508,7 @@ public JsonArray fileDownloads(String yyyymm, Dataverse d, boolean uniqueCounts) + " where ob.id = gb.datafile_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n") + " and date_trunc('month', gb.responsetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.datafile_id, ob.id, ob.protocol, ob.authority, ob.identifier order by count desc;"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -529,6 +535,7 @@ public JsonArray uniqueDownloadsTimeSeries(Dataverse d) { + " FROM guestbookresponse gb, DvObject ob" + " where ob.id = gb.dataset_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.dataset_id, ob.protocol, ob.authority, ob.identifier, to_char(gb.responsetime, 'YYYY-MM') order by to_char(gb.responsetime, 'YYYY-MM');"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -546,6 +553,7 @@ public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) { + " where ob.id = gb.dataset_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + " and date_trunc('month', responsetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.dataset_id, ob.protocol, ob.authority, ob.identifier order by count(distinct email) desc;"); JsonArrayBuilder jab = Json.createArrayBuilder(); try { From 740f63bd852abdd19c7f7aa43b2f641519ae0a66 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 22 Sep 2023 14:52:23 -0400 Subject: [PATCH 0479/1092] fix query --- .../edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 837cd518817..79369207963 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -425,7 +425,7 @@ public JsonArray downloadsTimeSeries(Dataverse d) { + "select distinct COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') as date, count(id)\n" + "from guestbookresponse\n" + ((d == null) ? "" : "where dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")") - + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + + ((d == null) ? "where ":" and ") + "eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + " group by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') order by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "');"); logger.log(Level.FINE, "Metric query: {0}", query); From 21c4a7fbaa48ace2ba588a72103af619cd70df02 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 10:36:12 -0400 Subject: [PATCH 0480/1092] downloadtype -> eventtype --- .../iq/dataverse/GuestbookResponseServiceBean.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 4800ffd439f..b0cc41eb448 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -63,7 +63,7 @@ public class GuestbookResponseServiceBean { + " and r.dataset_id = o.id " + " and r.guestbook_id = g.id ";*/ - private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, r.downloadtype," + private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, r.eventtype," + " m.label, r.dataFile_id, r.name, r.email, r.institution, r.position," + " o.protocol, o.authority, o.identifier, d.protocol, d.authority, d.identifier " + "from guestbookresponse r, filemetadata m, dvobject o, guestbook g, dvobject d " @@ -78,7 +78,7 @@ public class GuestbookResponseServiceBean { // on the guestbook-results.xhtml page (the info we show on the page is // less detailed than what we let the users download as CSV files, so this // query has fewer fields than the one above). -- L.A. - private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select r.id, v.value, r.responsetime, r.downloadtype, m.label, r.name " + private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select r.id, v.value, r.responsetime, r.eventtype, m.label, r.name " + "from guestbookresponse r, datasetfieldvalue v, filemetadata m , dvobject o " + "where " + " v.datasetfield_id = (select id from datasetfield f where datasetfieldtype_id = 1 " @@ -735,8 +735,8 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String * "externalTool" for all external tools, including TwoRavens. When * clicking "Explore" and then the name of the tool, we want the * name of the exploration tool (i.e. "Data Explorer", - * etc.) to be persisted as the downloadType. We execute - * guestbookResponse.setDownloadtype(externalTool.getDisplayName()) + * etc.) to be persisted as the eventType. We execute + * guestbookResponse.setEventType(externalTool.getDisplayName()) * over in the "explore" method of FileDownloadServiceBean just * before the guestbookResponse is written. */ From e5575135b474eca7457a620646d95c4cc839aad9 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 25 Sep 2023 12:36:52 -0400 Subject: [PATCH 0481/1092] fix popup logic for previews, fix old bug guestbookAndTermsPopupRequired (and downloadPopupRequired before it) was not defined and I think therefore was always false, meaning the option to show a popup never occurred. This may have been OK in practice since one would have to have accepted the terms popup to show the preview and preview pane with these buttons. The fix here should show the terms popup prior to allowing the explore button to be clicked if/when these buttons ever show and one hasn't already accepted the terms (and termsMet is therefore true). --- src/main/webapp/file.xhtml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 744b593dbd8..f69b5c35afd 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -364,12 +364,12 @@ - + - +
  • - + - +
  • @@ -1001,6 +1001,12 @@ + +

    + + +

    +

    From cdd3a721deefe659151ab89b2c75cd6f3af016a5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:23:07 -0400 Subject: [PATCH 0483/1092] 9952 - add missing <> chars for license --- .../edu/harvard/iq/dataverse/util/SignpostingResources.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 2c9b7167059..21abd2d7034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -78,7 +78,7 @@ public String getLinks() { type = ";rel=\"type\",<" + defaultFileTypeValue + ">;rel=\"type\""; valueList.add(type); - String licenseString = DatasetUtil.getLicenseURI(workingDatasetVersion) + ";rel=\"license\""; + String licenseString = "<" + DatasetUtil.getLicenseURI(workingDatasetVersion) + ">;rel=\"license\""; valueList.add(licenseString); String linkset = "<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" From 3a4d8f98053ff726c617a45c5ad15d2f3059c138 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:25:28 -0400 Subject: [PATCH 0484/1092] 9953 - don't wrap linkset in a data element also remove @AuthRequired per #9466 --- .../harvard/iq/dataverse/api/Datasets.java | 33 ++++++++++--------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 98bc42f75b0..3b0bc3e0fcf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -580,26 +580,27 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, * @return */ @GET - @AuthRequired @Path("{id}/versions/{versionId}/linkset") - public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if ( ":draft".equals(versionId) ) { + public Response getLinkset(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (":draft".equals(versionId)) { return badRequest("Signposting is not supported on the :draft version"); } - User user = getRequestUser(crc); - return response(req -> { + DataverseRequest req = createDataverseRequest(null); + try { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return ok(Json.createObjectBuilder().add( - "linkset", - new SignpostingResources( - systemConfig, - dsv, - JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), - JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("") - ).getJsonLinkset() - ) - ); - }, user); + return Response + .ok(Json.createObjectBuilder() + .add("linkset", + new SignpostingResources(systemConfig, dsv, + JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), + JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("")) + .getJsonLinkset()) + .build()) + .type(MediaType.APPLICATION_JSON).build(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } } @GET From 869d24266bc305d4b008975d8ebe0dd5911063a2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:26:50 -0400 Subject: [PATCH 0485/1092] add null check to avoid any remaining cases of 9954 --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index d20175b6e1a..7cb5bfa3850 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -6144,7 +6144,7 @@ public String getWebloaderUrlForDataset(Dataset d) { String signpostingLinkHeader = null; public String getSignpostingLinkHeader() { - if (!workingVersion.isReleased()) { + if ((workingVersion==null) || (!workingVersion.isReleased())) { return null; } if (signpostingLinkHeader == null) { From 2332c1c5b815e737f7e2471d40d31b0fac179c82 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 18:38:05 -0400 Subject: [PATCH 0486/1092] release note --- doc/release-notes/9955-Signposting-updates.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9955-Signposting-updates.md diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md new file mode 100644 index 00000000000..bf0c7bc646b --- /dev/null +++ b/doc/release-notes/9955-Signposting-updates.md @@ -0,0 +1 @@ +This release fixes two issues (#9952, #9953) where the Signposting output did not match the Signposting specification. \ No newline at end of file From 9d846d2455e820cc9312863079086c66b0799c7a Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Tue, 26 Sep 2023 09:13:13 +0200 Subject: [PATCH 0487/1092] fix: require ManageDatasetPermissions for listing role assignments on datasets --- .../engine/command/impl/ListRoleAssignments.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java index 1858ba377ab..b619d32cc7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java @@ -6,16 +6,18 @@ import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Collections; /** * * @author michael */ -@RequiredPermissions( Permission.ManageDataversePermissions ) +// no annotations here, since permissions are dynamically decided public class ListRoleAssignments extends AbstractCommand> { private final DvObject definitionPoint; @@ -34,5 +36,12 @@ public List execute(CommandContext ctxt) throws CommandException } return ctxt.permissions().assignmentsOn(definitionPoint); } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + definitionPoint.isInstanceofDataset() ? Collections.singleton(Permission.ManageDatasetPermissions) + : Collections.singleton(Permission.ManageDataversePermissions)); + } } From 41e363e343861f6b416e6add60e60778f697cce0 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Tue, 26 Sep 2023 09:13:36 +0200 Subject: [PATCH 0488/1092] test: require ManageDatasetPermissions for listing role assignments on datasets --- scripts/api/data/role-contributor-plus.json | 12 +++ .../harvard/iq/dataverse/api/DatasetsIT.java | 87 +++++++++++++++++++ 2 files changed, 99 insertions(+) create mode 100644 scripts/api/data/role-contributor-plus.json diff --git a/scripts/api/data/role-contributor-plus.json b/scripts/api/data/role-contributor-plus.json new file mode 100644 index 00000000000..ef9ba3aaff6 --- /dev/null +++ b/scripts/api/data/role-contributor-plus.json @@ -0,0 +1,12 @@ +{ + "alias":"contributorPlus", + "name":"ContributorPlus", + "description":"For datasets, a person who can edit License + Terms, then submit them for review, and add collaborators.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft", + "ManageDatasetPermissions" + ] +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 3b6d4d1ecdf..b51d400d2d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1296,6 +1296,93 @@ public void testAddRoles(){ } + @Test + public void testListRoleAssignments() { + Response createAdminUser = UtilIT.createRandomUser(); + String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser); + String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser); + UtilIT.makeSuperUser(adminUsername); + + Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Now, let's allow anyone with a Dataverse account (any "random user") + // to create datasets in this dataverse: + + Response grantRole = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR, AuthenticatedUsers.get().getIdentifier(), adminApiToken); + grantRole.prettyPrint(); + assertEquals(OK.getStatusCode(), grantRole.getStatusCode()); + + Response createContributorUser = UtilIT.createRandomUser(); + String contributorUsername = UtilIT.getUsernameFromResponse(createContributorUser); + String contributorApiToken = UtilIT.getApiTokenFromResponse(createContributorUser); + + // First, we test listing role assignments on a dataverse which requires "ManageDataversePermissions" + + Response notPermittedToListRoleAssignmentOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, contributorApiToken); + assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode()); + + Response roleAssignmentsOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, adminApiToken); + roleAssignmentsOnDataverse.prettyPrint(); + assertEquals(OK.getStatusCode(), roleAssignmentsOnDataverse.getStatusCode()); + + // Second, we test listing role assignments on a dataset which requires "ManageDatasetPermissions" + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, contributorApiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + logger.info("dataset id: " + datasetId); + + Response datasetAsJson = UtilIT.nativeGet(datasetId, adminApiToken); + datasetAsJson.then().assertThat() + .statusCode(OK.getStatusCode()); + + String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier"); + assertEquals(10, identifier.length()); + + String protocol1 = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.protocol"); + String authority1 = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.authority"); + String identifier1 = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol1 + ":" + authority1 + "/" + identifier1; + + Response notPermittedToListRoleAssignmentOnDataset = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, contributorApiToken); + assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataset.getStatusCode()); + + // We create a new role that includes "ManageDatasetPermissions" which are required for listing role assignments + // of a dataset and assign it to the contributor user + + String pathToJsonFile = "scripts/api/data/role-contributor-plus.json"; + Response addDataverseRoleResponse = UtilIT.addDataverseRole(pathToJsonFile, dataverseAlias, adminApiToken); + addDataverseRoleResponse.prettyPrint(); + String body = addDataverseRoleResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response giveRandoPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, "contributorPlus", "@" + contributorUsername, adminApiToken); + giveRandoPermission.prettyPrint(); + assertEquals(200, giveRandoPermission.getStatusCode()); + + // Contributor user should now be able to list dataset role assignments as well + + Response roleAssignmentsOnDataset = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, contributorApiToken); + roleAssignmentsOnDataset.prettyPrint(); + assertEquals(OK.getStatusCode(), roleAssignmentsOnDataset.getStatusCode()); + + // ...but not dataverse role assignments + + notPermittedToListRoleAssignmentOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, contributorApiToken); + assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode()); + + // Finally, we clean up and delete the role we created + + Response deleteDataverseRoleResponse = UtilIT.deleteDataverseRole("contributorPlus", adminApiToken); + deleteDataverseRoleResponse.prettyPrint(); + body = deleteDataverseRoleResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + } + @Test public void testFileChecksum() { From 5978d71337fd1bfbecd42b7bd88b7b4193dbc6ad Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 26 Sep 2023 07:07:00 -0400 Subject: [PATCH 0489/1092] 9957- use ld+json --- doc/release-notes/9955-Signposting-updates.md | 2 +- doc/sphinx-guides/source/api/native-api.rst | 2 +- .../edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 3 ++- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- .../edu/harvard/iq/dataverse/util/SignpostingResources.java | 4 ++-- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md index bf0c7bc646b..92168231895 100644 --- a/doc/release-notes/9955-Signposting-updates.md +++ b/doc/release-notes/9955-Signposting-updates.md @@ -1 +1 @@ -This release fixes two issues (#9952, #9953) where the Signposting output did not match the Signposting specification. \ No newline at end of file +This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. \ No newline at end of file diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 56d245f97c0..e181a2a5546 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2196,7 +2196,7 @@ Signposting involves the addition of a `Link ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/json+ld", ;rel="type",;rel="type", https://demo.dataverse.org/api/datasets/:persistentId/versions/1.0/customlicense?persistentId=doi:10.5072/FK2/YD5QDG;rel="license", ; rel="linkset";type="application/linkset+json"`` +``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/ld+json", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"`` The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 620d4bf3e09..ce2b00086ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -500,7 +500,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep .setRetryHandler(new DefaultHttpRequestRetryHandler(3, false)) .build()) { HttpGet httpGet = new HttpGet(retrievalUri); - httpGet.addHeader("Accept", "application/json+ld, application/json"); + //application/json+ld is for backward compatibility + httpGet.addHeader("Accept", "application/ld+json, application/json+ld, application/json"); HttpResponse response = httpClient.execute(httpGet); String data = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 3b0bc3e0fcf..b9a104d8eaa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -272,7 +272,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html" }) + @Produces({"application/xml", "application/json", "application/html", "application/ld+json" }) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 21abd2d7034..1826689b892 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -71,7 +71,7 @@ public String getLinks() { String describedby = "<" + ds.getGlobalId().asURL().toString() + ">;rel=\"describedby\"" + ";type=\"" + "application/vnd.citationstyles.csl+json\""; describedby += ",<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" - + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/json+ld\""; + + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/ld+json\""; valueList.add(describedby); String type = ";rel=\"type\""; @@ -116,7 +116,7 @@ public JsonArrayBuilder getJsonLinkset() { systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() ).add( "type", - "application/json+ld" + "application/ld+json" ) ); JsonArrayBuilder linksetJsonObj = Json.createArrayBuilder(); From 89dfb012458a93ec563e9c81f9458c2919b23d51 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Sep 2023 10:57:23 -0400 Subject: [PATCH 0490/1092] #9589 fix update for dialog --- src/main/webapp/dataset.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index d7cc18e68a2..5d880e8108d 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -458,7 +458,7 @@

  • - +
  • From e3fbd0287392aa6652cf23f32f849c17812a4fd8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 26 Sep 2023 12:29:01 -0400 Subject: [PATCH 0491/1092] Update test --- src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index 17eba4770f1..b41f62ae28f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -92,7 +92,7 @@ public void testSignposting() { String responseString = linksetResponse.getBody().asString(); - JsonObject data = JsonUtil.getJsonObject(responseString).getJsonObject("data"); + JsonObject data = JsonUtil.getJsonObject(responseString); JsonObject lso = data.getJsonArray("linkset").getJsonObject(0); System.out.println("Linkset: " + lso.toString()); From a8872056358186feff8ae7ba422c27b6449e188a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 26 Sep 2023 13:13:38 -0400 Subject: [PATCH 0492/1092] Fix merge issue --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index ba1eea05fc0..82811162d52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -840,7 +840,7 @@ public Response getFixityAlgorithm() { public Response getFileDownloadCount(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { return response(req -> { DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFile.getId()).toString()); + return ok(guestbookResponseService.getDownloadCountByDataFileId(dataFile.getId()).toString()); }, getRequestUser(crc)); } From 3dd4564dc56a1132fcda7301a358e8f1f802752b Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 26 Sep 2023 18:29:42 +0100 Subject: [PATCH 0493/1092] Added: ignoreOriginalTabularSize optional query parameter to getDownloadSize datasets API endpoint --- .../iq/dataverse/DatasetServiceBean.java | 13 +++-- .../harvard/iq/dataverse/api/Datasets.java | 29 ++++++---- .../impl/GetDatasetStorageSizeCommand.java | 18 +++--- .../impl/GetDataverseStorageSizeCommand.java | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 55 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 +++ 6 files changed, 98 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 52eb5868c35..4799502a6e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -788,13 +788,13 @@ public void exportDataset(Dataset dataset, boolean forceReExport) { } } } - + } //get a string to add to save success message //depends on page (dataset/file) and user privleges public String getReminderString(Dataset dataset, boolean canPublishDataset, boolean filePage, boolean isValid) { - + String reminderString; if (canPublishDataset) { @@ -1015,12 +1015,12 @@ public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) { } public long findStorageSize(Dataset dataset) throws IOException { - return findStorageSize(dataset, false, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, false, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws IOException { - return findStorageSize(dataset, countCachedExtras, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, countCachedExtras, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } /** @@ -1028,6 +1028,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * * @param dataset * @param countCachedExtras boolean indicating if the cached disposable extras should also be counted + * @param countOriginalTabularSize boolean indicating if the size of the stored original tabular files should also be counted, in addition to the main tab-delimited file size * @param mode String indicating whether we are getting the result for storage (entire dataset) or download version based * @param version optional param for dataset version * @return total size @@ -1036,7 +1037,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * default mode, the method doesn't need to access the storage system, as the * sizes of the main files are recorded in the database) */ - public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { + public long findStorageSize(Dataset dataset, boolean countCachedExtras, boolean countOriginalTabularSize, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { long total = 0L; if (dataset.isHarvested()) { @@ -1062,7 +1063,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatas total += datafile.getFilesize(); if (!countCachedExtras) { - if (datafile.isTabularData()) { + if (datafile.isTabularData() && countOriginalTabularSize) { // count the size of the stored original, in addition to the main tab-delimited file: Long originalFileSize = datafile.getDataTable().getOriginalFileSize(); if (originalFileSize != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 62d87b198fe..a39347ef64e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2947,25 +2947,32 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup String nullCurrentMonth = null; return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country); } - + @GET @AuthRequired @Path("{identifier}/storagesize") - public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); } - + @GET @AuthRequired @Path("{identifier}/versions/{versionId}/downloadsize") - public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - - return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))))), getRequestUser(crc)); + public Response getDownloadSize(@Context ContainerRequestContext crc, + @PathParam("identifier") String dvIdtf, + @PathParam("versionId") String version, + @QueryParam("ignoreOriginalTabularSize") boolean ignoreOriginalTabularSize, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { + Long datasetStorageSize = execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, !ignoreOriginalTabularSize, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))); + String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("message", message); + jsonObjectBuilder.add("storageSize", datasetStorageSize); + return ok(jsonObjectBuilder); + }, getRequestUser(crc)); } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java index f1f27fdcee2..eebb8dd9e00 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java @@ -7,7 +7,6 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -15,6 +14,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -32,47 +32,49 @@ public class GetDatasetStorageSizeCommand extends AbstractCommand { private final Dataset dataset; private final Boolean countCachedFiles; + private final Boolean countOriginalTabularSize; private final Mode mode; private final DatasetVersion version; public enum Mode { STORAGE, DOWNLOAD - }; + } public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target) { super(aRequest, target); dataset = target; countCachedFiles = false; + countOriginalTabularSize = true; mode = Mode.DOWNLOAD; version = null; } - public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, Mode mode, DatasetVersion version) { + public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, boolean countOriginalTabularSize, Mode mode, DatasetVersion version) { super(aRequest, target); dataset = target; this.countCachedFiles = countCachedFiles; + this.countOriginalTabularSize = countOriginalTabularSize; this.mode = mode; this.version = version; } @Override public Long execute(CommandContext ctxt) throws CommandException { - logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); - if (dataset == null) { // should never happen - must indicate some data corruption in the database throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.listing.error"), this); } + logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); + try { - return ctxt.datasets().findStorageSize(dataset, countCachedFiles, mode, version); + return ctxt.datasets().findStorageSize(dataset, countCachedFiles, countOriginalTabularSize, mode, version); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.datasize.ioerror"), this); } - } - + @Override public Map> getRequiredPermissions() { // for data file check permission on owning dataset diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java index 57912a6b4bd..9f93f6747ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java @@ -59,7 +59,7 @@ public Long execute(CommandContext ctxt) throws CommandException { } try { - total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("dataverse.datasize.ioerror"), this); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5c1eb66b63d..929882fe95a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3630,4 +3630,59 @@ public void deaccessionDataset() { deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, apiToken); deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } + + @Test + public void getDownloadSize() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Creating test text files + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + + int testFileSize1 = 50; + int testFileSize2 = 200; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[testFileSize1], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[testFileSize2], apiToken); + + int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; + + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); + + // Upload test tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get the original tabular file size + int tabularOriginalSize = Integer.parseInt(uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.filesize")); + + // Get the size ignoring the original tabular file sizes + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, true, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); + + int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); + // Assert that the size has been incremented with the last uploaded file + assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); + + // Get the size including the original tabular file sizes + int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + actualSizeIgnoringOriginalTabularSizes; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOriginalTabularSizes)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8c6a2d6e75d..ecf26bd26ae 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3409,4 +3409,11 @@ static Response deaccessionDataset(Integer datasetId, String version, String api .body(jsonString) .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } + + static Response getDownloadSize(Integer datasetId, String version, boolean ignoreOriginalTabularSize, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("ignoreOriginalTabularSize", ignoreOriginalTabularSize) + .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); + } } From 9b4a4827a6173be91e6a8266d4ad12ad890e38b7 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:06:30 -0400 Subject: [PATCH 0494/1092] add links to dataset configure popup #9589 --- src/main/webapp/dataset.xhtml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 5d880e8108d..7cc6db65b2a 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1004,8 +1004,15 @@

    -

    +
    + + + + +

    From 8a3d4c23c726e70210c4248b262fe030ac30ede3 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:07:11 -0400 Subject: [PATCH 0495/1092] better tool name in test, add URL to popup #9589 --- .../harvard/iq/dataverse/api/ExternalToolsIT.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index a9f6055fc9e..67fffadb488 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -277,13 +277,13 @@ public void testDatasetLevelToolConfigure() { String toolManifest = """ { - "displayName": "Turbo Dataset Config", - "description": "Read/write access.", + "displayName": "Dataset Configurator", + "description": "Slices! Dices! More info.", "types": [ "configure" ], "scope": "dataset", - "toolUrl": "http://datasettool1.com", + "toolUrl": "https://datasetconfigurator.com", "toolParameters": { "queryParameters": [ { @@ -301,15 +301,15 @@ public void testDatasetLevelToolConfigure() { addExternalTool.prettyPrint(); addExternalTool.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.displayName", CoreMatchers.equalTo("Turbo Dataset Config")); + .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator")); Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "configure", apiToken); getExternalToolsByDatasetId.prettyPrint(); getExternalToolsByDatasetId.then().assertThat() - .body("data[0].displayName", CoreMatchers.equalTo("Turbo Dataset Config")) + .body("data[0].displayName", CoreMatchers.equalTo("Dataset Configurator")) .body("data[0].scope", CoreMatchers.equalTo("dataset")) .body("data[0].types[0]", CoreMatchers.equalTo("configure")) - .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("http://datasettool1.com?datasetPid=" + datasetPid)) + .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("https://datasetconfigurator.com?datasetPid=" + datasetPid)) .statusCode(OK.getStatusCode()); } From b76e7ee86e0f5e272c13dbc55166041ac6fa9e86 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:10:05 -0400 Subject: [PATCH 0496/1092] fix typo about popup in docs #9589 --- doc/sphinx-guides/source/api/external-tools.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index ed68bb09ee0..d12e4b17549 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -88,7 +88,7 @@ Terminology displayName The **name** of the tool in the Dataverse installation web interface. For example, "Data Explorer". - description The **description** of the tool, which appears in a popup (for configure tools only) so the user who clicked the tool can learn about the tool before being redirected the tool in a new tab in their browser. HTML is supported. + description The **description** of the tool, which appears in a popup (for configure tools only) so the user who clicked the tool can learn about the tool before being redirected to the tool in a new tab in their browser. HTML is supported. scope Whether the external tool appears and operates at the **file** level or the **dataset** level. Note that a file level tool much also specify the type of file it operates on (see "contentType" below). From 5840a068cc0947f2d90cf52e4d8c3e118fd23c0e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:12:50 -0400 Subject: [PATCH 0497/1092] example tool: open link in new tab #9589 --- src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 67fffadb488..022747a3cdc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -278,7 +278,7 @@ public void testDatasetLevelToolConfigure() { String toolManifest = """ { "displayName": "Dataset Configurator", - "description": "Slices! Dices! More info.", + "description": "Slices! Dices! More info.", "types": [ "configure" ], From a50776237a606e3b22bd67e2380abfc1e4303851 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 14 Aug 2023 21:57:00 +0200 Subject: [PATCH 0498/1092] build(deps): update to Maven Docker Plugin 0.43.3 #9771 With this new version, a problem on M1 MACs not being able to build with Docker because of a non-existing config file was worked around by the DMP devs. See also: https://github.com/fabric8io/docker-maven-plugin/issues/1678 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index c45d59e4f5f..cf136820430 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -198,7 +198,7 @@ 1.7.0 - 0.43.0 + 0.43.3 From c93b180df40c37e2f0ca17816d713cf8205f98ba Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 18 Aug 2023 09:19:34 +0200 Subject: [PATCH 0499/1092] build(deps): update to Maven Docker Plugin 0.43.4 #9771 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index cf136820430..a7233cba164 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -198,7 +198,7 @@ 1.7.0 - 0.43.3 + 0.43.4 From f653c219d0ce6d7b1b1b3774b4820a05391c82d0 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:33:21 +0100 Subject: [PATCH 0500/1092] Changed: dataset version download size calculation when ignoring original tab file sizes --- .../edu/harvard/iq/dataverse/DatasetServiceBean.java | 9 ++++----- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 ++++-- .../command/impl/GetDatasetStorageSizeCommand.java | 7 ++----- .../command/impl/GetDataverseStorageSizeCommand.java | 2 +- .../java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 ++- 5 files changed, 13 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 4799502a6e3..30274efb384 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1015,12 +1015,12 @@ public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) { } public long findStorageSize(Dataset dataset) throws IOException { - return findStorageSize(dataset, false, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, false, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws IOException { - return findStorageSize(dataset, countCachedExtras, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + return findStorageSize(dataset, countCachedExtras, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } /** @@ -1028,7 +1028,6 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * * @param dataset * @param countCachedExtras boolean indicating if the cached disposable extras should also be counted - * @param countOriginalTabularSize boolean indicating if the size of the stored original tabular files should also be counted, in addition to the main tab-delimited file size * @param mode String indicating whether we are getting the result for storage (entire dataset) or download version based * @param version optional param for dataset version * @return total size @@ -1037,7 +1036,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws I * default mode, the method doesn't need to access the storage system, as the * sizes of the main files are recorded in the database) */ - public long findStorageSize(Dataset dataset, boolean countCachedExtras, boolean countOriginalTabularSize, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { + public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { long total = 0L; if (dataset.isHarvested()) { @@ -1063,7 +1062,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, boolean total += datafile.getFilesize(); if (!countCachedExtras) { - if (datafile.isTabularData() && countOriginalTabularSize) { + if (datafile.isTabularData()) { // count the size of the stored original, in addition to the main tab-delimited file: Long originalFileSize = datafile.getDataTable().getOriginalFileSize(); if (originalFileSize != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a39347ef64e..981cbced11e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2953,7 +2953,7 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup @Path("{identifier}/storagesize") public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); } @GET @@ -2966,7 +2966,9 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { - Long datasetStorageSize = execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, !ignoreOriginalTabularSize, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); + Long datasetStorageSize = ignoreOriginalTabularSize ? DatasetUtil.getDownloadSizeNumeric(datasetVersion, false) + : execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, datasetVersion)); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java index eebb8dd9e00..09b33c4efc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java @@ -32,7 +32,6 @@ public class GetDatasetStorageSizeCommand extends AbstractCommand { private final Dataset dataset; private final Boolean countCachedFiles; - private final Boolean countOriginalTabularSize; private final Mode mode; private final DatasetVersion version; @@ -45,16 +44,14 @@ public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target) { super(aRequest, target); dataset = target; countCachedFiles = false; - countOriginalTabularSize = true; mode = Mode.DOWNLOAD; version = null; } - public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, boolean countOriginalTabularSize, Mode mode, DatasetVersion version) { + public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, boolean countCachedFiles, Mode mode, DatasetVersion version) { super(aRequest, target); dataset = target; this.countCachedFiles = countCachedFiles; - this.countOriginalTabularSize = countOriginalTabularSize; this.mode = mode; this.version = version; } @@ -69,7 +66,7 @@ public Long execute(CommandContext ctxt) throws CommandException { logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); try { - return ctxt.datasets().findStorageSize(dataset, countCachedFiles, countOriginalTabularSize, mode, version); + return ctxt.datasets().findStorageSize(dataset, countCachedFiles, mode, version); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.datasize.ioerror"), this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java index 9f93f6747ea..57912a6b4bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseStorageSizeCommand.java @@ -59,7 +59,7 @@ public Long execute(CommandContext ctxt) throws CommandException { } try { - total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, true, GetDatasetStorageSizeCommand.Mode.STORAGE, null); + total += ctxt.datasets().findStorageSize(dataset, countCachedFiles, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("dataverse.datasize.ioerror"), this); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 929882fe95a..580a1edb6f2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3679,7 +3679,8 @@ public void getDownloadSize() throws IOException { assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); // Get the size including the original tabular file sizes - int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + actualSizeIgnoringOriginalTabularSizes; + int tabularProcessedSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; + int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + tabularProcessedSize + expectedTextFilesStorageSize; getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) From 9d10b99cdbb3487e08a308e0e6f1de7ff69cf913 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:40:23 +0100 Subject: [PATCH 0501/1092] Added: #9958 release notes --- .../9958-dataset-api-downloadsize-ignore-tabular-size.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md diff --git a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md new file mode 100644 index 00000000000..73b27a1a581 --- /dev/null +++ b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md @@ -0,0 +1,3 @@ +Added a new optional query parameter "ignoreOriginalTabularSize" to the "getDownloadSize" API endpoint ("api/datasets/{identifier}/versions/{versionId}/downloadsize"). + +If set to true, the endpoint will return the download size ignoring the original tabular file sizes. From 9710c79432cbc30a1f3222a2df2e423f6040ed0a Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:46:05 +0100 Subject: [PATCH 0502/1092] Added: mentioned ignoreOriginalTabularSize query parameter in the docs for /downloadsize API endpoint --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 663051c0884..169b950dc74 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1797,6 +1797,8 @@ The fully expanded example above (without environment variables) looks like this The size of all files available for download will be returned. If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s). +There is an optional query parameter ``ignoreOriginalTabularSize`` which, if set to true, the endpoint will return the download size ignoring the sizes of the original tabular files. Otherwise, both the original and the processed size will be included in the count for tabular files. + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 4aa34ffb417039b8132070270a246f8e4b4fedd3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 27 Sep 2023 10:50:42 +0100 Subject: [PATCH 0503/1092] Added: ignoreOriginalTabularSize query param usage example to the docs --- doc/sphinx-guides/source/api/native-api.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 169b950dc74..0f77aeba580 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1799,6 +1799,12 @@ If :draft is passed as versionId the token supplied must have permission to view There is an optional query parameter ``ignoreOriginalTabularSize`` which, if set to true, the endpoint will return the download size ignoring the sizes of the original tabular files. Otherwise, both the original and the processed size will be included in the count for tabular files. +Usage example: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?ignoreOriginalTabularSize=true" + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 6c7826f434399648194cd7ac54e232411a24d161 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 27 Sep 2023 10:27:27 -0400 Subject: [PATCH 0504/1092] Consolidate flyway scripts --- .../migration/V5.13.0.3__guestbook-on-request.sql | 12 ------------ .../V6.0.0.1__9599-guestbook-at-request.sql | 13 +++++++++++++ 2 files changed, 13 insertions(+), 12 deletions(-) delete mode 100644 src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql diff --git a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql deleted file mode 100644 index 7e05e792858..00000000000 --- a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql +++ /dev/null @@ -1,12 +0,0 @@ -ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS request_state VARCHAR(64); -ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS id SERIAL; -ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fileaccessrequests_pkey; -ALTER TABLE fileaccessrequests ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KEY (id); -ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS guestbookresponse_id INT; -ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fk_fileaccessrequests_guestbookresponse; -ALTER TABLE fileaccessrequests ADD CONSTRAINT fk_fileaccessrequests_guestbookresponse FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id); -DROP INDEX IF EXISTS created_requests; -CREATE UNIQUE INDEX created_requests ON fileaccessrequests (datafile_id, authenticated_user_id) WHERE request_state='CREATED'; - -ALTER TABLE dataverse ADD COLUMN IF NOT EXISTS guestbookatrequest bool; -ALTER TABLE dataset ADD COLUMN IF NOT EXISTS guestbookatrequest bool; diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql index 109963d8382..c90ee4a5329 100644 --- a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -1,3 +1,16 @@ +ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS request_state VARCHAR(64); +ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS id SERIAL; +ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fileaccessrequests_pkey; +ALTER TABLE fileaccessrequests ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KEY (id); +ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS guestbookresponse_id INT; +ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fk_fileaccessrequests_guestbookresponse; +ALTER TABLE fileaccessrequests ADD CONSTRAINT fk_fileaccessrequests_guestbookresponse FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id); +DROP INDEX IF EXISTS created_requests; +CREATE UNIQUE INDEX created_requests ON fileaccessrequests (datafile_id, authenticated_user_id) WHERE request_state='CREATED'; + +ALTER TABLE dataverse ADD COLUMN IF NOT EXISTS guestbookatrequest bool; +ALTER TABLE dataset ADD COLUMN IF NOT EXISTS guestbookatrequest bool; + ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS eventtype VARCHAR(255); ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS sessionid VARCHAR(255); From c9c6cf26a1764bb5c409c4d25571984d0e5fbf80 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 27 Sep 2023 10:32:44 -0400 Subject: [PATCH 0505/1092] Add null check to avoid future issues --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7cb5bfa3850..7dba8af3fdc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2863,6 +2863,12 @@ public void sort() { public String refresh() { logger.fine("refreshing"); + //In v5.14, versionId was null here. In 6.0, it appears not to be. + //This check is to handle the null if it reappears/occurs under other circumstances + if(versionId==null) { + logger.fine("versionId was null in refresh"); + versionId = workingVersion.getId(); + } //dataset = datasetService.find(dataset.getId()); dataset = null; workingVersion = null; @@ -2872,10 +2878,9 @@ public String refresh() { DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null; if (versionId != null) { - // versionId must have been set by now, in the init() method, - // regardless of how the page was originally called - by the dataset - // database id, by the persistent identifier, or by the db id of - // the version. + // versionId must have been set by now (see null check above), in the init() + // method, regardless of how the page was originally called - by the dataset + // database id, by the persistent identifier, or by the db id of the version. this.workingVersion = datasetVersionService.findDeep(versionId); dataset = workingVersion.getDataset(); } From ba2dd8400c128cefd22f7d83d52771760d477905 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 27 Sep 2023 10:39:26 -0400 Subject: [PATCH 0506/1092] warn if null cases still occur --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7dba8af3fdc..74064f20893 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2866,7 +2866,7 @@ public String refresh() { //In v5.14, versionId was null here. In 6.0, it appears not to be. //This check is to handle the null if it reappears/occurs under other circumstances if(versionId==null) { - logger.fine("versionId was null in refresh"); + logger.warning("versionId was null in refresh"); versionId = workingVersion.getId(); } //dataset = datasetService.find(dataset.getId()); @@ -6150,6 +6150,9 @@ public String getWebloaderUrlForDataset(Dataset d) { public String getSignpostingLinkHeader() { if ((workingVersion==null) || (!workingVersion.isReleased())) { + if(workingVersion==null) { + logger.warning("workingVersion was null in getSignpostingLinkHeader"); + } return null; } if (signpostingLinkHeader == null) { From 16322dfd2ae3f288d0f5505f9e892b3370cb751a Mon Sep 17 00:00:00 2001 From: Kevin Condon Date: Wed, 27 Sep 2023 14:35:30 -0400 Subject: [PATCH 0507/1092] Create 9002_allow_direct_upload_setting.md Adding release notes snippet, directly taken from installation guide. --- doc/release-notes/9002_allow_direct_upload_setting.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9002_allow_direct_upload_setting.md diff --git a/doc/release-notes/9002_allow_direct_upload_setting.md b/doc/release-notes/9002_allow_direct_upload_setting.md new file mode 100644 index 00000000000..581da5592da --- /dev/null +++ b/doc/release-notes/9002_allow_direct_upload_setting.md @@ -0,0 +1,3 @@ +A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. +By default, Dataverse supports uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the :ref:`Adding the Uploaded file to the Dataset ` API call (described in the :doc:`/developers/s3-direct-upload-api` page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. From 3ecd118d97fb3e6869e4caab9f21b060a3130d2a Mon Sep 17 00:00:00 2001 From: Kevin Condon Date: Wed, 27 Sep 2023 15:46:44 -0400 Subject: [PATCH 0508/1092] Update doc/release-notes/9002_allow_direct_upload_setting.md Co-authored-by: Philip Durbin --- doc/release-notes/9002_allow_direct_upload_setting.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/9002_allow_direct_upload_setting.md b/doc/release-notes/9002_allow_direct_upload_setting.md index 581da5592da..1e76ed4ad47 100644 --- a/doc/release-notes/9002_allow_direct_upload_setting.md +++ b/doc/release-notes/9002_allow_direct_upload_setting.md @@ -1,3 +1,5 @@ -A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -By default, Dataverse supports uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). -With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the :ref:`Adding the Uploaded file to the Dataset ` API call (described in the :doc:`/developers/s3-direct-upload-api` page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. +A Dataverse installation can be now be configured to allow out-of-band upload by setting the `dataverse.files..upload-out-of-band` JVM option to `true`. + +By default, Dataverse supports uploading files via the [add a file to a dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server). + +With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://dataverse-guide--9003.org.readthedocs.build/en/9003/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store. From 448ae448ff1cb36f10b30449694126a866c28643 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 28 Sep 2023 11:13:52 +0100 Subject: [PATCH 0509/1092] Added: JSON payload to curl examples for Deaccession Dataset docs --- doc/sphinx-guides/source/api/native-api.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 663051c0884..01a681cfb6a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1375,14 +1375,15 @@ Given a version of a dataset, updates its status to deaccessioned. export SERVER_URL=https://demo.dataverse.org export ID=24 export VERSIONID=1.0 + export JSON='{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From a8883981daa5d84d4553150804fe59942886d069 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 28 Sep 2023 13:36:19 +0200 Subject: [PATCH 0510/1092] always_add_validity_field_to_solr_doc --- .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index d6d0be7a17b..04bc824c4b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -811,9 +811,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Thu, 28 Sep 2023 14:33:45 +0200 Subject: [PATCH 0511/1092] doc clarification --- doc/sphinx-guides/source/api/native-api.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 56d245f97c0..1cc462ee02a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -525,10 +525,16 @@ Submit Incomplete Dataset ^^^^^^^^^^^^^^^^^^^^^^^^^ **Note:** This feature requires :ref:`dataverse.api.allow-incomplete-metadata` to be enabled and your Solr -Schema to be up-to-date with the ``datasetValid`` field. +Schema to be up-to-date with the ``datasetValid`` field. If not done yet with the version upgrade, you will +also need to reindex all dataset after enabling the :ref:`dataverse.api.allow-incomplete-metadata` feature. Providing a ``.../datasets?doNotValidate=true`` query parameter turns off the validation of metadata. -In this case, only the "Author Name" is required. For example, a minimal JSON file would look like this: +In this situation, only the "Author Name" is required, except for the case when the setting :ref:`:MetadataLanguages` +is configured and the value of "Dataset Metadata Language" setting of a collection is left with the default +"Chosen at Dataset Creation" value. In that case, a language that is a part of the :ref:`:MetadataLanguages` list must be +declared in the incomplete dataset. + +For example, a minimal JSON file, without the language specification, would look like this: .. code-block:: json :name: dataset-incomplete.json From 3d7a2dee22636a9aaba0f5c3a144b3a2b05e25c3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Sep 2023 10:52:24 -0400 Subject: [PATCH 0512/1092] #9913 show TOA on manage templates preview --- src/main/webapp/dataset-license-terms.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index c5958697a20..b998442f2aa 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -238,7 +238,7 @@ -

    +
    + styleClass="largePopUp" widgetVar="deleteConfirmation" modal="true" focus="contDeleteTemplateBtn">

     

    From 28df26c7d9f88b4c594e21bfcf1fbd4b695dc1b1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Sep 2023 16:34:20 -0400 Subject: [PATCH 0514/1092] #9913 make request access true on create template also fix various render issues. --- .../harvard/iq/dataverse/TemplatePage.java | 1 + src/main/webapp/dataset-license-terms.xhtml | 25 +++++++++++-------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java index fff520fd259..44070dcbb41 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java @@ -148,6 +148,7 @@ public String init() { editMode = TemplatePage.EditMode.CREATE; template = new Template(this.dataverse, settingsWrapper.getSystemMetadataBlocks()); TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); + terms.setFileAccessRequest(true); terms.setTemplate(template); terms.setLicense(licenseServiceBean.getDefault()); template.setTermsOfUseAndAccess(terms); diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index b998442f2aa..c54d94442ea 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -7,6 +7,13 @@ xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf"> + +
    @@ -238,17 +245,13 @@
    -
    +
     
    -
    +
    @@ -267,7 +270,7 @@
    -
    +
    -
    +
    -

    +

    -

    +

    From 29e56bbe5a3e0ec8f80d98e8d858665d6b63250d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 28 Sep 2023 17:17:16 -0400 Subject: [PATCH 0515/1092] saving temp. changes --- .../edu/harvard/iq/dataverse/EditDatafilesPage.java | 10 ++-------- .../engine/command/impl/CreateNewDataFilesCommand.java | 6 +++++- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 4144112f997..7b8f0355f4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2187,16 +2187,10 @@ public void handleExternalUpload() { List datafiles = new ArrayList<>(); // ----------------------------------------------------------- - // Send it through the ingest service + // Execute the CreateNewDataFiles command: // ----------------------------------------------------------- try { - - // Note: A single uploaded file may produce multiple datafiles - - // for example, multiple files can be extracted from an uncompressed - // zip file. - //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); - + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e7fc30bdd80..6fa3c721c38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -648,15 +648,19 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // if we were unable to unpack an uploaded file, etc.), we'll just // create and return a single DataFile: File newFile = null; + long fileSize = -1; + if (tempFile != null) { newFile = tempFile.toFile(); + fileSize = newFile.length(); + } else { + // @todo! What do we do if this is direct upload?? where does the size come from? } // We have already checked that this file does not exceed the individual size limit; // but if we are processing it as is, as a single file, we need to check if // its size does not go beyond the allocated storage quota (if specified): - long fileSize = newFile.length(); if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try { From 7ab736c05a2aac0b7137861d46a6224ed561e228 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 29 Sep 2023 15:29:46 -0400 Subject: [PATCH 0516/1092] Fixes direct upload from the UI. (#9361) --- .../iq/dataverse/EditDatafilesPage.java | 21 +++++- .../impl/CreateNewDataFilesCommand.java | 66 +++++++++++-------- 2 files changed, 57 insertions(+), 30 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 7b8f0355f4c..a942830b19e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2068,7 +2068,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // dataset that does not yet exist in the database. We must // use the version of the Create New Files constructor that takes // the parent Dataverse as the extra argument: - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, workingVersion.getDataset().getOwner()); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, null, workingVersion.getDataset().getOwner()); } else { cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null); } @@ -2168,6 +2168,11 @@ public void handleExternalUpload() { - Max size NOT specified in db: default is unlimited - Max size specified in db: check too make sure file is within limits // ---------------------------- */ + /** + * @todo: this size check is probably redundant here, since the new + * CreateNewFilesCommand is going to perform it (and the quota + * checks too, if enabled + */ if ((!this.isUnlimitedUploadFileSize()) && (fileSize > this.getMaxFileUploadSizeInBytes())) { String warningMessage = "Uploaded file \"" + fileName + "\" exceeded the limit of " + fileSize + " bytes and was not uploaded."; sio.delete(); @@ -2189,9 +2194,21 @@ public void handleExternalUpload() { // ----------------------------------------------------------- // Execute the CreateNewDataFiles command: // ----------------------------------------------------------- + + Dataverse parent = null; + + if (mode == FileEditMode.CREATE) { + // This is a file upload in the context of creating a brand new + // dataset that does not yet exist in the database. We must + // pass the parent Dataverse to the CreateNewFiles command + // constructor. The RequiredPermission on the command in this + // scenario = Permission.AddDataset on the parent dataverse. + parent = workingVersion.getDataset().getOwner(); + } + try { - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType, fileSize, parent); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 6fa3c721c38..ac701da1be9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -74,34 +74,29 @@ public class CreateNewDataFilesCommand extends AbstractCommand storageQuotaLimit) { - try { - tempFile.toFile().delete(); - } catch (Exception ex) { - // ignore - but log a warning - logger.warning("Could not remove temp file " + tempFile.getFileName()); + if (newFile != null) { + // Remove the temp. file, if this is a non-direct upload. + // If this is a direct upload, it will be a responsibility of the + // component calling the command to remove the file that may have + // already been saved in the S3 volume. + try { + newFile.delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } } throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)), this); } DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); - File f = null; - if (tempFile != null) { - f = tempFile.toFile(); - } - if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { + + if (datafile != null && ((newFile != null) || (newStorageIdentifier != null))) { if (warningMessage != null) { createIngestFailureReport(datafile, warningMessage); datafile.SetIngestProblem(); } + if (datafile.getFilesize() < 0) { + datafile.setFilesize(fileSize); + } datafiles.add(datafile); // Update quota (may not be necessary in the context of direct upload - ?) @@ -701,7 +711,7 @@ public Map> getRequiredPermissions() { ret.put("", new HashSet<>()); - if (dataverse != null) { + if (parentDataverse != null) { // The command is called in the context of uploading files on // create of a new dataset ret.get("").add(Permission.AddDataset); From 7881919ede378886fe6283086a02a4dfea8989a8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 29 Sep 2023 16:07:09 -0400 Subject: [PATCH 0517/1092] Update native-api.rst misplaced quote marks --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 176cb36e288..b9b0bd32aec 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2283,7 +2283,7 @@ To set the behavior for this dataset: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG - curl -X PUT "-H:X-Dataverse-key:$API_TOKEN" -H Content-type:application/json -d true "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" + curl -X PUT -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json -d true "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" This example uses true to set the behavior to guestbook at request. Note that this call will return a 403/Forbidden response if guestbook at request functionality is not enabled for this Dataverse instance. @@ -2296,7 +2296,7 @@ The API can also be used to reset the dataset to use the default/inherited value export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG - curl -X DELETE "-H:X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" + curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" From 907fd4024c8df2218764fd0902d1242a37726f7e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 10:48:36 +0100 Subject: [PATCH 0518/1092] Changed: using query-based implementation for files download size --- .../DatasetVersionFilesServiceBean.java | 57 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 12 +++- .../harvard/iq/dataverse/api/DatasetsIT.java | 30 +++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 4 +- 4 files changed, 89 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index a547a216ad5..66e0ec5b5fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.QDataFileCategory; +import edu.harvard.iq.dataverse.QDataTable; import edu.harvard.iq.dataverse.QDvObject; import edu.harvard.iq.dataverse.QEmbargo; import edu.harvard.iq.dataverse.QFileMetadata; @@ -36,6 +37,7 @@ public class DatasetVersionFilesServiceBean implements Serializable { private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; private final QDvObject dvObject = QDvObject.dvObject; private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; + private final QDataTable dataTable = QDataTable.dataTable; /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} @@ -51,6 +53,19 @@ public enum DataFileAccessStatus { Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic } + /** + * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileDownloadSizeMode)} + *

    + * All: Includes both archival and original sizes for tabular files + * Archival: Includes only the archival size for tabular files + * Original: Includes only the original size for tabular files + *

    + * All the modes include archival sizes for non-tabular files + */ + public enum FileDownloadSizeMode { + All, Original, Archival + } + /** * Given a DatasetVersion, returns its total file metadata count * @@ -159,6 +174,23 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege return baseQuery.fetch(); } + /** + * Returns the total download size of all files for a particular DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param mode a FileDownloadSizeMode to base the search on + * @return long value of total file download size + */ + public long getFilesDownloadSize(DatasetVersion datasetVersion, FileDownloadSizeMode mode) { + return switch (mode) { + case All -> + Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, false)); + case Original -> + Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, true)); + case Archival -> getArchivalFilesSize(datasetVersion, false); + }; + } + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); if (fileMetadataCount > 0) { @@ -230,4 +262,29 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu break; } } + + private long getOriginalTabularFilesSize(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + Long result = queryFactory + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) + .from(dataTable) + .where(fileMetadata.dataFile.dataTables.isNotEmpty().and(dataTable.dataFile.eq(fileMetadata.dataFile))) + .select(dataTable.originalFileSize.sum()).fetchFirst(); + return (result == null) ? 0 : result; + } + + private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + Long result; + if (ignoreTabular) { + result = baseQuery.where(fileMetadata.dataFile.dataTables.isEmpty()).select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + } else { + result = baseQuery.select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + } + return (result == null) ? 0 : result; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 981cbced11e..80a2dac9568 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2962,13 +2962,19 @@ public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam( public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @QueryParam("ignoreOriginalTabularSize") boolean ignoreOriginalTabularSize, + @QueryParam("mode") String mode, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode; + try { + fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); + } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); - Long datasetStorageSize = ignoreOriginalTabularSize ? DatasetUtil.getDownloadSizeNumeric(datasetVersion, false) - : execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, datasetVersion)); + long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 580a1edb6f2..189cf3a6f5a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3632,7 +3632,7 @@ public void deaccessionDataset() { } @Test - public void getDownloadSize() throws IOException { + public void getDownloadSize() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3658,7 +3658,8 @@ public void getDownloadSize() throws IOException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + // Get the total size when there are no tabular files + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3670,20 +3671,31 @@ public void getDownloadSize() throws IOException { // Get the original tabular file size int tabularOriginalSize = Integer.parseInt(uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.filesize")); - // Get the size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, true, apiToken); + // Ensure tabular file is ingested + Thread.sleep(2000); + + // Get the total size ignoring the original tabular file sizes + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); + // Assert that the size has been incremented with the last uploaded file assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); - // Get the size including the original tabular file sizes - int tabularProcessedSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; - int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + tabularProcessedSize + expectedTextFilesStorageSize; + // Get the total size including only original sizes and ignoring archival sizes for tabular files + int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; + int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) - .body("data.storageSize", equalTo(expectedSizeIncludingOriginalTabularSizes)); + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ecf26bd26ae..f9f3dc9be8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3410,10 +3410,10 @@ static Response deaccessionDataset(Integer datasetId, String version, String api .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, boolean ignoreOriginalTabularSize, String apiToken) { + static Response getDownloadSize(Integer datasetId, String version, String mode, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("ignoreOriginalTabularSize", ignoreOriginalTabularSize) + .queryParam("mode", mode) .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); } } From a5c32bd1b11f4385926f9abc53578e6b48c05adc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 10:53:45 +0100 Subject: [PATCH 0519/1092] Added: error case to getDownloadSize IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 189cf3a6f5a..ee3355096b8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3697,5 +3697,11 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size sending invalid file download size mode + String invalidMode = "invalidMode"; + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid mode: " + invalidMode)); } } From 131cd8f83473e9919e871723551eb441b6f27c3e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 11:22:44 +0100 Subject: [PATCH 0520/1092] Added: multiple tab files test case for getDownloadSize IT --- .../harvard/iq/dataverse/api/DatasetsIT.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ee3355096b8..829c19c6440 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3703,5 +3703,23 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); + + // Upload second test tabular file (same source as before) + uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get the total size including only original sizes and ignoring archival sizes for tabular files + expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } } From d1d5eed683dfc8d04cdf832d5c40d4947821b8c3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Oct 2023 12:34:33 +0200 Subject: [PATCH 0521/1092] style(oidc): apply language fixes by @pdurbin from review #9268 Co-authored-by: Philip Durbin --- doc/release-notes/9268-8349-oidc-improvements.md | 15 ++++++++++++++- doc/sphinx-guides/source/installation/oidc.rst | 10 +++++----- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/9268-8349-oidc-improvements.md b/doc/release-notes/9268-8349-oidc-improvements.md index cb0a9685c69..34e45a98101 100644 --- a/doc/release-notes/9268-8349-oidc-improvements.md +++ b/doc/release-notes/9268-8349-oidc-improvements.md @@ -5,7 +5,7 @@ With this release it is possible to provision a single OIDC-based authentication provider by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. -If you are using an external OIDC provider component as identity management system and/or broker +If you are using an external OIDC provider component as an identity management system and/or broker to other authentication providers such as Google, eduGain SAML and so on, this might make your life easier during instance setups and reconfiguration. You no longer need to generate the necessary JSON file. @@ -26,3 +26,16 @@ The testing and development Keycloak realm has been updated with more users and The support for setting JVM options during testing has been improved for developers. You now may add the `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. + +## New Configuration Options + +- dataverse.auth.oidc.enabled +- dataverse.auth.oidc.client-id +- dataverse.auth.oidc.client-secret +- dataverse.auth.oidc.auth-server-url +- dataverse.auth.oidc.pkce.enabled +- dataverse.auth.oidc.pkce.method +- dataverse.auth.oidc.title +- dataverse.auth.oidc.subtitle +- dataverse.auth.oidc.pkce.max-cache-size +- dataverse.auth.oidc.pkce.max-cache-age diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index e036e9c8470..4a0c77006e0 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -78,14 +78,14 @@ Enabling PKCE Security Many providers these days support or even require the usage of `PKCE `_ to safeguard against some attacks and enable public clients that cannot have a secure secret to still use OpenID Connect (or OAuth2). -The Dataverse built OIDC client can be enabled to use PKCE and which method to use when creating the code challenge. +The Dataverse-built OIDC client can be configured to use PKCE and the method to use when creating the code challenge can be specified. See also `this explanation of the flow `_ for details on how this works. As we are using the `Nimbus SDK `_ as our client -library, we support the standard ``PLAIN`` and ``S256`` code challenge methods. "SHA-256 method" is the default +library, we support the standard ``PLAIN`` and ``S256`` (SHA-256) code challenge methods. "SHA-256 method" is the default as recommend in `RFC7636 `_. If your provider needs some -other method (unlikely), please open an issue. +other method, please open an issue. The provisioning sections below contain in the example the parameters you may use to configure PKCE. @@ -169,11 +169,11 @@ The following options are available: - N - ``OpenID Connect`` * - ``dataverse.auth.oidc.pkce.max-cache-size`` - - Tune the maximum size of all OIDC providers' verifier cache (= number of outstanding PKCE-enabled auth responses). + - Tune the maximum size of all OIDC providers' verifier cache (the number of outstanding PKCE-enabled auth responses). - N - 10000 * - ``dataverse.auth.oidc.pkce.max-cache-age`` - - Tune the maximum age of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime + - Tune the maximum age, in seconds, of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime of many OIDC access tokens. - N - 300 \ No newline at end of file From 42d181216fd042d378768763209805f77e659149 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Oct 2023 12:53:20 +0200 Subject: [PATCH 0522/1092] build(oidc): upgrade to Keycloak 22 in OIDC integration test #9268 --- pom.xml | 2 +- .../oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 13a834e9a67..e70b723cad5 100644 --- a/pom.xml +++ b/pom.xml @@ -609,7 +609,7 @@ com.github.dasniko testcontainers-keycloak - 2.5.0 + 3.0.0 test 0.43.4 - - - 5.0.0 diff --git a/pom.xml b/pom.xml index 909e9ee9b80..e70b723cad5 100644 --- a/pom.xml +++ b/pom.xml @@ -252,20 +252,6 @@ expressly provided - - - com.querydsl - querydsl-apt - ${querydsl.version} - jakarta - provided - - - com.querydsl - querydsl-jpa - ${querydsl.version} - jakarta - commons-io diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index 351c4032939..f4f66d3c874 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -58,7 +58,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa private static final Map TagTypeToLabels = new HashMap<>(); - public static final Map TagLabelToTypes = new HashMap<>(); + private static final Map TagLabelToTypes = new HashMap<>(); static { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java deleted file mode 100644 index 6006d937100..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ /dev/null @@ -1,241 +0,0 @@ -package edu.harvard.iq.dataverse; - -import edu.harvard.iq.dataverse.QDataFileCategory; -import edu.harvard.iq.dataverse.QDataFileTag; -import edu.harvard.iq.dataverse.QDvObject; -import edu.harvard.iq.dataverse.QEmbargo; -import edu.harvard.iq.dataverse.QFileMetadata; - -import com.querydsl.core.Tuple; -import com.querydsl.core.types.dsl.BooleanExpression; -import com.querydsl.core.types.dsl.CaseBuilder; -import com.querydsl.core.types.dsl.DateExpression; -import com.querydsl.core.types.dsl.DateTimeExpression; - -import com.querydsl.jpa.impl.JPAQuery; -import com.querydsl.jpa.impl.JPAQueryFactory; - -import jakarta.ejb.Stateless; -import jakarta.inject.Named; -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; - -import java.io.Serializable; -import java.sql.Timestamp; -import java.time.LocalDate; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; - -@Stateless -@Named -public class DatasetVersionFilesServiceBean implements Serializable { - - @PersistenceContext(unitName = "VDCNet-ejbPU") - private EntityManager em; - - private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; - private final QDvObject dvObject = QDvObject.dvObject; - private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; - private final QDataFileTag dataFileTag = QDataFileTag.dataFileTag; - - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, NameZA, Newest, Oldest, Size, Type - } - - /** - * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum DataFileAccessStatus { - Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic - } - - /** - * Given a DatasetVersion, returns its total file metadata count - * - * @param datasetVersion the DatasetVersion to access - * @return long value of total file metadata count - */ - public long getFileMetadataCount(DatasetVersion datasetVersion) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); - } - - /** - * Given a DatasetVersion, returns its file metadata count per content type - * - * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per content type - */ - public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List contentTypeOccurrences = queryFactory - .select(fileMetadata.dataFile.contentType, fileMetadata.count()) - .from(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .groupBy(fileMetadata.dataFile.contentType).fetch(); - Map result = new HashMap<>(); - for (Tuple occurrence : contentTypeOccurrences) { - result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); - } - return result; - } - - /** - * Given a DatasetVersion, returns its file metadata count per category name - * - * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per category name - */ - public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List categoryNameOccurrences = queryFactory - .select(dataFileCategory.name, fileMetadata.count()) - .from(dataFileCategory, fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) - .groupBy(dataFileCategory.name).fetch(); - Map result = new HashMap<>(); - for (Tuple occurrence : categoryNameOccurrences) { - result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); - } - return result; - } - - /** - * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus - * - * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per DataFileAccessStatus - */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { - Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); - return allCounts; - } - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param contentType for retrieving only files with this content type - * @param accessStatus for retrieving only files with this DataFileAccessStatus - * @param categoryName for retrieving only files categorized with this category name - * @param tabularTagName for retrieving only files categorized with this tabular tag name - * @param searchText for retrieving only files that contain the specified text within their labels or descriptions - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list from the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { - JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); - - if (contentType != null) { - baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); - } - if (accessStatus != null) { - baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); - } - if (categoryName != null) { - baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); - } - if (tabularTagName != null) { - baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); - } - if (searchText != null && !searchText.isEmpty()) { - searchText = searchText.trim().toLowerCase(); - baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); - } - - applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); - - if (limit != null) { - baseQuery.limit(limit); - } - if (offset != null) { - baseQuery.offset(offset); - } - - return baseQuery.fetch(); - } - - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { - long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); - if (fileMetadataCount > 0) { - totalCounts.put(dataFileAccessStatus, fileMetadataCount); - } - } - - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory - .selectFrom(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) - .stream().count(); - } - - private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { - baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); - } - return baseQuery; - } - - private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { - QEmbargo embargo = fileMetadata.dataFile.embargo; - BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); - BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); - BooleanExpression accessStatusExpression; - switch (accessStatus) { - case EmbargoedThenRestricted: - accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); - break; - case EmbargoedThenPublic: - accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); - break; - case Restricted: - accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); - break; - case Public: - accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); - break; - default: - throw new IllegalStateException("Unexpected value: " + accessStatus); - } - return accessStatusExpression; - } - - private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { - DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); - switch (orderCriteria) { - case NameZA: - query.orderBy(fileMetadata.label.desc()); - break; - case Newest: - query.orderBy(orderByLifetimeExpression.desc()); - break; - case Oldest: - query.orderBy(orderByLifetimeExpression.asc()); - break; - case Size: - query.orderBy(fileMetadata.dataFile.filesize.asc()); - break; - case Type: - query.orderBy(fileMetadata.dataFile.contentType.asc()); - break; - default: - query.orderBy(fileMetadata.label.asc()); - break; - } - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 5c43001dcb5..6f087f9eabc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,6 +49,22 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" + + " WHERE fm.datasetVersion.id=:datasetVersionId" + + " ORDER BY fm.label"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = dvo.id" + + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.filesize"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.contentType"; + @EJB DatasetServiceBean datasetService; @@ -150,6 +166,18 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, + NameZA, + Newest, + Oldest, + Size, + Type + } + public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1224,4 +1252,50 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list of the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { + TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) + .setParameter("datasetVersionId", datasetVersion.getId()); + if (limit != null) { + query.setMaxResults(limit); + } + if (offset != null) { + query.setFirstResult(offset); + } + return query.getResultList(); + } + + private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { + String queryString; + switch (orderCriteria) { + case NameZA: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; + break; + case Newest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; + break; + case Oldest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; + break; + case Size: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; + break; + case Type: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; + break; + default: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; + break; + } + return queryString; + } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 3c226e68472..1aa3f4ffde6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1681,47 +1681,7 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); } } - - @GET - @AuthRequired - @Path("/datafile/{id}/userFileAccessRequested") - public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - AuthenticatedUser requestAuthenticatedUser; - try { - dataFile = findDataFileOrDie(dataFileId); - requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - boolean fileAccessRequested = false; - List requests = dataFile.getFileAccessRequests(); - for (FileAccessRequest fileAccessRequest : requests) { - if (fileAccessRequest.getRequester().getId().equals(requestAuthenticatedUser.getId())) { - fileAccessRequested = true; - break; - } - } - return ok(fileAccessRequested); - } - - @GET - @AuthRequired - @Path("/datafile/{id}/userPermissions") - public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); - jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); - return ok(jsonObjectBuilder); - } - + // checkAuthorization is a convenience method; it calls the boolean method // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not. @@ -1989,4 +1949,21 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b3be55399d8..704ec8f1989 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -236,9 +236,6 @@ public class Datasets extends AbstractApiBean { @Inject PrivateUrlServiceBean privateUrlService; - @Inject - DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; - /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -487,56 +484,23 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" : ok(json(dsv)); }, getRequestUser(crc)); } - + @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, - @PathParam("id") String datasetId, - @PathParam("versionId") String versionId, - @QueryParam("limit") Integer limit, - @QueryParam("offset") Integer offset, - @QueryParam("contentType") String contentType, - @QueryParam("accessStatus") String accessStatus, - @QueryParam("categoryName") String categoryName, - @QueryParam("tabularTagName") String tabularTagName, - @QueryParam("searchText") String searchText, - @QueryParam("orderCriteria") String orderCriteria, - @Context UriInfo uriInfo, - @Context HttpHeaders headers) { - return response(req -> { + public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response( req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; - try { - dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; - } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); - } - return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); + return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); }, getRequestUser(crc)); } - - @GET - @AuthRequired - @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); - jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); - jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); - jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); - return ok(jsonObjectBuilder); - }, getRequestUser(crc)); - } - + @GET @AuthRequired @Path("{id}/dirindex") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 8a9abe68d85..82811162d52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -4,7 +4,6 @@ import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -55,7 +54,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -65,12 +63,15 @@ import jakarta.ejb.EJBException; import jakarta.inject.Inject; import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonString; -import jakarta.json.JsonValue; -import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.*; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.HttpHeaders; @@ -111,8 +112,6 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; - @Inject - DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -853,85 +852,18 @@ public Response getFileDataTables(@Context ContainerRequestContext crc, @PathPar try { dataFile = findDataFileOrDie(dataFileId); } catch (WrappedResponse e) { - return notFound("File not found for given id."); + return error(Response.Status.NOT_FOUND, "File not found for given id."); } if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); if (!hasPermissionToDownloadFile) { - return forbidden("Insufficient permissions to access the requested information."); + return error(FORBIDDEN, "Insufficient permissions to access the requested information."); } } if (!dataFile.isTabularData()) { - return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); + return error(BAD_REQUEST, "This operation is only available for tabular files."); } return ok(jsonDT(dataFile.getDataTables())); } - - @POST - @AuthRequired - @Path("{id}/metadata/categories") - @Produces(MediaType.APPLICATION_JSON) - public Response setFileCategories(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - jakarta.json.JsonObject jsonObject; - try (StringReader stringReader = new StringReader(jsonBody)) { - jsonObject = Json.createReader(stringReader).readObject(); - JsonArray requestedCategoriesJson = jsonObject.getJsonArray("categories"); - FileMetadata fileMetadata = dataFile.getFileMetadata(); - for (JsonValue jsonValue : requestedCategoriesJson) { - JsonString jsonString = (JsonString) jsonValue; - fileMetadata.addCategoryByName(jsonString.getString()); - } - execCommand(new UpdateDatasetVersionCommand(fileMetadata.getDataFile().getOwner(), req)); - return ok("Categories of file " + dataFileId + " updated."); - } catch (JsonParsingException jpe) { - return badRequest("Error parsing Json: " + jpe.getMessage()); - } - }, getRequestUser(crc)); - } - - @POST - @AuthRequired - @Path("{id}/metadata/tabularTags") - @Produces(MediaType.APPLICATION_JSON) - public Response setFileTabularTags(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - if (!dataFile.isTabularData()) { - return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); - } - jakarta.json.JsonObject jsonObject; - try (StringReader stringReader = new StringReader(jsonBody)) { - jsonObject = Json.createReader(stringReader).readObject(); - JsonArray requestedTabularTagsJson = jsonObject.getJsonArray("tabularTags"); - for (JsonValue jsonValue : requestedTabularTagsJson) { - JsonString jsonString = (JsonString) jsonValue; - DataFileTag tag = new DataFileTag(); - try { - tag.setTypeByLabel(jsonString.getString()); - } catch (IllegalArgumentException iax) { - return badRequest(iax.getMessage()); - } - tag.setDataFile(dataFile); - dataFile.addTag(tag); - } - execCommand(new UpdateDatasetVersionCommand(dataFile.getOwner(), req)); - return ok("Tabular tags of file " + dataFileId + " updated."); - } catch (JsonParsingException jpe) { - return badRequest("Error parsing Json: " + jpe.getMessage()); - } - }, getRequestUser(crc)); - } - - @GET - @AuthRequired - @Path("{id}/hasBeenDeleted") - public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); - }, getRequestUser(crc)); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1fed0b233e4..a7aa36f179e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -672,14 +672,9 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) - .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); - Dataset dfOwner = df.getOwner(); - if (dfOwner != null) { - builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); - } /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the @@ -1100,22 +1095,6 @@ public Set characteristics() { }; } - public static JsonObjectBuilder json(Map map) { - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { - jsonObjectBuilder.add(mapEntry.getKey(), mapEntry.getValue()); - } - return jsonObjectBuilder; - } - - public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { - jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); - } - return jsonObjectBuilder; - } - public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 7b4befcca36..ac725caf1b2 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2620,7 +2620,6 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file -files.api.only.tabular.supported=This operation is only available for tabular files. #Datasets.java datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 416caa68566..b1beddd893f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -26,9 +26,11 @@ import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; -import static org.junit.jupiter.api.Assertions.*; /** * @@ -630,37 +632,28 @@ public void testZipUploadAndDownload() throws IOException { } @Test - public void testGetUserFileAccessRequested() { - // Create new user - Response createUserResponse = UtilIT.createRandomUser(); - createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); - String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); - - String dataFileId = Integer.toString(tabFile3IdRestricted); - - // Call with new user and unrequested access file - Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); - getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + public void testGetUserPermissionsOnFile() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); - boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); - assertFalse(userFileAccessRequested); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - // Request file access for the new user - Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken); - requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - // Call with new user and requested access file - getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); - getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); - assertTrue(userFileAccessRequested); - } + // Assert user permissions on file + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); - @Test - public void testGetUserPermissionsOnFile() { - // Call with valid file id - Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken); getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); assertTrue(canDownloadFile); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..3b6d4d1ecdf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -9,9 +9,6 @@ import io.restassured.http.ContentType; import io.restassured.response.Response; -import java.time.LocalDate; -import java.time.format.DateTimeFormatter; -import java.util.*; import java.util.logging.Logger; import org.junit.jupiter.api.AfterAll; @@ -20,6 +17,8 @@ import org.skyscreamer.jsonassert.JSONAssert; import org.junit.jupiter.api.Disabled; +import java.util.List; +import java.util.Map; import jakarta.json.JsonObject; import static jakarta.ws.rs.core.Response.Status.CREATED; @@ -40,6 +39,8 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import java.util.UUID; + import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -66,7 +67,8 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.Files; - +import java.util.ArrayList; +import java.util.HashMap; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObjectBuilder; @@ -75,7 +77,6 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import org.hamcrest.CoreMatchers; @@ -116,9 +117,7 @@ public static void setUpClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - - UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); - + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); @@ -145,9 +144,7 @@ public static void afterClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - - UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); - + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() @@ -3267,7 +3264,8 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } - public void getVersionFiles() throws IOException, InterruptedException { + @Test + public void getVersionFiles() throws IOException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3299,42 +3297,39 @@ public void getVersionFiles() throws IOException, InterruptedException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, null, apiToken); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); + + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)); - int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); - - String testFileId1 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[0].dataFile.id"); - String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); - // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) .body("data[1].label", equalTo(testFileName4)); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); - // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName5)); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3340,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3351,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3362,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3373,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3390,186 +3385,9 @@ public void getVersionFiles() throws IOException, InterruptedException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, invalidOrderCriteria, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); - - // Test Content Type - Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, null, apiToken); - - getVersionFilesResponseContentType.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName5)); - - fileMetadatasCount = getVersionFilesResponseContentType.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Category Name - String testCategory = "testCategory"; - Response setFileCategoriesResponse = UtilIT.setFileCategories(testFileId1, apiToken, List.of(testCategory)); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, null, apiToken); - - getVersionFilesResponseCategoryName.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)) - .body("data[1].label", equalTo(testFileName2)); - - fileMetadatasCount = getVersionFilesResponseCategoryName.jsonPath().getList("data").size(); - assertEquals(2, fileMetadatasCount); - - // Test Access Status Restricted - Response restrictFileResponse = UtilIT.restrictFile(String.valueOf(testFileId1), true, apiToken); - restrictFileResponse.then().assertThat() - .statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); - - getVersionFilesResponseRestricted.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)); - - fileMetadatasCount = getVersionFilesResponseRestricted.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Access Status Embargoed - UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); - String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - - // Create embargo for test file 1 (Embargoed and Restricted) - Response createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId1), activeEmbargoDate, apiToken); - - createActiveFileEmbargoResponse.then().assertThat() - .statusCode(OK.getStatusCode()); - - // Create embargo for test file 2 (Embargoed and Public) - createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId2), activeEmbargoDate, apiToken); - - createActiveFileEmbargoResponse.then().assertThat() - .statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); - - getVersionFilesResponseEmbargoedThenPublic.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName2)); - - fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); - - getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)); - - fileMetadatasCount = getVersionFilesResponseEmbargoedThenRestricted.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); - - getVersionFilesResponsePublic.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName3)) - .body("data[1].label", equalTo(testFileName4)) - .body("data[2].label", equalTo(testFileName5)); - - fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); - assertEquals(3, fileMetadatasCount); - - // Test Search Text - Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); - - getVersionFilesResponseSearchText.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)); - - fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Tabular Tag Name - String pathToTabularTestFile = "src/test/resources/tab/test.tab"; - Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); - uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Ensure tabular file is ingested - sleep(2000); - - String tabularTagName = "Survey"; - Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); - setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseTabularTagName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, tabularTagName, null, null, apiToken); - - getVersionFilesResponseTabularTagName.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo("test.tab")); - - fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - } - - @Test - public void getVersionFileCounts() throws IOException { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Creating test files - String testFileName1 = "test_1.txt"; - String testFileName2 = "test_2.txt"; - String testFileName3 = "test_3.png"; - - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); - - // Creating a categorized test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - String testCategory = "testCategory"; - Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, List.of(testCategory)); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Setting embargo for file (Embargo and Public) - UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); - String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); - createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); - - getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); - - JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); - LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); - LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); - LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); - - assertEquals(4, (Integer) responseJsonPath.get("data.total")); - assertEquals(2, responseCountPerContentTypeMap.get("image/png")); - assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); - assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); - assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); - assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index bfe856a8d18..0a16bca7008 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2,8 +2,6 @@ import io.restassured.RestAssured; import io.restassured.response.Response; - -import java.util.List; import java.util.logging.Logger; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; @@ -32,12 +30,16 @@ import static jakarta.ws.rs.core.Response.Status.*; import org.hamcrest.CoreMatchers; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.CoreMatchers.nullValue; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; -import static org.hamcrest.CoreMatchers.*; -import static org.hamcrest.CoreMatchers.hasItem; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class FilesIT { @@ -2209,137 +2211,4 @@ public void testGetFileDataTables() throws InterruptedException { getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } - - @Test - public void testSetFileCategories() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Set categories - String testCategory1 = "testCategory1"; - String testCategory2 = "testCategory2"; - List testCategories = List.of(testCategory1, testCategory2); - Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, testCategories); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Get file data and check for new categories - Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); - getFileDataResponse.prettyPrint(); - getFileDataResponse.then().assertThat() - .body("data.categories", hasItem(testCategory1)) - .body("data.categories", hasItem(testCategory2)) - .statusCode(OK.getStatusCode()); - } - - @Test - public void testSetFileTabularTags() throws InterruptedException { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload tabular file - String pathToTabularTestFile = "src/test/resources/tab/test.tab"; - Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); - uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Ensure tabular file is ingested - sleep(2000); - - // Set tabular tags - String testTabularTag1 = "Survey"; - String testTabularTag2 = "Genomics"; - List testTabularTags = List.of(testTabularTag1, testTabularTag2); - Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags); - setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Get file data and check for new categories - Response getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); - getFileDataResponse.then().assertThat() - .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) - .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) - .statusCode(OK.getStatusCode()); - - // Set invalid tabular tag - String testInvalidTabularTag = "Invalid"; - setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(testInvalidTabularTag)); - setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); - - // Get file data and check categories are unaltered - getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); - getFileDataResponse.then().assertThat() - .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) - .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) - .statusCode(OK.getStatusCode()); - } - - @Test - public void testGetHasBeenDeleted() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Publish dataverse and dataset - Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); - publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); - publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Assert that the file has not been deleted - Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); - getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); - boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); - assertFalse(fileHasBeenDeleted); - - // Delete test file - Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken); - deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Assert that the file has been deleted - getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); - getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); - fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); - assertTrue(fileHasBeenDeleted); - } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8e333451c8d..f61b392c898 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3276,7 +3276,7 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json"); @@ -3286,21 +3286,6 @@ static Response getVersionFiles(Integer datasetId, String version, Integer limit if (offset != null) { requestSpecification = requestSpecification.queryParam("offset", offset); } - if (contentType != null) { - requestSpecification = requestSpecification.queryParam("contentType", contentType); - } - if (accessStatus != null) { - requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); - } - if (categoryName != null) { - requestSpecification = requestSpecification.queryParam("categoryName", categoryName); - } - if (tabularTagName != null) { - requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); - } - if (searchText != null) { - requestSpecification = requestSpecification.queryParam("searchText", searchText); - } if (orderCriteria != null) { requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); } @@ -3332,75 +3317,9 @@ static Response getFileDataTables(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/dataTables"); } - static Response getUserFileAccessRequested(String dataFileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested"); - } - static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } - - static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { - JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); - jsonBuilder.add("dateAvailable", dateAvailable); - jsonBuilder.add("reason", "This is a test embargo"); - jsonBuilder.add("fileIds", Json.createArrayBuilder().add(fileId)); - String jsonString = jsonBuilder.build().toString(); - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(jsonString) - .contentType("application/json") - .urlEncodingEnabled(false) - .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); - } - - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); - } - - static Response setFileCategories(String dataFileId, String apiToken, List categories) { - JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); - for (String category : categories) { - jsonArrayBuilder.add(category); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("categories", jsonArrayBuilder); - String jsonString = jsonObjectBuilder.build().toString(); - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(jsonString) - .post("/api/files/" + dataFileId + "/metadata/categories"); - } - - static Response setFileTabularTags(String dataFileId, String apiToken, List tabularTags) { - JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); - for (String tabularTag : tabularTags) { - jsonArrayBuilder.add(tabularTag); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("tabularTags", jsonArrayBuilder); - String jsonString = jsonObjectBuilder.build().toString(); - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(jsonString) - .post("/api/files/" + dataFileId + "/metadata/tabularTags"); - } - - static Response deleteFileInDataset(Integer fileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .delete("/api/files/" + fileId); - } - - static Response getHasBeenDeleted(String dataFileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/files/" + dataFileId + "/hasBeenDeleted"); - } } From 87393bae0e2f40b41693d7a3cecc64785e81df36 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 15:05:57 +0100 Subject: [PATCH 0557/1092] Added: docs for filtering by tabular tag in getVersionFiles endpoint --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index d4e2e4cd178..f78ae62f3ae 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -978,6 +978,14 @@ Usage example: curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?categoryName=Data" +Tabular tag name filtering is also optionally supported. To return files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?tabularTagName=Survey" + Content type filtering is also optionally supported. To return files matching the requested content type. Usage example: From 9f0b8102904bb663dce8c50203d32663550e2095 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Oct 2023 10:47:54 -0400 Subject: [PATCH 0558/1092] more tests and docs #9952, #9953, #9957 --- doc/release-notes/9955-Signposting-updates.md | 8 +++++++- doc/sphinx-guides/source/api/native-api.rst | 2 +- .../edu/harvard/iq/dataverse/api/SignpostingIT.java | 11 +++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md index 92168231895..db0e27e51c5 100644 --- a/doc/release-notes/9955-Signposting-updates.md +++ b/doc/release-notes/9955-Signposting-updates.md @@ -1 +1,7 @@ -This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. \ No newline at end of file +This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. + +To fix #9952, we surround the license info with `<` and `>`. + +To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information + +To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e181a2a5546..bc186720252 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2200,7 +2200,7 @@ Here is an example of a "Link" header: The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. -The reponse includes a JSON object conforming to the `Signposting `__ specification. +The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object. Signposting is not supported for draft dataset versions. .. code-block:: bash diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index b41f62ae28f..75f514f3398 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -80,6 +80,7 @@ public void testSignposting() { assertTrue(linkHeader.contains(datasetPid)); assertTrue(linkHeader.contains("cite-as")); assertTrue(linkHeader.contains("describedby")); + assertTrue(linkHeader.contains(";rel=\"license\"")); Pattern pattern = Pattern.compile("<([^<]*)> ; rel=\"linkset\";type=\"application\\/linkset\\+json\""); Matcher matcher = pattern.matcher(linkHeader); @@ -101,6 +102,16 @@ public void testSignposting() { assertTrue(lso.getString("anchor").indexOf("/dataset.xhtml?persistentId=" + datasetPid) > 0); assertTrue(lso.containsKey("describedby")); + // Test export URL from link header + // regex inspired by https://stackoverflow.com/questions/68860255/how-to-match-the-closest-opening-and-closing-brackets + Pattern exporterPattern = Pattern.compile("[<\\[][^()\\[\\]]*?exporter=schema.org[^()\\[\\]]*[>\\]]"); + Matcher exporterMatcher = exporterPattern.matcher(linkHeader); + exporterMatcher.find(); + + Response exportDataset = UtilIT.exportDataset(datasetPid, "schema.org"); + exportDataset.prettyPrint(); + exportDataset.then().assertThat().statusCode(OK.getStatusCode()); + } } From 5b3630f4bf68a579cbaaf34cd4abf20e278112cb Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 16:22:56 +0100 Subject: [PATCH 0559/1092] Revert "Revert "Dataset files API extension for filters"" This reverts commit 122565a4f1afac9f5fe0d85a3c6f08517eb530c4. --- .../9714-files-api-extension-filters.md | 14 + .../9785-files-api-extension-search-text.md | 3 + .../9834-files-api-extension-counts.md | 6 + ...oad-extension-new-file-access-endpoints.md | 14 + doc/sphinx-guides/source/api/dataaccess.rst | 12 + doc/sphinx-guides/source/api/native-api.rst | 165 +++++++++++- modules/dataverse-parent/pom.xml | 3 + pom.xml | 14 + .../edu/harvard/iq/dataverse/DataFileTag.java | 2 +- .../DatasetVersionFilesServiceBean.java | 241 ++++++++++++++++++ .../dataverse/DatasetVersionServiceBean.java | 74 ------ .../edu/harvard/iq/dataverse/api/Access.java | 59 +++-- .../harvard/iq/dataverse/api/Datasets.java | 50 +++- .../edu/harvard/iq/dataverse/api/Files.java | 90 ++++++- .../iq/dataverse/util/json/JsonPrinter.java | 21 ++ src/main/java/propertyFiles/Bundle.properties | 1 + .../harvard/iq/dataverse/api/AccessIT.java | 47 ++-- .../harvard/iq/dataverse/api/DatasetsIT.java | 240 ++++++++++++++--- .../edu/harvard/iq/dataverse/api/FilesIT.java | 145 ++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 83 +++++- 20 files changed, 1110 insertions(+), 174 deletions(-) create mode 100644 doc/release-notes/9714-files-api-extension-filters.md create mode 100644 doc/release-notes/9785-files-api-extension-search-text.md create mode 100644 doc/release-notes/9834-files-api-extension-counts.md create mode 100644 doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md create mode 100644 src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java diff --git a/doc/release-notes/9714-files-api-extension-filters.md b/doc/release-notes/9714-files-api-extension-filters.md new file mode 100644 index 00000000000..034230efe61 --- /dev/null +++ b/doc/release-notes/9714-files-api-extension-filters.md @@ -0,0 +1,14 @@ +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by: + +- Access status: through the `accessStatus` query parameter, which supports the following values: + + - Public + - Restricted + - EmbargoedThenRestricted + - EmbargoedThenPublic + + +- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. + + +- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". diff --git a/doc/release-notes/9785-files-api-extension-search-text.md b/doc/release-notes/9785-files-api-extension-search-text.md new file mode 100644 index 00000000000..fb185e1c7af --- /dev/null +++ b/doc/release-notes/9785-files-api-extension-search-text.md @@ -0,0 +1,3 @@ +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by search text through the `searchText` query parameter. + +The search will be applied to the labels and descriptions of the dataset files. diff --git a/doc/release-notes/9834-files-api-extension-counts.md b/doc/release-notes/9834-files-api-extension-counts.md new file mode 100644 index 00000000000..3ec15d8bd36 --- /dev/null +++ b/doc/release-notes/9834-files-api-extension-counts.md @@ -0,0 +1,6 @@ +Implemented the following new endpoints: + +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Given a dataset and its version, retrieves file counts based on different criteria (Total count, per content type, per access status and per category name). + + +- setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created. diff --git a/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md new file mode 100644 index 00000000000..f306ae2ab80 --- /dev/null +++ b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md @@ -0,0 +1,14 @@ +Implemented the following new endpoints: + +- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. + + +- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + + +In addition, the DataFile API payload has been extended to include the following fields: + +- tabularData: Boolean field to know if the DataFile is of tabular type + + +- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 21be702d62b..6edd413b7a5 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -404,6 +404,18 @@ A curl example using an ``id``:: curl -H "X-Dataverse-key:$API_TOKEN" -X GET http://$SERVER/api/access/datafile/{id}/listRequests +User Has Requested Access to a File: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``/api/access/datafile/{id}/userFileAccessRequested`` + +This method returns true or false depending on whether or not the calling user has requested access to a particular file. + +A curl example using an ``id``:: + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userFileAccessRequested" + + Get User Permissions on a File: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c72937eecb..cf869d338ca 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -970,6 +970,45 @@ This endpoint supports optional pagination, through the ``limit`` and ``offset`` curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?limit=10&offset=20" +Category name filtering is also optionally supported. To return files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?categoryName=Data" + +Content type filtering is also optionally supported. To return files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?accessStatus=Public" + Ordering criteria for sorting the results is also optionally supported. In particular, by the following possible values: * ``NameAZ`` (Default) @@ -979,14 +1018,42 @@ Ordering criteria for sorting the results is also optionally supported. In parti * ``Size`` * ``Type`` -Please note that these values are case sensitive and must be correctly typed for the endpoint to recognize them. - Usage example: .. code-block:: bash curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?orderCriteria=Newest" +Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Keep in mind that you can combine all of the above query params depending on the results you are looking for. + +Get File Counts in a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Get file counts, for the given dataset and version. + +The returned file counts are based on different criteria: + +- Total (The total file count) +- Per content type +- Per category name +- Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic) + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 + + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/files/counts" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2832,13 +2899,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl "$SERVER_URL/api/files/$ID/downloadCount" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/downloadCount" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/24/downloadCount" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/downloadCount" A curl example using a ``PERSISTENT_ID`` @@ -2848,16 +2915,53 @@ A curl example using a ``PERSISTENT_ID`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" If you are interested in download counts for multiple files, see :doc:`/api/metrics`. +File Has Been Deleted +~~~~~~~~~~~~~~~~~~~~~ + +Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/hasBeenDeleted" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/hasBeenDeleted" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000" + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ @@ -2907,6 +3011,55 @@ Also note that dataFileTags are not versioned and changes to these will update t .. _EditingVariableMetadata: +Updating File Metadata Categories +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates the categories for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the category names. + +Although updating categories can also be done with the previous endpoint, this has been created to be more practical when it is only necessary to update categories and not other metadata fields. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "$SERVER_URL/api/files/$ID/metadata/categories" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "http://demo.dataverse.org/api/files/24/metadata/categories" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" + +Note that if the specified categories do not exist, they will be created. + Editing Variable Level Metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index bfa11af6c70..8e0ff2887df 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -200,6 +200,9 @@ 0.43.4 + + + 5.0.0 diff --git a/pom.xml b/pom.xml index e70b723cad5..909e9ee9b80 100644 --- a/pom.xml +++ b/pom.xml @@ -252,6 +252,20 @@ expressly provided + + + com.querydsl + querydsl-apt + ${querydsl.version} + jakarta + provided + + + com.querydsl + querydsl-jpa + ${querydsl.version} + jakarta + commons-io diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index f4f66d3c874..351c4032939 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -58,7 +58,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa private static final Map TagTypeToLabels = new HashMap<>(); - private static final Map TagLabelToTypes = new HashMap<>(); + public static final Map TagLabelToTypes = new HashMap<>(); static { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java new file mode 100644 index 00000000000..6006d937100 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -0,0 +1,241 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.QDataFileCategory; +import edu.harvard.iq.dataverse.QDataFileTag; +import edu.harvard.iq.dataverse.QDvObject; +import edu.harvard.iq.dataverse.QEmbargo; +import edu.harvard.iq.dataverse.QFileMetadata; + +import com.querydsl.core.Tuple; +import com.querydsl.core.types.dsl.BooleanExpression; +import com.querydsl.core.types.dsl.CaseBuilder; +import com.querydsl.core.types.dsl.DateExpression; +import com.querydsl.core.types.dsl.DateTimeExpression; + +import com.querydsl.jpa.impl.JPAQuery; +import com.querydsl.jpa.impl.JPAQueryFactory; + +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; + +@Stateless +@Named +public class DatasetVersionFilesServiceBean implements Serializable { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; + private final QDvObject dvObject = QDvObject.dvObject; + private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; + private final QDataFileTag dataFileTag = QDataFileTag.dataFileTag; + + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, NameZA, Newest, Oldest, Size, Type + } + + /** + * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} + */ + public enum DataFileAccessStatus { + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + } + + /** + * Given a DatasetVersion, returns its total file metadata count + * + * @param datasetVersion the DatasetVersion to access + * @return long value of total file metadata count + */ + public long getFileMetadataCount(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); + } + + /** + * Given a DatasetVersion, returns its file metadata count per content type + * + * @param datasetVersion the DatasetVersion to access + * @return Map of file metadata counts per content type + */ + public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + List contentTypeOccurrences = queryFactory + .select(fileMetadata.dataFile.contentType, fileMetadata.count()) + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) + .groupBy(fileMetadata.dataFile.contentType).fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : contentTypeOccurrences) { + result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); + } + return result; + } + + /** + * Given a DatasetVersion, returns its file metadata count per category name + * + * @param datasetVersion the DatasetVersion to access + * @return Map of file metadata counts per category name + */ + public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + List categoryNameOccurrences = queryFactory + .select(dataFileCategory.name, fileMetadata.count()) + .from(dataFileCategory, fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) + .groupBy(dataFileCategory.name).fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : categoryNameOccurrences) { + result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); + } + return result; + } + + /** + * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus + * + * @param datasetVersion the DatasetVersion to access + * @return Map of file metadata counts per DataFileAccessStatus + */ + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + Map allCounts = new HashMap<>(); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); + return allCounts; + } + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param contentType for retrieving only files with this content type + * @param accessStatus for retrieving only files with this DataFileAccessStatus + * @param categoryName for retrieving only files categorized with this category name + * @param tabularTagName for retrieving only files categorized with this tabular tag name + * @param searchText for retrieving only files that contain the specified text within their labels or descriptions + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list from the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { + JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); + + if (contentType != null) { + baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); + } + if (accessStatus != null) { + baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); + } + if (categoryName != null) { + baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); + } + if (tabularTagName != null) { + baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); + } + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); + } + + applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); + + if (limit != null) { + baseQuery.limit(limit); + } + if (offset != null) { + baseQuery.offset(offset); + } + + return baseQuery.fetch(); + } + + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { + long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); + if (fileMetadataCount > 0) { + totalCounts.put(dataFileAccessStatus, fileMetadataCount); + } + } + + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + return queryFactory + .selectFrom(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) + .stream().count(); + } + + private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { + baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); + } + return baseQuery; + } + + private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { + QEmbargo embargo = fileMetadata.dataFile.embargo; + BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); + BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); + BooleanExpression accessStatusExpression; + switch (accessStatus) { + case EmbargoedThenRestricted: + accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); + break; + case EmbargoedThenPublic: + accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); + break; + case Restricted: + accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); + break; + case Public: + accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); + break; + default: + throw new IllegalStateException("Unexpected value: " + accessStatus); + } + return accessStatusExpression; + } + + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { + DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); + switch (orderCriteria) { + case NameZA: + query.orderBy(fileMetadata.label.desc()); + break; + case Newest: + query.orderBy(orderByLifetimeExpression.desc()); + break; + case Oldest: + query.orderBy(orderByLifetimeExpression.asc()); + break; + case Size: + query.orderBy(fileMetadata.dataFile.filesize.asc()); + break; + case Type: + query.orderBy(fileMetadata.dataFile.contentType.asc()); + break; + default: + query.orderBy(fileMetadata.label.asc()); + break; + } + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 6f087f9eabc..5c43001dcb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,22 +49,6 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" - + " WHERE fm.datasetVersion.id=:datasetVersionId" - + " ORDER BY fm.label"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = dvo.id" - + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.filesize"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.contentType"; - @EJB DatasetServiceBean datasetService; @@ -166,18 +150,6 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, - NameZA, - Newest, - Oldest, - Size, - Type - } - public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1252,50 +1224,4 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list of the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { - TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) - .setParameter("datasetVersionId", datasetVersion.getId()); - if (limit != null) { - query.setMaxResults(limit); - } - if (offset != null) { - query.setFirstResult(offset); - } - return query.getResultList(); - } - - private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { - String queryString; - switch (orderCriteria) { - case NameZA: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; - break; - case Newest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; - break; - case Oldest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; - break; - case Size: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; - break; - case Type: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; - break; - default: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; - break; - } - return queryString; - } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 1aa3f4ffde6..3c226e68472 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1681,7 +1681,47 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); } } - + + @GET + @AuthRequired + @Path("/datafile/{id}/userFileAccessRequested") + public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + AuthenticatedUser requestAuthenticatedUser; + try { + dataFile = findDataFileOrDie(dataFileId); + requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + boolean fileAccessRequested = false; + List requests = dataFile.getFileAccessRequests(); + for (FileAccessRequest fileAccessRequest : requests) { + if (fileAccessRequest.getRequester().getId().equals(requestAuthenticatedUser.getId())) { + fileAccessRequested = true; + break; + } + } + return ok(fileAccessRequested); + } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } + // checkAuthorization is a convenience method; it calls the boolean method // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not. @@ -1949,21 +1989,4 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } - - @GET - @AuthRequired - @Path("/datafile/{id}/userPermissions") - public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); - jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); - return ok(jsonObjectBuilder); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..b3be55399d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -236,6 +236,9 @@ public class Datasets extends AbstractApiBean { @Inject PrivateUrlServiceBean privateUrlService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -484,23 +487,56 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" : ok(json(dsv)); }, getRequestUser(crc)); } - + @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> { + public Response getVersionFiles(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("limit") Integer limit, + @QueryParam("offset") Integer offset, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @QueryParam("orderCriteria") String orderCriteria, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); + DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; + try { + dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + } + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); }, getRequestUser(crc)); } - + + @GET + @AuthRequired + @Path("{id}/versions/{versionId}/files/counts") + public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); + jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); + jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); + jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); + return ok(jsonObjectBuilder); + }, getRequestUser(crc)); + } + @GET @AuthRequired @Path("{id}/dirindex") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 82811162d52..8a9abe68d85 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -4,6 +4,7 @@ import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -54,6 +55,7 @@ import java.io.IOException; import java.io.InputStream; +import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -63,15 +65,12 @@ import jakarta.ejb.EJBException; import jakarta.inject.Inject; import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.*; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.HttpHeaders; @@ -112,6 +111,8 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; + @Inject + DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -852,18 +853,85 @@ public Response getFileDataTables(@Context ContainerRequestContext crc, @PathPar try { dataFile = findDataFileOrDie(dataFileId); } catch (WrappedResponse e) { - return error(Response.Status.NOT_FOUND, "File not found for given id."); + return notFound("File not found for given id."); } if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); if (!hasPermissionToDownloadFile) { - return error(FORBIDDEN, "Insufficient permissions to access the requested information."); + return forbidden("Insufficient permissions to access the requested information."); } } if (!dataFile.isTabularData()) { - return error(BAD_REQUEST, "This operation is only available for tabular files."); + return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); } return ok(jsonDT(dataFile.getDataTables())); } + + @POST + @AuthRequired + @Path("{id}/metadata/categories") + @Produces(MediaType.APPLICATION_JSON) + public Response setFileCategories(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + jakarta.json.JsonObject jsonObject; + try (StringReader stringReader = new StringReader(jsonBody)) { + jsonObject = Json.createReader(stringReader).readObject(); + JsonArray requestedCategoriesJson = jsonObject.getJsonArray("categories"); + FileMetadata fileMetadata = dataFile.getFileMetadata(); + for (JsonValue jsonValue : requestedCategoriesJson) { + JsonString jsonString = (JsonString) jsonValue; + fileMetadata.addCategoryByName(jsonString.getString()); + } + execCommand(new UpdateDatasetVersionCommand(fileMetadata.getDataFile().getOwner(), req)); + return ok("Categories of file " + dataFileId + " updated."); + } catch (JsonParsingException jpe) { + return badRequest("Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + + @POST + @AuthRequired + @Path("{id}/metadata/tabularTags") + @Produces(MediaType.APPLICATION_JSON) + public Response setFileTabularTags(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + if (!dataFile.isTabularData()) { + return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); + } + jakarta.json.JsonObject jsonObject; + try (StringReader stringReader = new StringReader(jsonBody)) { + jsonObject = Json.createReader(stringReader).readObject(); + JsonArray requestedTabularTagsJson = jsonObject.getJsonArray("tabularTags"); + for (JsonValue jsonValue : requestedTabularTagsJson) { + JsonString jsonString = (JsonString) jsonValue; + DataFileTag tag = new DataFileTag(); + try { + tag.setTypeByLabel(jsonString.getString()); + } catch (IllegalArgumentException iax) { + return badRequest(iax.getMessage()); + } + tag.setDataFile(dataFile); + dataFile.addTag(tag); + } + execCommand(new UpdateDatasetVersionCommand(dataFile.getOwner(), req)); + return ok("Tabular tags of file " + dataFileId + " updated."); + } catch (JsonParsingException jpe) { + return badRequest("Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + + @GET + @AuthRequired + @Path("{id}/hasBeenDeleted") + public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); + }, getRequestUser(crc)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index a7aa36f179e..1fed0b233e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -672,9 +672,14 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) + .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); + Dataset dfOwner = df.getOwner(); + if (dfOwner != null) { + builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); + } /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the @@ -1095,6 +1100,22 @@ public Set characteristics() { }; } + public static JsonObjectBuilder json(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + + public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index ac725caf1b2..7b4befcca36 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2620,6 +2620,7 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file +files.api.only.tabular.supported=This operation is only available for tabular files. #Datasets.java datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index b1beddd893f..416caa68566 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -26,11 +26,9 @@ import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; +import static org.junit.jupiter.api.Assertions.*; /** * @@ -632,28 +630,37 @@ public void testZipUploadAndDownload() throws IOException { } @Test - public void testGetUserPermissionsOnFile() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); + public void testGetUserFileAccessRequested() { + // Create new user + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + String dataFileId = Integer.toString(tabFile3IdRestricted); - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + // Call with new user and unrequested access file + Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertFalse(userFileAccessRequested); + + // Request file access for the new user + Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken); + requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Assert user permissions on file - int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); + // Call with new user and requested access file + getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertTrue(userFileAccessRequested); + } + + @Test + public void testGetUserPermissionsOnFile() { + // Call with valid file id + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken); getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); assertTrue(canDownloadFile); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 3b6d4d1ecdf..b9f09cc7c07 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -9,6 +9,9 @@ import io.restassured.http.ContentType; import io.restassured.response.Response; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.*; import java.util.logging.Logger; import org.junit.jupiter.api.AfterAll; @@ -17,8 +20,6 @@ import org.skyscreamer.jsonassert.JSONAssert; import org.junit.jupiter.api.Disabled; -import java.util.List; -import java.util.Map; import jakarta.json.JsonObject; import static jakarta.ws.rs.core.Response.Status.CREATED; @@ -39,8 +40,6 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import java.util.UUID; - import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -67,8 +66,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.Files; -import java.util.ArrayList; -import java.util.HashMap; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObjectBuilder; @@ -77,6 +75,7 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; +import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import org.hamcrest.CoreMatchers; @@ -117,7 +116,9 @@ public static void setUpClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - + + UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); @@ -144,7 +145,9 @@ public static void afterClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - + + UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() @@ -3264,8 +3267,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } - @Test - public void getVersionFiles() throws IOException { + public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3297,39 +3299,42 @@ public void getVersionFiles() throws IOException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); - - int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, null, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)); - // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); - - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); + String testFileId1 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[0].dataFile.id"); + String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); + + // Test page 2 + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) .body("data[1].label", equalTo(testFileName4)); - // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); + assertEquals(testPageSize, fileMetadatasCount); + + // Test page 3 (last) + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, null, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName5)); + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3340,7 +3345,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3351,7 +3356,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3362,7 +3367,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3373,7 +3378,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3385,9 +3390,186 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, invalidOrderCriteria, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); + + // Test Content Type + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, null, apiToken); + + getVersionFilesResponseContentType.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)); + + fileMetadatasCount = getVersionFilesResponseContentType.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Category Name + String testCategory = "testCategory"; + Response setFileCategoriesResponse = UtilIT.setFileCategories(testFileId1, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, null, apiToken); + + getVersionFilesResponseCategoryName.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)); + + fileMetadatasCount = getVersionFilesResponseCategoryName.jsonPath().getList("data").size(); + assertEquals(2, fileMetadatasCount); + + // Test Access Status Restricted + Response restrictFileResponse = UtilIT.restrictFile(String.valueOf(testFileId1), true, apiToken); + restrictFileResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); + + getVersionFilesResponseRestricted.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseRestricted.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Access Status Embargoed + UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); + String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + + // Create embargo for test file 1 (Embargoed and Restricted) + Response createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId1), activeEmbargoDate, apiToken); + + createActiveFileEmbargoResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Create embargo for test file 2 (Embargoed and Public) + createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId2), activeEmbargoDate, apiToken); + + createActiveFileEmbargoResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); + + getVersionFilesResponseEmbargoedThenPublic.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName2)); + + fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); + + getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseEmbargoedThenRestricted.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Access Status Public + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); + + getVersionFilesResponsePublic.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName3)) + .body("data[1].label", equalTo(testFileName4)) + .body("data[2].label", equalTo(testFileName5)); + + fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); + assertEquals(3, fileMetadatasCount); + + // Test Search Text + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); + + getVersionFilesResponseSearchText.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Tabular Tag Name + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseTabularTagName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, tabularTagName, null, null, apiToken); + + getVersionFilesResponseTabularTagName.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo("test.tab")); + + fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + } + + @Test + public void getVersionFileCounts() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Creating test files + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + String testFileName3 = "test_3.png"; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); + + // Creating a categorized test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + String testCategory = "testCategory"; + Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Setting embargo for file (Embargo and Public) + UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); + String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); + createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Getting the file counts and assert each count + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); + LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(4, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 0a16bca7008..bfe856a8d18 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2,6 +2,8 @@ import io.restassured.RestAssured; import io.restassured.response.Response; + +import java.util.List; import java.util.logging.Logger; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; @@ -30,16 +32,12 @@ import static jakarta.ws.rs.core.Response.Status.*; import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.CoreMatchers.nullValue; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.hamcrest.CoreMatchers.*; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.junit.jupiter.api.Assertions.*; public class FilesIT { @@ -2211,4 +2209,137 @@ public void testGetFileDataTables() throws InterruptedException { getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } + + @Test + public void testSetFileCategories() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Set categories + String testCategory1 = "testCategory1"; + String testCategory2 = "testCategory2"; + List testCategories = List.of(testCategory1, testCategory2); + Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, testCategories); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data and check for new categories + Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); + getFileDataResponse.prettyPrint(); + getFileDataResponse.then().assertThat() + .body("data.categories", hasItem(testCategory1)) + .body("data.categories", hasItem(testCategory2)) + .statusCode(OK.getStatusCode()); + } + + @Test + public void testSetFileTabularTags() throws InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + // Set tabular tags + String testTabularTag1 = "Survey"; + String testTabularTag2 = "Genomics"; + List testTabularTags = List.of(testTabularTag1, testTabularTag2); + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data and check for new categories + Response getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); + getFileDataResponse.then().assertThat() + .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) + .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) + .statusCode(OK.getStatusCode()); + + // Set invalid tabular tag + String testInvalidTabularTag = "Invalid"; + setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(testInvalidTabularTag)); + setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + // Get file data and check categories are unaltered + getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); + getFileDataResponse.then().assertThat() + .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) + .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) + .statusCode(OK.getStatusCode()); + } + + @Test + public void testGetHasBeenDeleted() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Publish dataverse and dataset + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has not been deleted + Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertFalse(fileHasBeenDeleted); + + // Delete test file + Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken); + deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has been deleted + getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertTrue(fileHasBeenDeleted); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f61b392c898..8e333451c8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3276,7 +3276,7 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String orderCriteria, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json"); @@ -3286,6 +3286,21 @@ static Response getVersionFiles(Integer datasetId, String version, Integer limit if (offset != null) { requestSpecification = requestSpecification.queryParam("offset", offset); } + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } if (orderCriteria != null) { requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); } @@ -3317,9 +3332,75 @@ static Response getFileDataTables(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/dataTables"); } + static Response getUserFileAccessRequested(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested"); + } + static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } + + static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("dateAvailable", dateAvailable); + jsonBuilder.add("reason", "This is a test embargo"); + jsonBuilder.add("fileIds", Json.createArrayBuilder().add(fileId)); + String jsonString = jsonBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .contentType("application/json") + .urlEncodingEnabled(false) + .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); + } + + static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + } + + static Response setFileCategories(String dataFileId, String apiToken, List categories) { + JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); + for (String category : categories) { + jsonArrayBuilder.add(category); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("categories", jsonArrayBuilder); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/files/" + dataFileId + "/metadata/categories"); + } + + static Response setFileTabularTags(String dataFileId, String apiToken, List tabularTags) { + JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); + for (String tabularTag : tabularTags) { + jsonArrayBuilder.add(tabularTag); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("tabularTags", jsonArrayBuilder); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/files/" + dataFileId + "/metadata/tabularTags"); + } + + static Response deleteFileInDataset(Integer fileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/files/" + fileId); + } + + static Response getHasBeenDeleted(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/hasBeenDeleted"); + } } From 440d02030d12852d61a38c458d6d21d1f9f92bfd Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 16:40:29 +0100 Subject: [PATCH 0560/1092] Added: release notes for PR #9972 --- doc/release-notes/9972-files-api-filter-by-tabular-tags.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9972-files-api-filter-by-tabular-tags.md diff --git a/doc/release-notes/9972-files-api-filter-by-tabular-tags.md b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md new file mode 100644 index 00000000000..9c3fced1741 --- /dev/null +++ b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md @@ -0,0 +1,3 @@ +- New query parameter `tabularTagName` added to the getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) to return files to which the particular tabular tag has been added. + +- New endpoint to set tabular file tags via API: /api/files/{id}/metadata/tabularTags. From 6f464bc4697e5b1aee280d4d963c644ca7a80dc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20ROUCOU?= Date: Wed, 4 Oct 2023 17:58:06 +0200 Subject: [PATCH 0561/1092] Revert print email on modal --- src/main/webapp/roles-assign.xhtml | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/webapp/roles-assign.xhtml b/src/main/webapp/roles-assign.xhtml index 4b355c74d5c..93b9862c55d 100644 --- a/src/main/webapp/roles-assign.xhtml +++ b/src/main/webapp/roles-assign.xhtml @@ -32,7 +32,6 @@ var="roleAssignee" itemLabel="#{roleAssignee.displayInfo.title}" itemValue="#{roleAssignee}" converter="roleAssigneeConverter"> - From eb56c502dc2179754fa144bf0f354da444612ea9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:26:21 -0400 Subject: [PATCH 0562/1092] allow fast xhtml edits in Docker #9590 --- docker-compose-dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 694f2046ca8..930bb1230f5 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -18,6 +18,7 @@ services: - DATAVERSE_AUTH_OIDC_CLIENT_ID=test - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test + - DATAVERSE_JSF_REFRESH_PERIOD=1 ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) @@ -31,6 +32,9 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets + # Uncomment for changes to xhtml in Netbeans to be deployed immediately. + # Replace 6.0 with the current version. + # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: - /dumps:mode=770,size=2052M,uid=1000,gid=1000 - /tmp:mode=770,size=2052M,uid=1000,gid=1000 From db1410e8c14c73becf7042b4d5ebca6c0c9ef8bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 17:15:01 +0100 Subject: [PATCH 0563/1092] Added: non-tabular error test case to setFileTabularTags IT --- .../java/edu/harvard/iq/dataverse/api/FilesIT.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index af678905fe1..af020215f5d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2304,6 +2304,16 @@ public void testSetFileTabularTags() throws InterruptedException { actualTabularTagsCount = getFileDataResponse.jsonPath().getList("data.dataFile.tabularTags").size(); assertEquals(2, actualTabularTagsCount); + + // Should receive an error when calling the endpoint for a non-tabular file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String nonTabularFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + setFileTabularTagsResponse = UtilIT.setFileTabularTags(nonTabularFileId, apiToken, List.of(testInvalidTabularTag)); + setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } @Test From e2fa09680472fdcda617efb430d0b14ccce70ab9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Oct 2023 12:15:18 -0400 Subject: [PATCH 0564/1092] instead of Netbeans, be generic about IDEs/tools #9590 --- docker-compose-dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 930bb1230f5..d4564ab1335 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -32,7 +32,7 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets - # Uncomment for changes to xhtml in Netbeans to be deployed immediately. + # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). # Replace 6.0 with the current version. # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: From 9d7108b7ed2759dade7f6d1b67a1ec1ab541cc95 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 4 Oct 2023 23:59:11 +0200 Subject: [PATCH 0565/1092] chore(build): use stable Rewrite releases for Jakarta EE 10 #8305 --- modules/dataverse-parent/pom.xml | 3 ++- pom.xml | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index bfa11af6c70..b6d846b49bc 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -426,6 +426,7 @@ https://artifacts.unidata.ucar.edu/repository/unidata-all/ + + --> diff --git a/pom.xml b/pom.xml index e70b723cad5..5536bcccb05 100644 --- a/pom.xml +++ b/pom.xml @@ -359,12 +359,12 @@ org.ocpsoft.rewrite rewrite-servlet - 6.0.0-SNAPSHOT + 10.0.0.Final org.ocpsoft.rewrite rewrite-config-prettyfaces - 6.0.0-SNAPSHOT + 10.0.0.Final edu.ucsb.nceas From 365b18cf2ba70d2ac192a04475ede920eb11aa6c Mon Sep 17 00:00:00 2001 From: Lehebax Date: Thu, 5 Oct 2023 12:07:41 +0530 Subject: [PATCH 0566/1092] Fixed the equals() method check --- .../java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseContact.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java | 2 +- .../edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java index 7746099818e..8ac98500890 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java @@ -113,7 +113,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetField)) { + if (!(object instanceof DatasetFieldDefaultValue)) { return false; } DatasetFieldDefaultValue other = (DatasetFieldDefaultValue) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java index d77767985eb..9f86a03639a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java @@ -99,7 +99,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseContact)) { return false; } DataverseContact other = (DataverseContact) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java index 768c2308e50..83a2d8fdb8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java @@ -93,7 +93,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFacet)) { return false; } DataverseFacet other = (DataverseFacet) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java index 39ad6ca9520..d30d94cd034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java @@ -85,7 +85,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFeaturedDataverse)) { return false; } DataverseFeaturedDataverse other = (DataverseFeaturedDataverse) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java index 539669328a7..7f57d16b95a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java @@ -181,7 +181,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseTheme)) { return false; } DataverseTheme other = (DataverseTheme) object; From f42587e1e33af7170185cc24ce382cb402d72533 Mon Sep 17 00:00:00 2001 From: Lehebax Date: Thu, 5 Oct 2023 19:18:37 +0530 Subject: [PATCH 0567/1092] Added unit tests for the fixed equals() methods --- .../DatasetFieldDefaultValueTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseContactTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseFacetTest.java | 47 +++++++++++++++++++ .../DataverseFeaturedDataverseTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseThemeTest.java | 47 +++++++++++++++++++ 5 files changed, 235 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java new file mode 100644 index 00000000000..999fadaae06 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DatasetFieldDefaultValueTest { + private DatasetFieldDefaultValue dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DatasetFieldDefaultValue(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetField datasetField = new DatasetField(); + + assertFalse(this.dataverseContact.equals(datasetField)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java new file mode 100644 index 00000000000..2abb10a485d --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseContactTest { + private DataverseContact dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DataverseContact(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseContact.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java new file mode 100644 index 00000000000..7ae50439c10 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFacetTest { + private DataverseFacet dataverseFacet; + + @BeforeEach + public void before() { + this.dataverseFacet = new DataverseFacet(); + this.dataverseFacet.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFacet.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFacet.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(1L); + + assertTrue(this.dataverseFacet.equals(dataverseFacet1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(2L); + + assertFalse(this.dataverseFacet.equals(dataverseFacet1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java new file mode 100644 index 00000000000..b024dc3bfd3 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFeaturedDataverseTest { + private DataverseFeaturedDataverse dataverseFeaturedDataverse; + + @BeforeEach + public void before() { + this.dataverseFeaturedDataverse = new DataverseFeaturedDataverse(); + this.dataverseFeaturedDataverse.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFeaturedDataverse.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFeaturedDataverse.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(1L); + + assertTrue(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(2L); + + assertFalse(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java new file mode 100644 index 00000000000..e6721e34c6f --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseThemeTest { + private DataverseTheme dataverseTheme; + + @BeforeEach + public void before() { + this.dataverseTheme = new DataverseTheme(); + this.dataverseTheme.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseTheme.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseTheme.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(1L); + + assertTrue(this.dataverseTheme.equals(dataverseTheme1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(2L); + + assertFalse(this.dataverseTheme.equals(dataverseTheme1)); + } +} \ No newline at end of file From 56994b16b9a8dd92d550d491b5df00b542450e64 Mon Sep 17 00:00:00 2001 From: bencomp Date: Fri, 6 Oct 2023 00:55:09 +0200 Subject: [PATCH 0568/1092] Use JsonUtil.getJsonObject to prevent resource leaks --- .../harvard/iq/dataverse/api/Datasets.java | 28 ++++++++----------- .../harvard/iq/dataverse/api/Dataverses.java | 11 ++++---- .../iq/dataverse/api/HarvestingClients.java | 9 +++--- .../edu/harvard/iq/dataverse/api/Prov.java | 6 ++-- .../api/imports/ImportServiceBean.java | 15 ++++------ .../dataverse/globus/GlobusServiceBean.java | 21 ++++++++------ .../DatasetMetricsServiceBean.java | 9 ++---- 7 files changed, 45 insertions(+), 54 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..aacfb78cc2e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -93,8 +93,8 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; @@ -109,7 +109,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.net.URI; import java.sql.Timestamp; import java.text.MessageFormat; @@ -675,10 +674,10 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); // clear possibly stale fields from the incoming dataset version. @@ -834,10 +833,10 @@ public Response deleteVersionMetadata(@Context ContainerRequestContext crc, Stri } private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) { - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -991,10 +990,10 @@ public Response editVersionMetadata(@Context ContainerRequestContext crc, String private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -1441,8 +1440,7 @@ public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); Embargo embargo = new Embargo(); @@ -1585,8 +1583,7 @@ public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); List datasetFiles = dataset.getFiles(); List embargoFilesToUnset = new LinkedList<>(); @@ -2101,8 +2098,7 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); try { Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; @@ -2354,9 +2350,7 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa List eTagList = new ArrayList(); logger.info("Etags: " + partETagBody); try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); + JsonObject object = JsonUtil.getJsonObject(partETagBody); for (String partNo : object.keySet()) { eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a60775cbd38..c64ba42999c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -78,8 +78,9 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.io.StringReader; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -178,8 +179,8 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, Dataverse d; JsonObject dvJson; - try (StringReader rdr = new StringReader(body)) { - dvJson = Json.createReader(rdr).readObject(); + try { + dvJson = JsonUtil.getJsonObject(body); d = jsonParser().parseDataverse(dvJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); @@ -559,8 +560,8 @@ public Response recreateDataset(@Context ContainerRequestContext crc, String jso } private Dataset parseDataset(String datasetJson) throws WrappedResponse { - try (StringReader rdr = new StringReader(datasetJson)) { - return jsonParser().parseDataset(Json.createReader(rdr).readObject()); + try { + return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson)); } catch (JsonParsingException | JsonParseException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index d7eec9f5757..dfc9f48dd1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; @@ -164,8 +165,8 @@ public Response createHarvestingClient(@Context ContainerRequestContext crc, Str return wr.getResponse(); } - try ( StringReader rdr = new StringReader(jsonBody) ) { - JsonObject json = Json.createReader(rdr).readObject(); + try { + JsonObject json = JsonUtil.getJsonObject(jsonBody); // Check that the client with this name doesn't exist yet: // (we could simply let the command fail, but that does not result @@ -261,9 +262,9 @@ public Response modifyHarvestingClient(@Context ContainerRequestContext crc, Str String ownerDataverseAlias = harvestingClient.getDataverse().getAlias(); - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); HarvestingClient newHarvestingClient = new HarvestingClient(); String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index 37b4792920f..7f81ca20988 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -12,7 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.util.HashMap; import java.util.logging.Logger; import jakarta.inject.Inject; @@ -109,11 +110,10 @@ public Response addProvFreeForm(@Context ContainerRequestContext crc, String bod if(!systemConfig.isProvCollectionEnabled()) { return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled")); } - StringReader rdr = new StringReader(body); JsonObject jsonObj = null; try { - jsonObj = Json.createReader(rdr).readObject(); + jsonObj = JsonUtil.getJsonObject(body); } catch (JsonException ex) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformInvalidJson")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index bcb67b180c8..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -36,12 +36,12 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; -import java.io.StringReader; import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; @@ -60,7 +60,6 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.validation.ConstraintViolation; @@ -259,9 +258,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO"); } } - - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient); @@ -396,10 +394,8 @@ public JsonObject ddiToJson(String xmlToParse) throws ImportException, XMLStream // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); - return obj; + return JsonUtil.getJsonObject(json); } public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { @@ -416,8 +412,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d2613422be9..d6943ec3511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -596,13 +596,14 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin Thread.sleep(5000); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: I think an (parsing) exception should stop the process, shouldn't it? } - logger.info("json: " + JsonUtil.prettyPrint(jsonObject)); + logger.log(Level.INFO, "json: {0}", JsonUtil.prettyPrint(jsonObject)); String taskIdentifier = jsonObject.getString("taskIdentifier"); @@ -808,11 +809,12 @@ private String addFiles(String curlCommand, Logger globusLogger) { sb.append(line); globusLogger.info(" API Output : " + sb.toString()); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(sb.toString())) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(sb.toString()); } catch (Exception jpe) { jpe.printStackTrace(); globusLogger.log(Level.SEVERE, "Error parsing dataset json."); + // TODO: a parsing exception should cause the process to stop. } status = jsonObject.getString("status"); @@ -853,11 +855,12 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro globusLogger.info("Starting an globusDownload "); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: stop the process after this parsing exception. } String taskIdentifier = jsonObject.getString("taskIdentifier"); diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 0925c164bf4..0fb7e9f1e6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -3,7 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; @@ -14,10 +15,8 @@ import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -125,9 +124,7 @@ public List parseSushiReport(JsonObject report, Dataset dataset) List datasetMetricsDataset = new ArrayList<>(); String globalId = null; Dataset ds = null; - StringReader rdr = new StringReader(reportDataset.toString()); - JsonReader jrdr = Json.createReader(rdr); - JsonObject obj = jrdr.readObject(); + JsonObject obj = JsonUtil.getJsonObject(reportDataset.toString()); String jsonGlobalId = ""; String globalIdType = ""; if (obj.containsKey("dataset-id")) { From 7d72db1778802a72022cd625ddd545290d96405b Mon Sep 17 00:00:00 2001 From: bencomp Date: Fri, 6 Oct 2023 00:55:09 +0200 Subject: [PATCH 0569/1092] Add --- .../harvard/iq/dataverse/api/Datasets.java | 28 ++++++++----------- .../harvard/iq/dataverse/api/Dataverses.java | 11 ++++---- .../iq/dataverse/api/HarvestingClients.java | 9 +++--- .../edu/harvard/iq/dataverse/api/Prov.java | 6 ++-- .../api/imports/ImportServiceBean.java | 15 ++++------ .../dataverse/globus/GlobusServiceBean.java | 21 ++++++++------ .../DatasetMetricsServiceBean.java | 9 ++---- .../settings/SettingsServiceBean.java | 16 ++++------- .../iq/dataverse/workflows/WorkflowUtil.java | 7 ++--- 9 files changed, 54 insertions(+), 68 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..aacfb78cc2e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -93,8 +93,8 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; @@ -109,7 +109,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.net.URI; import java.sql.Timestamp; import java.text.MessageFormat; @@ -675,10 +674,10 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); // clear possibly stale fields from the incoming dataset version. @@ -834,10 +833,10 @@ public Response deleteVersionMetadata(@Context ContainerRequestContext crc, Stri } private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) { - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -991,10 +990,10 @@ public Response editVersionMetadata(@Context ContainerRequestContext crc, String private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -1441,8 +1440,7 @@ public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); Embargo embargo = new Embargo(); @@ -1585,8 +1583,7 @@ public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); List datasetFiles = dataset.getFiles(); List embargoFilesToUnset = new LinkedList<>(); @@ -2101,8 +2098,7 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); try { Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; @@ -2354,9 +2350,7 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa List eTagList = new ArrayList(); logger.info("Etags: " + partETagBody); try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); + JsonObject object = JsonUtil.getJsonObject(partETagBody); for (String partNo : object.keySet()) { eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a60775cbd38..c64ba42999c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -78,8 +78,9 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.io.StringReader; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -178,8 +179,8 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, Dataverse d; JsonObject dvJson; - try (StringReader rdr = new StringReader(body)) { - dvJson = Json.createReader(rdr).readObject(); + try { + dvJson = JsonUtil.getJsonObject(body); d = jsonParser().parseDataverse(dvJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); @@ -559,8 +560,8 @@ public Response recreateDataset(@Context ContainerRequestContext crc, String jso } private Dataset parseDataset(String datasetJson) throws WrappedResponse { - try (StringReader rdr = new StringReader(datasetJson)) { - return jsonParser().parseDataset(Json.createReader(rdr).readObject()); + try { + return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson)); } catch (JsonParsingException | JsonParseException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index d7eec9f5757..dfc9f48dd1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; @@ -164,8 +165,8 @@ public Response createHarvestingClient(@Context ContainerRequestContext crc, Str return wr.getResponse(); } - try ( StringReader rdr = new StringReader(jsonBody) ) { - JsonObject json = Json.createReader(rdr).readObject(); + try { + JsonObject json = JsonUtil.getJsonObject(jsonBody); // Check that the client with this name doesn't exist yet: // (we could simply let the command fail, but that does not result @@ -261,9 +262,9 @@ public Response modifyHarvestingClient(@Context ContainerRequestContext crc, Str String ownerDataverseAlias = harvestingClient.getDataverse().getAlias(); - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); HarvestingClient newHarvestingClient = new HarvestingClient(); String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index 37b4792920f..7f81ca20988 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -12,7 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.util.HashMap; import java.util.logging.Logger; import jakarta.inject.Inject; @@ -109,11 +110,10 @@ public Response addProvFreeForm(@Context ContainerRequestContext crc, String bod if(!systemConfig.isProvCollectionEnabled()) { return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled")); } - StringReader rdr = new StringReader(body); JsonObject jsonObj = null; try { - jsonObj = Json.createReader(rdr).readObject(); + jsonObj = JsonUtil.getJsonObject(body); } catch (JsonException ex) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformInvalidJson")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index bcb67b180c8..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -36,12 +36,12 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; -import java.io.StringReader; import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; @@ -60,7 +60,6 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.validation.ConstraintViolation; @@ -259,9 +258,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO"); } } - - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient); @@ -396,10 +394,8 @@ public JsonObject ddiToJson(String xmlToParse) throws ImportException, XMLStream // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); - return obj; + return JsonUtil.getJsonObject(json); } public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { @@ -416,8 +412,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d2613422be9..d6943ec3511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -596,13 +596,14 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin Thread.sleep(5000); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: I think an (parsing) exception should stop the process, shouldn't it? } - logger.info("json: " + JsonUtil.prettyPrint(jsonObject)); + logger.log(Level.INFO, "json: {0}", JsonUtil.prettyPrint(jsonObject)); String taskIdentifier = jsonObject.getString("taskIdentifier"); @@ -808,11 +809,12 @@ private String addFiles(String curlCommand, Logger globusLogger) { sb.append(line); globusLogger.info(" API Output : " + sb.toString()); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(sb.toString())) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(sb.toString()); } catch (Exception jpe) { jpe.printStackTrace(); globusLogger.log(Level.SEVERE, "Error parsing dataset json."); + // TODO: a parsing exception should cause the process to stop. } status = jsonObject.getString("status"); @@ -853,11 +855,12 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro globusLogger.info("Starting an globusDownload "); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: stop the process after this parsing exception. } String taskIdentifier = jsonObject.getString("taskIdentifier"); diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 0925c164bf4..0fb7e9f1e6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -3,7 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; @@ -14,10 +15,8 @@ import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -125,9 +124,7 @@ public List parseSushiReport(JsonObject report, Dataset dataset) List datasetMetricsDataset = new ArrayList<>(); String globalId = null; Dataset ds = null; - StringReader rdr = new StringReader(reportDataset.toString()); - JsonReader jrdr = Json.createReader(rdr); - JsonObject obj = jrdr.readObject(); + JsonObject obj = JsonUtil.getJsonObject(reportDataset.toString()); String jsonGlobalId = ""; String globalIdType = ""; if (obj.containsKey("dataset-id")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 0aa403a5116..6b74810eb53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -4,14 +4,12 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.api.ApiBlockingFilter; import edu.harvard.iq.dataverse.util.StringUtil; - +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.ejb.EJB; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -20,7 +18,6 @@ import org.json.JSONException; import org.json.JSONObject; -import java.io.StringReader; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -696,8 +693,8 @@ public Long getValueForCompoundKeyAsLong(Key key, String param){ try { return Long.parseLong(val); } catch (NumberFormatException ex) { - try ( StringReader rdr = new StringReader(val) ) { - JsonObject settings = Json.createReader(rdr).readObject(); + try { + JsonObject settings = JsonUtil.getJsonObject(val); if(settings.containsKey(param)) { return Long.parseLong(settings.getString(param)); } else if(settings.containsKey("default")) { @@ -730,8 +727,8 @@ public Boolean getValueForCompoundKeyAsBoolean(Key key, String param) { return null; } - try (StringReader rdr = new StringReader(val)) { - JsonObject settings = Json.createReader(rdr).readObject(); + try { + JsonObject settings = JsonUtil.getJsonObject(val); if (settings.containsKey(param)) { return Boolean.parseBoolean(settings.getString(param)); } else if (settings.containsKey("default")) { @@ -897,8 +894,7 @@ public Map getBaseMetadataLanguageMap(Map languag if(mlString.isEmpty()) { mlString="[]"; } - JsonReader jsonReader = Json.createReader(new StringReader(mlString)); - JsonArray languages = jsonReader.readArray(); + JsonArray languages = JsonUtil.getJsonArray(mlString); for(JsonValue jv: languages) { JsonObject lang = (JsonObject) jv; languageMap.put(lang.getString("locale"), lang.getString("title")); diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java index 456b829ba61..b104f113db2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.api.Util; -import java.io.StringReader; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -11,7 +10,7 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; - +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Success; @@ -42,8 +41,8 @@ public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVers } public static WorkflowStepResult parseResponse(String externalData) { - try (StringReader reader = new StringReader(externalData)) { - JsonObject response = Json.createReader(reader).readObject(); + try { + JsonObject response = JsonUtil.getJsonObject(externalData); String status = null; //Lower case is documented, upper case is deprecated if(response.containsKey("status")) { From faa8de1639b007a8c2a9f90b73a723fc10e7f19a Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 10:31:28 +0100 Subject: [PATCH 0570/1092] Changed: using json files in curl examples for tabular tags and categories update in the docs --- doc/sphinx-guides/source/api/native-api.rst | 54 +++++++++++++++------ 1 file changed, 38 insertions(+), 16 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f78ae62f3ae..97b41ffa98a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3026,6 +3026,15 @@ Updates the categories for an existing file where ``ID`` is the database id of t Although updating categories can also be done with the previous endpoint, this has been created to be more practical when it is only necessary to update categories and not other metadata fields. +The JSON representation of file categories (``categories.json``) looks like this:: + + { + "categories": [ + "Data", + "Custom" + ] + } + A curl example using an ``ID`` .. code-block:: bash @@ -3033,18 +3042,19 @@ A curl example using an ``ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 + export FILE_PATH=categories.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "$SERVER_URL/api/files/$ID/metadata/categories" + "$SERVER_URL/api/files/$ID/metadata/categories" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "http://demo.dataverse.org/api/files/24/metadata/categories" + "http://demo.dataverse.org/api/files/24/metadata/categories" \ + -H "Content-type:application/json" --upload-file categories.json A curl example using a ``PERSISTENT_ID`` @@ -3053,18 +3063,19 @@ A curl example using a ``PERSISTENT_ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=categories.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" + "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" + "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" \ + -H "Content-type:application/json" --upload-file categories.json Note that if the specified categories do not exist, they will be created. @@ -3073,6 +3084,15 @@ Updating File Tabular Tags Updates the tabular tags for an existing tabular file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the tabular tag names. +The JSON representation of tabular tags (``tags.json``) looks like this:: + + { + "tabularTags": [ + "Survey", + "Genomics" + ] + } + A curl example using an ``ID`` .. code-block:: bash @@ -3080,18 +3100,19 @@ A curl example using an ``ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 + export FILE_PATH=tags.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "$SERVER_URL/api/files/$ID/metadata/tabularTags" + "$SERVER_URL/api/files/$ID/metadata/tabularTags" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "http://demo.dataverse.org/api/files/24/metadata/tabularTags" + "http://demo.dataverse.org/api/files/24/metadata/tabularTags" \ + -H "Content-type:application/json" --upload-file tags.json A curl example using a ``PERSISTENT_ID`` @@ -3100,18 +3121,19 @@ A curl example using a ``PERSISTENT_ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=tags.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "$SERVER_URL/api/files/:persistentId/metadata/tabularTags?persistentId=$PERSISTENT_ID" + "$SERVER_URL/api/files/:persistentId/metadata/tabularTags?persistentId=$PERSISTENT_ID" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "https://demo.dataverse.org/api/files/:persistentId/metadata/tabularTags?persistentId=doi:10.5072/FK2/AAA000" + "https://demo.dataverse.org/api/files/:persistentId/metadata/tabularTags?persistentId=doi:10.5072/FK2/AAA000" \ + -H "Content-type:application/json" --upload-file tags.json Note that the specified tabular tags must be valid. The supported tags are: From 1440e653b8480c754f0669bb15f1b2cd92442522 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 14:48:30 +0100 Subject: [PATCH 0571/1092] Refactor: FileSearchCriteria to encapsulate all criteria options --- .../DatasetVersionFilesServiceBean.java | 54 +++++++++---------- .../iq/dataverse/FileSearchCriteria.java | 45 ++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 17 ++++-- .../iq/dataverse/util/json/JsonPrinter.java | 5 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 24 +++++---- 5 files changed, 97 insertions(+), 48 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 6006d937100..a436b10d340 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -29,6 +29,8 @@ import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; +import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; + @Stateless @Named public class DatasetVersionFilesServiceBean implements Serializable { @@ -44,17 +46,10 @@ public class DatasetVersionFilesServiceBean implements Serializable { /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} */ - public enum FileMetadatasOrderCriteria { + public enum FileOrderCriteria { NameAZ, NameZA, Newest, Oldest, Size, Type } - /** - * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum DataFileAccessStatus { - Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic - } - /** * Given a DatasetVersion, returns its total file metadata count * @@ -107,17 +102,17 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data } /** - * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus + * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per DataFileAccessStatus + * @return Map of file metadata counts per FileAccessStatus */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { - Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + Map allCounts = new HashMap<>(); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted); return allCounts; } @@ -127,29 +122,30 @@ public Map getFileMetadataCountPerAccessStatus(Datas * @param datasetVersion the DatasetVersion to access * @param limit for pagination, can be null * @param offset for pagination, can be null - * @param contentType for retrieving only files with this content type - * @param accessStatus for retrieving only files with this DataFileAccessStatus - * @param categoryName for retrieving only files categorized with this category name - * @param tabularTagName for retrieving only files categorized with this tabular tag name - * @param searchText for retrieving only files that contain the specified text within their labels or descriptions - * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @param searchCriteria for retrieving only files matching this criteria + * @param orderCriteria a FileOrderCriteria to order the results * @return a FileMetadata list from the specified DatasetVersion */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); + String contentType = searchCriteria.getContentType(); if (contentType != null) { baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); if (accessStatus != null) { baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); } + String categoryName = searchCriteria.getCategoryName(); if (categoryName != null) { baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); } + String tabularTagName = searchCriteria.getTabularTagName(); if (tabularTagName != null) { baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); } + String searchText = searchCriteria.getSearchText(); if (searchText != null && !searchText.isEmpty()) { searchText = searchText.trim().toLowerCase(); baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); @@ -167,14 +163,14 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege return baseQuery.fetch(); } - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); if (fileMetadataCount > 0) { totalCounts.put(dataFileAccessStatus, fileMetadataCount); } } - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); return queryFactory .selectFrom(fileMetadata) @@ -182,16 +178,16 @@ private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, D .stream().count(); } - private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { + private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { + if (orderCriteria == FileOrderCriteria.Newest || orderCriteria == FileOrderCriteria.Oldest) { baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); } return baseQuery; } - private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { + private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAccessStatus accessStatus) { QEmbargo embargo = fileMetadata.dataFile.embargo; BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); @@ -215,7 +211,7 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileA return accessStatusExpression; } - private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); switch (orderCriteria) { case NameZA: diff --git a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java new file mode 100644 index 00000000000..62f10c18bdf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse; + +public class FileSearchCriteria { + + private final String contentType; + private final FileAccessStatus accessStatus; + private final String categoryName; + private final String tabularTagName; + private final String searchText; + + /** + * Status of the particular DataFile based on active embargoes and restriction state + */ + public enum FileAccessStatus { + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + } + + public FileSearchCriteria(String contentType, FileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText) { + this.contentType = contentType; + this.accessStatus = accessStatus; + this.categoryName = categoryName; + this.tabularTagName = tabularTagName; + this.searchText = searchText; + } + + public String getContentType() { + return contentType; + } + + public FileAccessStatus getAccessStatus() { + return accessStatus; + } + + public String getCategoryName() { + return categoryName; + } + + public String getTabularTagName() { + return tabularTagName; + } + + public String getSearchText() { + return searchText; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b3be55399d8..14fd1b2453c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -506,19 +506,26 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @Context HttpHeaders headers) { return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; + FileSearchCriteria.FileAccessStatus dataFileAccessStatus; try { - dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; + dataFileAccessStatus = accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); } - return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); + FileSearchCriteria fileSearchCriteria = new FileSearchCriteria( + contentType, + dataFileAccessStatus, + categoryName, + tabularTagName, + searchText + ); + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1fed0b233e4..70840c7502f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -41,7 +41,6 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; @@ -1108,9 +1107,9 @@ public static JsonObjectBuilder json(Map map) { return jsonObjectBuilder; } - public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { + for (Map.Entry mapEntry : map.entrySet()) { jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); } return jsonObjectBuilder; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..5d1a89aa555 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.FileSearchCriteria; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -3267,6 +3268,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } + @Test public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); @@ -3334,7 +3336,7 @@ public void getVersionFiles() throws IOException, InterruptedException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3347,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3358,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3369,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3380,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3429,7 @@ public void getVersionFiles() throws IOException, InterruptedException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3454,7 @@ public void getVersionFiles() throws IOException, InterruptedException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3463,7 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3473,7 @@ public void getVersionFiles() throws IOException, InterruptedException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, null, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3569,7 +3571,7 @@ public void getVersionFileCounts() throws IOException { assertEquals(2, responseCountPerContentTypeMap.get("image/png")); assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); - assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); - assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); } } From 94fe70952ed46fd833f5948dfd9dc6ba92b95f6a Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 14:49:55 +0100 Subject: [PATCH 0572/1092] Fixed: missing @Test annotation added to getVersionFilesIT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..cdd1b4dff2b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3267,6 +3267,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } + @Test public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); From 690ac1e96a2717774e04aefb11603ae126005559 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 15:29:45 +0100 Subject: [PATCH 0573/1092] Added: file search criteria params to getVersionFileCounts API endpoint (Pending IT to be added) --- .../DatasetVersionFilesServiceBean.java | 99 ++++++++++--------- .../harvard/iq/dataverse/api/Datasets.java | 48 ++++++--- 2 files changed, 89 insertions(+), 58 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index a436b10d340..9afd0513b62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -54,26 +54,32 @@ public enum FileOrderCriteria { * Given a DatasetVersion, returns its total file metadata count * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return long value of total file metadata count */ - public long getFileMetadataCount(DatasetVersion datasetVersion) { + public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); + JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + return baseQuery.stream().count(); } /** * Given a DatasetVersion, returns its file metadata count per content type * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per content type */ - public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List contentTypeOccurrences = queryFactory + JPAQuery baseQuery = queryFactory .select(fileMetadata.dataFile.contentType, fileMetadata.count()) .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .groupBy(fileMetadata.dataFile.contentType).fetch(); + .groupBy(fileMetadata.dataFile.contentType); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List contentTypeOccurrences = baseQuery.fetch(); Map result = new HashMap<>(); for (Tuple occurrence : contentTypeOccurrences) { result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); @@ -85,15 +91,18 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas * Given a DatasetVersion, returns its file metadata count per category name * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per category name */ - public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List categoryNameOccurrences = queryFactory + JPAQuery baseQuery = queryFactory .select(dataFileCategory.name, fileMetadata.count()) .from(dataFileCategory, fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) - .groupBy(dataFileCategory.name).fetch(); + .groupBy(dataFileCategory.name); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List categoryNameOccurrences = baseQuery.fetch(); Map result = new HashMap<>(); for (Tuple occurrence : categoryNameOccurrences) { result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); @@ -105,14 +114,15 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per FileAccessStatus */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted, searchCriteria); return allCounts; } @@ -128,54 +138,31 @@ public Map getFileMetadataCountPerAccessStatus(DatasetVe */ public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); - - String contentType = searchCriteria.getContentType(); - if (contentType != null) { - baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); - } - FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); - if (accessStatus != null) { - baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); - } - String categoryName = searchCriteria.getCategoryName(); - if (categoryName != null) { - baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); - } - String tabularTagName = searchCriteria.getTabularTagName(); - if (tabularTagName != null) { - baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); - } - String searchText = searchCriteria.getSearchText(); - if (searchText != null && !searchText.isEmpty()) { - searchText = searchText.trim().toLowerCase(); - baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); - } - + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); - if (limit != null) { baseQuery.limit(limit); } if (offset != null) { baseQuery.offset(offset); } - return baseQuery.fetch(); } - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus) { - long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus, FileSearchCriteria searchCriteria) { + long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus, searchCriteria); if (fileMetadataCount > 0) { totalCounts.put(dataFileAccessStatus, fileMetadataCount); } } - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus) { + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory + JPAQuery baseQuery = queryFactory .selectFrom(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) - .stream().count(); + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + return baseQuery.stream().count(); } private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { @@ -211,6 +198,30 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAcces return accessStatusExpression; } + private void applyFileSearchCriteriaToQuery(JPAQuery baseQuery, FileSearchCriteria searchCriteria) { + String contentType = searchCriteria.getContentType(); + if (contentType != null) { + baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); + } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); + if (accessStatus != null) { + baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); + } + String categoryName = searchCriteria.getCategoryName(); + if (categoryName != null) { + baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); + } + String tabularTagName = searchCriteria.getTabularTagName(); + if (tabularTagName != null) { + baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); + } + String searchText = searchCriteria.getSearchText(); + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); + } + } + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); switch (orderCriteria) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 14fd1b2453c..ac32454c950 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -512,19 +512,18 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - FileSearchCriteria.FileAccessStatus dataFileAccessStatus; + FileSearchCriteria fileSearchCriteria; try { - dataFileAccessStatus = accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null; + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); } - FileSearchCriteria fileSearchCriteria = new FileSearchCriteria( - contentType, - dataFileAccessStatus, - categoryName, - tabularTagName, - searchText - ); return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } @@ -532,14 +531,35 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersionFileCounts(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); - jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); - jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); - jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); + jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); + jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); return ok(jsonObjectBuilder); }, getRequestUser(crc)); } From a0870b8554c709f25fb3bc47e04f58e08e951f2f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 15:35:17 +0100 Subject: [PATCH 0574/1092] Refactor: using Bundle.properties string for bad request errors in getVersionFiles and getVersionFileCounts API endpoints --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 +++--- src/main/java/propertyFiles/Bundle.properties | 2 ++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index ac32454c950..f7a4b1d0d25 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -510,7 +510,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, try { fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria))); } FileSearchCriteria fileSearchCriteria; try { @@ -522,7 +522,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, searchText ); } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); @@ -552,7 +552,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, searchText ); } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 7b4befcca36..3128106d38f 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2646,6 +2646,8 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status +datasets.api.version.files.invalid.order.criteria=Invalid order criteria: {0} +datasets.api.version.files.invalid.access.status=Invalid access status: {0} #Dataverses.java From 2abb36fc2f24e78ca75ebe0cbfc0a84a1345af26 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 17:00:56 +0100 Subject: [PATCH 0575/1092] Added: IT for getVersionFileCounts with criteria --- .../harvard/iq/dataverse/api/DatasetsIT.java | 127 +++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 ++- 2 files changed, 143 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5d1a89aa555..433628685b2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3484,6 +3484,13 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); assertEquals(3, fileMetadatasCount); + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, invalidStatus, null, null, null, null, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + // Test Search Text Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); @@ -3519,7 +3526,7 @@ public void getVersionFiles() throws IOException, InterruptedException { } @Test - public void getVersionFileCounts() throws IOException { + public void getVersionFileCounts() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3557,8 +3564,10 @@ public void getVersionFileCounts() throws IOException { Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + String testDatasetVersion = ":latest"; + + // Getting the file counts without criteria and assert each count is correct + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, null, null, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3570,8 +3579,120 @@ public void getVersionFileCounts() throws IOException { assertEquals(4, (Integer) responseJsonPath.get("data.total")); assertEquals(2, responseCountPerContentTypeMap.get("image/png")); assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test content type criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, "image/png", null, null, null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(2, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test access status criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, invalidStatus, null, null, null, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + + // Test category name criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, "testCategory", null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test search text criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, null, "test", apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test tabular tag name criteria + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, tabularTagName, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8e333451c8d..6d0f0bfa752 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3358,10 +3358,26 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { - return given() + static Response getVersionFileCounts(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String apiToken) { + RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + .contentType("application/json"); + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } static Response setFileCategories(String dataFileId, String apiToken, List categories) { From 65df3d0f4bca41598dcc5cad741779d7d8fd5716 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:36:40 +0100 Subject: [PATCH 0576/1092] Added: count per tabular tag name to getVersionFileCounts API endpoint --- .../DatasetVersionFilesServiceBean.java | 23 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 1 + .../iq/dataverse/util/json/JsonPrinter.java | 8 +++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 13 +++++++++++ 4 files changed, 45 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 9afd0513b62..b6b095f58dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -110,6 +110,29 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data return result; } + /** + * Given a DatasetVersion, returns its file metadata count per DataFileTag.TagType + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per DataFileTag.TagType + */ + public Map getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory + .select(dataFileTag.type, fileMetadata.count()) + .from(dataFileTag, fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))) + .groupBy(dataFileTag.type); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List tagNameOccurrences = baseQuery.fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : tagNameOccurrences) { + result.put(occurrence.get(dataFileTag.type), occurrence.get(fileMetadata.count())); + } + return result; + } + /** * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f7a4b1d0d25..26d4dd01cf5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -559,6 +559,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria))); jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); return ok(jsonObjectBuilder); }, getRequestUser(crc)); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 70840c7502f..6fe1ca87028 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1115,6 +1115,14 @@ public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 433628685b2..53546133b27 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3574,6 +3574,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + LinkedHashMap responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(4, (Integer) responseJsonPath.get("data.total")); @@ -3581,6 +3582,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(2, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3593,6 +3595,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(2, (Integer) responseJsonPath.get("data.total")); @@ -3600,6 +3603,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(2, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3612,6 +3616,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(3, (Integer) responseJsonPath.get("data.total")); @@ -3619,6 +3624,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); @@ -3637,6 +3643,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(1, (Integer) responseJsonPath.get("data.total")); @@ -3644,6 +3651,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3655,6 +3663,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(3, (Integer) responseJsonPath.get("data.total")); @@ -3662,6 +3671,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); @@ -3686,12 +3696,15 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(1, (Integer) responseJsonPath.get("data.total")); assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.get(tabularTagName)); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); } From 98a444c2108395fc562e0159d554ce1f9968686e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:45:15 +0100 Subject: [PATCH 0577/1092] Added: docs for extended getVersionFileCounts endpoint --- doc/sphinx-guides/source/api/native-api.rst | 52 +++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 97b41ffa98a..f05c4d42073 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1046,6 +1046,7 @@ The returned file counts are based on different criteria: - Total (The total file count) - Per content type - Per category name +- Per tabular tag name - Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic) .. code-block:: bash @@ -1062,6 +1063,57 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" +Category name filtering is optionally supported. To return counts only for files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return counts only for files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return counts only for files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return counts only for files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?accessStatus=Public" + +Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Keep in mind that you can combine all of the above query params depending on the results you are looking for. + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 7d0501cdc2982e591d99eab29b9569d2880ebf30 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:50:30 +0100 Subject: [PATCH 0578/1092] Added: #9907 release notes --- .../9907-files-api-counts-with-criteria.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 doc/release-notes/9907-files-api-counts-with-criteria.md diff --git a/doc/release-notes/9907-files-api-counts-with-criteria.md b/doc/release-notes/9907-files-api-counts-with-criteria.md new file mode 100644 index 00000000000..07cd23daad0 --- /dev/null +++ b/doc/release-notes/9907-files-api-counts-with-criteria.md @@ -0,0 +1,11 @@ +Extended the getVersionFileCounts endpoint (/api/datasets/{id}/versions/{versionId}/files/counts) to support filtering by criteria. + +In particular, the endpoint now accepts the following optional criteria query parameters: + +- contentType +- accessStatus +- categoryName +- tabularTagName +- searchText + +This filtering criteria is the same as the one for the getVersionFiles endpoint. From 35eeed53cefe427df8684ca8c20046be2b2a45f2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 10:07:53 +0100 Subject: [PATCH 0579/1092] Refactor: using variable instead of repeated string in IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 53546133b27..06d0bed14c0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3636,7 +3636,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); // Test category name criteria - getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, "testCategory", null, null, apiToken); + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, testCategory, null, null, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); From 1e8b735ca1baba7c12bac0737cfc88eedc084ec3 Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:47:26 +0530 Subject: [PATCH 0580/1092] #9412 added markdown in external tools guide --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 8543300dd2c..f8bf5fc73d9 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown (md), text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From b32d51fab1b78ff9316476f76c549163c4b3e7ba Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:50:28 +0530 Subject: [PATCH 0581/1092] #9412 added markdown to gile previews list --- doc/sphinx-guides/source/user/dataset-management.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 3b5b4ec6ba8..c41ca40dd36 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,6 +200,7 @@ Previewers are available for the following file types: - Text - PDF +- Markdown (MD) - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) From dea8bf7636bc396d51773bc481135dab5f1a7679 Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:51:13 +0530 Subject: [PATCH 0582/1092] #9412 removed file extension markdown --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index f8bf5fc73d9..a13dea923e4 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown (md), text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From 5a3b7853607d0a995ad9cbdbbcf402114f2a70b8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Oct 2023 08:53:43 -0400 Subject: [PATCH 0583/1092] add release note for markdown previewer #9412 --- doc/release-notes/9412-markdown-previewer.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9412-markdown-previewer.md diff --git a/doc/release-notes/9412-markdown-previewer.md b/doc/release-notes/9412-markdown-previewer.md new file mode 100644 index 00000000000..8faa2679fb0 --- /dev/null +++ b/doc/release-notes/9412-markdown-previewer.md @@ -0,0 +1 @@ +There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews From cc117bd4396e18f5680f34488928bb7a009b8bf0 Mon Sep 17 00:00:00 2001 From: Abdul Samad <62374784+mr-loop-1@users.noreply.github.com> Date: Mon, 9 Oct 2023 19:03:00 +0530 Subject: [PATCH 0584/1092] remove extension after markdown Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/user/dataset-management.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index c41ca40dd36..1e8ea897032 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,7 +200,7 @@ Previewers are available for the following file types: - Text - PDF -- Markdown (MD) +- Markdown - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) From 44b015f375fc92505def8ef2e2475950a3818d4e Mon Sep 17 00:00:00 2001 From: Abdul Samad <62374784+mr-loop-1@users.noreply.github.com> Date: Mon, 9 Oct 2023 19:03:32 +0530 Subject: [PATCH 0585/1092] Capitalise Markdown Co-authored-by: Philip Durbin --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index a13dea923e4..4f4c29d0670 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From e1acdd328fa4a6ca6624522e21806c7d2a779ef9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 14:49:09 +0200 Subject: [PATCH 0586/1092] test(oidc): deactivate test when no Docker available #9974 As many of IQSS and external devs might not have Docker available, let's deactivate any Testcontainers tests in these cases. --- .../oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 5968cf3eaeb..ee6823ef98a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -54,7 +54,7 @@ @Tag(Tags.INTEGRATION_TEST) @Tag(Tags.USES_TESTCONTAINERS) -@Testcontainers +@Testcontainers(disabledWithoutDocker = true) @ExtendWith(MockitoExtension.class) // NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() @LocalJvmSettings From 2aa7a471249cb129aeef13d6301f10ddb43506b7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 14:52:43 +0200 Subject: [PATCH 0587/1092] doc(testing): change docs for TC ITs to disable when no Docker #9974 --- doc/sphinx-guides/source/developers/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4691aca3aad..dab8110b20b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -316,7 +316,7 @@ Please make sure to: .. code:: java /** A very minimal example for a Testcontainers integration test class. */ - @Testcontainers + @Testcontainers(disabledWithoutDocker = true) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.INTEGRATION_TEST) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.USES_TESTCONTAINERS) class MyExampleIT { /* ... */ } From ed291936810a46e260df9809def80b2d2c5b50dc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 20:04:11 +0200 Subject: [PATCH 0588/1092] style(ct): remove empty lines from configbaker Dockerfile --- modules/container-configbaker/Dockerfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 2975b043213..9b98334d72b 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -40,8 +40,6 @@ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml - - # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] # By default run a script that will print a help message and terminate From e89e2aaeb32f983462ea11b64eceab6ddc926eb7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Oct 2023 10:47:00 -0400 Subject: [PATCH 0589/1092] #9507 revert to use dataverse in bundle --- src/main/java/propertyFiles/Bundle.properties | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 4964dac78a4..e3dbdc144f1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -875,22 +875,22 @@ dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact dataverse.link=Link Collection -dataverse.link.btn.tip=Link to Your Collection -dataverse.link.yourDataverses=Your Collection -dataverse.link.yourDataverses.inputPlaceholder=Enter Collection Name -dataverse.link.save=Save Linked collection -dataverse.link.dataverse.choose=Choose which of your collection you would like to link this collection to. -dataverse.link.dataset.choose=Enter the name of the collection you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. -dataverse.link.dataset.none=No linkable collections available. -dataverse.link.no.choice=You have one collection you can add linked collection and datasets in. -dataverse.link.no.linkable=To be able to link a collection or dataset, you need to have your own collection. Create a collection to get started. -dataverse.link.no.linkable.remaining=You have already linked all of your eligible collections. +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your Dataverse +dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name +dataverse.link.save=Save Linked Dataverse +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. +dataverse.link.dataset.none=No linkable dataverses available. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. dataverse.savedsearch.link=Link Search dataverse.savedsearch.searchquery=Search dataverse.savedsearch.filterQueries=Facets dataverse.savedsearch.save=Save Linked Search -dataverse.savedsearch.dataverse.choose=Choose which of your collection you would like to link this search to. -dataverse.savedsearch.no.choice=You have one collection to which you may add a saved search. +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. # Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test dataverse.saved.search.success=The saved search has been successfully linked to {0}. dataverse.saved.search.failure=The saved search was not able to be linked. @@ -2498,7 +2498,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: -dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following collection(s): +dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s): datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. From 617f36cd98b267bc99d53a7b69c21d96974ff4dc Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Oct 2023 10:48:11 -0400 Subject: [PATCH 0590/1092] #9507 missed one --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index e3dbdc144f1..89eabaeb0bf 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -874,7 +874,7 @@ dataverse.publish.header=Publish Dataverse dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact -dataverse.link=Link Collection +dataverse.link=Link Dataverse dataverse.link.btn.tip=Link to Your Dataverse dataverse.link.yourDataverses=Your Dataverse dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name From 18cdf133f49d597da6aea9d21385e45b77844ceb Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 11 Oct 2023 14:48:51 -0400 Subject: [PATCH 0591/1092] stripping more dead code in the version service bean (my experimental filemetadatas retrieval method, not directly used in the PR). (#9763) --- .../dataverse/DatasetVersionServiceBean.java | 88 ------------------- 1 file changed, 88 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 476a306e081..c2f9027a38a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,22 +49,6 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" - + " WHERE fm.datasetVersion.id=:datasetVersionId" - + " ORDER BY fm.label"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = dvo.id" - + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.filesize"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.contentType"; - @EJB DatasetServiceBean datasetService; @@ -166,18 +150,6 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, - NameZA, - Newest, - Oldest, - Size, - Type - } - public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1287,64 +1259,4 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list of the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { - TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) - .setParameter("datasetVersionId", datasetVersion.getId()); - - if (limit == null && offset == null) { - query = query.setHint("eclipselink.left-join-fetch", "fm.dataFile.ingestRequest") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.thumbnailForDataset") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataTables") - .setHint("eclipselink.left-join-fetch", "fm.fileCategories") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.embargo") - .setHint("eclipselink.left-join-fetch", "fm.datasetVersion") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.releaseUser") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.dataFileTags") - .setHint("eclipselink.left-join-fetch", "fm.dataFile.creator"); - } else { - // @todo: is there really no way to use offset-limit with left join hints? - if (limit != null) { - query = query.setMaxResults(limit); - } - if (offset != null) { - query = query.setFirstResult(offset); - } - } - return query.getResultList(); - } - - private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { - String queryString; - switch (orderCriteria) { - case NameZA: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; - break; - case Newest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; - break; - case Oldest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; - break; - case Size: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; - break; - case Type: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; - break; - default: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; - break; - } - return queryString; - } } // end class From 381ddf59088808a536d58498e60514e1ea8557b8 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 11 Oct 2023 15:22:52 -0400 Subject: [PATCH 0592/1092] more commented-out code that needed to be removed before finalizing the pr. (#9763) --- .../edu/harvard/iq/dataverse/Dataset.java | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 692a2ba0245..245bdf0efd2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -693,31 +693,12 @@ public Timestamp getCitationDate() { Timestamp citationDate = null; //Only calculate if this dataset doesn't use an alternate date field for publication date if (citationDateDatasetFieldType == null) { - // @todo: remove this commented-out code once/if the PR passes review - L.A. - //List versions = this.versions; - // TODo - is this ever not version 1.0 (or draft if not published yet) - //DatasetVersion oldest = versions.get(versions.size() - 1); - // - I believe the answer is yes, the oldest versions will always be - // either 1.0 or draft - L.A. citationDate = super.getPublicationDate(); if (embargoCitationDate != null) { if (citationDate.compareTo(embargoCitationDate) < 0) { return embargoCitationDate; } } - // @todo: remove this commented-out code once/if the PR passes review - L.A. - /*if (oldest.isPublished()) { - List fms = oldest.getFileMetadatas(); - for (FileMetadata fm : fms) { - Embargo embargo = fm.getDataFile().getEmbargo(); - if (embargo != null) { - Timestamp embDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay()); - if (citationDate.compareTo(embDate) < 0) { - citationDate = embDate; - } - } - } - }*/ } return citationDate; } From 4c67f2a636699d51589fa815511ce4e1b3dc9d1f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 6 Oct 2023 12:13:19 -0400 Subject: [PATCH 0593/1092] remove inefficient bucket check --- .../iq/dataverse/dataaccess/S3AccessIO.java | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 822ada0b83e..22216ee5c2b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -108,14 +108,13 @@ public S3AccessIO(T dvObject, DataAccessRequest req, String driverId) { if(!StringUtil.isEmpty(proxy)&&StringUtil.isEmpty(endpoint)) { logger.severe(driverId + " config error: Must specify a custom-endpoint-url if proxy-url is specified"); } - //Not sure this is needed but moving it from the open method for now since it definitely doesn't need to run every time an object is opened. - try { - if (bucketName == null || !s3.doesBucketExistV2(bucketName)) { - throw new IOException("ERROR: S3AccessIO - You must create and configure a bucket before creating datasets."); - } - } catch (SdkClientException sce) { - throw new IOException("ERROR: S3AccessIO - Failed to look up bucket "+bucketName+" (is AWS properly configured?): " + sce.getMessage()); - } + + // FWIW: There used to be a check here to see if the bucket exists. + // It was very redundant (checking every time we access any file) and didn't do + // much but potentially make the failure (in the unlikely case a bucket doesn't + // exist/just disappeared) happen slightly earlier (here versus at the first + // file/metadata access). + } catch (Exception e) { throw new AmazonClientException( "Cannot instantiate a S3 client; check your AWS credentials and region", From 90dfa42c9090ce9e4cf9dab1e8ed57776137a077 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 11 Oct 2023 16:41:38 -0400 Subject: [PATCH 0594/1092] Redesigned provider mechanism --- .../iq/dataverse/dataaccess/S3AccessIO.java | 71 ++++++++++++++----- 1 file changed, 52 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 22216ee5c2b..ee04bbcb853 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -4,6 +4,7 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.HttpMethod; import com.amazonaws.SdkClientException; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProviderChain; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; @@ -57,9 +58,11 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Optional; import java.util.Random; import java.util.function.Predicate; import java.util.logging.Logger; @@ -1180,29 +1183,59 @@ private static AmazonS3 getClient(String driverId) { // Boolean is inverted, otherwise setting dataverse.files..chunked-encoding=false would result in leaving Chunked Encoding enabled s3CB.setChunkedEncodingDisabled(!s3chunkedEncoding); - /** - * Pass in a string value if this storage driver should use a non-default AWS S3 profile. - * The default is "default" which should work when only one profile exists. + /** Configure credentials for the S3 client. There are multiple mechanisms available. + * Role-based/instance credentials are globally defined while the other mechanisms (profile, static) + * are defined per store. The logic below assures that + * * if a store specific profile or static credentials are explicitly set, they will be used in preference to the global role-based credentials. + * * if a store specific role-based credentials are explicitly set, they will be used in preference to the global instance credentials, + * * if a profile and static credentials are both explicitly set, the profile will be used preferentially, and + * * if no store-specific credentials are set, the global credentials will be preferred over using any "default" profile credentials that are found. */ - String s3profile = System.getProperty("dataverse.files." + driverId + ".profile","default"); - ProfileCredentialsProvider profileCredentials = new ProfileCredentialsProvider(s3profile); - - // Try to retrieve credentials via Microprofile Config API, too. For production use, you should not use env - // vars or system properties to provide these, but use the secrets config source provided by Payara. - AWSStaticCredentialsProvider staticCredentials = new AWSStaticCredentialsProvider( - new BasicAWSCredentials( - config.getOptionalValue("dataverse.files." + driverId + ".access-key", String.class).orElse(""), - config.getOptionalValue("dataverse.files." + driverId + ".secret-key", String.class).orElse("") - )); - - //Add role-based provider as in the default provider chain - InstanceProfileCredentialsProvider instanceCredentials = InstanceProfileCredentialsProvider.getInstance(); + ArrayList providers = new ArrayList<>(); + + String s3profile = System.getProperty("dataverse.files." + driverId + ".profile"); + boolean allowInstanceCredentials = true; + // Assume that instance credentials should not be used if the profile is + // actually set for this store or if static creds are provided (below). + if (s3profile != null) { + allowInstanceCredentials = false; + } + // Try to retrieve credentials via Microprofile Config API, too. For production + // use, you should not use env vars or system properties to provide these, but + // use the secrets config source provided by Payara. + Optional accessKey = config.getOptionalValue("dataverse.files." + driverId + ".access-key", String.class); + Optional secretKey = config.getOptionalValue("dataverse.files." + driverId + ".secret-key", String.class); + if (accessKey.isPresent() && secretKey.isPresent()) { + allowInstanceCredentials = false; + AWSStaticCredentialsProvider staticCredentials = new AWSStaticCredentialsProvider( + new BasicAWSCredentials( + accessKey.orElse(""), + secretKey.orElse(""))); + providers.add(staticCredentials); + } else if (s3profile == null) { + //Only use the default profile when it isn't explicitly set for this store when there are no static creds (otherwise it will be preferred). + s3profile = "default"; + } + if (s3profile != null) { + ProfileCredentialsProvider profileCredentials = new ProfileCredentialsProvider(s3profile); + providers.add(profileCredentials); + } + + if (allowInstanceCredentials) { + // Add role-based provider as in the default provider chain + InstanceProfileCredentialsProvider instanceCredentials = InstanceProfileCredentialsProvider.getInstance(); + providers.add(instanceCredentials); + } // Add all providers to chain - the first working provider will be used - // (role-based is first in the default cred provider chain, so we're just + // (role-based is first in the default cred provider chain (if no profile or + // static creds are explicitly set for the store), so we're just // reproducing that, then profile, then static credentials as the fallback) - AWSCredentialsProviderChain providerChain = new AWSCredentialsProviderChain(instanceCredentials, profileCredentials, staticCredentials); + + // As the order is the reverse of how we added providers, we reverse the list here + Collections.reverse(providers); + AWSCredentialsProviderChain providerChain = new AWSCredentialsProviderChain(providers); s3CB.setCredentials(providerChain); - + // let's build the client :-) AmazonS3 client = s3CB.build(); driverClientMap.put(driverId, client); From dcca52566958fba3f58698766f9696723fcebfc0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 12 Oct 2023 09:28:42 -0400 Subject: [PATCH 0595/1092] Good cleanup Co-authored-by: Oliver Bertuch --- .../harvard/iq/dataverse/dataaccess/S3AccessIO.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index ee04bbcb853..a66686ac648 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -1209,22 +1209,20 @@ private static AmazonS3 getClient(String driverId) { allowInstanceCredentials = false; AWSStaticCredentialsProvider staticCredentials = new AWSStaticCredentialsProvider( new BasicAWSCredentials( - accessKey.orElse(""), - secretKey.orElse(""))); + accessKey.get(), + secretKey.get())); providers.add(staticCredentials); } else if (s3profile == null) { //Only use the default profile when it isn't explicitly set for this store when there are no static creds (otherwise it will be preferred). s3profile = "default"; } if (s3profile != null) { - ProfileCredentialsProvider profileCredentials = new ProfileCredentialsProvider(s3profile); - providers.add(profileCredentials); + providers.add(new ProfileCredentialsProvider(s3profile)); } if (allowInstanceCredentials) { // Add role-based provider as in the default provider chain - InstanceProfileCredentialsProvider instanceCredentials = InstanceProfileCredentialsProvider.getInstance(); - providers.add(instanceCredentials); + providers.add(InstanceProfileCredentialsProvider.getInstance()); } // Add all providers to chain - the first working provider will be used // (role-based is first in the default cred provider chain (if no profile or From ada8cc7a713c8074378c7732d4cf30688d50f9cf Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 10:44:14 +0100 Subject: [PATCH 0596/1092] Fixed: curl examples in docs for deaccession dataset --- doc/sphinx-guides/source/api/native-api.rst | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e51ca0055b6..1dc1ab13d9f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1383,21 +1383,31 @@ Deaccession Dataset Given a version of a dataset, updates its status to deaccessioned. +The JSON body required to deaccession a dataset (``deaccession.json``) looks like this:: + + { + "deaccessionReason": "Description of the deaccession reason.", + "deaccessionForwardURL": "https://demo.dataverse.org" + } + + +Note that the field ``deaccessionForwardURL`` is optional. + .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 export VERSIONID=1.0 - export JSON='{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + export FILE_PATH=deaccession.json - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -H "Content-type:application/json" --upload-file deaccession.json .. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be published since it is already deaccessioned. From 1f0efddbd6cb4e10b7f5924dbd338105f18add81 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 11:35:44 +0100 Subject: [PATCH 0597/1092] Fixed: permission checks in GetSpecificPublishedDatasetVersionCommand --- ...etSpecificPublishedDatasetVersionCommand.java | 3 ++- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 16 ++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 14 +++++++++----- 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java index 879a694ef57..a87eb8a99a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -40,7 +41,7 @@ public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Data @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { for (DatasetVersion dsv : ds.getVersions()) { - if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) { return dsv; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 2d52a6c6e15..ee81d3f67f4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3543,6 +3543,14 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); + + // Test that the dataset files for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, "1.0", null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test @@ -3620,6 +3628,14 @@ public void getVersionFileCounts() throws IOException { responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); assertEquals(4, (Integer) responseJsonPath.get("data.total")); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, "1.0", true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 93a7cc64082..434dc6d26f1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3290,9 +3290,11 @@ static Response getVersionFiles(Integer datasetId, boolean includeDeaccessioned, String apiToken) { RequestSpecification requestSpecification = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json") .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } if (limit != null) { requestSpecification = requestSpecification.queryParam("limit", limit); } @@ -3372,10 +3374,12 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date } static Response getVersionFileCounts(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("includeDeaccessioned", includeDeaccessioned) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + RequestSpecification requestSpecification = given() + .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } static Response setFileCategories(String dataFileId, String apiToken, List categories) { From 4b5ad8fac1c1733c73ad0e2f5d7e1e47155895bc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 13 Oct 2023 10:04:25 -0400 Subject: [PATCH 0598/1092] rename sql script #9763 avoid conflict with V6.0.0.1__9599-guestbook-at-request.sql --- ...rgocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.1__9763-embargocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql rename to src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql From 12ba35e9b9c4f0396ed942ea30a832e6a57c22c9 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 17:14:17 +0100 Subject: [PATCH 0599/1092] Fixed: failing tests after develop merge --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6626b18219c..34eccd3172a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3622,8 +3622,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); // Test content type criteria - getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, "image/png", null, null, null, null, false, apiToken); - + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, "image/png", null, null, null, null, false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponse.jsonPath(); @@ -3760,7 +3759,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); - assertEquals(4, (Integer) responseJsonPath.get("data.total")); + assertEquals(5, (Integer) responseJsonPath.get("data.total")); // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest // By latest published version From beed44473f5a51d2a9d69fd31353c4900f8391ae Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 13 Oct 2023 13:44:23 -0400 Subject: [PATCH 0600/1092] re-apply #9892 --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 69cc9fae55c..34df0c79390 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -7,7 +7,8 @@ xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> - + + @@ -321,4 +322,5 @@

    + From 53b73e09f28751c3e17150e17908df5fa3f308b4 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 13 Oct 2023 14:07:37 -0400 Subject: [PATCH 0601/1092] remove file with "TODO - remove!" --- .../webapp/file-download-popup-fragment.xhtml | 305 ------------------ 1 file changed, 305 deletions(-) delete mode 100644 src/main/webapp/file-download-popup-fragment.xhtml diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml deleted file mode 100644 index 3a64ca4a3a2..00000000000 --- a/src/main/webapp/file-download-popup-fragment.xhtml +++ /dev/null @@ -1,305 +0,0 @@ - - - - - - - -

    - #{bundle['file.downloadDialog.tip']} -

    - -
    - -
    -
    - -
    - -
    - - -
    - - - -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    -
    - - -
    - - - -
    - -
    - - - -
    -
    -
    - -
    - - - -
    -
    -
    - -
    - - - -
    -
    -
    - -
    - - - -
    -
    -
    - -
    - -
    - - - - - - - - - - - -
    -
    -
    -
    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - -
    -
    -
    \ No newline at end of file From f47867ee34e93e14efaca2fba414e202d234c1c6 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 15 Oct 2023 19:24:09 -0400 Subject: [PATCH 0602/1092] renaming the flyway script since 6.0.0.1 has already been merged. (#9763) --- ...rgocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.0.0.1__9763-embargocitationdate.sql => V6.0.0.2__9763-embargocitationdate.sql} (100%) diff --git a/src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql similarity index 100% rename from src/main/resources/db/migration/V6.0.0.1__9763-embargocitationdate.sql rename to src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql From 35f69517ea2139c2e742b7d7b28e1b88dcdd9ef5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 15 Oct 2023 20:22:09 -0400 Subject: [PATCH 0603/1092] Switching to the new version of gdcc/xoai, v5.2.0 (#9910) --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 19f78415280..1d99c1cd3d8 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -165,7 +165,7 @@ 4.4.14 - 5.1.0 + 5.2.0 1.19.0 From 4182b036f24ba8402ffe7f2c304ed4026fa7874d Mon Sep 17 00:00:00 2001 From: Abhinav Rana <142827270+AR-2910@users.noreply.github.com> Date: Mon, 16 Oct 2023 07:50:09 +0530 Subject: [PATCH 0604/1092] Update config.rst Adding link to "Dataverse General User Interface Translation Guide for Weblate" in the "Tools For Translators" section. Issue #9512. --- doc/sphinx-guides/source/installation/config.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 086b0a80895..ce8876b012c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1276,6 +1276,8 @@ The list below depicts a set of tools that can be used to ease the amount of wor - `easyTranslationHelper `_, a tool developed by `University of Aveiro `_. +- `Dataverse General User Interface Translation Guide for Weblate `_, a guide produced as part of the `SSHOC Dataverse Translation `_ event. + .. _Web-Analytics-Code: Web Analytics Code From cea36d6aadcc3a21d70c3029b498a279256d6c07 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 12:19:15 +0100 Subject: [PATCH 0605/1092] Added: file search criteria to getDownloadSize datasets API endpoint --- .../DatasetVersionFilesServiceBean.java | 23 +++++++++++-------- .../harvard/iq/dataverse/api/Datasets.java | 19 ++++++++++++++- .../harvard/iq/dataverse/api/DatasetsIT.java | 21 +++++++++++------ .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 +++++++++++++++--- 4 files changed, 64 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 2c14498caa9..6ea9262bbc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -53,7 +53,7 @@ public enum FileOrderCriteria { } /** - * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileDownloadSizeMode)} + * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileSearchCriteria, FileDownloadSizeMode)} *

    * All: Includes both archival and original sizes for tabular files * Archival: Includes only the archival size for tabular files @@ -191,16 +191,17 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege * Returns the total download size of all files for a particular DatasetVersion * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for retrieving only files matching this criteria * @param mode a FileDownloadSizeMode to base the search on * @return long value of total file download size */ - public long getFilesDownloadSize(DatasetVersion datasetVersion, FileDownloadSizeMode mode) { + public long getFilesDownloadSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria, FileDownloadSizeMode mode) { return switch (mode) { case All -> - Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, false)); + Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, false, searchCriteria)); case Original -> - Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, true)); - case Archival -> getArchivalFilesSize(datasetVersion, false); + Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, true, searchCriteria)); + case Archival -> getArchivalFilesSize(datasetVersion, false, searchCriteria); }; } @@ -301,22 +302,24 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu } } - private long getOriginalTabularFilesSize(DatasetVersion datasetVersion) { + private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - Long result = queryFactory + JPAQuery baseQuery = queryFactory .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) .from(dataTable) - .where(dataTable.dataFile.eq(fileMetadata.dataFile)) - .select(dataTable.originalFileSize.sum()).fetchFirst(); + .where(dataTable.dataFile.eq(fileMetadata.dataFile)); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + Long result = baseQuery.select(dataTable.originalFileSize.sum()).fetchFirst(); return (result == null) ? 0 : result; } - private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular) { + private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); JPAQuery baseQuery = queryFactory .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); Long result; if (ignoreTabular) { result = baseQuery.where(fileMetadata.dataFile.dataTables.isEmpty()).select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f8929c5e8d8..8605b4772f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2992,11 +2992,28 @@ public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam( public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, @QueryParam("mode") String mode, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); + } DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode; try { fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All; @@ -3004,7 +3021,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); - long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileDownloadSizeMode); + long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 34eccd3172a..66a67887405 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3863,7 +3863,7 @@ public void getDownloadSize() throws IOException, InterruptedException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; // Get the total size when there are no tabular files - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3878,7 +3878,7 @@ public void getDownloadSize() throws IOException, InterruptedException { Thread.sleep(2000); // Get the total size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); @@ -3889,7 +3889,7 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); @@ -3897,13 +3897,13 @@ public void getDownloadSize() throws IOException, InterruptedException { int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); // Get the total size sending invalid file download size mode String invalidMode = "invalidMode"; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); @@ -3917,15 +3917,22 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); // Get the total size including both the original and archival tabular file sizes expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size including both the original and archival tabular file sizes with search criteria + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + // We exclude tabular sizes from the expected result since the search criteria filters by content type "text/plain" and search text "test_" + int expectedSizeIncludingAllSizesAndApplyingCriteria = testFileSize1 + testFileSize2; + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizesAndApplyingCriteria)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4421e9280b3..38cc44c8c0d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3450,10 +3450,26 @@ static Response deaccessionDataset(Integer datasetId, String version, String dea .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, String mode, String apiToken) { - return given() + static Response getDownloadSize(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String mode, String apiToken) { + RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("mode", mode) + .queryParam("mode", mode); + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); } } From b6bcbf7cadcf8e7b2f05825836d155f6a589b710 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 12:47:35 +0100 Subject: [PATCH 0606/1092] Added: getDownloadSize API endpoint deaccessioned dataset support --- .../harvard/iq/dataverse/api/Datasets.java | 3 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 41 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UtilIT.java | 26 ++++++++++-- 3 files changed, 58 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8605b4772f4..852dd18ee84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2998,6 +2998,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, @QueryParam("tabularTagName") String tabularTagName, @QueryParam("searchText") String searchText, @QueryParam("mode") String mode, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @Context UriInfo uriInfo, @Context HttpHeaders headers) { @@ -3020,7 +3021,7 @@ public Response getDownloadSize(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); } - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned); long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 66a67887405..e12de1e23cc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3863,7 +3863,7 @@ public void getDownloadSize() throws IOException, InterruptedException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; // Get the total size when there are no tabular files - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3878,7 +3878,7 @@ public void getDownloadSize() throws IOException, InterruptedException { Thread.sleep(2000); // Get the total size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); @@ -3889,7 +3889,7 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); @@ -3897,13 +3897,13 @@ public void getDownloadSize() throws IOException, InterruptedException { int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); // Get the total size sending invalid file download size mode String invalidMode = "invalidMode"; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); @@ -3917,22 +3917,47 @@ public void getDownloadSize() throws IOException, InterruptedException { // Get the total size including only original sizes and ignoring archival sizes for tabular files expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); // Get the total size including both the original and archival tabular file sizes expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); // Get the total size including both the original and archival tabular file sizes with search criteria - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); // We exclude tabular sizes from the expected result since the search criteria filters by content type "text/plain" and search text "test_" int expectedSizeIncludingAllSizesAndApplyingCriteria = testFileSize1 + testFileSize2; getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizesAndApplyingCriteria)); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, apiToken); + getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null); + getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, "1.0", null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null); + getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 38cc44c8c0d..15350782fa1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3373,7 +3373,15 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, boolean includeDeaccessioned, String apiToken) { + static Response getVersionFileCounts(Integer datasetId, + String version, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() .queryParam("includeDeaccessioned", includeDeaccessioned); if (apiToken != null) { @@ -3450,10 +3458,22 @@ static Response deaccessionDataset(Integer datasetId, String version, String dea .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String mode, String apiToken) { + static Response getDownloadSize(Integer datasetId, + String version, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + String mode, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .queryParam("mode", mode); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } if (contentType != null) { requestSpecification = requestSpecification.queryParam("contentType", contentType); } From 794c5b64e6daa52e1d84fb2cb58468b5104d3161 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 13:03:08 +0100 Subject: [PATCH 0607/1092] Added: extended docs for getDownloadSize API endpoint --- doc/sphinx-guides/source/api/native-api.rst | 61 ++++++++++++++++++++- 1 file changed, 58 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2e1a878dce8..98e2722ac5e 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -964,7 +964,7 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" -This endpoint supports optional pagination, through the ``limit`` and ``offset`` query params: +This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters: .. code-block:: bash @@ -1044,7 +1044,7 @@ Usage example: curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?includeDeaccessioned=true" -.. note:: Keep in mind that you can combine all of the above query params depending on the results you are looking for. +.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. Get File Counts in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1132,7 +1132,7 @@ Usage example: Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them. -Keep in mind that you can combine all of the above query params depending on the results you are looking for. +Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1888,6 +1888,61 @@ Usage example: curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?mode=Archival" +Category name filtering is also optionally supported. To return the size of all files available for download matching the requested category name. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return the size of all files available for download for which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return the size of all files available for download matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the size of all files available for download that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Please note that filtering query parameters are case sensitive and must be correctly typed for the endpoint to recognize them. + +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?includeDeaccessioned=true" + +.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 28cd109303ec22cbb898f32f3141cc281f4c7c62 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 13:10:38 +0100 Subject: [PATCH 0608/1092] Added: release notes for #9995 --- ...adsize-with-criteria-and-deaccessioned-support.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md diff --git a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md new file mode 100644 index 00000000000..71c7aa3b516 --- /dev/null +++ b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md @@ -0,0 +1,12 @@ +Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/files/downloadsize), including the following new features: + +- The endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned dataset versions when searching for versions to obtain the file total download size. + + +- The endpoint now supports filtering by criteria. In particular, it accepts the following optional criteria query parameters: + + - contentType + - accessStatus + - categoryName + - tabularTagName + - searchText From ab237777309b90e299e584cff6995618bc378ebd Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 13:22:30 +0100 Subject: [PATCH 0609/1092] Fixed: release notes --- ...-api-downloadsize-with-criteria-and-deaccessioned-support.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md index 71c7aa3b516..020224b2094 100644 --- a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md +++ b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md @@ -1,4 +1,4 @@ -Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/files/downloadsize), including the following new features: +Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/downloadsize), including the following new features: - The endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned dataset versions when searching for versions to obtain the file total download size. From e847ed04e87f16d5423bcfade38453fd1d959343 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 16:53:58 +0100 Subject: [PATCH 0610/1092] Fixed: set label as second ordering column when ordering by content type --- .../harvard/iq/dataverse/DatasetVersionFilesServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 2c14498caa9..701ff4474ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -293,7 +293,7 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu query.orderBy(fileMetadata.dataFile.filesize.asc()); break; case Type: - query.orderBy(fileMetadata.dataFile.contentType.asc()); + query.orderBy(fileMetadata.dataFile.contentType.asc(), fileMetadata.label.asc()); break; default: query.orderBy(fileMetadata.label.asc()); From 4ad95697405512c16ec42b1d242ce620aec2436a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 16 Oct 2023 16:32:13 -0400 Subject: [PATCH 0611/1092] partial changes for permission mgmt, etc. --- .../harvard/iq/dataverse/api/Datasets.java | 33 ++- .../dataverse/globus/GlobusServiceBean.java | 218 +++++++++++------- .../iq/dataverse/settings/JvmSettings.java | 1 + 3 files changed, 155 insertions(+), 97 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 25839544ce9..d3ea1b80696 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3363,6 +3363,15 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, } catch (WrappedResponse wr) { return wr.getResponse(); } + + JsonObject jsonObject = null; + try { + jsonObject = JsonUtil.getJsonObject(jsonData); + } catch (Exception ex) { + logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage()); + return badRequest("Error parsing json body"); + + } //------------------------------------ // (2b) Make sure dataset does not have package file @@ -3396,7 +3405,7 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, String requestUrl = SystemConfig.getDataverseSiteUrlStatic(); // Async Call - globusService.globusUpload(jsonData, token, dataset, requestUrl, authUser); + globusService.globusUpload(jsonObject, token, dataset, requestUrl, authUser); return ok("Async call to Globus Upload started "); @@ -3414,9 +3423,10 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, */ @POST @AuthRequired - @Path("{id}/allowGlobusUpload") + @Path("{id}/requestGlobusTransferPaths") @Consumes(MediaType.APPLICATION_JSON) - public Response allowGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, String jsonBody + @Produces(MediaType.APPLICATION_JSON) + public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, String jsonBody ) throws IOException, ExecutionException, InterruptedException { @@ -3454,15 +3464,18 @@ public Response allowGlobusUpload(@Context ContainerRequestContext crc, @PathPar if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset) .canIssue(UpdateDatasetVersionCommand.class)) { - + try { JsonObject params = JsonUtil.getJsonObject(jsonBody); String principal = params.getString("principal"); + int numberOfPaths = params.getInt("numberOfFiles"); + if(numberOfPaths <=0) { + return badRequest("numberOfFiles must be positive"); + } - // Async Call - int status = globusService.givePermission("identity", principal, "rw", dataset); - switch (status) { + JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths); + switch (response.getInt("status")) { case 201: - return ok("Permission Granted"); + return ok(response.getJsonArray("paths")); case 400: return badRequest("Unable to grant permission"); case 409: @@ -3470,6 +3483,10 @@ public Response allowGlobusUpload(@Context ContainerRequestContext crc, @PathPar default: return error(null, "Unexpected error when granting permission"); } + } catch (NullPointerException|ClassCastException e) { + return badRequest("Error retrieving principal and numberOfFiles from JSON request body"); + + } } else { return forbidden("User doesn't have permission to upload to this dataset"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index ad20b90971b..49572519696 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -1,7 +1,11 @@ package edu.harvard.iq.dataverse.globus; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; import com.google.gson.FieldNamingPolicy; import com.google.gson.GsonBuilder; +import com.nimbusds.oauth2.sdk.pkce.CodeVerifier; + import edu.harvard.iq.dataverse.*; import jakarta.ejb.Asynchronous; @@ -15,7 +19,9 @@ import jakarta.json.JsonArray; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonPatch; +import jakarta.json.JsonValue; import jakarta.servlet.http.HttpServletRequest; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; @@ -29,6 +35,8 @@ import java.net.URLEncoder; import java.sql.Timestamp; import java.text.SimpleDateFormat; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; @@ -48,6 +56,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -107,8 +116,10 @@ public void setUserTransferToken(String userTransferToken) { this.userTransferToken = userTransferToken; } - private ArrayList checkPermissions(GlobusEndpoint endpoint, String principalType, String principal) throws MalformedURLException { + private String getRuleId(GlobusEndpoint endpoint, String principal, String permissions) throws MalformedURLException { + String principalType="identity"; + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access_list"); MakeRequestResponse result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null); @@ -118,20 +129,22 @@ private ArrayList checkPermissions(GlobusEndpoint endpoint, String princ for (int i = 0; i < al.getDATA().size(); i++) { Permissions pr = al.getDATA().get(i); + if ((pr.getPath().equals(endpoint.getBasePath() + "/") || pr.getPath().equals(endpoint.getBasePath())) && pr.getPrincipalType().equals(principalType) - && ((principal == null) || (principal != null && pr.getPrincipal().equals(principal)))) { - ids.add(pr.getId()); + && ((principal == null) || (principal != null && pr.getPrincipal().equals(principal))) + &&pr.getPermissions().equals(permissions)) { + return pr.getId(); } else { - logger.info(pr.getPath() + " === " + endpoint.getBasePath() + " == " + pr.getPrincipalType()); + logger.fine(pr.getPath() + " === " + endpoint.getBasePath() + " == " + pr.getPrincipalType()); continue; } } } - - return ids; + return null; } -/* + + /* public void updatePermision(AccessToken clientTokenUser, String directory, String principalType, String perm) throws MalformedURLException { if (directory != null && !directory.equals("")) { @@ -165,47 +178,71 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin } } */ - public void deletePermission(String ruleId, Logger globusLogger) throws MalformedURLException { - - if (ruleId.length() > 0) { - AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); - - globusLogger.info("Start deleting permissions."); - String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - - URL url = new URL( - "https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); - MakeRequestResponse result = makeRequest(url, "Bearer", - clientTokenUser.getOtherTokens().get(0).getAccessToken(), "DELETE", null); - if (result.status != 200) { - globusLogger.warning("Cannot delete access rule " + ruleId); - } else { - globusLogger.info("Access rule " + ruleId + " was deleted successfully"); + +/** Call to delete a globus rule related to the specified dataset. + * + * @param ruleId - Globus rule id - assumed to be associated with the dataset's file path (should not be called with a user specified rule id w/o further checking) + * @param datasetId - the id of the dataset associated with the rule + * @param globusLogger - a separate logger instance, may be null + */ +public void deletePermission(String ruleId, Dataset dataset, Logger globusLogger) { + + if (ruleId.length() > 0) { + if (dataset != null) { + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + if (endpoint != null) { + String accessToken = endpoint.getClientToken(); + if (globusLogger != null) { + globusLogger.info("Start deleting permissions."); + } + try { + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + + "/access/" + ruleId); + MakeRequestResponse result = makeRequest(url, "Bearer", accessToken, "DELETE", null); + if (result.status != 200) { + if (globusLogger != null) { + globusLogger.warning("Cannot delete access rule " + ruleId); + } else { + // When removed due to a cache ejection, we don't have a globusLogger + logger.warning("Cannot delete access rule " + ruleId); + } + } else { + if (globusLogger != null) { + globusLogger.info("Access rule " + ruleId + " was deleted successfully"); + } + } + } catch (MalformedURLException ex) { + logger.log(Level.WARNING, + "Failed to delete access rule " + ruleId + " on endpoint " + endpoint.getId(), ex); + } } } - } +} - public int givePermission(String principalType, String principal, String perm, Dataset dataset) throws MalformedURLException { + public JsonObject requestAccessiblePaths(String principal, Dataset dataset, int numberOfPaths) { GlobusEndpoint endpoint = getGlobusEndpoint(dataset); - ArrayList rules = checkPermissions(endpoint, principalType, principal); + String principalType= "identity"; Permissions permissions = new Permissions(); permissions.setDATA_TYPE("access"); permissions.setPrincipalType(principalType); permissions.setPrincipal(principal); permissions.setPath(endpoint.getBasePath() + "/"); - permissions.setPermissions(perm); + permissions.setPermissions("rw"); Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; - if (rules.size() == 0) { logger.info("Start creating the rule"); + JsonObjectBuilder response = Json.createObjectBuilder(); + + try { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access"); result = makeRequest(url, "Bearer", endpoint.getClientToken(), "POST", gson.toJson(permissions)); + response.add("status", result.status); switch (result.status) { case 400: @@ -215,35 +252,50 @@ public int givePermission(String principalType, String principal, String perm, D logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules"); break; case 201: - JsonObject response = JsonUtil.getJsonObject(result.jsonResponse); - if (response != null && response.containsKey("access_id")) { - permissions.setId(response.getString("access_id")); - monitorTemporaryPermissions(permissions, endpoint); + JsonObject globusResponse = JsonUtil.getJsonObject(result.jsonResponse); + if (globusResponse != null && globusResponse.containsKey("access_id")) { + permissions.setId(globusResponse.getString("access_id")); + monitorTemporaryPermissions(permissions.getId(), dataset.getId()); logger.info("Access rule " + permissions.getId() + " was created successfully"); + JsonArrayBuilder pathArray = Json.createArrayBuilder(); + for(int i=0;i rulesCache = Caffeine.newBuilder() + .expireAfterWrite(Duration.of(JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) + .removalListener((ruleId, datasetId, cause) -> { + //Delete rules that expire + Dataset dataset = datasetSvc.find(datasetId); + deletePermission((String) ruleId, dataset, null); + }) + + .build(); + + + private void monitorTemporaryPermissions(String ruleId, long datasetId) { + rulesCache.put(ruleId, datasetId); } public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId) throws MalformedURLException { @@ -468,6 +520,7 @@ private MakeRequestResponse findDirectory(String directory, String clientToken, return result; } + /* public boolean giveGlobusPublicPermissions(Dataset dataset) throws UnsupportedEncodingException, MalformedURLException { @@ -478,20 +531,6 @@ public boolean giveGlobusPublicPermissions(Dataset dataset) if (status.status == 200) { - /* - * FilesList fl = parseJson(status.jsonResponse, FilesList.class, false); - * ArrayList files = fl.getDATA(); if (files != null) { for (FileG file: - * files) { if (!file.getName().contains("cached") && - * !file.getName().contains(".thumb")) { int perStatus = - * givePermission("all_authenticated_users", "", "r", clientTokenUser, directory - * + "/" + file.getName(), globusEndpoint); logger.info("givePermission status " - * + perStatus + " for " + file.getName()); if (perStatus == 409) { - * logger.info("Permissions already exist or limit was reached for " + - * file.getName()); } else if (perStatus == 400) { - * logger.info("No file in Globus " + file.getName()); } else if (perStatus != - * 201) { logger.info("Cannot get permission for " + file.getName()); } } } } - */ - int perStatus = givePermission("all_authenticated_users", "", "r", dataset); logger.info("givePermission status " + perStatus); if (perStatus == 409) { @@ -512,7 +551,8 @@ public boolean giveGlobusPublicPermissions(Dataset dataset) return true; } - +*/ + // Generates the URL to launch the Globus app public String getGlobusAppUrlForDataset(Dataset d) { return getGlobusAppUrlForDataset(d, true, null); @@ -572,7 +612,7 @@ public String getGlobusDownloadScript(Dataset dataset, ApiToken apiToken) { @Asynchronous @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void globusUpload(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl, + public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, String httpRequestUrl, AuthenticatedUser authUser) throws ExecutionException, InterruptedException, MalformedURLException { Integer countAll = 0; @@ -606,33 +646,33 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin // ToDo - use DataAccess methods? //String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") + 3); //datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") + 3); + + logger.fine("json: " + JsonUtil.prettyPrint(jsonData)); - Thread.sleep(5000); - - JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); - } catch (Exception jpe) { - jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); - } - logger.info("json: " + JsonUtil.prettyPrint(jsonObject)); - - String taskIdentifier = jsonObject.getString("taskIdentifier"); + String taskIdentifier = jsonData.getString("taskIdentifier"); - String ruleId = ""; - try { - ruleId = jsonObject.getString("ruleId"); - } catch (NullPointerException npe) { - logger.warning("NPE for jsonData object"); - } + String ruleId = null; + Thread.sleep(5000); + // globus task status check GlobusTask task = globusStatusCheck(taskIdentifier, globusLogger); String taskStatus = getTaskStatus(task); - if (ruleId.length() > 0) { - deletePermission(ruleId, globusLogger); + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + + ruleId = getRuleId(endpoint, task.getOwner_id(), "rw"); + if(ruleId!=null) { + Long datasetId = rulesCache.getIfPresent(ruleId); + if(datasetId!=null) { + + //Will delete rule + rulesCache.invalidate(ruleId); + } else { + //The cache already expired this rule, in which case it's delay not long enough, or we have some other problem + logger.warning("Rule " + ruleId + " not found in rulesCache"); + deletePermission(ruleId, dataset, globusLogger); + } } // If success, switch to an EditInProgress lock - do this before removing the @@ -674,7 +714,7 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin // List inputList = new ArrayList(); - JsonArray filesJsonArray = jsonObject.getJsonArray("files"); + JsonArray filesJsonArray = jsonData.getJsonArray("files"); if (filesJsonArray != null) { String datasetIdentifier = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); @@ -905,7 +945,7 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro String taskStatus = getTaskStatus(task); if (ruleId.length() > 0) { - deletePermission(ruleId, globusLogger); + deletePermission(ruleId, dataset, globusLogger); } if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index b4807372b69..f8abe505dca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -48,6 +48,7 @@ public enum JvmSettings { SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"), + GLOBUS_RULES_CACHE_MAXAGE(SCOPE_FILES, "globus-rules-cache-maxage"), FILES(SCOPE_FILES), BASE_URL(FILES, "base-url"), GLOBUS_TOKEN(FILES, "globus-token"), From 30395309689949a3fc633e3be5fa4c30cc1f27cd Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 16 Oct 2023 16:33:02 -0400 Subject: [PATCH 0612/1092] check driver type not id --- .../java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index f4cc7d40120..3bc83538679 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return (DataAccess.S3.equals(driverId) && Boolean.parseBoolean(System.getProperty("dataverse.files." + DataAccess.S3 + ".upload-redirect"))) || + return (System.getProperty("dataverse.files." + driverId + ".type").equals(DataAccess.S3) && Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"))) || Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } From 635d345df3b71484b827668946b48b017420eedd Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 16 Oct 2023 16:33:02 -0400 Subject: [PATCH 0613/1092] check driver type not id --- .../java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 00db98e894e..d33f8f5e5bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return (DataAccess.S3.equals(driverId) && Boolean.parseBoolean(System.getProperty("dataverse.files." + DataAccess.S3 + ".upload-redirect"))) || + return (System.getProperty("dataverse.files." + driverId + ".type").equals(DataAccess.S3) && Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"))) || Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } From 48144a24cb200e285b5419ab29865293eac17e54 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 16 Oct 2023 17:00:51 -0400 Subject: [PATCH 0614/1092] adding extra logic to skip things like facets and highlights in searches, unless specifically requested. (#9635) --- .../search/SearchIncludeFragment.java | 91 ++++-- .../dataverse/search/SearchServiceBean.java | 308 +++++++++++------- 2 files changed, 249 insertions(+), 150 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 2ce06541afa..1e42958fe4e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -120,7 +120,6 @@ public class SearchIncludeFragment implements java.io.Serializable { private Long facetCountDatasets = 0L; private Long facetCountFiles = 0L; Map previewCountbyType = new HashMap<>(); - private SolrQueryResponse solrQueryResponseAllTypes; private String sortField; private SortOrder sortOrder; private String currentSort; @@ -132,6 +131,7 @@ public class SearchIncludeFragment implements java.io.Serializable { Map datasetfieldFriendlyNamesBySolrField = new HashMap<>(); Map staticSolrFieldFriendlyNamesBySolrField = new HashMap<>(); private boolean solrIsDown = false; + private boolean solrIsOverloaded = false; private Map numberOfFacets = new HashMap<>(); // private boolean showUnpublished; List filterQueriesDebug = new ArrayList<>(); @@ -279,6 +279,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused SolrQueryResponse solrQueryResponse = null; + SolrQueryResponse solrQueryResponseSecondPass = null; List filterQueriesFinal = new ArrayList<>(); @@ -311,18 +312,11 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused String[] parts = selectedTypesString.split(":"); selectedTypesList.addAll(Arrays.asList(parts)); - List filterQueriesFinalAllTypes = new ArrayList<>(); - String[] arr = selectedTypesList.toArray(new String[selectedTypesList.size()]); - selectedTypesHumanReadable = combine(arr, " OR "); - if (!selectedTypesHumanReadable.isEmpty()) { - typeFilterQuery = SearchFields.TYPE + ":(" + selectedTypesHumanReadable + ")"; - } + filterQueriesFinal.addAll(filterQueries); - filterQueriesFinalAllTypes.addAll(filterQueriesFinal); - String allTypesFilterQuery = SearchFields.TYPE + ":(dataverses OR datasets OR files)"; - filterQueriesFinalAllTypes.add(allTypesFilterQuery); + filterQueriesFinal.add(typeFilterQuery); if (page <= 1) { @@ -363,10 +357,60 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused // This 2nd search() is for populating the "type" ("dataverse", "dataset", "file") facets: -- L.A. // (why exactly do we need it, again?) // To get the counts we display in the types facets particulary for unselected types - SEK 08/25/2021 - solrQueryResponseAllTypes = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalAllTypes, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null); - if (solrQueryResponse.hasError()){ - logger.info(solrQueryResponse.getError()); - setSolrErrorEncountered(true); + // Sure, but we should not waste resources here. We will try to save + // solr some extra work and a) only run this second query IF there is + // one or more unselected type facets; and b) drop all the extra + // parameters from this second query - such as facets and highlights - + // that we do not actually need for the purposes of finding these + // extra numbers. -- L.A. 10/16/2023 + + // populate preview counts: https://redmine.hmdc.harvard.edu/issues/3560 + previewCountbyType.put(BundleUtil.getStringFromBundle("dataverses"), -1L); + previewCountbyType.put(BundleUtil.getStringFromBundle("datasets"), -1L); + previewCountbyType.put(BundleUtil.getStringFromBundle("files"), -1L); + + + // This will populate the type facet counts for the types that are + // currently selected on the collection page: + for (FacetCategory facetCategory : solrQueryResponse.getTypeFacetCategories()) { + for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { + previewCountbyType.put(facetLabel.getName(), facetLabel.getCount()); + } + } + + if (selectedTypesList.size() < 3) { + // If some types are NOT currently selected, we will need to + // run another query to obtain the numbers of the unselected types: + + List filterQueriesFinalSecondPass = new ArrayList<>(); + filterQueriesFinalSecondPass.addAll(filterQueriesFinal); + + List selectedTypesListSecondPass = new ArrayList<>(); + + for (String dvObjectType : previewCountbyType.keySet()) { + if (previewCountbyType.get(dvObjectType) == -1) { + selectedTypesListSecondPass.add(dvObjectType); + } + } + + String[] arr = selectedTypesListSecondPass.toArray(new String[selectedTypesListSecondPass.size()]); + filterQueriesFinalSecondPass.add(SearchFields.TYPE + ":(" + combine(arr, " OR ") + ")"); + + if (solrQueryResponseSecondPass != null) { + + solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null); + if (solrQueryResponseSecondPass.hasError()) { + logger.info(solrQueryResponse.getError()); + setSolrErrorEncountered(true); + } + + // And now populate the remaining type facets: + for (FacetCategory facetCategory : solrQueryResponseSecondPass.getTypeFacetCategories()) { + for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { + previewCountbyType.put(facetLabel.getName(), facetLabel.getCount()); + } + } + } } } catch (SearchException ex) { @@ -446,17 +490,6 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused } } - // populate preview counts: https://redmine.hmdc.harvard.edu/issues/3560 - previewCountbyType.put(BundleUtil.getStringFromBundle("dataverses"), 0L); - previewCountbyType.put(BundleUtil.getStringFromBundle("datasets"), 0L); - previewCountbyType.put(BundleUtil.getStringFromBundle("files"), 0L); - if (solrQueryResponseAllTypes != null) { - for (FacetCategory facetCategory : solrQueryResponseAllTypes.getTypeFacetCategories()) { - for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { - previewCountbyType.put(facetLabel.getName(), facetLabel.getCount()); - } - } - } setDisplayCardValues(); @@ -1020,6 +1053,14 @@ public boolean isSolrIsDown() { public void setSolrIsDown(boolean solrIsDown) { this.solrIsDown = solrIsDown; } + + public boolean isSolrOverloaded() { + return solrIsOverloaded; + } + + public void setSolrIsOverloaded(boolean solrIsOverloaded) { + this.solrIsOverloaded = solrIsOverloaded; + } public boolean isRootDv() { return rootDv; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 44976d232c2..aa2948eb8cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -101,7 +101,7 @@ public class SearchServiceBean { public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, String query, List filterQueries, String sortField, String sortOrder, int paginationStart, boolean onlyDatatRelatedToMe, int numResultsPerPage) throws SearchException { return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDatatRelatedToMe, numResultsPerPage, true, null, null); } - + /** * Import note: "onlyDatatRelatedToMe" relies on filterQueries for providing * access to Private Data for the correct user @@ -122,6 +122,41 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List dataverses, + String query, + List filterQueries, + String sortField, + String sortOrder, + int paginationStart, + boolean onlyDatatRelatedToMe, + int numResultsPerPage, + boolean retrieveEntities, + String geoPoint, + String geoRadius) throws SearchException { + return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDatatRelatedToMe, numResultsPerPage, true, null, null, true, true); + } + + /** + * @param dataverseRequest + * @param dataverses + * @param query + * @param filterQueries + * @param sortField + * @param sortOrder + * @param paginationStart + * @param onlyDatatRelatedToMe + * @param numResultsPerPage + * @param retrieveEntities - look up dvobject entities with .find() (potentially expensive!) + * @param geoPoint e.g. "35,15" + * @param geoRadius e.g. "5" + * @param addFacets boolean + * @param addHighlights boolean * @return * @throws SearchException */ @@ -136,7 +171,9 @@ public SolrQueryResponse search( int numResultsPerPage, boolean retrieveEntities, String geoPoint, - String geoRadius + String geoRadius, + boolean addFacets, + boolean addHighlights ) throws SearchException { if (paginationStart < 0) { @@ -157,56 +194,62 @@ public SolrQueryResponse search( // solrQuery.setSort(sortClause); // } // solrQuery.setSort(sortClause); - solrQuery.setHighlight(true).setHighlightSnippets(1); - Integer fragSize = systemConfig.getSearchHighlightFragmentSize(); - if (fragSize != null) { - solrQuery.setHighlightFragsize(fragSize); - } - solrQuery.setHighlightSimplePre(""); - solrQuery.setHighlightSimplePost(""); + + List datasetFields = datasetFieldService.findAllOrderedById(); Map solrFieldsToHightlightOnMap = new HashMap<>(); - // TODO: Do not hard code "Name" etc as English here. - solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name"); - solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation"); - solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type"); - solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description"); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name"); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label"); - solrFieldsToHightlightOnMap.put(SearchFields.LITERAL_QUESTION, BundleUtil.getStringFromBundle("search.datasets.literalquestion")); - solrFieldsToHightlightOnMap.put(SearchFields.INTERVIEW_INSTRUCTIONS, BundleUtil.getStringFromBundle("search.datasets.interviewinstructions")); - solrFieldsToHightlightOnMap.put(SearchFields.POST_QUESTION, BundleUtil.getStringFromBundle("search.datasets.postquestion")); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_UNIVERSE, BundleUtil.getStringFromBundle("search.datasets.variableuniverse")); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NOTES, BundleUtil.getStringFromBundle("search.datasets.variableNotes")); - - solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type"); - solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Year"); - solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.datasets.persistentId")); - solrFieldsToHightlightOnMap.put(SearchFields.FILE_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.files.persistentId")); - /** - * @todo Dataverse subject and affiliation should be highlighted but - * this is commented out right now because the "friendly" names are not - * being shown on the dataverse cards. See also - * https://github.com/IQSS/dataverse/issues/1431 - */ + if (addHighlights) { + solrQuery.setHighlight(true).setHighlightSnippets(1); + Integer fragSize = systemConfig.getSearchHighlightFragmentSize(); + if (fragSize != null) { + solrQuery.setHighlightFragsize(fragSize); + } + solrQuery.setHighlightSimplePre(""); + solrQuery.setHighlightSimplePost(""); + + // TODO: Do not hard code "Name" etc as English here. + solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name"); + solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation"); + solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type"); + solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description"); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name"); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label"); + solrFieldsToHightlightOnMap.put(SearchFields.LITERAL_QUESTION, BundleUtil.getStringFromBundle("search.datasets.literalquestion")); + solrFieldsToHightlightOnMap.put(SearchFields.INTERVIEW_INSTRUCTIONS, BundleUtil.getStringFromBundle("search.datasets.interviewinstructions")); + solrFieldsToHightlightOnMap.put(SearchFields.POST_QUESTION, BundleUtil.getStringFromBundle("search.datasets.postquestion")); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_UNIVERSE, BundleUtil.getStringFromBundle("search.datasets.variableuniverse")); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NOTES, BundleUtil.getStringFromBundle("search.datasets.variableNotes")); + + solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type"); + solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Year"); + solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.datasets.persistentId")); + solrFieldsToHightlightOnMap.put(SearchFields.FILE_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.files.persistentId")); + /** + * @todo Dataverse subject and affiliation should be highlighted but + * this is commented out right now because the "friendly" names are + * not being shown on the dataverse cards. See also + * https://github.com/IQSS/dataverse/issues/1431 + */ // solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_SUBJECT, "Subject"); // solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_AFFILIATION, "Affiliation"); - /** - * @todo: show highlight on file card? - * https://redmine.hmdc.harvard.edu/issues/3848 - */ - solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension"); - solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag"); - List datasetFields = datasetFieldService.findAllOrderedById(); - for (DatasetFieldType datasetFieldType : datasetFields) { - String solrField = datasetFieldType.getSolrField().getNameSearchable(); - String displayName = datasetFieldType.getDisplayName(); - solrFieldsToHightlightOnMap.put(solrField, displayName); - } - for (Map.Entry entry : solrFieldsToHightlightOnMap.entrySet()) { - String solrField = entry.getKey(); - // String displayName = entry.getValue(); - solrQuery.addHighlightField(solrField); + /** + * @todo: show highlight on file card? + * https://redmine.hmdc.harvard.edu/issues/3848 + */ + solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension"); + solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag"); + + for (DatasetFieldType datasetFieldType : datasetFields) { + String solrField = datasetFieldType.getSolrField().getNameSearchable(); + String displayName = datasetFieldType.getDisplayName(); + solrFieldsToHightlightOnMap.put(solrField, displayName); + } + for (Map.Entry entry : solrFieldsToHightlightOnMap.entrySet()) { + String solrField = entry.getKey(); + // String displayName = entry.getValue(); + solrQuery.addHighlightField(solrField); + } } + solrQuery.setParam("fl", "*,score"); solrQuery.setParam("qt", "/select"); solrQuery.setParam("facet", "true"); @@ -214,6 +257,8 @@ public SolrQueryResponse search( * @todo: do we need facet.query? */ solrQuery.setParam("facet.query", "*"); + solrQuery.addFacetField(SearchFields.TYPE); // this one is always performed + for (String filterQuery : filterQueries) { solrQuery.addFilterQuery(filterQuery); } @@ -223,70 +268,73 @@ public SolrQueryResponse search( // See https://solr.apache.org/guide/8_11/spatial-search.html#bbox solrQuery.addFilterQuery("{!bbox sfield=" + SearchFields.GEOLOCATION + "}"); } + + List metadataBlockFacets = new LinkedList<>(); - // ----------------------------------- - // Facets to Retrieve - // ----------------------------------- - solrQuery.addFacetField(SearchFields.METADATA_TYPES); -// solrQuery.addFacetField(SearchFields.HOST_DATAVERSE); -// solrQuery.addFacetField(SearchFields.AUTHOR_STRING); - solrQuery.addFacetField(SearchFields.DATAVERSE_CATEGORY); - solrQuery.addFacetField(SearchFields.METADATA_SOURCE); -// solrQuery.addFacetField(SearchFields.AFFILIATION); - solrQuery.addFacetField(SearchFields.PUBLICATION_YEAR); -// solrQuery.addFacetField(SearchFields.CATEGORY); -// solrQuery.addFacetField(SearchFields.FILE_TYPE_MIME); -// solrQuery.addFacetField(SearchFields.DISTRIBUTOR); -// solrQuery.addFacetField(SearchFields.KEYWORD); - /** - * @todo when a new method on datasetFieldService is available - * (retrieveFacetsByDataverse?) only show the facets that the dataverse - * in question wants to show (and in the right order): - * https://redmine.hmdc.harvard.edu/issues/3490 - * - * also, findAll only returns advancedSearchField = true... we should - * probably introduce the "isFacetable" boolean rather than caring about - * if advancedSearchField is true or false - * - */ + if (addFacets) { + // ----------------------------------- + // Facets to Retrieve + // ----------------------------------- + solrQuery.addFacetField(SearchFields.METADATA_TYPES); + solrQuery.addFacetField(SearchFields.DATAVERSE_CATEGORY); + solrQuery.addFacetField(SearchFields.METADATA_SOURCE); + solrQuery.addFacetField(SearchFields.PUBLICATION_YEAR); + /** + * @todo when a new method on datasetFieldService is available + * (retrieveFacetsByDataverse?) only show the facets that the + * dataverse in question wants to show (and in the right order): + * https://redmine.hmdc.harvard.edu/issues/3490 + * + * also, findAll only returns advancedSearchField = true... we + * should probably introduce the "isFacetable" boolean rather than + * caring about if advancedSearchField is true or false + * + */ - List metadataBlockFacets = new LinkedList<>(); + if (dataverses != null) { + for (Dataverse dataverse : dataverses) { + if (dataverse != null) { + for (DataverseFacet dataverseFacet : dataverse.getDataverseFacets()) { + DatasetFieldType datasetField = dataverseFacet.getDatasetFieldType(); + solrQuery.addFacetField(datasetField.getSolrField().getNameFacetable()); + } + // Get all metadata block facets configured to be displayed + metadataBlockFacets.addAll(dataverse.getMetadataBlockFacets()); + } + } + } + + solrQuery.addFacetField(SearchFields.FILE_TYPE); + /** + * @todo: hide the extra line this shows in the GUI... at least it's + * last... + */ + solrQuery.addFacetField(SearchFields.FILE_TAG); + if (!systemConfig.isPublicInstall()) { + solrQuery.addFacetField(SearchFields.ACCESS); + } + } + + //I'm not sure if just adding null here is good for hte permissions system... i think it needs something if(dataverses != null) { for(Dataverse dataverse : dataverses) { // ----------------------------------- // PERMISSION FILTER QUERY // ----------------------------------- - String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, dataverse, onlyDatatRelatedToMe); + String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, dataverse, onlyDatatRelatedToMe, addFacets); if (permissionFilterQuery != null) { solrQuery.addFilterQuery(permissionFilterQuery); } - if (dataverse != null) { - for (DataverseFacet dataverseFacet : dataverse.getDataverseFacets()) { - DatasetFieldType datasetField = dataverseFacet.getDatasetFieldType(); - solrQuery.addFacetField(datasetField.getSolrField().getNameFacetable()); - } - // Get all metadata block facets configured to be displayed - metadataBlockFacets.addAll(dataverse.getMetadataBlockFacets()); - } } } else { - String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, null, onlyDatatRelatedToMe); + String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, null, onlyDatatRelatedToMe, addFacets); if (permissionFilterQuery != null) { solrQuery.addFilterQuery(permissionFilterQuery); } } - solrQuery.addFacetField(SearchFields.FILE_TYPE); - /** - * @todo: hide the extra line this shows in the GUI... at least it's - * last... - */ - solrQuery.addFacetField(SearchFields.TYPE); - solrQuery.addFacetField(SearchFields.FILE_TAG); - if (!systemConfig.isPublicInstall()) { - solrQuery.addFacetField(SearchFields.ACCESS); - } + /** * @todo: do sanity checking... throw error if negative */ @@ -416,34 +464,44 @@ public SolrQueryResponse search( Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID); List matchedFields = new ArrayList<>(); - List highlights = new ArrayList<>(); - Map highlightsMap = new HashMap<>(); - Map> highlightsMap2 = new HashMap<>(); - Map highlightsMap3 = new HashMap<>(); - if (queryResponse.getHighlighting().get(id) != null) { - for (Map.Entry entry : solrFieldsToHightlightOnMap.entrySet()) { - String field = entry.getKey(); - String displayName = entry.getValue(); - - List highlightSnippets = queryResponse.getHighlighting().get(id).get(field); - if (highlightSnippets != null) { - matchedFields.add(field); - /** - * @todo only SolrField.SolrType.STRING? that's not - * right... knit the SolrField object more into the - * highlighting stuff - */ - SolrField solrField = new SolrField(field, SolrField.SolrType.STRING, true, true); - Highlight highlight = new Highlight(solrField, highlightSnippets, displayName); - highlights.add(highlight); - highlightsMap.put(solrField, highlight); - highlightsMap2.put(solrField, highlightSnippets); - highlightsMap3.put(field, highlight); + + SolrSearchResult solrSearchResult = new SolrSearchResult(query, name); + + if (addHighlights) { + List highlights = new ArrayList<>(); + Map highlightsMap = new HashMap<>(); + Map> highlightsMap2 = new HashMap<>(); + Map highlightsMap3 = new HashMap<>(); + if (queryResponse.getHighlighting().get(id) != null) { + for (Map.Entry entry : solrFieldsToHightlightOnMap.entrySet()) { + String field = entry.getKey(); + String displayName = entry.getValue(); + + List highlightSnippets = queryResponse.getHighlighting().get(id).get(field); + if (highlightSnippets != null) { + matchedFields.add(field); + /** + * @todo only SolrField.SolrType.STRING? that's not + * right... knit the SolrField object more into the + * highlighting stuff + */ + SolrField solrField = new SolrField(field, SolrField.SolrType.STRING, true, true); + Highlight highlight = new Highlight(solrField, highlightSnippets, displayName); + highlights.add(highlight); + highlightsMap.put(solrField, highlight); + highlightsMap2.put(solrField, highlightSnippets); + highlightsMap3.put(field, highlight); + } } + } + solrSearchResult.setHighlightsAsList(highlights); + solrSearchResult.setHighlightsMap(highlightsMap); + solrSearchResult.setHighlightsAsMap(highlightsMap3); } - SolrSearchResult solrSearchResult = new SolrSearchResult(query, name); + + /** * @todo put all this in the constructor? */ @@ -470,9 +528,7 @@ public SolrQueryResponse search( solrSearchResult.setNameSort(nameSort); solrSearchResult.setReleaseOrCreateDate(release_or_create_date); solrSearchResult.setMatchedFields(matchedFields); - solrSearchResult.setHighlightsAsList(highlights); - solrSearchResult.setHighlightsMap(highlightsMap); - solrSearchResult.setHighlightsAsMap(highlightsMap3); + Map parent = new HashMap<>(); String description = (String) solrDocument.getFieldValue(SearchFields.DESCRIPTION); solrSearchResult.setDescriptionNoSnippet(description); @@ -863,7 +919,7 @@ public String getCapitalizedName(String name) { * * @return */ - private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQuery solrQuery, Dataverse dataverse, boolean onlyDatatRelatedToMe) { + private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQuery solrQuery, Dataverse dataverse, boolean onlyDatatRelatedToMe, boolean addFacets) { User user = dataverseRequest.getUser(); if (user == null) { @@ -922,9 +978,11 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ AuthenticatedUser au = (AuthenticatedUser) user; - // Logged in user, has publication status facet - // - solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); + if (addFacets) { + // Logged in user, has publication status facet + // + solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS); + } // ---------------------------------------------------- // (3) Is this a Super User? From 6cda2fcec8fd2ebfef8b9a11a907628ec7d4b1b2 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 10:00:06 +0200 Subject: [PATCH 0615/1092] Remove StringReader in deaccession endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7d50d5e0e70..f292f63c1ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3974,8 +3974,8 @@ public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathPa } return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); - try (StringReader stringReader = new StringReader(jsonBody)) { - JsonObject jsonObject = Json.createReader(stringReader).readObject(); + try { + JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody); datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason")); String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null); if (deaccessionForwardURL != null) { From 5802bf61fe92aae50d8764ee8df4dd8011292f22 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 11:02:12 +0200 Subject: [PATCH 0616/1092] Lookup dataverse by alias or ID --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 30c14535251..47f6468bfac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1194,10 +1194,15 @@ public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContex public void write(OutputStream os) throws IOException, WebApplicationException { - Dataverse dv = dataverseService.findByAlias(dvIdtf); + Dataverse dv; + try { + dv = findDataverseOrDie(dvIdtf); + } catch (WrappedResponse wr) { + throw new WebApplicationException(wr.getResponse()); + } Map customQandAs = guestbookResponseService.mapCustomQuestionAnswersAsStrings(dv.getId(), gbId); Map datasetTitles = guestbookResponseService.mapDatasetTitles(dv.getId()); - + List guestbookResults = guestbookResponseService.getGuestbookResults(dv.getId(), gbId); os.write("Guestbook, Dataset, Dataset PID, Date, Type, File Name, File Id, File PID, User Name, Email, Institution, Position, Custom Questions\n".getBytes()); for (Object[] result : guestbookResults) { From f2770fb2f107b4e75701d2baf5f4f52b51abf709 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 16:10:02 +0200 Subject: [PATCH 0617/1092] Do not look up the Dataverse twice Co-authored-by: Philip Durbin --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 47f6468bfac..76cfa8ef764 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1173,8 +1173,9 @@ public Response getGroupByOwnerAndAliasInOwner(@Context ContainerRequestContext public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("guestbookId") Long gbId, @Context HttpServletResponse response) { + Dataverse dv; try { - Dataverse dv = findDataverseOrDie(dvIdtf); + dv = findDataverseOrDie(dvIdtf); User u = getRequestUser(crc); DataverseRequest req = createDataverseRequest(u); if (permissionSvc.request(req) @@ -1194,12 +1195,6 @@ public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContex public void write(OutputStream os) throws IOException, WebApplicationException { - Dataverse dv; - try { - dv = findDataverseOrDie(dvIdtf); - } catch (WrappedResponse wr) { - throw new WebApplicationException(wr.getResponse()); - } Map customQandAs = guestbookResponseService.mapCustomQuestionAnswersAsStrings(dv.getId(), gbId); Map datasetTitles = guestbookResponseService.mapDatasetTitles(dv.getId()); @@ -1208,7 +1203,6 @@ public void write(OutputStream os) throws IOException, for (Object[] result : guestbookResults) { StringBuilder sb = guestbookResponseService.convertGuestbookResponsesToCSV(customQandAs, datasetTitles, result); os.write(sb.toString().getBytes()); - } } }; From 6307292d3858bd62144e313de1b5574b55b4fb36 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 17 Oct 2023 13:07:14 -0400 Subject: [PATCH 0618/1092] more fixes/cleanup #9635 --- .../search/SearchIncludeFragment.java | 27 ++-- .../dataverse/search/SearchServiceBean.java | 127 ++++++++++-------- 2 files changed, 90 insertions(+), 64 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 1e42958fe4e..958ac0151c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -308,15 +308,23 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused this.setRootDv(true); } + filterQueriesFinal.addAll(filterQueries); + + /** + * Add type queries, for the types (Dataverses, Datasets, Datafiles) + * currently selected: + */ selectedTypesList = new ArrayList<>(); String[] parts = selectedTypesString.split(":"); selectedTypesList.addAll(Arrays.asList(parts)); - - - filterQueriesFinal.addAll(filterQueries); - + logger.info("selected types list size: "+selectedTypesList.size()); + String[] arr = selectedTypesList.toArray(new String[selectedTypesList.size()]); + selectedTypesHumanReadable = combine(arr, " OR "); + if (!selectedTypesHumanReadable.isEmpty()) { + typeFilterQuery = SearchFields.TYPE + ":(" + selectedTypesHumanReadable + ")"; + } filterQueriesFinal.add(typeFilterQuery); if (page <= 1) { @@ -383,7 +391,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused // run another query to obtain the numbers of the unselected types: List filterQueriesFinalSecondPass = new ArrayList<>(); - filterQueriesFinalSecondPass.addAll(filterQueriesFinal); + filterQueriesFinalSecondPass.addAll(filterQueries); List selectedTypesListSecondPass = new ArrayList<>(); @@ -393,12 +401,13 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused } } - String[] arr = selectedTypesListSecondPass.toArray(new String[selectedTypesListSecondPass.size()]); + arr = selectedTypesListSecondPass.toArray(new String[selectedTypesListSecondPass.size()]); filterQueriesFinalSecondPass.add(SearchFields.TYPE + ":(" + combine(arr, " OR ") + ")"); - + + solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, false, false); + if (solrQueryResponseSecondPass != null) { - solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null); if (solrQueryResponseSecondPass.hasError()) { logger.info(solrQueryResponse.getError()); setSolrErrorEncountered(true); @@ -410,6 +419,8 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused previewCountbyType.put(facetLabel.getName(), facetLabel.getCount()); } } + } else { + logger.warning("null solr response from the second pass type query"); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index aa2948eb8cb..d3ff7e42d15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -195,64 +195,11 @@ public SolrQueryResponse search( // } // solrQuery.setSort(sortClause); - List datasetFields = datasetFieldService.findAllOrderedById(); - Map solrFieldsToHightlightOnMap = new HashMap<>(); - if (addHighlights) { - solrQuery.setHighlight(true).setHighlightSnippets(1); - Integer fragSize = systemConfig.getSearchHighlightFragmentSize(); - if (fragSize != null) { - solrQuery.setHighlightFragsize(fragSize); - } - solrQuery.setHighlightSimplePre(""); - solrQuery.setHighlightSimplePost(""); - - // TODO: Do not hard code "Name" etc as English here. - solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name"); - solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation"); - solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type"); - solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description"); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name"); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label"); - solrFieldsToHightlightOnMap.put(SearchFields.LITERAL_QUESTION, BundleUtil.getStringFromBundle("search.datasets.literalquestion")); - solrFieldsToHightlightOnMap.put(SearchFields.INTERVIEW_INSTRUCTIONS, BundleUtil.getStringFromBundle("search.datasets.interviewinstructions")); - solrFieldsToHightlightOnMap.put(SearchFields.POST_QUESTION, BundleUtil.getStringFromBundle("search.datasets.postquestion")); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_UNIVERSE, BundleUtil.getStringFromBundle("search.datasets.variableuniverse")); - solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NOTES, BundleUtil.getStringFromBundle("search.datasets.variableNotes")); - - solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type"); - solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Year"); - solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.datasets.persistentId")); - solrFieldsToHightlightOnMap.put(SearchFields.FILE_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.files.persistentId")); - /** - * @todo Dataverse subject and affiliation should be highlighted but - * this is commented out right now because the "friendly" names are - * not being shown on the dataverse cards. See also - * https://github.com/IQSS/dataverse/issues/1431 - */ -// solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_SUBJECT, "Subject"); -// solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_AFFILIATION, "Affiliation"); - /** - * @todo: show highlight on file card? - * https://redmine.hmdc.harvard.edu/issues/3848 - */ - solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension"); - solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag"); - - for (DatasetFieldType datasetFieldType : datasetFields) { - String solrField = datasetFieldType.getSolrField().getNameSearchable(); - String displayName = datasetFieldType.getDisplayName(); - solrFieldsToHightlightOnMap.put(solrField, displayName); - } - for (Map.Entry entry : solrFieldsToHightlightOnMap.entrySet()) { - String solrField = entry.getKey(); - // String displayName = entry.getValue(); - solrQuery.addHighlightField(solrField); - } - } solrQuery.setParam("fl", "*,score"); solrQuery.setParam("qt", "/select"); solrQuery.setParam("facet", "true"); + /** * @todo: do we need facet.query? */ @@ -315,7 +262,61 @@ public SolrQueryResponse search( } } - + List datasetFields = datasetFieldService.findAllOrderedById(); + Map solrFieldsToHightlightOnMap = new HashMap<>(); + if (addHighlights) { + solrQuery.setHighlight(true).setHighlightSnippets(1); + Integer fragSize = systemConfig.getSearchHighlightFragmentSize(); + if (fragSize != null) { + solrQuery.setHighlightFragsize(fragSize); + } + solrQuery.setHighlightSimplePre(""); + solrQuery.setHighlightSimplePost(""); + + // TODO: Do not hard code "Name" etc as English here. + solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name"); + solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation"); + solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type"); + solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description"); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name"); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label"); + solrFieldsToHightlightOnMap.put(SearchFields.LITERAL_QUESTION, BundleUtil.getStringFromBundle("search.datasets.literalquestion")); + solrFieldsToHightlightOnMap.put(SearchFields.INTERVIEW_INSTRUCTIONS, BundleUtil.getStringFromBundle("search.datasets.interviewinstructions")); + solrFieldsToHightlightOnMap.put(SearchFields.POST_QUESTION, BundleUtil.getStringFromBundle("search.datasets.postquestion")); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_UNIVERSE, BundleUtil.getStringFromBundle("search.datasets.variableuniverse")); + solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NOTES, BundleUtil.getStringFromBundle("search.datasets.variableNotes")); + + solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type"); + solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Year"); + solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.datasets.persistentId")); + solrFieldsToHightlightOnMap.put(SearchFields.FILE_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.files.persistentId")); + /** + * @todo Dataverse subject and affiliation should be highlighted but + * this is commented out right now because the "friendly" names are + * not being shown on the dataverse cards. See also + * https://github.com/IQSS/dataverse/issues/1431 + */ +// solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_SUBJECT, "Subject"); +// solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_AFFILIATION, "Affiliation"); + /** + * @todo: show highlight on file card? + * https://redmine.hmdc.harvard.edu/issues/3848 + */ + solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension"); + solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag"); + + for (DatasetFieldType datasetFieldType : datasetFields) { + String solrField = datasetFieldType.getSolrField().getNameSearchable(); + String displayName = datasetFieldType.getDisplayName(); + solrFieldsToHightlightOnMap.put(solrField, displayName); + } + for (Map.Entry entry : solrFieldsToHightlightOnMap.entrySet()) { + String solrField = entry.getKey(); + // String displayName = entry.getValue(); + solrQuery.addHighlightField(solrField); + } + } + //I'm not sure if just adding null here is good for hte permissions system... i think it needs something if(dataverses != null) { for(Dataverse dataverse : dataverses) { @@ -370,7 +371,7 @@ public SolrQueryResponse search( // solrQuery.addNumericRangeFacet(SearchFields.PRODUCTION_DATE_YEAR_ONLY, citationYearRangeStart, citationYearRangeEnd, citationYearRangeSpan); // solrQuery.addNumericRangeFacet(SearchFields.DISTRIBUTION_DATE_YEAR_ONLY, citationYearRangeStart, citationYearRangeEnd, citationYearRangeSpan); solrQuery.setRows(numResultsPerPage); - logger.fine("Solr query:" + solrQuery); + logger.info("Solr query:" + solrQuery); // ----------------------------------- // Make the solr query @@ -378,8 +379,12 @@ public SolrQueryResponse search( QueryResponse queryResponse = null; try { queryResponse = solrClientService.getSolrClient().query(solrQuery); + } catch (RemoteSolrException ex) { String messageFromSolr = ex.getLocalizedMessage(); + + logger.info("message from solr exception: "+messageFromSolr); + String error = "Search Syntax Error: "; String stringToHide = "org.apache.solr.search.SyntaxError: "; if (messageFromSolr.startsWith(stringToHide)) { @@ -393,6 +398,12 @@ public SolrQueryResponse search( exceptionSolrQueryResponse.setError(error); // we can't show anything because of the search syntax error + + // We probably shouldn't be assuming that this is necessarily a + // "search syntax error" - could be anything else too - ? + + + long zeroNumResultsFound = 0; long zeroGetResultsStart = 0; List emptySolrSearchResults = new ArrayList<>(); @@ -408,6 +419,10 @@ public SolrQueryResponse search( } catch (SolrServerException | IOException ex) { throw new SearchException("Internal Dataverse Search Engine Error", ex); } + + int statusCode = queryResponse.getStatus(); + + logger.info("status code of the query response: "+statusCode); SolrDocumentList docs = queryResponse.getResults(); List solrSearchResults = new ArrayList<>(); From 21eb153a63227fde85604b8d504c18813254496a Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 17 Oct 2023 20:47:00 +0200 Subject: [PATCH 0619/1092] Add API test for Dataverses GuestbookResponses --- .../iq/dataverse/api/DataversesIT.java | 19 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 11 ++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 09052f9e4ea..171a35ac04f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -144,6 +144,25 @@ public void testMinimalDataverse() throws FileNotFoundException { deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); } + @Test + public void testGetGuestbookResponses() { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + // Create a Dataverse + Response create = UtilIT.createRandomDataverse(apiToken); + create.prettyPrint(); + create.then().assertThat().statusCode(CREATED.getStatusCode()); + String alias = UtilIT.getAliasFromResponse(create); + Integer dvId = UtilIT.getDataverseIdFromResponse(create); + // Get GuestbookResponses by Dataverse alias + Response getResponsesByAlias = UtilIT.getGuestbookResponses(alias, null, apiToken); + getResponsesByAlias.then().assertThat().statusCode(OK.getStatusCode()); + // Get GuestbookResponses by Dataverse ID + Response getResponsesById = UtilIT.getGuestbookResponses(dvId.toString(), null, apiToken); + getResponsesById.then().assertThat().statusCode(OK.getStatusCode()); + } + @Test public void testNotEnoughJson() { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4421e9280b3..94a0f33a83e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -372,7 +372,16 @@ static Response showDataverseContents(String alias, String apiToken) { .header(API_TOKEN_HTTP_HEADER, apiToken) .when().get("/api/dataverses/" + alias + "/contents"); } - + + static Response getGuestbookResponses(String dataverseAlias, Long guestbookId, String apiToken) { + RequestSpecification requestSpec = given() + .auth().basic(apiToken, EMPTY_STRING); + if (guestbookId != null) { + requestSpec.queryParam("guestbookId", guestbookId); + } + return requestSpec.get("/api/dataverses/" + dataverseAlias + "/guestbookResponses/"); + } + static Response createRandomDatasetViaNativeApi(String dataverseAlias, String apiToken) { return createRandomDatasetViaNativeApi(dataverseAlias, apiToken, false); } From 74eb7c551d209c9e460cbaea5572004b0fcad0bc Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 17 Oct 2023 16:09:32 -0400 Subject: [PATCH 0620/1092] more fixes (#9635) --- .../search/SearchIncludeFragment.java | 24 +++++++++++++++---- .../dataverse/search/SearchServiceBean.java | 2 ++ 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 958ac0151c6..177186fce49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -395,9 +395,23 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused List selectedTypesListSecondPass = new ArrayList<>(); - for (String dvObjectType : previewCountbyType.keySet()) { - if (previewCountbyType.get(dvObjectType) == -1) { - selectedTypesListSecondPass.add(dvObjectType); + // @todo: simplify this! + for (String dvObjectTypeLabel : previewCountbyType.keySet()) { + if (previewCountbyType.get(dvObjectTypeLabel) == -1) { + String dvObjectType = null; + + if (dvObjectTypeLabel.equals(BundleUtil.getStringFromBundle("dataverses"))) { + dvObjectType = "dataverses"; + } else if (dvObjectTypeLabel.equals(BundleUtil.getStringFromBundle("datasets"))) { + dvObjectType = "datasets"; + } else if (dvObjectTypeLabel.equals(BundleUtil.getStringFromBundle("files"))) { + dvObjectType = "files"; + } + + if (dvObjectType != null) { + logger.info("adding object type to the second pass query: "+dvObjectType); + selectedTypesListSecondPass.add(dvObjectType); + } } } @@ -409,13 +423,15 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused if (solrQueryResponseSecondPass != null) { if (solrQueryResponseSecondPass.hasError()) { - logger.info(solrQueryResponse.getError()); + logger.info(solrQueryResponseSecondPass.getError()); setSolrErrorEncountered(true); } // And now populate the remaining type facets: for (FacetCategory facetCategory : solrQueryResponseSecondPass.getTypeFacetCategories()) { + logger.info("facet category: "+facetCategory.getName()); for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { + logger.info("facet label: "+facetLabel.getName()); previewCountbyType.put(facetLabel.getName(), facetLabel.getCount()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index d3ff7e42d15..18cdbaa6994 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -423,6 +423,7 @@ public SolrQueryResponse search( int statusCode = queryResponse.getStatus(); logger.info("status code of the query response: "+statusCode); + ///logger.info("number of hits: "+queryResponse._size()); SolrDocumentList docs = queryResponse.getResults(); List solrSearchResults = new ArrayList<>(); @@ -823,6 +824,7 @@ public SolrQueryResponse search( facetCategory.setFacetLabel(facetLabelList); if (!facetLabelList.isEmpty()) { if (facetCategory.getName().equals(SearchFields.TYPE)) { + logger.info("type facet encountered"); // the "type" facet is special, these are not typeFacetCategories.add(facetCategory); } else if (facetCategory.getName().equals(SearchFields.PUBLICATION_STATUS)) { From f1e37ae0ff01e1fe0030202be1883f823bb8d080 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 17 Oct 2023 17:26:54 -0400 Subject: [PATCH 0621/1092] finally working as it should; much simplified/way less expensive second pass query sent in order to populate the unchecked type count facets. (#9635) --- .../iq/dataverse/search/SearchIncludeFragment.java | 4 +--- .../iq/dataverse/search/SearchServiceBean.java | 13 +++++++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 177186fce49..47a5621c3d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -418,7 +418,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused arr = selectedTypesListSecondPass.toArray(new String[selectedTypesListSecondPass.size()]); filterQueriesFinalSecondPass.add(SearchFields.TYPE + ":(" + combine(arr, " OR ") + ")"); - solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, false, false); + solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, null, sortOrder.toString(), 0, onlyDataRelatedToMe, 1, false, null, null, false, false); if (solrQueryResponseSecondPass != null) { @@ -429,9 +429,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused // And now populate the remaining type facets: for (FacetCategory facetCategory : solrQueryResponseSecondPass.getTypeFacetCategories()) { - logger.info("facet category: "+facetCategory.getName()); for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { - logger.info("facet label: "+facetLabel.getName()); previewCountbyType.put(facetLabel.getName(), facetLabel.getCount()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 18cdbaa6994..be3330080c4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -165,7 +165,8 @@ public SolrQueryResponse search( List dataverses, String query, List filterQueries, - String sortField, String sortOrder, + String sortField, + String sortOrder, int paginationStart, boolean onlyDatatRelatedToMe, int numResultsPerPage, @@ -189,7 +190,11 @@ public SolrQueryResponse search( // SortClause foo = new SortClause("name", SolrQuery.ORDER.desc); // if (query.equals("*") || query.equals("*:*")) { // solrQuery.setSort(new SortClause(SearchFields.NAME_SORT, SolrQuery.ORDER.asc)); - solrQuery.setSort(new SortClause(sortField, sortOrder)); + if (sortField != null) { + // is it ok not to specify any sort? - there are cases where we + // don't care, and it must cost some extra cycles -- L.A. + solrQuery.setSort(new SortClause(sortField, sortOrder)); + } // } else { // solrQuery.setSort(sortClause); // } @@ -423,7 +428,8 @@ public SolrQueryResponse search( int statusCode = queryResponse.getStatus(); logger.info("status code of the query response: "+statusCode); - ///logger.info("number of hits: "+queryResponse._size()); + logger.info("_size from query response: "+queryResponse._size()); + logger.info("qtime: "+queryResponse.getQTime()); SolrDocumentList docs = queryResponse.getResults(); List solrSearchResults = new ArrayList<>(); @@ -824,7 +830,6 @@ public SolrQueryResponse search( facetCategory.setFacetLabel(facetLabelList); if (!facetLabelList.isEmpty()) { if (facetCategory.getName().equals(SearchFields.TYPE)) { - logger.info("type facet encountered"); // the "type" facet is special, these are not typeFacetCategories.add(facetCategory); } else if (facetCategory.getName().equals(SearchFields.PUBLICATION_STATUS)) { From d0a858f5c817df7f626033063ec1afa4dbd69831 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 07:27:09 +0100 Subject: [PATCH 0622/1092] Added: ManageFilePermissions permission check to getUserPermissionsOnFile API endpoint --- .../harvard/iq/dataverse/FileDownloadServiceBean.java | 11 ----------- .../java/edu/harvard/iq/dataverse/api/Access.java | 3 ++- .../java/edu/harvard/iq/dataverse/api/AccessIT.java | 2 ++ 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index de947ee9058..55817d4a746 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -645,15 +645,4 @@ public String getDirectStorageLocatrion(String storageLocation) { return null; } - - /** - * Checks if the DataverseRequest, which contains IP Groups, has permission to download the file - * - * @param dataverseRequest the DataverseRequest - * @param dataFile the DataFile to check permissions - * @return boolean - */ - public boolean canDownloadFile(DataverseRequest dataverseRequest, DataFile dataFile) { - return permissionService.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 1aaa7e60816..696fcb34920 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1709,7 +1709,8 @@ public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @ } JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canDownloadFile", permissionService.userOn(requestUser, dataFile).has(Permission.DownloadFile)); + jsonObjectBuilder.add("canManageFilePermissions", permissionService.userOn(requestUser, dataFile).has(Permission.ManageFilePermissions)); jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); return ok(jsonObjectBuilder); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 416caa68566..42e21e53101 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -666,6 +666,8 @@ public void testGetUserPermissionsOnFile() { assertTrue(canDownloadFile); boolean canEditOwnerDataset = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canEditOwnerDataset"); assertTrue(canEditOwnerDataset); + boolean canManageFilePermissions = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canManageFilePermissions"); + assertTrue(canManageFilePermissions); // Call with invalid file id Response getUserPermissionsOnFileInvalidIdResponse = UtilIT.getUserPermissionsOnFile("testInvalidId", apiToken); From 5d8ac32754ea2c13c2dbd883d627b583a6cb1b43 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 07:34:58 +0100 Subject: [PATCH 0623/1092] Added: getUserPermissionsOnDataset API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 20 +++++++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 33 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++++ 3 files changed, 59 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c3032495f27..7cfe587d8dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4083,4 +4083,24 @@ public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext cr datasetService.merge(dataset); return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } + + @GET + @AuthRequired + @Path("{id}/userPermissions") + public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) { + Dataset dataset; + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + User requestUser = getRequestUser(crc); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset)); + jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset)); + jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset)); + jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions)); + jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft)); + return ok(jsonObjectBuilder); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 34eccd3172a..4258773a0b3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3928,4 +3928,37 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } + + @Test + public void testGetUserPermissionsOnDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Call with valid dataset id + Response getUserPermissionsOnDatasetResponse = UtilIT.getUserPermissionsOnDataset(Integer.toString(datasetId), apiToken); + getUserPermissionsOnDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canViewUnpublishedDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canViewUnpublishedDataset"); + assertTrue(canViewUnpublishedDataset); + boolean canEditDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canEditDataset"); + assertTrue(canEditDataset); + boolean canPublishDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canPublishDataset"); + assertTrue(canPublishDataset); + boolean canManageDatasetPermissions = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canManageDatasetPermissions"); + assertTrue(canManageDatasetPermissions); + boolean canDeleteDatasetDraft = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canDeleteDatasetDraft"); + assertTrue(canDeleteDatasetDraft); + + // Call with invalid dataset id + Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getUserPermissionsOnDataset("testInvalidId", apiToken); + getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4421e9280b3..be23df5ec63 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3359,6 +3359,12 @@ static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } + static Response getUserPermissionsOnDataset(String datasetId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/userPermissions"); + } + static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); jsonBuilder.add("dateAvailable", dateAvailable); From 38681bb113da3b9ea6359cf2da4e324e550ea463 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 07:40:24 +0100 Subject: [PATCH 0624/1092] Added: includeDeaccessioned optional query param to getVersion Datasets API endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7cfe587d8dc..5e9d02c4af3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -482,9 +482,14 @@ public Response listVersions(@Context ContainerRequestContext crc, @PathParam("i @GET @AuthRequired @Path("{id}/versions/{versionId}") - public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersion(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(json(dsv)); }, getRequestUser(crc)); From 835fb44325935a4509ce3139b96306b0370d290d Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 08:27:27 +0100 Subject: [PATCH 0625/1092] Added: docs for API endpoints getUserPermissionsOnDataset, getUserPermissionsOnFile and getVersion --- doc/sphinx-guides/source/api/dataaccess.rst | 1 + doc/sphinx-guides/source/api/native-api.rst | 30 +++++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 6edd413b7a5..f7aaa8f4ee4 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -426,6 +426,7 @@ This method returns the permissions that the calling user has on a particular fi In particular, the user permissions that this method checks, returned as booleans, are the following: * Can download the file +* Can manage the file permissions * Can edit the file owner dataset A curl example using an ``id``:: diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 3ac145b2f8e..f735079b334 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -909,6 +909,16 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0" +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeDeaccessioned=true" + .. _export-dataset-metadata-api: Export Metadata of a Dataset in Various Formats @@ -2496,6 +2506,26 @@ The API can also be used to reset the dataset to use the default/inherited value curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" +Get User Permissions on a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call returns the permissions that the calling user has on a particular dataset. + +In particular, the user permissions that this method checks, returned as booleans, are the following: + +* Can view the unpublished dataset +* Can edit the dataset +* Can publish the dataset +* Can manage the dataset permissions +* Can delete the dataset draft + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/userPermissions" Files From 56b291f4bf3a1ed4e48740ed50666a1709d4febf Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 18 Oct 2023 09:42:45 +0200 Subject: [PATCH 0626/1092] Log Dataverse ID instead of JSON path --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 94a0f33a83e..2f10e623047 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -287,7 +287,7 @@ static String getAliasFromResponse(Response createDataverseResponse) { static Integer getDataverseIdFromResponse(Response createDataverseResponse) { JsonPath createdDataverse = JsonPath.from(createDataverseResponse.body().asString()); int dataverseId = createdDataverse.getInt("data.id"); - logger.info("Id found in create dataverse response: " + createdDataverse); + logger.info("Id found in create dataverse response: " + dataverseId); return dataverseId; } From e886c1adcd2cfe06e1b01a514350ba1f7f586cc1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 09:04:27 +0100 Subject: [PATCH 0627/1092] Added: includeDeaccessioned IT test case for getDatasetVersion --- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 14 +++++++++++++- .../java/edu/harvard/iq/dataverse/api/FilesIT.java | 8 ++++---- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 3 ++- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 4258773a0b3..569ebe0894b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -505,7 +505,7 @@ public void testCreatePublishDestroyDataset() { assertTrue(datasetContactFromExport.toString().contains("finch@mailinator.com")); assertTrue(firstValue.toString().contains("finch@mailinator.com")); - Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken); + Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); getDatasetVersion.prettyPrint(); getDatasetVersion.then().assertThat() .body("data.datasetId", equalTo(datasetId)) @@ -549,6 +549,18 @@ public void testCreatePublishDestroyDataset() { } assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo")); + // Test includeDeaccessioned option + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, false, apiToken); + getDatasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, true, apiToken); + getDatasetVersion.then().assertThat().statusCode(OK.getStatusCode()); + Response deleteDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 16726485dee..1f1321bad79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -1989,14 +1989,14 @@ public void testDeleteFile() { deleteResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 2 deleted from post v1.0 draft - Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, false, apiToken); postv1draft.prettyPrint(); postv1draft.then().assertThat() .body("data.files.size()", equalTo(1)) .statusCode(OK.getStatusCode()); // Check file 2 still in v1.0 - Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); + Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", false, apiToken); v1.prettyPrint(); v1.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("cc0.png")) @@ -2011,7 +2011,7 @@ public void testDeleteFile() { downloadResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 still in post v1.0 draft - Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, false, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) @@ -2026,7 +2026,7 @@ public void testDeleteFile() { deleteResponse3.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 deleted from post v1.0 draft - Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, false, apiToken); postv1draft3.prettyPrint(); postv1draft3.then().assertThat() .body("data.files[0]", equalTo(null)) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index be23df5ec63..0a1061c30ea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1399,9 +1399,10 @@ static Response nativeGetUsingPersistentId(String persistentId, String apiToken) return response; } - static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken) { + static Response getDatasetVersion(String persistentId, String versionNumber, boolean includeDeaccessioned, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("includeDeaccessioned", includeDeaccessioned) .get("/api/datasets/:persistentId/versions/" + versionNumber + "?persistentId=" + persistentId); } From 52d439d3284cf91064dbabcd3dbe401faeb3ba4d Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 09:10:53 +0100 Subject: [PATCH 0628/1092] Fixed: minor docs tweak --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f735079b334..6f1c3072a55 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2511,7 +2511,7 @@ Get User Permissions on a Dataset This API call returns the permissions that the calling user has on a particular dataset. -In particular, the user permissions that this method checks, returned as booleans, are the following: +In particular, the user permissions that this API call checks, returned as booleans, are the following: * Can view the unpublished dataset * Can edit the dataset From fa1b37bca410e903c9474ebf9aa6f38fd0b59c70 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 18 Oct 2023 09:12:37 +0100 Subject: [PATCH 0629/1092] Added: release notes for #10001 --- .../10001-datasets-files-api-user-permissions.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 doc/release-notes/10001-datasets-files-api-user-permissions.md diff --git a/doc/release-notes/10001-datasets-files-api-user-permissions.md b/doc/release-notes/10001-datasets-files-api-user-permissions.md new file mode 100644 index 00000000000..0aa75f9218a --- /dev/null +++ b/doc/release-notes/10001-datasets-files-api-user-permissions.md @@ -0,0 +1,13 @@ +- New query parameter `includeDeaccessioned` added to the getVersion endpoint (/api/datasets/{id}/versions/{versionId}) to consider deaccessioned versions when searching for versions. + + +- New endpoint to get user permissions on a dataset (/api/datasets/{id}/userPermissions). In particular, the user permissions that this API call checks, returned as booleans, are the following: + + - Can view the unpublished dataset + - Can edit the dataset + - Can publish the dataset + - Can manage the dataset permissions + - Can delete the dataset draft + + +- New permission check "canManageFilePermissions" added to the existing endpoint for getting user permissions on a file (/api/access/datafile/{id}/userPermissions). \ No newline at end of file From 836d4a7006ea222f30f34d816c7e388a44d44142 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 18 Oct 2023 13:15:51 +0200 Subject: [PATCH 0630/1092] Authenticate using API token in header Co-authored-by: Philip Durbin --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 2f10e623047..b4d77252615 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -375,7 +375,7 @@ static Response showDataverseContents(String alias, String apiToken) { static Response getGuestbookResponses(String dataverseAlias, Long guestbookId, String apiToken) { RequestSpecification requestSpec = given() - .auth().basic(apiToken, EMPTY_STRING); + .header(API_TOKEN_HTTP_HEADER, apiToken); if (guestbookId != null) { requestSpec.queryParam("guestbookId", guestbookId); } From 7d7d73165e88c7791f1271bd55a3977134c978c9 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Wed, 18 Oct 2023 13:20:23 +0200 Subject: [PATCH 0631/1092] Adjust expectations for getGuestbookResponses --- .../iq/dataverse/api/DataversesIT.java | 33 +++++++++++++++---- 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 171a35ac04f..78ece6ecc42 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -17,11 +17,13 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import jakarta.ws.rs.core.Response.Status; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.OK; +import static jakarta.ws.rs.core.Response.Status.CREATED; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; +import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; +import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -144,23 +146,42 @@ public void testMinimalDataverse() throws FileNotFoundException { deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); } + /** + * A regular user can create a Dataverse Collection and access its + * GuestbookResponses by DV alias or ID. + * A request for a non-existent Dataverse's GuestbookResponses returns + * Not Found. + * A regular user cannot access the guestbook responses for a Dataverse + * that they do not have permissions for, like the root Dataverse. + */ @Test public void testGetGuestbookResponses() { Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); String apiToken = UtilIT.getApiTokenFromResponse(createUser); - // Create a Dataverse + Response create = UtilIT.createRandomDataverse(apiToken); create.prettyPrint(); create.then().assertThat().statusCode(CREATED.getStatusCode()); String alias = UtilIT.getAliasFromResponse(create); Integer dvId = UtilIT.getDataverseIdFromResponse(create); - // Get GuestbookResponses by Dataverse alias + + logger.info("Request guestbook responses for non-existent Dataverse"); + Response getResponsesByBadAlias = UtilIT.getGuestbookResponses("-1", null, apiToken); + getResponsesByBadAlias.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + logger.info("Request guestbook responses for existent Dataverse by alias"); Response getResponsesByAlias = UtilIT.getGuestbookResponses(alias, null, apiToken); getResponsesByAlias.then().assertThat().statusCode(OK.getStatusCode()); - // Get GuestbookResponses by Dataverse ID + + logger.info("Request guestbook responses for existent Dataverse by ID"); Response getResponsesById = UtilIT.getGuestbookResponses(dvId.toString(), null, apiToken); getResponsesById.then().assertThat().statusCode(OK.getStatusCode()); + + logger.info("Request guestbook responses for root Dataverse by alias"); + getResponsesById = UtilIT.getGuestbookResponses("root", null, apiToken); + getResponsesById.prettyPrint(); + getResponsesById.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } @Test From c1a19299e547fbc47322dafde74bc75d2e138d9c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 18 Oct 2023 13:48:47 -0400 Subject: [PATCH 0632/1092] a stub for interecepting a "circuit breaker" 503 from the server (#9635) --- .../dataverse/search/SearchServiceBean.java | 31 ++++++++++++++----- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index be3330080c4..1b92c2a4a46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -382,13 +382,35 @@ public SolrQueryResponse search( // Make the solr query // ----------------------------------- QueryResponse queryResponse = null; + boolean solrTemporarilyUnavailable = false; + try { queryResponse = solrClientService.getSolrClient().query(solrQuery); } catch (RemoteSolrException ex) { String messageFromSolr = ex.getLocalizedMessage(); - logger.info("message from solr exception: "+messageFromSolr); + logger.info("message from tye solr exception: "+messageFromSolr); + logger.info("code from the solr exception: "+ex.code()); + + if (queryResponse != null) { + logger.info("return code: "+queryResponse.getStatus()); + } + + // We probably shouldn't be assuming that this is necessarily a + // "search syntax error", as the code below implies - could be + // something else too - ? + + // Specifically, we now rely on the Solr "circuit breaker" mechanism + // to start dropping requests with 503, when the service is + // overwhelmed with requests load (with the assumption that this is + // a transient condition): + + if (ex.code() == 503) { + solrTemporarilyUnavailable = true; + // actual logic for communicating this state back to the local + // client code TBD (@todo) + } String error = "Search Syntax Error: "; String stringToHide = "org.apache.solr.search.SyntaxError: "; @@ -403,12 +425,7 @@ public SolrQueryResponse search( exceptionSolrQueryResponse.setError(error); // we can't show anything because of the search syntax error - - // We probably shouldn't be assuming that this is necessarily a - // "search syntax error" - could be anything else too - ? - - - + long zeroNumResultsFound = 0; long zeroGetResultsStart = 0; List emptySolrSearchResults = new ArrayList<>(); From 3c98c7d00e8e24be44e40b818d2ad2ff61a8ab29 Mon Sep 17 00:00:00 2001 From: bencomp Date: Thu, 19 Oct 2023 00:22:16 +0200 Subject: [PATCH 0633/1092] Fix identifer typo in docs --- .../source/developers/s3-direct-upload-api.rst | 8 ++++---- doc/sphinx-guides/source/user/dataset-management.rst | 2 +- doc/sphinx-guides/source/user/find-use-data.rst | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst index 4bf2bbdcc79..423fb02d385 100644 --- a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst +++ b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst @@ -116,7 +116,7 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. To add multiple Uploaded Files to the Dataset --------------------------------------------- @@ -147,7 +147,7 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/addFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. Replacing an existing file in the Dataset @@ -177,7 +177,7 @@ Note that the API call does not validate that the file matches the hash value su curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA" Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. Replacing multiple existing files in the Dataset ------------------------------------------------ @@ -275,4 +275,4 @@ The JSON object returned as a response from this API call includes a "data" that Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide. -With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 1e8ea897032..bac0192bdd6 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -784,7 +784,7 @@ The "Compute" button on dataset and file pages will allow you to compute on a si Cloud Storage Access -------------------- -If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifer can then be used to allow direct access to the dataset. +If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifier can then be used to allow direct access to the dataset. .. _deaccession: diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst index 2e82a1482b4..bea23cbcd0e 100755 --- a/doc/sphinx-guides/source/user/find-use-data.rst +++ b/doc/sphinx-guides/source/user/find-use-data.rst @@ -71,7 +71,7 @@ View Files Files in a Dataverse installation each have their own landing page that can be reached through the search results or through the Files table on their parent dataset's page. The dataset page and file page offer much the same functionality in terms of viewing and editing files, with a few small exceptions. -- In installations that have enabled support for persistent identifers (PIDs) at the file level, the file page includes the file's DOI or handle, which can be found in the file citation and also under the Metadata tab. +- In installations that have enabled support for persistent identifiers (PIDs) at the file level, the file page includes the file's DOI or handle, which can be found in the file citation and also under the Metadata tab. - Previewers for several common file types are available and can be added by installation administrators. - The file page's Versions tab gives you a version history that is more focused on the individual file rather than the dataset as a whole. From d76e494f7c6889d9dfc23406e06b25f1a80d3507 Mon Sep 17 00:00:00 2001 From: bencomp Date: Thu, 19 Oct 2023 01:04:50 +0200 Subject: [PATCH 0634/1092] Fix identifer typo in code Just in case code outside this project is referencing the old methods, I created deprecated versions. --- .../dataverse/DatasetVersionServiceBean.java | 26 ++++++++++++++----- .../java/edu/harvard/iq/dataverse/Shib.java | 11 ++++---- .../CollectionListManagerImpl.java | 2 +- .../datadeposit/MediaResourceManagerImpl.java | 2 +- .../AuthenticationServiceBean.java | 10 +++---- .../authorization/UserIdentifier.java | 21 ++++++++++++--- .../providers/shib/ShibUtil.java | 17 ++++++++++++ .../filesystem/FileRecordJobListener.java | 2 +- .../importer/filesystem/FileRecordReader.java | 2 +- .../impl/ImportFromFileSystemCommand.java | 2 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 12 ++++----- .../providers/shib/ShibUtilTest.java | 18 ++++++------- 12 files changed, 85 insertions(+), 40 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index c2f9027a38a..cd3291e6222 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -495,10 +495,24 @@ private DatasetVersion getDatasetVersionByQuery(String queryString){ } } // end getDatasetVersionByQuery - - - - public DatasetVersion retrieveDatasetVersionByIdentiferClause(String identifierClause, String version){ + /** + * @deprecated because of a typo; use {@link #retrieveDatasetVersionByIdentifierClause(String, String) retrieveDatasetVersionByIdentifierClause} instead + * @see #retrieveDatasetVersionByIdentifierClause(String, String) + * @param identifierClause + * @param version + * @return a DatasetVersion if found, or {@code null} otherwise + */ + @Deprecated + public DatasetVersion retrieveDatasetVersionByIdentiferClause(String identifierClause, String version) { + return retrieveDatasetVersionByIdentifierClause(identifierClause, version); + } + + /** + * @param identifierClause + * @param version + * @return a DatasetVersion if found, or {@code null} otherwise + */ + public DatasetVersion retrieveDatasetVersionByIdentifierClause(String identifierClause, String version) { if (identifierClause == null){ return null; @@ -620,7 +634,7 @@ public RetrieveDatasetVersionResponse retrieveDatasetVersionByPersistentId(Strin identifierClause += " AND ds.identifier = '" + parsedId.getIdentifier() + "'"; - DatasetVersion ds = retrieveDatasetVersionByIdentiferClause(identifierClause, version); + DatasetVersion ds = retrieveDatasetVersionByIdentifierClause(identifierClause, version); if (ds != null){ msg("retrieved dataset: " + ds.getId() + " semantic: " + ds.getSemanticVersion()); @@ -718,7 +732,7 @@ public DatasetVersion getDatasetVersionById(Long datasetId, String version){ String identifierClause = this.getIdClause(datasetId); - DatasetVersion ds = retrieveDatasetVersionByIdentiferClause(identifierClause, version); + DatasetVersion ds = retrieveDatasetVersionByIdentifierClause(identifierClause, version); return ds; diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index bee1182e248..24c0f9d7926 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.Date; import java.util.List; +import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; import jakarta.ejb.EJBException; @@ -62,7 +63,7 @@ public class Shib implements java.io.Serializable { HttpServletRequest request; private String userPersistentId; - private String internalUserIdentifer; + private String internalUserIdentifier; AuthenticatedUserDisplayInfo displayInfo; /** * @todo Remove this boolean some day? Now the mockups show a popup. Should @@ -210,8 +211,8 @@ public void init() { } String usernameAssertion = getValueFromAssertion(ShibUtil.usernameAttribute); - internalUserIdentifer = ShibUtil.generateFriendlyLookingUserIdentifer(usernameAssertion, emailAddress); - logger.fine("friendly looking identifer (backend will enforce uniqueness):" + internalUserIdentifer); + internalUserIdentifier = ShibUtil.generateFriendlyLookingUserIdentifier(usernameAssertion, emailAddress); + logger.log(Level.FINE, "friendly looking identifier (backend will enforce uniqueness): {0}", internalUserIdentifier); String shibAffiliationAttribute = settingsService.getValueForKey(SettingsServiceBean.Key.ShibAffiliationAttribute); String affiliation = (StringUtils.isNotBlank(shibAffiliationAttribute)) @@ -326,7 +327,7 @@ public String confirmAndCreateAccount() { AuthenticatedUser au = null; try { au = authSvc.createAuthenticatedUser( - new UserRecordIdentifier(shibAuthProvider.getId(), lookupStringPerAuthProvider), internalUserIdentifer, displayInfo, true); + new UserRecordIdentifier(shibAuthProvider.getId(), lookupStringPerAuthProvider), internalUserIdentifier, displayInfo, true); } catch (EJBException ex) { /** * @todo Show the ConstraintViolationException, if any. @@ -354,7 +355,7 @@ public String confirmAndConvertAccount() { visibleTermsOfUse = false; ShibAuthenticationProvider shibAuthProvider = new ShibAuthenticationProvider(); String lookupStringPerAuthProvider = userPersistentId; - UserIdentifier userIdentifier = new UserIdentifier(lookupStringPerAuthProvider, internalUserIdentifer); + UserIdentifier userIdentifier = new UserIdentifier(lookupStringPerAuthProvider, internalUserIdentifier); logger.fine("builtin username: " + builtinUsername); AuthenticatedUser builtInUserToConvert = authSvc.canLogInAsBuiltinUser(builtinUsername, builtinPassword); if (builtInUserToConvert != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java index 084136f2b5d..541fa144e80 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java @@ -94,7 +94,7 @@ public Feed listCollectionContents(IRI iri, AuthCredentials authCredentials, Swo throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifer from URL: " + iri); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifier from URL: " + iri); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 15838a09456..a878720cc39 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -219,7 +219,7 @@ public void deleteMediaResource(String uri, AuthCredentials authCredentials, Swo throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported file type found in URL: " + uri); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifer not specified in URL: " + uri); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifier not specified in URL: " + uri); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 106a83a4ad1..496620cd6e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -580,7 +580,7 @@ public boolean updateProvider( AuthenticatedUser authenticatedUser, String authe * {@code userDisplayInfo}, a lookup entry for them based * UserIdentifier.getLookupStringPerAuthProvider (within the supplied * authentication provider), and internal user identifier (used for role - * assignments, etc.) based on UserIdentifier.getInternalUserIdentifer. + * assignments, etc.) based on UserIdentifier.getInternalUserIdentifier. * * @param userRecordId * @param proposedAuthenticatedUserIdentifier @@ -605,20 +605,20 @@ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecord proposedAuthenticatedUserIdentifier = proposedAuthenticatedUserIdentifier.trim(); } // we now select a username for the generated AuthenticatedUser, or give up - String internalUserIdentifer = proposedAuthenticatedUserIdentifier; + String internalUserIdentifier = proposedAuthenticatedUserIdentifier; // TODO should lock table authenticated users for write here - if ( identifierExists(internalUserIdentifer) ) { + if ( identifierExists(internalUserIdentifier) ) { if ( ! generateUniqueIdentifier ) { return null; } int i=1; - String identifier = internalUserIdentifer + i; + String identifier = internalUserIdentifier + i; while ( identifierExists(identifier) ) { i += 1; } authenticatedUser.setUserIdentifier(identifier); } else { - authenticatedUser.setUserIdentifier(internalUserIdentifer); + authenticatedUser.setUserIdentifier(internalUserIdentifier); } authenticatedUser = save( authenticatedUser ); // TODO should unlock table authenticated users for write here diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java index 1ac2c7583d6..312910e52c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java @@ -25,18 +25,31 @@ public class UserIdentifier { /** * The String used in the permission system to assign roles, for example. */ - String internalUserIdentifer; + String internalUserIdentifier; - public UserIdentifier(String lookupStringPerAuthProvider, String internalUserIdentifer) { + public UserIdentifier(String lookupStringPerAuthProvider, String internalUserIdentifier) { this.lookupStringPerAuthProvider = lookupStringPerAuthProvider; - this.internalUserIdentifer = internalUserIdentifer; + this.internalUserIdentifier = internalUserIdentifier; } public String getLookupStringPerAuthProvider() { return lookupStringPerAuthProvider; } + /** + * @deprecated because of a typo; use {@link #getInternalUserIdentifier()} instead + * @see #getInternalUserIdentifier() + * @return the internal user identifier + */ + @Deprecated public String getInternalUserIdentifer() { - return internalUserIdentifer; + return getInternalUserIdentifier(); + } + + /** + * @return the internal user identifier + */ + public String getInternalUserIdentifier() { + return internalUserIdentifier; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java index fff135e0dec..4cf41903405 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java @@ -133,7 +133,24 @@ public static String findSingleValue(String mayHaveMultipleValues) { return singleValue; } + /** + * @deprecated because of a typo; use {@link #generateFriendlyLookingUserIdentifier(String, String)} instead + * @see #generateFriendlyLookingUserIdentifier(String, String) + * @param usernameAssertion + * @param email + * @return a friendly-looking user identifier based on the asserted username or email, or a UUID as fallback + */ + @Deprecated public static String generateFriendlyLookingUserIdentifer(String usernameAssertion, String email) { + return generateFriendlyLookingUserIdentifier(usernameAssertion, email); + } + + /** + * @param usernameAssertion + * @param email + * @return a friendly-looking user identifier based on the asserted username or email, or a UUID as fallback + */ + public static String generateFriendlyLookingUserIdentifier(String usernameAssertion, String email) { if (usernameAssertion != null && !usernameAssertion.isEmpty()) { return usernameAssertion; } diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java index 593a5cbfdc3..a2f76150d7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java @@ -450,7 +450,7 @@ private void loadChecksumManifest() { // We probably want package files to be able to use specific stores instead. // More importantly perhaps, the approach above does not take into account // if the dataset may have an AlternativePersistentIdentifier, that may be - // designated isStorageLocationDesignator() - i.e., if a different identifer + // designated isStorageLocationDesignator() - i.e., if a different identifier // needs to be used to name the storage directory, instead of the main/current // persistent identifier above. getJobLogger().log(Level.INFO, "Reading checksum manifest: " + manifestAbsolutePath); diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java index fb702c21df2..9ce30683a87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java @@ -109,7 +109,7 @@ public void open(Serializable checkpoint) throws Exception { // We probably want package files to be able to use specific stores instead. // More importantly perhaps, the approach above does not take into account // if the dataset may have an AlternativePersistentIdentifier, that may be - // designated isStorageLocationDesignator() - i.e., if a different identifer + // designated isStorageLocationDesignator() - i.e., if a different identifier // needs to be used to name the storage directory, instead of the main/current // persistent identifier above. getJobLogger().log(Level.INFO, "Reading dataset directory: " + directory.getAbsolutePath() diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java index c03c77d42fd..9a75f437b66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java @@ -83,7 +83,7 @@ public JsonObject execute(CommandContext ctxt) throws CommandException { // We probably want package files to be able to use specific stores instead. // More importantly perhaps, the approach above does not take into account // if the dataset may have an AlternativePersistentIdentifier, that may be - // designated isStorageLocationDesignator() - i.e., if a different identifer + // designated isStorageLocationDesignator() - i.e., if a different identifier // needs to be used to name the storage directory, instead of the main/current // persistent identifier above. if (!isValidDirectory(directory)) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index a5a4924ad77..0c5de662e8a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -153,10 +153,10 @@ public void testFilterAuthenticatedUsers() throws Exception { .body("data.pagination.pageCount", equalTo(1)) .body("data.pagination.numResults", equalTo(numResults)); - String userIdentifer; + String userIdentifier; for (int i=0; i < numResults; i++){ - userIdentifer = JsonPath.from(filterReponse01.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); - assertEquals(randomUsernames.contains(userIdentifer), true); + userIdentifier = JsonPath.from(filterReponse01.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); + assertTrue(randomUsernames.contains(userIdentifier)); } List userList1 = JsonPath.from(filterReponse01.body().asString()).getList("data.users"); @@ -177,10 +177,10 @@ public void testFilterAuthenticatedUsers() throws Exception { .body("data.pagination.pageCount", equalTo(3)) .body("data.pagination.numResults", equalTo(numResults)); - String userIdentifer2; + String userIdentifier2; for (int i=0; i < numUsersReturned; i++){ - userIdentifer2 = JsonPath.from(filterReponse02.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); - assertEquals(randomUsernames.contains(userIdentifer2), true); + userIdentifier2 = JsonPath.from(filterReponse02.getBody().asString()).getString("data.users[" + i + "].userIdentifier"); + assertTrue(randomUsernames.contains(userIdentifier2)); } List userList2 = JsonPath.from(filterReponse02.body().asString()).getList("data.users"); diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java index c644a4e2b2a..9ace90ac496 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java @@ -102,16 +102,16 @@ public void testFindSingleValue() { } @Test - public void testGenerateFriendlyLookingUserIdentifer() { + public void testGenerateFriendlyLookingUserIdentifier() { int lengthOfUuid = UUID.randomUUID().toString().length(); - assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifer("uid1", null)); - assertEquals(" leadingWhiteSpace", ShibUtil.generateFriendlyLookingUserIdentifer(" leadingWhiteSpace", null)); - assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifer("uid1", "email1@example.com")); - assertEquals("email1", ShibUtil.generateFriendlyLookingUserIdentifer(null, "email1@example.com")); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, null).length()); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, "").length()); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer("", null).length()); - assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, "junkEmailAddress").length()); + assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifier("uid1", null)); + assertEquals(" leadingWhiteSpace", ShibUtil.generateFriendlyLookingUserIdentifier(" leadingWhiteSpace", null)); + assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifier("uid1", "email1@example.com")); + assertEquals("email1", ShibUtil.generateFriendlyLookingUserIdentifier(null, "email1@example.com")); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, null).length()); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, "").length()); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier("", null).length()); + assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, "junkEmailAddress").length()); } @Test From 5bca73896f0b482a9c8f838d3a01d37d235b57ac Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Thu, 19 Oct 2023 18:41:02 +0530 Subject: [PATCH 0635/1092] Added tabulartags in files metadata for files metadata --- doc/sphinx-guides/source/api/native-api.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index eedf23fd04e..1e0804ce7d8 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,9 +3136,11 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}', "dataFileTags":["Survey"]} \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" +Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. + Also note that dataFileTags are not versioned and changes to these will update the published version of the file. .. _EditingVariableMetadata: From ecbb020ed7da390c378fb76f08c9c5fb72677189 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Oct 2023 09:18:12 -0400 Subject: [PATCH 0636/1092] add/standardize retrieveSizeFromMedia call --- .../iq/dataverse/dataaccess/FileAccessIO.java | 33 +- .../dataverse/dataaccess/InputStreamIO.java | 5 + .../dataaccess/RemoteOverlayAccessIO.java | 14 +- .../iq/dataverse/dataaccess/S3AccessIO.java | 21 +- .../iq/dataverse/dataaccess/StorageIO.java | 379 +++++++++--------- .../dataverse/dataaccess/SwiftAccessIO.java | 5 + 6 files changed, 241 insertions(+), 216 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index d95df1567bd..3e6c802c526 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -35,8 +35,6 @@ import java.util.List; import java.util.function.Predicate; import java.util.logging.Logger; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import java.util.stream.Collectors; // Dataverse imports: @@ -115,7 +113,7 @@ public void open (DataAccessOption... options) throws IOException { this.setInputStream(fin); setChannel(fin.getChannel()); - this.setSize(getLocalFileSize()); + this.setSize(retrieveSizeFromMedia()); if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") @@ -506,21 +504,6 @@ public void delete() throws IOException { // Auxilary helper methods, filesystem access-specific: - private long getLocalFileSize () { - long fileSize = -1; - - try { - File testFile = getFileSystemPath().toFile(); - if (testFile != null) { - fileSize = testFile.length(); - } - return fileSize; - } catch (IOException ex) { - return -1; - } - - } - public FileInputStream openLocalFileAsInputStream () { FileInputStream in; @@ -742,4 +725,18 @@ public List cleanUp(Predicate filter, boolean dryRun) throws IOE return toDelete; } + @Override + public long retrieveSizeFromMedia() { + long fileSize = -1; + try { + File testFile = getFileSystemPath().toFile(); + if (testFile != null) { + fileSize = testFile.length(); + } + return fileSize; + } catch (IOException ex) { + return -1; + } + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index be6f9df0254..de392b74cca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -165,4 +165,9 @@ public List cleanUp(Predicate filter, boolean dryRun) throws IOE throw new UnsupportedDataAccessOperationException("InputStreamIO: tthis method is not supported in this DataAccess driver."); } + @Override + public long retrieveSizeFromMedia() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java index a9653f2ab68..9c1f5ba23aa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java @@ -64,8 +64,6 @@ public class RemoteOverlayAccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO"); - - String globusAccessToken = null; protected StorageIO baseStore = null; protected String path = null; @@ -155,7 +153,7 @@ public void open(DataAccessOption... options) throws IOException { this.setSize(dataFile.getFilesize()); } else { logger.fine("Setting size"); - this.setSize(retrieveSize()); + this.setSize(retrieveSizeFromMedia()); } if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { @@ -183,7 +181,8 @@ public void open(DataAccessOption... options) throws IOException { } } - long retrieveSize() { + @Override + public long retrieveSizeFromMedia() { long size = -1; HttpHead head = new HttpHead(baseUrl + "/" + path); try { @@ -383,7 +382,7 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { @Override public boolean exists() { logger.fine("Exists called"); - return (retrieveSize() != -1); + return (retrieveSizeFromMedia() != -1); } @Override @@ -502,8 +501,9 @@ protected void configureStores(DataAccessRequest req, String driverId, String st if (index > 0) { storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); } - // THe base store needs the baseStoreIdentifier and not the relative URL - fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); + // The base store needs the baseStoreIdentifier and not the relative URL (if it exists) + int endOfId = storageLocation.indexOf("//"); + fullStorageLocation = (endOfId>-1) ? storageLocation.substring(0, endOfId) : storageLocation; switch (baseDriverType) { case DataAccess.S3: diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 822ada0b83e..b0f9f0ffb05 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -207,14 +207,7 @@ public void open(DataAccessOption... options) throws IOException { if (isReadAccess) { - key = getMainFileKey(); - ObjectMetadata objectMetadata = null; - try { - objectMetadata = s3.getObjectMetadata(bucketName, key); - } catch (SdkClientException sce) { - throw new IOException("Cannot get S3 object " + key + " ("+sce.getMessage()+")"); - } - this.setSize(objectMetadata.getContentLength()); + this.setSize(retrieveSizeFromMedia()); if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") @@ -1385,4 +1378,16 @@ public List cleanUp(Predicate filter, boolean dryRun) throws IOE } return toDelete; } + + @Override + public long retrieveSizeFromMedia() throws IOException { + key = getMainFileKey(); + ObjectMetadata objectMetadata = null; + try { + objectMetadata = s3.getObjectMetadata(bucketName, key); + } catch (SdkClientException sce) { + throw new IOException("Cannot get S3 object " + key + " (" + sce.getMessage() + ")"); + } + return objectMetadata.getContentLength(); + } } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 3bc83538679..f3c2ef5f513 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -20,7 +20,6 @@ package edu.harvard.iq.dataverse.dataaccess; - import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; @@ -43,7 +42,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; - /** * * @author Leonid Andreev @@ -55,15 +53,15 @@ public abstract class StorageIO { public StorageIO() { } - + public StorageIO(String storageLocation, String driverId) { - this.driverId=driverId; + this.driverId = driverId; } public StorageIO(T dvObject, DataAccessRequest req, String driverId) { this.dvObject = dvObject; this.req = req; - this.driverId=driverId; + this.driverId = driverId; if (this.req == null) { this.req = new DataAccessRequest(); } @@ -72,18 +70,19 @@ public StorageIO(T dvObject, DataAccessRequest req, String driverId) { } } - - // Abstract methods to be implemented by the storage drivers: public abstract void open(DataAccessOption... option) throws IOException; protected boolean isReadAccess = false; protected boolean isWriteAccess = false; - //A public store is one in which files may be accessible outside Dataverse and therefore accessible without regard to Dataverse's access controls related to restriction and embargoes. - //Currently, this is just used to warn users at upload time rather than disable restriction/embargo. + // A public store is one in which files may be accessible outside Dataverse and + // therefore accessible without regard to Dataverse's access controls related to + // restriction and embargoes. + // Currently, this is just used to warn users at upload time rather than disable + // restriction/embargo. static protected Map driverPublicAccessMap = new HashMap(); - + public boolean canRead() { return isReadAccess; } @@ -94,115 +93,118 @@ public boolean canWrite() { public abstract String getStorageLocation() throws IOException; - // This method will return a Path, if the storage method is a - // local filesystem. Otherwise should throw an IOException. + // This method will return a Path, if the storage method is a + // local filesystem. Otherwise should throw an IOException. public abstract Path getFileSystemPath() throws IOException; - - public abstract boolean exists() throws IOException; - + + public abstract boolean exists() throws IOException; + public abstract void delete() throws IOException; - + // this method for copies a local Path (for ex., a // temp file, into this DataAccess location): public abstract void savePath(Path fileSystemPath) throws IOException; - + // same, for an InputStream: /** - * This method copies a local InputStream into this DataAccess location. - * Note that the S3 driver implementation of this abstract method is problematic, - * because S3 cannot save an object of an unknown length. This effectively - * nullifies any benefits of streaming; as we cannot start saving until we - * have read the entire stream. - * One way of solving this would be to buffer the entire stream as byte[], - * in memory, then save it... Which of course would be limited by the amount - * of memory available, and thus would not work for streams larger than that. - * So we have eventually decided to save save the stream to a temp file, then - * save to S3. This is slower, but guaranteed to work on any size stream. - * An alternative we may want to consider is to not implement this method - * in the S3 driver, and make it throw the UnsupportedDataAccessOperationException, - * similarly to how we handle attempts to open OutputStreams, in this and the - * Swift driver. - * (Not an issue in either FileAccessIO or SwiftAccessIO implementations) + * This method copies a local InputStream into this DataAccess location. Note + * that the S3 driver implementation of this abstract method is problematic, + * because S3 cannot save an object of an unknown length. This effectively + * nullifies any benefits of streaming; as we cannot start saving until we have + * read the entire stream. One way of solving this would be to buffer the entire + * stream as byte[], in memory, then save it... Which of course would be limited + * by the amount of memory available, and thus would not work for streams larger + * than that. So we have eventually decided to save save the stream to a temp + * file, then save to S3. This is slower, but guaranteed to work on any size + * stream. An alternative we may want to consider is to not implement this + * method in the S3 driver, and make it throw the + * UnsupportedDataAccessOperationException, similarly to how we handle attempts + * to open OutputStreams, in this and the Swift driver. (Not an issue in either + * FileAccessIO or SwiftAccessIO implementations) * * @param inputStream InputStream we want to save - * @param auxItemTag String representing this Auxiliary type ("extension") + * @param auxItemTag String representing this Auxiliary type ("extension") * @throws IOException if anything goes wrong. - */ + */ public abstract void saveInputStream(InputStream inputStream) throws IOException; + public abstract void saveInputStream(InputStream inputStream, Long filesize) throws IOException; - + // Auxiliary File Management: (new as of 4.0.2!) - + // An "auxiliary object" is an abstraction of the traditional DVN/Dataverse - // mechanism of storing extra files related to the man StudyFile/DataFile - - // such as "saved original" and cached format conversions for tabular files, - // thumbnails for images, etc. - in physical files with the same file - // name but various reserved extensions. - - //This function retrieves auxiliary files related to datasets, and returns them as inputstream - public abstract InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException ; - + // mechanism of storing extra files related to the man StudyFile/DataFile - + // such as "saved original" and cached format conversions for tabular files, + // thumbnails for images, etc. - in physical files with the same file + // name but various reserved extensions. + + // This function retrieves auxiliary files related to datasets, and returns them + // as inputstream + public abstract InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException; + public abstract Channel openAuxChannel(String auxItemTag, DataAccessOption... option) throws IOException; - - public abstract long getAuxObjectSize(String auxItemTag) throws IOException; - - public abstract Path getAuxObjectAsPath(String auxItemTag) throws IOException; - - public abstract boolean isAuxObjectCached(String auxItemTag) throws IOException; - - public abstract void backupAsAux(String auxItemTag) throws IOException; - - public abstract void revertBackupAsAux(String auxItemTag) throws IOException; - - // this method copies a local filesystem Path into this DataAccess Auxiliary location: + + public abstract long getAuxObjectSize(String auxItemTag) throws IOException; + + public abstract Path getAuxObjectAsPath(String auxItemTag) throws IOException; + + public abstract boolean isAuxObjectCached(String auxItemTag) throws IOException; + + public abstract void backupAsAux(String auxItemTag) throws IOException; + + public abstract void revertBackupAsAux(String auxItemTag) throws IOException; + + // this method copies a local filesystem Path into this DataAccess Auxiliary + // location: public abstract void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException; - + /** - * This method copies a local InputStream into this DataAccess Auxiliary location. - * Note that the S3 driver implementation of this abstract method is problematic, - * because S3 cannot save an object of an unknown length. This effectively - * nullifies any benefits of streaming; as we cannot start saving until we - * have read the entire stream. - * One way of solving this would be to buffer the entire stream as byte[], - * in memory, then save it... Which of course would be limited by the amount - * of memory available, and thus would not work for streams larger than that. - * So we have eventually decided to save save the stream to a temp file, then - * save to S3. This is slower, but guaranteed to work on any size stream. - * An alternative we may want to consider is to not implement this method - * in the S3 driver, and make it throw the UnsupportedDataAccessOperationException, - * similarly to how we handle attempts to open OutputStreams, in this and the - * Swift driver. - * (Not an issue in either FileAccessIO or SwiftAccessIO implementations) + * This method copies a local InputStream into this DataAccess Auxiliary + * location. Note that the S3 driver implementation of this abstract method is + * problematic, because S3 cannot save an object of an unknown length. This + * effectively nullifies any benefits of streaming; as we cannot start saving + * until we have read the entire stream. One way of solving this would be to + * buffer the entire stream as byte[], in memory, then save it... Which of + * course would be limited by the amount of memory available, and thus would not + * work for streams larger than that. So we have eventually decided to save save + * the stream to a temp file, then save to S3. This is slower, but guaranteed to + * work on any size stream. An alternative we may want to consider is to not + * implement this method in the S3 driver, and make it throw the + * UnsupportedDataAccessOperationException, similarly to how we handle attempts + * to open OutputStreams, in this and the Swift driver. (Not an issue in either + * FileAccessIO or SwiftAccessIO implementations) * * @param inputStream InputStream we want to save - * @param auxItemTag String representing this Auxiliary type ("extension") + * @param auxItemTag String representing this Auxiliary type ("extension") * @throws IOException if anything goes wrong. - */ - public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException; - public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException; - - public abstract ListlistAuxObjects() throws IOException; - - public abstract void deleteAuxObject(String auxItemTag) throws IOException; - + */ + public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException; + + public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) + throws IOException; + + public abstract List listAuxObjects() throws IOException; + + public abstract void deleteAuxObject(String auxItemTag) throws IOException; + public abstract void deleteAllAuxObjects() throws IOException; private DataAccessRequest req; private InputStream in = null; - private OutputStream out; + private OutputStream out; protected Channel channel; protected DvObject dvObject; protected String driverId; - /*private int status;*/ + /* private int status; */ private long size; /** - * Where in the file to seek to when reading (default is zero bytes, the - * start of the file). + * Where in the file to seek to when reading (default is zero bytes, the start + * of the file). */ private long offset; - + private String mimeType; private String fileName; private String varHeader; @@ -215,8 +217,8 @@ public boolean canWrite() { private String swiftContainerName; private boolean isLocalFile = false; - /*private boolean isRemoteAccess = false;*/ - /*private boolean isHttpAccess = false;*/ + /* private boolean isRemoteAccess = false; */ + /* private boolean isHttpAccess = false; */ private boolean noVarHeader = false; // For remote downloads: @@ -229,13 +231,14 @@ public boolean canWrite() { private String remoteUrl; protected String remoteStoreName = null; protected URL remoteStoreUrl = null; - + // For HTTP-based downloads: - /*private GetMethod method = null; - private Header[] responseHeaders;*/ + /* + * private GetMethod method = null; private Header[] responseHeaders; + */ // getters: - + public Channel getChannel() throws IOException { return channel; } @@ -255,16 +258,15 @@ public ReadableByteChannel getReadChannel() throws IOException { return (ReadableByteChannel) channel; } - - public DvObject getDvObject() - { + + public DvObject getDvObject() { return dvObject; } - + public DataFile getDataFile() { return (DataFile) dvObject; } - + public Dataset getDataset() { return (Dataset) dvObject; } @@ -277,9 +279,9 @@ public DataAccessRequest getRequest() { return req; } - /*public int getStatus() { - return status; - }*/ + /* + * public int getStatus() { return status; } + */ public long getSize() { return size; @@ -292,9 +294,9 @@ public long getOffset() { public InputStream getInputStream() throws IOException { return in; } - + public OutputStream getOutputStream() throws IOException { - return out; + return out; } public String getMimeType() { @@ -317,23 +319,23 @@ public String getRemoteUrl() { return remoteUrl; } - public String getTemporarySwiftUrl(){ + public String getTemporarySwiftUrl() { return temporarySwiftUrl; } - + public String getTempUrlExpiry() { return tempUrlExpiry; } - + public String getTempUrlSignature() { return tempUrlSignature; } - + public String getSwiftFileName() { return swiftFileName; } - public String getSwiftContainerName(){ + public String getSwiftContainerName() { return swiftContainerName; } @@ -344,34 +346,32 @@ public String getRemoteStoreName() { public URL getRemoteStoreUrl() { return remoteStoreUrl; } - - /*public GetMethod getHTTPMethod() { - return method; - } - public Header[] getResponseHeaders() { - return responseHeaders; - }*/ + /* + * public GetMethod getHTTPMethod() { return method; } + * + * public Header[] getResponseHeaders() { return responseHeaders; } + */ public boolean isLocalFile() { return isLocalFile; } - - // "Direct Access" StorageIO is used to access a physical storage - // location not associated with any dvObject. (For example, when we - // are deleting a physical file left behind by a DataFile that's - // already been deleted from the database). + + // "Direct Access" StorageIO is used to access a physical storage + // location not associated with any dvObject. (For example, when we + // are deleting a physical file left behind by a DataFile that's + // already been deleted from the database). public boolean isDirectAccess() { - return dvObject == null; + return dvObject == null; } - /*public boolean isRemoteAccess() { - return isRemoteAccess; - }*/ + /* + * public boolean isRemoteAccess() { return isRemoteAccess; } + */ - /*public boolean isHttpAccess() { - return isHttpAccess; - }*/ + /* + * public boolean isHttpAccess() { return isHttpAccess; } + */ public boolean isDownloadSupported() { return isDownloadSupported; @@ -398,9 +398,9 @@ public void setRequest(DataAccessRequest dar) { req = dar; } - /*public void setStatus(int s) { - status = s; - }*/ + /* + * public void setStatus(int s) { status = s; } + */ public void setSize(long s) { size = s; @@ -421,11 +421,11 @@ public void setOffset(long offset) throws IOException { public void setInputStream(InputStream is) { in = is; } - + public void setOutputStream(OutputStream os) { - out = os; - } - + out = os; + } + public void setChannel(Channel c) { channel = c; } @@ -450,45 +450,46 @@ public void setRemoteUrl(String u) { remoteUrl = u; } - public void setTemporarySwiftUrl(String u){ + public void setTemporarySwiftUrl(String u) { temporarySwiftUrl = u; } - - public void setTempUrlExpiry(Long u){ + + public void setTempUrlExpiry(Long u) { tempUrlExpiry = String.valueOf(u); } - + public void setSwiftFileName(String u) { swiftFileName = u; } - - public void setTempUrlSignature(String u){ + + public void setTempUrlSignature(String u) { tempUrlSignature = u; } - public void setSwiftContainerName(String u){ + public void setSwiftContainerName(String u) { swiftContainerName = u; } - /*public void setHTTPMethod(GetMethod hm) { - method = hm; - }*/ + /* + * public void setHTTPMethod(GetMethod hm) { method = hm; } + */ - /*public void setResponseHeaders(Header[] headers) { - responseHeaders = headers; - }*/ + /* + * public void setResponseHeaders(Header[] headers) { responseHeaders = headers; + * } + */ public void setIsLocalFile(boolean f) { isLocalFile = f; } - /*public void setIsRemoteAccess(boolean r) { - isRemoteAccess = r; - }*/ + /* + * public void setIsRemoteAccess(boolean r) { isRemoteAccess = r; } + */ - /*public void setIsHttpAccess(boolean h) { - isHttpAccess = h; - }*/ + /* + * public void setIsHttpAccess(boolean h) { isHttpAccess = h; } + */ public void setIsDownloadSupported(boolean d) { isDownloadSupported = d; @@ -506,12 +507,11 @@ public void setNoVarHeader(boolean nvh) { noVarHeader = nvh; } - // connection management methods: - /*public void releaseConnection() { - if (method != null) { - method.releaseConnection(); - } - }*/ + // connection management methods: + /* + * public void releaseConnection() { if (method != null) { + * method.releaseConnection(); } } + */ public void closeInputStream() { if (in != null) { @@ -528,7 +528,7 @@ public void closeInputStream() { } } } - + public String generateVariableHeader(List dvs) { String varHeader = null; @@ -571,14 +571,14 @@ protected boolean isWriteAccessRequested(DataAccessOption... options) throws IOE return false; } - public boolean isBelowIngestSizeLimit() { - long limit = Long.parseLong(System.getProperty("dataverse.files." + this.driverId + ".ingestsizelimit", "-1")); - if(limit>0 && getSize()>limit) { - return false; - } else { - return true; - } - } + public boolean isBelowIngestSizeLimit() { + long limit = Long.parseLong(System.getProperty("dataverse.files." + this.driverId + ".ingestsizelimit", "-1")); + if (limit > 0 && getSize() > limit) { + return false; + } else { + return true; + } + } public boolean downloadRedirectEnabled() { return false; @@ -587,36 +587,38 @@ public boolean downloadRedirectEnabled() { public boolean downloadRedirectEnabled(String auxObjectTag) { return false; } - - public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName) throws IOException { + + public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName) + throws IOException { throw new UnsupportedDataAccessOperationException("Direct download not implemented for this storage type"); } - public static boolean isPublicStore(String driverId) { - //Read once and cache - if(!driverPublicAccessMap.containsKey(driverId)) { - driverPublicAccessMap.put(driverId, Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".public"))); + // Read once and cache + if (!driverPublicAccessMap.containsKey(driverId)) { + driverPublicAccessMap.put(driverId, + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".public"))); } return driverPublicAccessMap.get(driverId); } - + public static String getDriverPrefix(String driverId) { - return driverId+ DataAccess.SEPARATOR; + return driverId + DataAccess.SEPARATOR; } - + public static boolean isDirectUploadEnabled(String driverId) { - return (System.getProperty("dataverse.files." + driverId + ".type").equals(DataAccess.S3) && Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"))) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); + return (System.getProperty("dataverse.files." + driverId + ".type").equals(DataAccess.S3) + && Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"))) + || Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } - - //Check that storageIdentifier is consistent with store's config - //False will prevent direct uploads + + // Check that storageIdentifier is consistent with store's config + // False will prevent direct uploads static boolean isValidIdentifier(String driverId, String storageId) { return false; } - - //Utility to verify the standard UUID pattern for stored files. + + // Utility to verify the standard UUID pattern for stored files. protected static boolean usesStandardNamePattern(String identifier) { Pattern r = Pattern.compile("^[a-f,0-9]{11}-[a-f,0-9]{12}$"); @@ -626,4 +628,15 @@ protected static boolean usesStandardNamePattern(String identifier) { public abstract List cleanUp(Predicate filter, boolean dryRun) throws IOException; + /** + * A storage-type-specific mechanism for retrieving the size of a file. Intended + * primarily as a way to get the size before it has been recorded in the + * database, e.g. during direct/out-of-band transfers but could be useful to + * check the db values. + * + * @return file size in bytes + * @throws IOException + */ + public abstract long retrieveSizeFromMedia() throws IOException; + } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index 6c84009de3e..0d1dab581fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -954,4 +954,9 @@ public List cleanUp(Predicate filter, boolean dryRun) throws IOE } return toDelete; } + + @Override + public long retrieveSizeFromMedia() throws IOException { + throw new UnsupportedDataAccessOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); + } } From 68ab3f3cb6399d4c73bff0bcc84d9687ab369351 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Oct 2023 09:18:58 -0400 Subject: [PATCH 0637/1092] typos, change hash notice --- .../iq/dataverse/globus/GlobusServiceBean.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 49572519696..8aa9915db58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -4,8 +4,6 @@ import com.github.benmanes.caffeine.cache.Caffeine; import com.google.gson.FieldNamingPolicy; import com.google.gson.GsonBuilder; -import com.nimbusds.oauth2.sdk.pkce.CodeVerifier; - import edu.harvard.iq.dataverse.*; import jakarta.ejb.Asynchronous; @@ -21,7 +19,6 @@ import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonPatch; -import jakarta.json.JsonValue; import jakarta.servlet.http.HttpServletRequest; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; @@ -662,6 +659,7 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S GlobusEndpoint endpoint = getGlobusEndpoint(dataset); ruleId = getRuleId(endpoint, task.getOwner_id(), "rw"); + if(ruleId!=null) { Long datasetId = rulesCache.getIfPresent(ruleId); if(datasetId!=null) { @@ -1095,7 +1093,7 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger) String fullPath = id.split("IDsplit")[1]; String fileName = id.split("IDsplit")[2]; - // ToDo: what if the file doesnot exists in s3 + // ToDo: what if the file does not exist in s3 // ToDo: what if checksum calculation failed do { @@ -1107,8 +1105,8 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger) } catch (IOException ioex) { count = 3; logger.info(ioex.getMessage()); - globusLogger.info("DataFile (fullPAth " + fullPath - + ") does not appear to be accessible withing Dataverse: "); + globusLogger.info("DataFile (fullPath " + fullPath + + ") does not appear to be accessible within Dataverse: "); } catch (Exception ex) { count = count + 1; ex.printStackTrace(); @@ -1119,7 +1117,7 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger) } while (count < 3); if (checksumVal.length() == 0) { - checksumVal = "NULL"; + checksumVal = "Not available in Dataverse"; } String mimeType = calculatemime(fileName); @@ -1384,4 +1382,5 @@ GlobusEndpoint getGlobusEndpoint(DvObject dvObject) { private static boolean isDataverseManaged(String driverId) { return Boolean.getBoolean("dataverse.files." + driverId + ".managed"); } + } From d57b9f048490bcc2a38d8c2fc422e3797bad2fbc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Oct 2023 09:19:24 -0400 Subject: [PATCH 0638/1092] add getLocationFromStorage, add tests --- .../iq/dataverse/dataaccess/DataAccess.java | 34 +++++++++++++++---- .../dataverse/dataaccess/DataAccessTest.java | 20 +++++++++++ 2 files changed, 48 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java index 8387f8110cf..a3345cb7a8c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java @@ -153,12 +153,34 @@ public static String[] getDriverIdAndStorageLocation(String storageLocation) { } public static String getStorageIdFromLocation(String location) { - if(location.contains(SEPARATOR)) { - //It's a full location with a driverId, so strip and reapply the driver id - //NOte that this will strip the bucketname out (which s3 uses) but the S3IOStorage class knows to look at re-insert it - return location.substring(0,location.indexOf(SEPARATOR) +3) + location.substring(location.lastIndexOf('/')+1); - } - return location.substring(location.lastIndexOf('/')+1); + if (location.contains(SEPARATOR)) { + // It's a full location with a driverId, so strip and reapply the driver id + // NOte that this will strip the bucketname out (which s3 uses) but the + // S3IOStorage class knows to look at re-insert it + return location.substring(0, location.indexOf(SEPARATOR) + 3) + + location.substring(location.lastIndexOf('/') + 1); + } + return location.substring(location.lastIndexOf('/') + 1); + } + + /** Changes storageidentifiers of the form + * s3://bucketname/18b39722140-50eb7d3c5ece or file://18b39722140-50eb7d3c5ece to s3://10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece + * and + * 18b39722140-50eb7d3c5ece to 10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece + * @param id + * @param dataset + * @return + */ + public static String getLocationFromStorageId(String id, Dataset dataset) { + String path= dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/"; + if (id.contains(SEPARATOR)) { + // It's a full location with a driverId, so strip and reapply the driver id + // NOte that this will strip the bucketname out (which s3 uses) but the + // S3IOStorage class knows to look at re-insert it + return id.substring(0, id.indexOf(SEPARATOR) + 3) + path + + id.substring(id.lastIndexOf('/') + 1); + } + return path + id.substring(id.lastIndexOf('/') + 1); } public static String getDriverType(String driverId) { diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java index 1ff914adff9..f7ce061fb24 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java @@ -59,4 +59,24 @@ void testCreateNewStorageIO_createsFileAccessIObyDefault() throws IOException { StorageIO storageIo = DataAccess.createNewStorageIO(dataset, "valid-tag"); assertTrue(storageIo.getClass().equals(FileAccessIO.class)); } + + @Test + void testGetLocationFromStorageId() { + Dataset d = new Dataset(); + d.setAuthority("10.5072"); + d.setIdentifier("FK2/ABCDEF"); + assertEquals("s3://10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece", + DataAccess.getLocationFromStorageId("s3://18b39722140-50eb7d3c5ece", d)); + assertEquals("10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece", + DataAccess.getLocationFromStorageId("18b39722140-50eb7d3c5ece", d)); + + } + + @Test + void testGetStorageIdFromLocation() { + assertEquals("file://18b39722140-50eb7d3c5ece", + DataAccess.getStorageIdFromLocation("file://10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece")); + assertEquals("s3://18b39722140-50eb7d3c5ece", + DataAccess.getStorageIdFromLocation("s3://bucketname:10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece")); + } } From 34286830d1cfa4849a82909eaff20528980fd717 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Oct 2023 09:19:53 -0400 Subject: [PATCH 0639/1092] get size for direct uploads --- .../impl/CreateNewDataFilesCommand.java | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index ac701da1be9..a8be1bd5116 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -3,18 +3,20 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -83,7 +85,7 @@ public class CreateNewDataFilesCommand extends AbstractCommand sio; + try { + sio = DataAccess.getDirectStorageIO(DataAccess.getLocationFromStorageId(newStorageIdentifier, version.getDataset())); + + // get file size + // Note - some stores (e.g. AWS S3) only offer eventual consistency and a call + // to get the size immediately after uploading may fail. As of the addition of + // PR#9409 adding storage quotas, we are now requiring size to be available + // earlier. If this is seen, adding + // a delay/retry may help + newFileSize = sio.retrieveSizeFromMedia(); + } catch (IOException e) { + // If we don't get a file size, a CommandExecutionException will be thrown later in the code + e.printStackTrace(); + } + } } // Finally, if none of the special cases above were applicable (or // if we were unable to unpack an uploaded file, etc.), we'll just From 2adfa8af01124c31ada3f1801dd5f3dac0fd704e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Oct 2023 09:20:31 -0400 Subject: [PATCH 0640/1092] refactor, add delete method, etc. --- .../dataaccess/GlobusOverlayAccessIO.java | 157 ++++++++++++------ 1 file changed, 110 insertions(+), 47 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 965dc3c0947..011bb74f720 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -17,11 +17,14 @@ import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; +import jakarta.json.Json; import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; /** * @author qqmyers @@ -43,7 +46,6 @@ public class GlobusOverlayAccessIO extends RemoteOverlayAcce private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO"); - String globusAccessToken = null; /* * If this is set to true, the store supports Globus transfer in and * Dataverse/the globus app manage file locations, access controls, deletion, @@ -51,35 +53,64 @@ public class GlobusOverlayAccessIO extends RemoteOverlayAcce */ private boolean dataverseManaged = false; + private String relativeDirectoryPath; + + private String endpointPath; + + private String filename; + + private String endpoint; + public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); - if (dvObject instanceof DataFile) { - globusAccessToken = retrieveGlobusAccessToken(); - } dataverseManaged = isDataverseManaged(this.driverId); + } + + private void parsePath() { + int filenameStart = path.lastIndexOf("/") + 1; + String endpointWithBasePath = baseUrl.substring(baseUrl.lastIndexOf("://") + 3); + int pathStart = endpointWithBasePath.indexOf("/"); + logger.info("endpointWithBasePath: " + endpointWithBasePath); + endpointPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : ""); + logger.info("endpointPath: " + endpointPath); + + if (dataverseManaged && (dvObject!=null)) { + + Dataset ds = null; + if (dvObject instanceof Dataset) { + ds = (Dataset) dvObject; + } else if (dvObject instanceof DataFile) { + ds = ((DataFile) dvObject).getOwner(); + } + relativeDirectoryPath = "/" + ds.getAuthority() + "/" + ds.getIdentifier(); + } else { + relativeDirectoryPath = ""; + } + if (filenameStart > 0) { + relativeDirectoryPath = relativeDirectoryPath + path.substring(0, filenameStart); + } + logger.info("relativeDirectoryPath finally: " + relativeDirectoryPath); + filename = path.substring(filenameStart); + endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath; - logger.info("GAT3: " + globusAccessToken); + } public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException { this.driverId = driverId; + configureStores(null, driverId, storageLocation); this.dataverseManaged = isDataverseManaged(this.driverId); if (dataverseManaged) { String[] parts = DataAccess.getDriverIdAndStorageLocation(storageLocation); path = parts[1]; } else { this.setIsLocalFile(false); - configureStores(null, driverId, storageLocation); - path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); validatePath(path); logger.fine("Relative path: " + path); } -//ToDo - only when needed? - globusAccessToken = retrieveGlobusAccessToken(); - } - + private String retrieveGlobusAccessToken() { // String globusToken = JvmSettings.GLOBUS_TOKEN.lookup(driverId); String globusToken = System.getProperty("dataverse.files." + this.driverId + ".globus-token"); @@ -101,33 +132,16 @@ private void validatePath(String relPath) throws IOException { // Call the Globus API to get the file size @Override - long retrieveSize() { + public long retrieveSizeFromMedia() { + parsePath(); + String globusAccessToken = retrieveGlobusAccessToken(); logger.info("GAT2: " + globusAccessToken); // Construct Globus URL URI absoluteURI = null; try { - int filenameStart = path.lastIndexOf("/") + 1; - String endpointWithBasePath = baseUrl.substring(baseUrl.lastIndexOf("://") + 3); - int pathStart = endpointWithBasePath.indexOf("/"); - logger.info("endpointWithBasePath: " + endpointWithBasePath); - String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : ""); - logger.info("directoryPath: " + directoryPath); - - if (dataverseManaged && (dvObject!=null)) { - Dataset ds = ((DataFile) dvObject).getOwner(); - directoryPath = directoryPath + "/" + ds.getAuthority() + "/" + ds.getIdentifier(); - logger.info("directoryPath now: " + directoryPath); - - } - if (filenameStart > 0) { - directoryPath = directoryPath + path.substring(0, filenameStart); - } - logger.info("directoryPath finally: " + directoryPath); - String filename = path.substring(filenameStart); - String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath; absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint - + "/ls?path=" + directoryPath + "&filter=name:" + filename); + + "/ls?path=" + endpointPath + relativeDirectoryPath + "&filter=name:" + filename); HttpGet get = new HttpGet(absoluteURI); logger.info("Token is " + globusAccessToken); @@ -166,25 +180,63 @@ public InputStream getInputStream() throws IOException { @Override public void delete() throws IOException { -// Fix - // Delete is best-effort - we tell the remote server and it may or may not - // implement this call + parsePath(); + // Delete is best-effort - we tell the endpoint to delete don't monitor whether + // it succeeds if (!isDirectAccess()) { throw new IOException("Direct Access IO must be used to permanently delete stored file objects"); } + String globusAccessToken = retrieveGlobusAccessToken(); + // Construct Globus URL + URI absoluteURI = null; try { - HttpDelete del = new HttpDelete(baseUrl + "/" + path); - CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext); - try { - int code = response.getStatusLine().getStatusCode(); - switch (code) { + + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/submission_id"); + HttpGet get = new HttpGet(absoluteURI); + + logger.info("Token is " + globusAccessToken); + get.addHeader("Authorization", "Bearer " + globusAccessToken); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); + if (response.getStatusLine().getStatusCode() == 200) { + // Get reponse as string + String responseString = EntityUtils.toString(response.getEntity()); + logger.info("Response from " + get.getURI().toString() + " is: " + responseString); + JsonObject responseJson = JsonUtil.getJsonObject(responseString); + String submissionId = responseJson.getString("value"); + logger.info("submission_id for delete is: " + submissionId); + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/delete"); + HttpPost post = new HttpPost(absoluteURI); + JsonObjectBuilder taskJsonBuilder = Json.createObjectBuilder(); + taskJsonBuilder.add("submission_id", submissionId).add("DATA_TYPE", "delete").add("endpoint", endpoint) + .add("DATA", Json.createArrayBuilder().add(Json.createObjectBuilder().add("DATA_TYPE", "delete_item").add("path", + endpointPath + relativeDirectoryPath + "/" + filename))); + post.setHeader("Content-Type", "application/json"); + post.addHeader("Authorization", "Bearer " + globusAccessToken); + String taskJson= JsonUtil.prettyPrint(taskJsonBuilder.build()); + logger.info("Sending: " + taskJson); + post.setEntity(new StringEntity(taskJson, "utf-8")); + CloseableHttpResponse postResponse = getSharedHttpClient().execute(post, localContext); + int statusCode=postResponse.getStatusLine().getStatusCode(); + logger.info("Response :" + statusCode + ": " +postResponse.getStatusLine().getReasonPhrase()); + switch (statusCode) { + case 202: + // ~Success - delete task was accepted + logger.info("Globus delete initiated: " + EntityUtils.toString(postResponse.getEntity())); + break; case 200: - logger.fine("Sent DELETE for " + baseUrl + "/" + path); + // Duplicate - delete task was already accepted + logger.info("Duplicate Globus delete: " + EntityUtils.toString(postResponse.getEntity())); + break; default: - logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code); + logger.warning("Response from " + post.getURI().toString() + " was " + + postResponse.getStatusLine().getStatusCode()); + logger.info(EntityUtils.toString(postResponse.getEntity())); } - } finally { - EntityUtils.consume(response.getEntity()); + + } else { + logger.warning("Response from " + get.getURI().toString() + " was " + + response.getStatusLine().getStatusCode()); + logger.info(EntityUtils.toString(response.getEntity())); } } catch (Exception e) { logger.warning(e.getMessage()); @@ -250,6 +302,16 @@ static boolean isValidIdentifier(String driverId, String storageId) { return true; } + @Override + public String getStorageLocation() throws IOException { + parsePath(); + if (dataverseManaged) { + return this.driverId + DataAccess.SEPARATOR + relativeDirectoryPath + "/" + filename; + } else { + return super.getStorageLocation(); + } + } + public static void main(String[] args) { System.out.println("Running the main method"); if (args.length > 0) { @@ -272,7 +334,7 @@ public static void main(String[] args) { try { GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO( "globus://1234///hdc1/image001.mrc", "globus"); - logger.info("Size is " + gsio.retrieveSize()); + logger.info("Size is " + gsio.retrieveSizeFromMedia()); } catch (IOException e) { // TODO Auto-generated catch block @@ -286,7 +348,7 @@ public static void main(String[] args) { df.setOwner(ds); df.setStorageIdentifier("globus://1234///hdc1/image001.mrc"); GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO(df, null, "globus"); - logger.info("Size2 is " + gsio.retrieveSize()); + logger.info("Size2 is " + gsio.retrieveSizeFromMedia()); } catch (IOException e) { // TODO Auto-generated catch block @@ -294,4 +356,5 @@ public static void main(String[] args) { } } + } From bdba5d8ef8a459314d5b8dccab30190461bbfdea Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Oct 2023 14:03:46 -0400 Subject: [PATCH 0641/1092] implement signedUrls for globus app, refactor --- .../edu/harvard/iq/dataverse/api/Admin.java | 8 +- .../harvard/iq/dataverse/api/Datasets.java | 75 ++++++++++++++++++- .../edu/harvard/iq/dataverse/api/Files.java | 6 +- .../externaltools/ExternalToolHandler.java | 71 +----------------- .../dataverse/globus/GlobusServiceBean.java | 58 +++++++------- .../iq/dataverse/util/URLTokenUtil.java | 65 ++++++++++++++++ src/main/java/propertyFiles/Bundle.properties | 4 +- .../ExternalToolHandlerTest.java | 11 +-- .../ExternalToolServiceBeanTest.java | 4 +- 9 files changed, 192 insertions(+), 110 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fd3b9a89e54..1870c7cb508 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -96,7 +96,6 @@ import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteTemplateCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; -import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.userdata.UserListMaker; @@ -105,6 +104,7 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import java.io.IOException; @@ -2418,12 +2418,12 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur } String baseUrl = urlInfo.getString("url"); - int timeout = urlInfo.getInt(ExternalToolHandler.TIMEOUT, 10); - String method = urlInfo.getString(ExternalToolHandler.HTTP_METHOD, "GET"); + int timeout = urlInfo.getInt(URLTokenUtil.TIMEOUT, 10); + String method = urlInfo.getString(URLTokenUtil.HTTP_METHOD, "GET"); String signedUrl = UrlSignerUtil.signUrl(baseUrl, timeout, userId, method, key); - return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl)); + return ok(Json.createObjectBuilder().add(URLTokenUtil.SIGNED_URL, signedUrl)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d3ea1b80696..aad5a95bd8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -90,6 +90,7 @@ import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.MarkupChecker; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.bagit.OREMap; import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; @@ -3328,7 +3329,7 @@ public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam(" @POST @AuthRequired - @Path("{id}/addglobusFiles") + @Path("{id}/addGlobusFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @@ -3411,6 +3412,74 @@ public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc, } + /** + * Retrieve the parameters and signed URLs required to perform a globus + * transfer. This api endpoint is expected to be called as a signed callback + * after the globus-dataverse app/other app is launched, but it will accept + * other forms of authentication. + * + * @param crc + * @param datasetId + */ + @GET + @AuthRequired + @Path("{id}/globusUploadParameters") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam(value = "locale") String locale) + { + // ------------------------------------- + // (1) Get the user from the ContainerRequestContext + // ------------------------------------- + AuthenticatedUser authUser; + try { + authUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse e) { + return e.getResponse(); + } + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + String storeId = dataset.getEffectiveStorageDriverId(); + if(!DataAccess.getDriverType(storeId).equals(DataAccess.GLOBUS)) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled")); + } + boolean managed = GlobusOverlayAccessIO.isDataverseManaged(storeId); + + JsonObjectBuilder queryParams = Json.createObjectBuilder(); + queryParams.add("queryParameters", + Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}")) + .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}")) + .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}")) + .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}")) + .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}").add("managed", managed))); + + JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder(); + allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusTransferPaths") + .add(URLTokenUtil.HTTP_METHOD, "POST") + .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusTransferPaths") + .add(URLTokenUtil.TIMEOUT, 300)); + allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles") + .add(URLTokenUtil.HTTP_METHOD, "POST") + .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles") + .add(URLTokenUtil.TIMEOUT, 300)); + allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing") + .add(URLTokenUtil.HTTP_METHOD, "GET") + .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files") + .add(URLTokenUtil.TIMEOUT, 300)); + + + URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale); + return ok(tokenUtil.createPostBody(tokenUtil.getParams(queryParams.build()), allowedApiCalls.build())); + } + /** Requests permissions for a given globus user to upload to the dataset * * @param crc @@ -3915,8 +3984,8 @@ public Response getExternalToolDVParams(@Context ContainerRequestContext crc, } - ExternalToolHandler eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale); - return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())))); + URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale); + return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls()))); } catch (WrappedResponse wr) { return wr.getResponse(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 82811162d52..4c2fa8f68ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -48,6 +48,8 @@ import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.URLTokenUtil; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -822,10 +824,10 @@ public Response getExternalToolFMParams(@Context ContainerRequestContext crc, @P return error(BAD_REQUEST, "FileMetadata not found."); } - ExternalToolHandler eth = null; + URLTokenUtil eth = null; eth = new ExternalToolHandler(externalTool, target.getDataFile(), apiToken, target, locale); - return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())))); + return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls()))); } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index de4317464e6..36227c2f883 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -22,12 +22,8 @@ import java.util.logging.Level; import java.util.logging.Logger; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonNumber; import jakarta.json.JsonObject; -import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonString; import jakarta.json.JsonValue; import jakarta.ws.rs.HttpMethod; @@ -41,15 +37,10 @@ */ public class ExternalToolHandler extends URLTokenUtil { - private final ExternalTool externalTool; + public final ExternalTool externalTool; private String requestMethod; - - public static final String HTTP_METHOD="httpMethod"; - public static final String TIMEOUT="timeOut"; - public static final String SIGNED_URL="signedUrl"; - public static final String NAME="name"; - public static final String URL_TEMPLATE="urlTemplate"; + /** @@ -134,10 +125,10 @@ public String handleRequest(boolean preview) { } else { // ToDo - if the allowedApiCalls() are defined, could/should we send them to - // tools using GET as well? + // tools using POST as well? if (requestMethod.equals(HttpMethod.POST)) { - String body = JsonUtil.prettyPrint(createPostBody(params).build()); + String body = JsonUtil.prettyPrint(createPostBody(params, null).build()); try { logger.info("POST Body: " + body); return postFormData(body); @@ -149,60 +140,6 @@ public String handleRequest(boolean preview) { return null; } - public JsonObject getParams(JsonObject toolParameters) { - //ToDo - why an array of object each with a single key/value pair instead of one object? - JsonArray queryParams = toolParameters.getJsonArray("queryParameters"); - - // ToDo return json and print later - JsonObjectBuilder paramsBuilder = Json.createObjectBuilder(); - if (!(queryParams == null) && !queryParams.isEmpty()) { - queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> { - queryParam.keySet().forEach((key) -> { - String value = queryParam.getString(key); - JsonValue param = getParam(value); - if (param != null) { - paramsBuilder.add(key, param); - } - }); - }); - } - return paramsBuilder.build(); - } - - public JsonObjectBuilder createPostBody(JsonObject params) { - JsonObjectBuilder bodyBuilder = Json.createObjectBuilder(); - bodyBuilder.add("queryParameters", params); - String apiCallStr = externalTool.getAllowedApiCalls(); - if (apiCallStr != null && !apiCallStr.isBlank()) { - JsonArray apiArray = JsonUtil.getJsonArray(externalTool.getAllowedApiCalls()); - JsonArrayBuilder apisBuilder = Json.createArrayBuilder(); - apiArray.getValuesAs(JsonObject.class).forEach(((apiObj) -> { - logger.fine(JsonUtil.prettyPrint(apiObj)); - String name = apiObj.getJsonString(NAME).getString(); - String httpmethod = apiObj.getJsonString(HTTP_METHOD).getString(); - int timeout = apiObj.getInt(TIMEOUT); - String urlTemplate = apiObj.getJsonString(URL_TEMPLATE).getString(); - logger.fine("URL Template: " + urlTemplate); - urlTemplate = SystemConfig.getDataverseSiteUrlStatic() + urlTemplate; - String apiPath = replaceTokensWithValues(urlTemplate); - logger.fine("URL WithTokens: " + apiPath); - String url = apiPath; - // Sign if apiToken exists, otherwise send unsigned URL (i.e. for guest users) - ApiToken apiToken = getApiToken(); - if (apiToken != null) { - url = UrlSignerUtil.signUrl(apiPath, timeout, apiToken.getAuthenticatedUser().getUserIdentifier(), - httpmethod, JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") - + getApiToken().getTokenString()); - } - logger.fine("Signed URL: " + url); - apisBuilder.add(Json.createObjectBuilder().add(NAME, name).add(HTTP_METHOD, httpmethod) - .add(SIGNED_URL, url).add(TIMEOUT, timeout)); - })); - bodyBuilder.add("signedUrls", apisBuilder); - } - return bodyBuilder; - } - private String postFormData(String allowedApis) throws IOException, InterruptedException { String url = null; HttpClient client = HttpClient.newHttpClient(); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 8aa9915db58..2c0edd070f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -20,6 +20,7 @@ import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonPatch; import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.HttpMethod; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; @@ -45,6 +46,8 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import org.apache.commons.codec.binary.StringUtils; + import com.google.gson.Gson; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; @@ -58,6 +61,7 @@ import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.URLTokenUtil; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; @Stateless @@ -120,7 +124,6 @@ private String getRuleId(GlobusEndpoint endpoint, String principal, String permi URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access_list"); MakeRequestResponse result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null); - ArrayList ids = new ArrayList(); if (result.status == 200) { AccessList al = parseJson(result.jsonResponse, AccessList.class, false); @@ -282,7 +285,7 @@ private String getUniqueFilePath(GlobusEndpoint endpoint) { //Single cache of open rules/permission requests private final Cache rulesCache = Caffeine.newBuilder() .expireAfterWrite(Duration.of(JvmSettings.GLOBUS_RULES_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES)) - .removalListener((ruleId, datasetId, cause) -> { + .evictionListener((ruleId, datasetId, cause) -> { //Delete rules that expire Dataset dataset = datasetSvc.find(datasetId); deletePermission((String) ruleId, dataset, null); @@ -575,12 +578,23 @@ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, DataFile df) } catch (Exception e) { logger.warning("GlobusAppUrlForDataset: Failed to get storePrefix for " + driverId); } - //Use URLTokenUtil for params currently in common with external tools. + // Use URLTokenUtil for params currently in common with external tools. URLTokenUtil tokenUtil = new URLTokenUtil(d, df, apiToken, localeCode); String appUrl; if (upload) { appUrl = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost") - + "/upload?datasetPid={datasetPid}&siteUrl={siteUrl}&apiToken={apiToken}&datasetId={datasetId}&datasetVersion={datasetVersion}&dvLocale={localeCode}"; + + "/upload?datasetPid={datasetPid}&siteUrl={siteUrl}&datasetId={datasetId}&datasetVersion={datasetVersion}&dvLocale={localeCode}"; + String callback = SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + d.getId() + + "/globusUploadParameters?locale=" + localeCode; + if (apiToken != null) { + callback = UrlSignerUtil.signUrl(callback, 5, apiToken.getAuthenticatedUser().getUserIdentifier(), + HttpMethod.GET, + JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + apiToken.getTokenString()); + } else { + // Shouldn't happen + logger.warning("unable to get api token for user: " + user.getIdentifier()); + } + appUrl = appUrl + "&callback=" + Base64.getEncoder().encodeToString(StringUtils.getBytesUtf8(callback)); } else { if (df == null) { appUrl = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost") @@ -637,39 +651,27 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S globusLogger = logger; } - globusLogger.info("Starting an globusUpload "); + Thread.sleep(5000); - - // ToDo - use DataAccess methods? - //String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") + 3); - //datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") + 3); - logger.fine("json: " + JsonUtil.prettyPrint(jsonData)); String taskIdentifier = jsonData.getString("taskIdentifier"); - String ruleId = null; - - Thread.sleep(5000); - // globus task status check GlobusTask task = globusStatusCheck(taskIdentifier, globusLogger); String taskStatus = getTaskStatus(task); + globusLogger.info("Starting an globusUpload "); + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); - - ruleId = getRuleId(endpoint, task.getOwner_id(), "rw"); - - if(ruleId!=null) { + String ruleId = getRuleId(endpoint, task.getOwner_id(), "rw"); + logger.info("Found rule: " + ruleId); + if (ruleId != null) { Long datasetId = rulesCache.getIfPresent(ruleId); - if(datasetId!=null) { - - //Will delete rule - rulesCache.invalidate(ruleId); - } else { - //The cache already expired this rule, in which case it's delay not long enough, or we have some other problem - logger.warning("Rule " + ruleId + " not found in rulesCache"); - deletePermission(ruleId, dataset, globusLogger); + if (datasetId != null) { + + // Will delete rule + rulesCache.invalidate(ruleId); } } @@ -836,6 +838,10 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S datasetSvc.removeDatasetLocks(dataset, DatasetLock.Reason.EditInProgress); } } + if (ruleId != null) { + deletePermission(ruleId, dataset, globusLogger); + globusLogger.info("Removed upload permission: " + ruleId); + } } public String addFilesAsync(String curlCommand, Logger globusLogger) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index 4ae76a7b8db..216237105aa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -6,6 +6,10 @@ import java.util.regex.Pattern; import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonValue; import edu.harvard.iq.dataverse.DataFile; @@ -13,6 +17,8 @@ import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.json.JsonUtil; public class URLTokenUtil { @@ -22,6 +28,13 @@ public class URLTokenUtil { protected final FileMetadata fileMetadata; protected ApiToken apiToken; protected String localeCode; + + + public static final String HTTP_METHOD="httpMethod"; + public static final String TIMEOUT="timeOut"; + public static final String SIGNED_URL="signedUrl"; + public static final String NAME="name"; + public static final String URL_TEMPLATE="urlTemplate"; /** * File level @@ -193,6 +206,58 @@ private String getTokenValue(String value) { throw new IllegalArgumentException("Cannot replace reserved word: " + value); } + public JsonObjectBuilder createPostBody(JsonObject params, JsonArray allowedApiCalls) { + JsonObjectBuilder bodyBuilder = Json.createObjectBuilder(); + bodyBuilder.add("queryParameters", params); + if (allowedApiCalls != null && !allowedApiCalls.isEmpty()) { + JsonArrayBuilder apisBuilder = Json.createArrayBuilder(); + allowedApiCalls.getValuesAs(JsonObject.class).forEach(((apiObj) -> { + logger.fine(JsonUtil.prettyPrint(apiObj)); + String name = apiObj.getJsonString(NAME).getString(); + String httpmethod = apiObj.getJsonString(HTTP_METHOD).getString(); + int timeout = apiObj.getInt(TIMEOUT); + String urlTemplate = apiObj.getJsonString(URL_TEMPLATE).getString(); + logger.fine("URL Template: " + urlTemplate); + urlTemplate = SystemConfig.getDataverseSiteUrlStatic() + urlTemplate; + String apiPath = replaceTokensWithValues(urlTemplate); + logger.fine("URL WithTokens: " + apiPath); + String url = apiPath; + // Sign if apiToken exists, otherwise send unsigned URL (i.e. for guest users) + ApiToken apiToken = getApiToken(); + if (apiToken != null) { + url = UrlSignerUtil.signUrl(apiPath, timeout, apiToken.getAuthenticatedUser().getUserIdentifier(), + httpmethod, JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + + getApiToken().getTokenString()); + } + logger.fine("Signed URL: " + url); + apisBuilder.add(Json.createObjectBuilder().add(NAME, name).add(HTTP_METHOD, httpmethod) + .add(SIGNED_URL, url).add(TIMEOUT, timeout)); + })); + bodyBuilder.add("signedUrls", apisBuilder); + } + return bodyBuilder; + } + + public JsonObject getParams(JsonObject toolParameters) { + //ToDo - why an array of object each with a single key/value pair instead of one object? + JsonArray queryParams = toolParameters.getJsonArray("queryParameters"); + + // ToDo return json and print later + JsonObjectBuilder paramsBuilder = Json.createObjectBuilder(); + if (!(queryParams == null) && !queryParams.isEmpty()) { + queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> { + queryParam.keySet().forEach((key) -> { + String value = queryParam.getString(key); + JsonValue param = getParam(value); + if (param != null) { + paramsBuilder.add(key, param); + } + }); + }); + } + return paramsBuilder.build(); + } + public static String getScriptForUrl(String url) { String msg = BundleUtil.getStringFromBundle("externaltools.enable.browser.popups"); String script = "const newWin = window.open('" + url + "', target='_blank'); if (!newWin || newWin.closed || typeof newWin.closed == \"undefined\") {alert(\"" + msg + "\");}"; diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 3497b23eb94..88f819b417b 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2645,8 +2645,8 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status -datasets.api.globusdownloaddisabled=File transfer from Dataverse via Globus is not available for this installation of Dataverse. -datasets.api.globusuploaddisabled=File transfer to Dataverse via Globus is not available for this installation of Dataverse. +datasets.api.globusdownloaddisabled=File transfer from Dataverse via Globus is not available for this dataset. +datasets.api.globusuploaddisabled=File transfer to Dataverse via Globus is not available for this dataset. diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index 21bb6633204..6f0132e2bc9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; @@ -53,7 +54,7 @@ public void testGetToolUrlWithOptionalQueryParameters() { Exception expectedException1 = null; String nullLocaleCode = null; try { - ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode); + URLTokenUtil externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode); } catch (Exception ex) { expectedException1 = ex; } @@ -71,7 +72,7 @@ public void testGetToolUrlWithOptionalQueryParameters() { DataFile dataFile = new DataFile(); dataFile.setId(42l); try { - ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, dataFile, nullApiToken, nullFileMetadata, nullLocaleCode); + URLTokenUtil externalToolHandler1 = new ExternalToolHandler(externalTool, dataFile, nullApiToken, nullFileMetadata, nullLocaleCode); } catch (Exception ex) { expectedException1 = ex; } @@ -92,7 +93,7 @@ public void testGetToolUrlWithOptionalQueryParameters() { .build().toString()); Exception expectedException2 = null; try { - ExternalToolHandler externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode); + URLTokenUtil externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode); } catch (Exception ex) { expectedException2 = ex; } @@ -225,10 +226,10 @@ public void testGetToolUrlWithAllowedApiCalls() { assertTrue(et != null); System.out.println("allowedApiCalls et created"); System.out.println(et.getAllowedApiCalls()); - ExternalToolHandler externalToolHandler = new ExternalToolHandler(et, ds, at, null); + URLTokenUtil externalToolHandler = new ExternalToolHandler(et, ds, at, null); System.out.println("allowedApiCalls eth created"); JsonObject jo = externalToolHandler - .createPostBody(externalToolHandler.getParams(JsonUtil.getJsonObject(et.getToolParameters()))).build(); + .createPostBody(externalToolHandler.getParams(JsonUtil.getJsonObject(et.getToolParameters())), JsonUtil.getJsonArray(et.getAllowedApiCalls())).build(); assertEquals(1, jo.getJsonObject("queryParameters").getInt("datasetId")); String signedUrl = jo.getJsonArray("signedUrls").getJsonObject(0).getString("signedUrl"); // The date and token will change each time but check for the constant parts of diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java index 9337949f605..4f5af8b97b0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.util.URLTokenUtil; + import java.util.ArrayList; import java.util.List; import jakarta.json.Json; @@ -49,7 +51,7 @@ public void testfindAll() { externalToolTypes.add(externalToolType); ExternalTool.Scope scope = ExternalTool.Scope.FILE; ExternalTool externalTool = new ExternalTool("displayName", "toolName", "description", externalToolTypes, scope, "http://foo.com", "{}", DataFileServiceBean.MIME_TYPE_TSV_ALT); - ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd, null); + URLTokenUtil externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd, null); List externalTools = new ArrayList<>(); externalTools.add(externalTool); List availableExternalTools = externalToolService.findExternalToolsByFile(externalTools, dataFile); From 520d5d6bdbea31ad56d465b7a2b4a1fdfaf40bb6 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:24:13 +0530 Subject: [PATCH 0642/1092] correcting the Invalid JSON error --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1e0804ce7d8..73a10f2b409 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false},"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}', "dataFileTags":["Survey"]} \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false, "dataFileTags":["Survey"]} \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From df7a4cfb9597b3c224a8d753b3a84d57d643af34 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:36:20 +0530 Subject: [PATCH 0643/1092] minor change --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 73a10f2b409..fcd2594ac6a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3108,7 +3108,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -3116,7 +3116,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ "http://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -3128,7 +3128,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false,"dataFileTags":["Survey"]}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false,"dataFileTags":["Survey"]}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false, "dataFileTags":["Survey"]} \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]} \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From 8c9828a3fa0c2d534ebcfc57f4cc17427e75f0b8 Mon Sep 17 00:00:00 2001 From: Saikiran Patil Date: Fri, 20 Oct 2023 00:39:49 +0530 Subject: [PATCH 0644/1092] another small change --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index fcd2594ac6a..ffb15b41fd1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3136,7 +3136,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]} \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","restrict":false, "dataFileTags":["Survey"]}' \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. From f056d6c051bf784ca4808e8757efa9afcaf7778c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 19 Oct 2023 15:10:14 -0400 Subject: [PATCH 0645/1092] minor incremental changes (#9635) --- .../search/SearchIncludeFragment.java | 30 +++++++++++++++---- .../dataverse/search/SearchServiceBean.java | 6 ++-- .../dataverse/search/SolrQueryResponse.java | 10 ++++++- src/main/webapp/search-include-fragment.xhtml | 24 +++++++++++++-- 4 files changed, 57 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 47a5621c3d6..14274a09399 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -131,7 +131,8 @@ public class SearchIncludeFragment implements java.io.Serializable { Map datasetfieldFriendlyNamesBySolrField = new HashMap<>(); Map staticSolrFieldFriendlyNamesBySolrField = new HashMap<>(); private boolean solrIsDown = false; - private boolean solrIsOverloaded = false; + private boolean solrIsTemporarilyUnavailable = false; + private boolean solrFacetsDisabled = false; private Map numberOfFacets = new HashMap<>(); // private boolean showUnpublished; List filterQueriesDebug = new ArrayList<>(); @@ -361,6 +362,14 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused if (solrQueryResponse.hasError()){ logger.info(solrQueryResponse.getError()); setSolrErrorEncountered(true); + } + // Solr "temporarily unavailable" is the condition triggered by + // receiving a 503 from the search engine, that is in turn a result + // of one of the Solr "circuit breakers" being triggered by excessive + // load. We treat this condition as distinct from "Solr is down", + // on the assumption that it is transitive. + if (solrQueryResponse.isSolrTemporarilyUnavailable()) { + setSolrTemporarilyUnavailable(true); } // This 2nd search() is for populating the "type" ("dataverse", "dataset", "file") facets: -- L.A. // (why exactly do we need it, again?) @@ -386,7 +395,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused } } - if (selectedTypesList.size() < 3) { + if (selectedTypesList.size() < 3 && !isSolrTemporarilyUnavailable()) { // If some types are NOT currently selected, we will need to // run another query to obtain the numbers of the unselected types: @@ -1079,14 +1088,23 @@ public void setSolrIsDown(boolean solrIsDown) { this.solrIsDown = solrIsDown; } - public boolean isSolrOverloaded() { - return solrIsOverloaded; + public boolean isSolrTemporarilyUnavailable() { + return solrIsTemporarilyUnavailable; } - public void setSolrIsOverloaded(boolean solrIsOverloaded) { - this.solrIsOverloaded = solrIsOverloaded; + public void setSolrTemporarilyUnavailable(boolean solrIsTemporarilyUnavailable) { + this.solrIsTemporarilyUnavailable = solrIsTemporarilyUnavailable; } + public boolean isFacetsDisabled() { + return solrFacetsDisabled; + } + + public void setFacetsDisabled(boolean solrFacetsDisabled) { + this.solrFacetsDisabled = solrFacetsDisabled; + } + + public boolean isRootDv() { return rootDv; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 1b92c2a4a46..6e410488794 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -382,7 +382,6 @@ public SolrQueryResponse search( // Make the solr query // ----------------------------------- QueryResponse queryResponse = null; - boolean solrTemporarilyUnavailable = false; try { queryResponse = solrClientService.getSolrClient().query(solrQuery); @@ -397,6 +396,8 @@ public SolrQueryResponse search( logger.info("return code: "+queryResponse.getStatus()); } + SolrQueryResponse exceptionSolrQueryResponse = new SolrQueryResponse(solrQuery); + // We probably shouldn't be assuming that this is necessarily a // "search syntax error", as the code below implies - could be // something else too - ? @@ -407,9 +408,9 @@ public SolrQueryResponse search( // a transient condition): if (ex.code() == 503) { - solrTemporarilyUnavailable = true; // actual logic for communicating this state back to the local // client code TBD (@todo) + exceptionSolrQueryResponse.setSolrTemporarilyUnavailable(true); } String error = "Search Syntax Error: "; @@ -421,7 +422,6 @@ public SolrQueryResponse search( error += messageFromSolr; } logger.info(error); - SolrQueryResponse exceptionSolrQueryResponse = new SolrQueryResponse(solrQuery); exceptionSolrQueryResponse.setError(error); // we can't show anything because of the search syntax error diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java index 893099ff08d..27e79cb1fc2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java @@ -26,6 +26,7 @@ public class SolrQueryResponse { private String error; private Map dvObjectCounts = new HashMap<>(); private Map publicationStatusCounts = new HashMap<>(); + private boolean solrTemporarilyUnavailable = false; public static String DATAVERSES_COUNT_KEY = "dataverses_count"; public static String DATASETS_COUNT_KEY = "datasets_count"; @@ -91,7 +92,14 @@ public JsonObjectBuilder getPublicationStatusCountsAsJSON(){ } return this.getMapCountsAsJSON(publicationStatusCounts); } - + + public boolean isSolrTemporarilyUnavailable() { + return solrTemporarilyUnavailable; + } + + public void setSolrTemporarilyUnavailable(boolean solrTemporarilyUnavailable) { + this.solrTemporarilyUnavailable = solrTemporarilyUnavailable; + } public JsonObjectBuilder getDvObjectCountsAsJSON(){ diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 718df813348..8397a14136e 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -88,12 +88,24 @@
    + + + +
    +
    +
    + + +
    +
    +
    +
    -
    +