From aa441ceafa493a39aa66a9114159dd0dc88ffb46 Mon Sep 17 00:00:00 2001 From: bencomp Date: Fri, 31 Jul 2015 00:13:43 +0200 Subject: [PATCH] Use more generics, less trailing whitespace in the dataverse package --- .../edu/harvard/iq/dataverse/DataFile.java | 244 +++++++++--------- .../iq/dataverse/DataFileServiceBean.java | 20 +- .../edu/harvard/iq/dataverse/DataFileTag.java | 2 +- .../edu/harvard/iq/dataverse/DataTable.java | 100 +++---- .../edu/harvard/iq/dataverse/Dataset.java | 14 +- .../harvard/iq/dataverse/DatasetField.java | 8 +- .../dataverse/DatasetFieldCompoundValue.java | 4 +- .../iq/dataverse/DatasetFieldServiceBean.java | 18 +- .../edu/harvard/iq/dataverse/Dataverse.java | 22 +- 9 files changed, 222 insertions(+), 210 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 6d5a4473528..b7e6cc604a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -46,22 +46,22 @@ , @Index(columnList="restricted")}) public class DataFile extends DvObject { private static final long serialVersionUID = 1L; - + public static final char INGEST_STATUS_NONE = 65; public static final char INGEST_STATUS_SCHEDULED = 66; public static final char INGEST_STATUS_INPROGRESS = 67; - public static final char INGEST_STATUS_ERROR = 68; - + public static final char INGEST_STATUS_ERROR = 68; + private String name; - + @NotBlank @Column( nullable = false ) @Pattern(regexp = "^.*/.*$", message = "Content-Type must contain a slash") private String contentType; - + @Column( nullable = false ) private String fileSystemName; - + @Column( nullable = false ) private String md5; @@ -69,27 +69,27 @@ public class DataFile extends DvObject { private Long filesize; // Number of bytes in file. Allows 0 and null, negative numbers not permitted private boolean restricted; - + /* Tabular (formerly "subsettable") data files have DataTable objects associated with them: */ - + @OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List dataTables; - + @OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List ingestReports; - + @OneToOne(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private IngestRequest ingestRequest; - + @OneToMany(mappedBy = "dataFile", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List dataFileTags; - + @OneToMany(mappedBy="dataFile", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List fileMetadatas; - + @OneToMany(mappedBy="dataFile", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List guestbookResponses; @@ -100,22 +100,22 @@ public List getGuestbookResponses() { public void setGuestbookResponses(List guestbookResponses) { this.guestbookResponses = guestbookResponses; } - - private char ingestStatus = INGEST_STATUS_NONE; - + + private char ingestStatus = INGEST_STATUS_NONE; + @OneToOne(mappedBy = "thumbnailFile") private Dataset thumbnailForDataset; - + public DataFile() { this.fileMetadatas = new ArrayList<>(); - } + } public DataFile(String contentType) { this.contentType = contentType; this.fileMetadatas = new ArrayList<>(); } - + // The dvObject field "name" should not be used in // datafile objects. // The file name must be stored in the file metadata. @@ -124,13 +124,13 @@ public DataFile(String name, String contentType) { this.name = name; this.contentType = contentType; this.fileMetadatas = new ArrayList<>(); - } - + } + @Override public boolean isEffectivelyPermissionRoot() { return false; } - + public List getDataTables() { return dataTables; } @@ -138,7 +138,7 @@ public List getDataTables() { public void setDataTables(List dataTables) { this.dataTables = dataTables; } - + public DataTable getDataTable() { if ( getDataTables() != null && getDataTables().size() > 0 ) { return getDataTables().get(0); @@ -149,30 +149,30 @@ public DataTable getDataTable() { public void setDataTable(DataTable dt) { if (this.getDataTables() == null) { - this.setDataTables( new ArrayList() ); + this.setDataTables( new ArrayList() ); } else { this.getDataTables().clear(); } this.getDataTables().add(dt); } - + public List getTags() { return dataFileTags; } - + public void setTags(List dataFileTags) { this.dataFileTags = dataFileTags; } - + public void addTag(DataFileTag tag) { if (dataFileTags == null) { dataFileTags = new ArrayList<>(); - } + } dataFileTags.add(tag); } - + public List getFileMetadatas() { return fileMetadatas; } @@ -180,7 +180,7 @@ public List getFileMetadatas() { public void setFileMetadatas(List fileMetadatas) { this.fileMetadatas = fileMetadatas; } - + public IngestReport getIngestReport() { if ( ingestReports != null && ingestReports.size() > 0 ) { return ingestReports.get(0); @@ -191,22 +191,22 @@ public IngestReport getIngestReport() { public void setIngestReport(IngestReport report) { if (ingestReports == null) { - ingestReports = new ArrayList(); + ingestReports = new ArrayList(); } else { ingestReports.clear(); } ingestReports.add(report); } - + public IngestRequest getIngestRequest() { return ingestRequest; } - + public void setIngestRequest(IngestRequest ingestRequest) { this.ingestRequest = ingestRequest; } - + public String getIngestReportMessage() { if ( ingestReports != null && ingestReports.size() > 0 ) { if (ingestReports.get(0).getReport() != null && !"".equals(ingestReports.get(0).getReport())) { @@ -216,9 +216,9 @@ public String getIngestReportMessage() { return "Ingest failed. No further information is available."; } public boolean isTabularData() { - return getDataTables() != null && getDataTables().size() > 0; + return getDataTables() != null && getDataTables().size() > 0; } - + public String getOriginalFileFormat() { if (isTabularData()) { DataTable dataTable = getDataTable(); @@ -235,7 +235,7 @@ public String getOriginalFileFormat() { public String getOriginalFormatLabel() { return FileUtil.getUserFriendlyOriginalType(this); } - + // The dvObject field "name" should not be used in // datafile objects. // The file name must be stored in the file metadata. @@ -260,7 +260,7 @@ public void setContentType(String contentType) { public String getFriendlyType() { return FileUtil.getUserFriendlyFileType(this); } - + @Override public Dataset getOwner() { return (Dataset) super.getOwner(); @@ -269,7 +269,7 @@ public Dataset getOwner() { public void setOwner(Dataset dataset) { super.setOwner(dataset); } - + public String getStorageIdentifier() { return this.fileSystemName; } @@ -277,10 +277,10 @@ public String getStorageIdentifier() { public void setStorageIdentifier(String storageIdentifier) { this.fileSystemName = storageIdentifier; } - + public String getDescription() { FileMetadata fmd = getLatestFileMetadata(); - + if (fmd == null) { return null; } @@ -289,16 +289,16 @@ public String getDescription() { public void setDescription(String description) { FileMetadata fmd = getLatestFileMetadata(); - + if (fmd != null) { fmd.setDescription(description); } } - + public FileMetadata getFileMetadata() { return getLatestFileMetadata(); } - + private FileMetadata getLatestFileMetadata() { FileMetadata fmd = null; @@ -306,23 +306,23 @@ private FileMetadata getLatestFileMetadata() { if (fileMetadatas.size() == 1) { return fileMetadatas.get(0); } - + for (FileMetadata fileMetadata : fileMetadatas) { // if it finds a draft, return it if (fileMetadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) { return fileMetadata; - } - + } + // otherwise return the one with the latest version number if (fmd == null || fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber() ) > 0 ) { fmd = fileMetadata; - } else if ((fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber())==0 )&& + } else if ((fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber())==0 )&& ( fileMetadata.getDatasetVersion().getMinorVersionNumber().compareTo( fmd.getDatasetVersion().getMinorVersionNumber()) > 0 ) ) fmd = fileMetadata; } return fmd; } - + /** * Get property filesize, number of bytes * @return value of property filesize. @@ -331,15 +331,15 @@ public long getFilesize() { if (this.filesize == null) { // -1 means "unknown" return -1; - } + } return this.filesize; } /** * Set property filesize in bytes - * + * * Allow nulls, but not negative numbers. - * + * * @param filesize new value of property filesize. */ public void setFilesize(long filesize) { @@ -350,7 +350,7 @@ public void setFilesize(long filesize) { } /** - * Converts the stored size of the file in bytes to + * Converts the stored size of the file in bytes to * a user-friendly value in KB, MB or GB. */ public String getFriendlySize() { @@ -366,60 +366,60 @@ public void setRestricted(boolean restricted) { } - public String getmd5() { - return this.md5; + public String getmd5() { + return this.md5; } - - public void setmd5(String md5) { - this.md5 = md5; + + public void setmd5(String md5) { + this.md5 = md5; } - + public DataFileIO getAccessObject() throws IOException { DataFileIO dataAccess = DataAccess.createDataAccessObject(this); - + if (dataAccess == null) { throw new IOException("Failed to create access object for datafile."); } - - return dataAccess; - } - - - // The 2 methods below - TODO: - // remove everything filesystem-specific; - // move the functionality into storage drivers. + + return dataAccess; + } + + + // The 2 methods below - TODO: + // remove everything filesystem-specific; + // move the functionality into storage drivers. // -- L.A. 4.0.2 - + public Path getSavedOriginalFile() { - + if (!this.isTabularData() || this.fileSystemName == null) { - return null; + return null; } - + Path studyDirectoryPath = this.getOwner().getFileSystemDirectory(); if (studyDirectoryPath == null) { return null; } String studyDirectory = studyDirectoryPath.toString(); - + Path savedOriginal = Paths.get(studyDirectory, "_" + this.fileSystemName); if (Files.exists(savedOriginal)) { return savedOriginal; } - return null; + return null; } - + /* public String getFilename() { String studyDirectory = this.getOwner().getFileSystemDirectory().toString(); - + if (studyDirectory == null || this.fileSystemName == null || this.fileSystemName.equals("")) { return null; } String fileSystemPath = studyDirectory + "/" + this.fileSystemName; return fileSystemPath.replaceAll("/", "%2F"); }*/ - + /* Does the contentType indicate a shapefile? */ @@ -429,65 +429,65 @@ public boolean isShapefileType(){ } return ShapefileHandler.SHAPEFILE_FILE_TYPE.equalsIgnoreCase(this.contentType); } - + public boolean isImage() { // Some browsers (Chrome?) seem to identify FITS files as mime // type "image/fits" on upload; this is both incorrect (the official // mime type for FITS is "application/fits", and problematic: then - // the file is identified as an image, and the page will attempt to + // the file is identified as an image, and the page will attempt to // generate a preview - which of course is going to fail... if ("image/fits".equalsIgnoreCase(contentType)) { return false; } - // a pdf file is an "image" for practical purposes (we will attempt to + // a pdf file is an "image" for practical purposes (we will attempt to // generate thumbnails and previews for them) return (contentType != null && (contentType.startsWith("image/") || contentType.equalsIgnoreCase("application/pdf"))); } - + public boolean isIngestScheduled() { return (ingestStatus == INGEST_STATUS_SCHEDULED); } - + public boolean isIngestInProgress() { return ((ingestStatus == INGEST_STATUS_SCHEDULED) || (ingestStatus == INGEST_STATUS_INPROGRESS)); } - + public boolean isIngestProblem() { return (ingestStatus == INGEST_STATUS_ERROR); } - + public void SetIngestScheduled() { ingestStatus = INGEST_STATUS_SCHEDULED; } - + public void SetIngestInProgress() { ingestStatus = INGEST_STATUS_INPROGRESS; } - + public void SetIngestProblem() { ingestStatus = INGEST_STATUS_ERROR; } - + public void setIngestDone() { ingestStatus = INGEST_STATUS_NONE; } - + public int getIngestStatus() { - return ingestStatus; + return ingestStatus; } - + public Dataset getThumbnailForDataset() { return thumbnailForDataset; } - + public void setAsThumbnailForDataset(Dataset dataset) { thumbnailForDataset = dataset; } - + /** * URL to use with the WorldMapRelatedData API * Used within dataset.xhtml - * + * * @param dataverseUserID * @return URL for "Map It" functionality */ @@ -497,32 +497,32 @@ public String getMapItURL(Long dataverseUserID){ } return WorldMapRelatedData.getMapItURL(this.getId(), dataverseUserID); } - + /* 8/10/2014 - Using the current "open access" url */ public String getMapItFileDownloadURL(String serverName){ if ((this.getId() == null)||(serverName == null)){ return null; - } + } return serverName + "/api/access/datafile/" + this.getId(); } - - /* + + /** * If this is tabular data, the corresponding dataTable may have a UNF - * "numeric fingerprint" signature - generated: + * @return the file's UNF or null if it doesn't exist */ - public String getUnf() { if (this.isTabularData()) { - // (isTabularData() method above verifies that that this file - // has a datDatable associated with it, so the line below is - // safe, in terms of a NullPointerException: + // (isTabularData() method above verifies that that this file + // has a datDatable associated with it, so the line below is + // safe, in terms of a NullPointerException: return this.getDataTable().getUnf(); } - return null; + return null; } - + @ManyToMany @JoinTable(name = "fileaccessrequests", @@ -530,6 +530,10 @@ public String getUnf() { inverseJoinColumns = @JoinColumn(name = "authenticated_user_id")) private List fileAccessRequesters; + /** + * Get the users who requested access to this file. + * @return a list of AuthenticatedUsers who requested access to this file + */ public List getFileAccessRequesters() { return fileAccessRequesters; } @@ -537,40 +541,40 @@ public List getFileAccessRequesters() { public void setFileAccessRequesters(List fileAccessRequesters) { this.fileAccessRequesters = fileAccessRequesters; } - - + + public boolean isHarvested() { - // TODO: + // TODO: // alternatively, we can determine whether this is a harvested file - // by looking at the storage identifier of the physical file; - // if it's something that's not a filesystem path (URL, etc.) - - // then it's a harvested object. - // -- L.A. 4.0 + // by looking at the storage identifier of the physical file; + // if it's something that's not a filesystem path (URL, etc.) - + // then it's a harvested object. + // -- L.A. 4.0 Dataset ownerDataset = this.getOwner(); if (ownerDataset != null) { - return ownerDataset.isHarvested(); + return ownerDataset.isHarvested(); } - return false; + return false; } - + public String getRemoteArchiveURL() { if (isHarvested()) { Dataset ownerDataset = this.getOwner(); return ownerDataset.getRemoteArchiveURL(); } - - return null; + + return null; } - + public String getHarvestingDescription() { if (isHarvested()) { Dataset ownerDataset = this.getOwner(); return ownerDataset.getHarvestingDescription(); } - + return null; } - + @Override public boolean equals(Object object) { if (!(object instanceof DataFile)) { @@ -589,12 +593,12 @@ public int hashCode() { protected String toStringExtras() { return "name:" + getName(); } - + @Override public T accept( Visitor v ) { return v.visit(this); } - + public String getDisplayName() { // @todo should we show the published version label instead? // currently this method is not being used diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index cafe625536b..5190eb10be3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -21,6 +21,7 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.Query; +import javax.persistence.TypedQuery; /** * @@ -118,7 +119,7 @@ public class DataFileServiceBean implements java.io.Serializable { private static final String MIME_TYPE_UNDETERMINED_BINARY = "application/binary"; public DataFile find(Object pk) { - return (DataFile) em.find(DataFile.class, pk); + return em.find(DataFile.class, pk); } /*public DataFile findByMD5(String md5Value){ @@ -136,14 +137,14 @@ public List findByDatasetId(Long studyId) { Sure, we don't have *studies* any more, in 4.0; it's a tribute to the past. -- L.A. */ - Query query = em.createQuery("select o from DataFile o where o.owner.id = :studyId order by o.id"); + TypedQuery query = em.createQuery("select o from DataFile o where o.owner.id = :studyId order by o.id", DataFile.class); query.setParameter("studyId", studyId); return query.getResultList(); } public List findIngestsInProgress() { if ( em.isOpen() ) { - Query query = em.createQuery("select object(o) from DataFile as o where o.ingestStatus =:scheduledStatusCode or o.ingestStatus =:progressStatusCode order by o.id"); + TypedQuery query = em.createQuery("select object(o) from DataFile as o where o.ingestStatus =:scheduledStatusCode or o.ingestStatus =:progressStatusCode order by o.id", DataFile.class); query.setParameter("scheduledStatusCode", DataFile.INGEST_STATUS_SCHEDULED); query.setParameter("progressStatusCode", DataFile.INGEST_STATUS_INPROGRESS); return query.getResultList(); @@ -160,7 +161,7 @@ public DataTable findDataTableByFileId(Long fileId) { } public List findAll() { - return em.createQuery("select object(o) from DataFile as o order by o.id").getResultList(); + return em.createQuery("select object(o) from DataFile as o order by o.id", DataFile.class).getResultList(); } public DataFile save(DataFile dataFile) { @@ -170,9 +171,9 @@ public DataFile save(DataFile dataFile) { } public Boolean isPreviouslyPublished(Long fileId){ - Query query = em.createQuery("select object(o) from FileMetadata as o where o.dataFile.id =:fileId"); + TypedQuery query = em.createQuery("select object(o) from FileMetadata as o where o.dataFile.id =:fileId", FileMetadata.class); query.setParameter("fileId", fileId); - List retList = query.getResultList(); + List retList = query.getResultList(); return (retList.size() > 1); } @@ -331,6 +332,9 @@ public boolean ingestableAsTabular(DataFile dataFile) { * main code base, so we can just go through a hard-coded list of mime * types. -- L.A. */ + if (dataFile == null) { + return false; + } String mimeType = dataFile.getContentType(); @@ -372,6 +376,10 @@ public String getFileClassById (Long fileId) { } public String getFileClass (DataFile file) { + if (file == null) { + return null; + } + if (isFileClassImage(file)) { return FILE_CLASS_IMAGE; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index 5aa10d2d475..d6ec64f3474 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -81,7 +81,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa } public static List listTags() { - List retlist = new ArrayList(); + List retlist = new ArrayList<>(); for(TagType t : TagType.values()) { retlist.add(TagTypeToLabels.get(t)); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTable.java b/src/main/java/edu/harvard/iq/dataverse/DataTable.java index 501624efab7..6c87b218d43 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataTable.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataTable.java @@ -31,78 +31,78 @@ /** * * @author Leonid Andreev - * + * * Largely based on the the DataTable entity from the DVN v2-3; * original author: Ellen Kraffmiller (2006). - * + * */ @Entity @Table(indexes = {@Index(columnList="datafile_id")}) public class DataTable implements Serializable { - + /** Creates a new instance of DataTable */ public DataTable() { } - + private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; - + /** - * unf: the Universal Numeric Signature of the + * unf: the Universal Numeric Signature of the * data table. */ @Column( nullable = false ) private String unf; - - /* + + /** * caseQuantity: Number of observations - */ - private Long caseQuantity; - - - /* + */ + private Long caseQuantity; + + + /** * varQuantity: Number of variables */ private Long varQuantity; - /* + /** * recordsPerCase: this property is specific to fixed-field data files * in which rows of observations may represented by *multiple* lines. - * The only known use case (so far): the fixed-width data files from - * ICPSR. + * The only known use case (so far): the fixed-width data files from + * ICPSR. + */ + private Long recordsPerCase; + + /** + * DataFile that stores the data for this DataTable */ - private Long recordsPerCase; - - /* - * DataFile that stores the data for this DataTable - */ - @ManyToOne - @JoinColumn(nullable=false) - private DataFile dataFile; - - /* - * DataVariables in this DataTable: + @ManyToOne + @JoinColumn(nullable=false) + private DataFile dataFile; + + /** + * DataVariables in this DataTable: */ @OneToMany (mappedBy="dataTable", cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}) @OrderBy ("fileOrder") private List dataVariables; - - /* + + /** * originalFileType: the format of the file from which this data table was * extracted (STATA, SPSS, R, etc.) - * Note: this was previously stored in the StudyFile. + * Note: this was previously stored in the StudyFile. */ private String originalFileFormat; - - /* + + /** * originalFormatVersion: the version/release number of the original file - * format; for example, STATA 9, SPSS 12, etc. + * format; for example, STATA 9, SPSS 12, etc. */ private String originalFormatVersion; - + /* * Getter and Setter methods: */ @@ -124,20 +124,20 @@ public void setUnf(String unf) { public Long getCaseQuantity() { return this.caseQuantity; - } - + } + public void setCaseQuantity(Long caseQuantity) { this.caseQuantity = caseQuantity; } - + public Long getVarQuantity() { return this.varQuantity; } public void setVarQuantity(Long varQuantity) { this.varQuantity = varQuantity; - } - + } + public Long getRecordsPerCase() { return recordsPerCase; } @@ -145,25 +145,25 @@ public Long getRecordsPerCase() { public void setRecordsPerCase(Long recordsPerCase) { this.recordsPerCase = recordsPerCase; } - + public DataFile getDataFile() { return this.dataFile; } - + public void setDataFile(DataFile dataFile) { this.dataFile = dataFile; } - + public List getDataVariables() { return this.dataVariables; } - + public void setDataVariables(List dataVariables) { this.dataVariables = dataVariables; - } - + } + public String getOriginalFileFormat() { return originalFileFormat; } @@ -172,7 +172,7 @@ public void setOriginalFileFormat(String originalFileType) { this.originalFileFormat = originalFileType; } - + public String getOriginalFormatVersion() { return originalFormatVersion; } @@ -180,11 +180,11 @@ public String getOriginalFormatVersion() { public void setOriginalFormatVersion(String originalFormatVersion) { this.originalFormatVersion = originalFormatVersion; } - - /* + + /* * Custom overrides for hashCode(), equals() and toString() methods: */ - + @Override public int hashCode() { int hash = 0; @@ -205,5 +205,5 @@ public boolean equals(Object object) { public String toString() { return "edu.harvard.iq.dataverse.DataTable[ id=" + id + " ]"; } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 36e99510edb..415d285eef8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -42,7 +42,7 @@ public class Dataset extends DvObjectContainer { private static final long serialVersionUID = 1L; @OneToMany(mappedBy = "owner", cascade = CascadeType.MERGE) - private List files = new ArrayList(); + private List files = new ArrayList<>(); private String protocol; private String authority; @@ -56,7 +56,7 @@ public class Dataset extends DvObjectContainer { private String identifier; @OneToMany(mappedBy = "dataset", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("versionNumber DESC, minorVersionNumber DESC") - private List versions = new ArrayList(); + private List versions = new ArrayList<>(); @OneToOne(mappedBy = "dataset", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private DatasetLock datasetLock; @OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST}) @@ -87,7 +87,7 @@ public Dataset() { DatasetVersion datasetVersion = new DatasetVersion(); datasetVersion.setDataset(this); datasetVersion.setVersionState(DatasetVersion.VersionState.DRAFT); - datasetVersion.setFileMetadatas(new ArrayList()); + datasetVersion.setFileMetadatas(new ArrayList()); datasetVersion.setVersionNumber(new Long(1)); datasetVersion.setMinorVersionNumber(new Long(0)); versions.add(datasetVersion); @@ -218,7 +218,7 @@ public void setVersions(List versions) { private DatasetVersion createNewDatasetVersion(Template template) { DatasetVersion dsv = new DatasetVersion(); dsv.setVersionState(DatasetVersion.VersionState.DRAFT); - dsv.setFileMetadatas(new ArrayList()); + dsv.setFileMetadatas(new ArrayList()); DatasetVersion latestVersion = null; //if the latest version has values get them copied over @@ -273,7 +273,7 @@ private DatasetVersion createNewDatasetVersion(Template template) { if (template == null) { getVersions().add(0, dsv); } else { - this.setVersions(new ArrayList()); + this.setVersions(new ArrayList()); getVersions().add(0, dsv); } @@ -301,9 +301,9 @@ public DatasetVersion getCreateVersion() { dsv.setVersionState(DatasetVersion.VersionState.DRAFT); dsv.setDataset(this); dsv.setDatasetFields(dsv.initDatasetFields());; - dsv.setFileMetadatas(new ArrayList()); + dsv.setFileMetadatas(new ArrayList()); - this.setVersions(new ArrayList()); + this.setVersions(new ArrayList()); getVersions().add(0, dsv); return dsv; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index e9a2b261381..09031fe5730 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -170,7 +170,7 @@ public void setParentDatasetFieldCompoundValue(DatasetFieldCompoundValue parentD @OneToMany(mappedBy = "parentDatasetField", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("displayOrder ASC") - private List datasetFieldCompoundValues = new ArrayList(); + private List datasetFieldCompoundValues = new ArrayList<>(); public List getDatasetFieldCompoundValues() { return datasetFieldCompoundValues; @@ -182,7 +182,7 @@ public void setDatasetFieldCompoundValues(List datase @OneToMany(mappedBy = "datasetField", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("displayOrder ASC") - private List datasetFieldValues = new ArrayList(); + private List datasetFieldValues = new ArrayList<>(); public List getDatasetFieldValues() { return this.datasetFieldValues; @@ -194,7 +194,7 @@ public void setDatasetFieldValues(List datasetFieldValues) { @ManyToMany(cascade = {CascadeType.MERGE}) @JoinTable(indexes = {@Index(columnList="datasetfield_id"),@Index(columnList="controlledvocabularyvalues_id")}) - private List controlledVocabularyValues = new ArrayList(); + private List controlledVocabularyValues = new ArrayList<>(); public List getControlledVocabularyValues() { return controlledVocabularyValues; @@ -271,7 +271,7 @@ public String getCompoundDisplayValue() { } public List getValues() { - List returnList = new ArrayList(); + List returnList = new ArrayList<>(); if (!datasetFieldValues.isEmpty()) { for (DatasetFieldValue dsfv : datasetFieldValues) { returnList.add(dsfv.getValue()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java index f52c7ef17b0..2329bd36049 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java @@ -64,7 +64,7 @@ public static DatasetFieldCompoundValue createNewEmptyDatasetFieldCompoundValue( @OneToMany(mappedBy = "parentDatasetFieldCompoundValue", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("datasetFieldType ASC") - private List childDatasetFields = new ArrayList(); + private List childDatasetFields = new ArrayList<>(); public Long getId() { return id; @@ -134,7 +134,7 @@ public DatasetFieldCompoundValue copy(DatasetField parent) { public Map getDisplayValueMap() { // todo - this currently only supports child datasetfields with single values // need to determine how we would want to handle multiple - Map fieldMap = new LinkedHashMap(); + Map fieldMap = new LinkedHashMap<>(); for (DatasetField childDatasetField : childDatasetFields) { // skip the value if it is empty or N/A diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 2f2f7cbd73b..f818ad86385 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -29,7 +29,7 @@ public class DatasetFieldServiceBean implements java.io.Serializable { private static final String NAME_QUERY = "SELECT dsfType from DatasetFieldType dsfType where dsfType.name= :fieldName"; public List findAllAdvancedSearchFieldTypes() { - return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.id").getResultList(); + return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.id", DatasetFieldType.class).getResultList(); } public List findAllFacetableFieldTypes() { @@ -44,24 +44,24 @@ public List findFacetableFieldTypesByMetadataBlock(Long metada } public List findAllRequiredFields() { - return em.createQuery("select object(o) from DatasetFieldType as o where o.required = true order by o.id").getResultList(); + return em.createQuery("select object(o) from DatasetFieldType as o where o.required = true order by o.id", DatasetFieldType.class).getResultList(); } public List findAllOrderedById() { - return em.createQuery("select object(o) from DatasetFieldType as o order by o.id").getResultList(); + return em.createQuery("select object(o) from DatasetFieldType as o order by o.id", DatasetFieldType.class).getResultList(); } public List findAllOrderedByName() { - return em.createQuery("select object(o) from DatasetFieldType as o order by o.name").getResultList(); + return em.createQuery("select object(o) from DatasetFieldType as o order by o.name", DatasetFieldType.class).getResultList(); } public DatasetFieldType find(Object pk) { - return (DatasetFieldType) em.find(DatasetFieldType.class, pk); + return em.find(DatasetFieldType.class, pk); } public DatasetFieldType findByName(String name) { try { - return (DatasetFieldType) em.createQuery(NAME_QUERY).setParameter("fieldName", name).getSingleResult(); + return em.createQuery(NAME_QUERY, DatasetFieldType.class).setParameter("fieldName", name).getSingleResult(); } catch (NoResultException e) { return null; } @@ -107,7 +107,7 @@ public ForeignMetadataFieldMapping findFieldMapping(String formatName, String pa } public ControlledVocabularyValue findControlledVocabularyValue(Object pk) { - return (ControlledVocabularyValue) em.find(ControlledVocabularyValue.class, pk); + return em.find(ControlledVocabularyValue.class, pk); } /** @@ -155,8 +155,8 @@ public ControlledVocabAlternate findControlledVocabAlternateByControlledVocabula } catch (NoResultException e) { return null; } catch (NonUniqueResultException ex){ - List results = typedQuery.getResultList(); - return (ControlledVocabAlternate) results.get(0); + List results = typedQuery.getResultList(); + return results.get(0); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index fa1aac3bd69..69f42d5c925 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -163,14 +163,14 @@ public void setDefaultContributorRole(DataverseRole defaultContributorRole) { @OneToMany(mappedBy = "dataverse",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true) @OrderBy("displayOrder") @NotEmpty(message="At least one contact is required.") - private List dataverseContacts = new ArrayList(); + private List dataverseContacts = new ArrayList<>(); @ManyToMany(cascade = {CascadeType.MERGE}) - private List metadataBlocks = new ArrayList(); + private List metadataBlocks = new ArrayList<>(); @OneToMany(mappedBy = "dataverse",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true) @OrderBy("displayOrder") - private List dataverseFacets = new ArrayList(); + private List dataverseFacets = new ArrayList<>(); @ManyToMany @JoinTable(name = "dataversesubjects", @@ -243,7 +243,7 @@ public void setDataverseSubjects(Set dataverseSubject @OneToMany(mappedBy = "dataverse") - private List dataverseFieldTypeInputLevels = new ArrayList(); + private List dataverseFieldTypeInputLevels = new ArrayList<>(); @ManyToOne @JoinColumn(nullable = true) @@ -291,7 +291,7 @@ public boolean isHarvested() { public List getParentGuestbooks() { - List retList = new ArrayList(); + List retList = new ArrayList<>(); Dataverse testDV = this; while (testDV.getOwner() != null){ retList.addAll(testDV.getOwner().getGuestbooks()); @@ -300,14 +300,14 @@ public List getParentGuestbooks() { } testDV = testDV.getOwner(); } - return retList; + return retList; } public List getAvailableGuestbooks() { //get all guestbooks - List retList = new ArrayList(); + List retList = new ArrayList<>(); Dataverse testDV = this; - List allGbs = new ArrayList(); + List allGbs = new ArrayList<>(); if (!this.guestbookRoot){ while (testDV.getOwner() != null){ @@ -367,7 +367,7 @@ public void setTemplates(List