Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use more generics, less trailing whitespace in the dataverse package (#775) #2422

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions src/main/java/edu/harvard/iq/dataverse/DataFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ public DataTable getDataTable() {

public void setDataTable(DataTable dt) {
if (this.getDataTables() == null) {
this.setDataTables( new ArrayList() );
this.setDataTables( new ArrayList<DataTable>() );
} else {
this.getDataTables().clear();
}
Expand Down Expand Up @@ -191,7 +191,7 @@ public IngestReport getIngestReport() {

public void setIngestReport(IngestReport report) {
if (ingestReports == null) {
ingestReports = new ArrayList();
ingestReports = new ArrayList<IngestReport>();
} else {
ingestReports.clear();
}
Expand Down Expand Up @@ -508,11 +508,11 @@ public String getMapItFileDownloadURL(String serverName){
return serverName + "/api/access/datafile/" + this.getId();
}

/*
/**
* If this is tabular data, the corresponding dataTable may have a UNF -
* "numeric fingerprint" signature - generated:
* @return the file's UNF or null if it doesn't exist
*/

public String getUnf() {
if (this.isTabularData()) {
// (isTabularData() method above verifies that that this file
Expand All @@ -530,6 +530,10 @@ public String getUnf() {
inverseJoinColumns = @JoinColumn(name = "authenticated_user_id"))
private List<AuthenticatedUser> fileAccessRequesters;

/**
* Get the users who requested access to this file.
* @return a list of AuthenticatedUsers who requested access to this file
*/
public List<AuthenticatedUser> getFileAccessRequesters() {
return fileAccessRequesters;
}
Expand Down
20 changes: 14 additions & 6 deletions src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;

/**
*
Expand Down Expand Up @@ -118,7 +119,7 @@ public class DataFileServiceBean implements java.io.Serializable {
private static final String MIME_TYPE_UNDETERMINED_BINARY = "application/binary";

public DataFile find(Object pk) {
return (DataFile) em.find(DataFile.class, pk);
return em.find(DataFile.class, pk);
}

/*public DataFile findByMD5(String md5Value){
Expand All @@ -136,14 +137,14 @@ public List<DataFile> findByDatasetId(Long studyId) {
Sure, we don't have *studies* any more, in 4.0; it's a tribute
to the past. -- L.A.
*/
Query query = em.createQuery("select o from DataFile o where o.owner.id = :studyId order by o.id");
TypedQuery<DataFile> query = em.createQuery("select o from DataFile o where o.owner.id = :studyId order by o.id", DataFile.class);
query.setParameter("studyId", studyId);
return query.getResultList();
}

public List<DataFile> findIngestsInProgress() {
if ( em.isOpen() ) {
Query query = em.createQuery("select object(o) from DataFile as o where o.ingestStatus =:scheduledStatusCode or o.ingestStatus =:progressStatusCode order by o.id");
TypedQuery<DataFile> query = em.createQuery("select object(o) from DataFile as o where o.ingestStatus =:scheduledStatusCode or o.ingestStatus =:progressStatusCode order by o.id", DataFile.class);
query.setParameter("scheduledStatusCode", DataFile.INGEST_STATUS_SCHEDULED);
query.setParameter("progressStatusCode", DataFile.INGEST_STATUS_INPROGRESS);
return query.getResultList();
Expand All @@ -160,7 +161,7 @@ public DataTable findDataTableByFileId(Long fileId) {
}

public List<DataFile> findAll() {
return em.createQuery("select object(o) from DataFile as o order by o.id").getResultList();
return em.createQuery("select object(o) from DataFile as o order by o.id", DataFile.class).getResultList();
}

public DataFile save(DataFile dataFile) {
Expand All @@ -170,9 +171,9 @@ public DataFile save(DataFile dataFile) {
}

public Boolean isPreviouslyPublished(Long fileId){
Query query = em.createQuery("select object(o) from FileMetadata as o where o.dataFile.id =:fileId");
TypedQuery<FileMetadata> query = em.createQuery("select object(o) from FileMetadata as o where o.dataFile.id =:fileId", FileMetadata.class);
query.setParameter("fileId", fileId);
List retList = query.getResultList();
List<FileMetadata> retList = query.getResultList();
return (retList.size() > 1);
}

Expand Down Expand Up @@ -331,6 +332,9 @@ public boolean ingestableAsTabular(DataFile dataFile) {
* main code base, so we can just go through a hard-coded list of mime
* types. -- L.A.
*/
if (dataFile == null) {
return false;
}

String mimeType = dataFile.getContentType();

Expand Down Expand Up @@ -372,6 +376,10 @@ public String getFileClassById (Long fileId) {
}

public String getFileClass (DataFile file) {
if (file == null) {
return null;
}

if (isFileClassImage(file)) {
return FILE_CLASS_IMAGE;
}
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa
}

public static List<String> listTags() {
List<String> retlist = new ArrayList();
List<String> retlist = new ArrayList<>();

for(TagType t : TagType.values()) {
retlist.add(TagTypeToLabels.get(t));
Expand Down
14 changes: 7 additions & 7 deletions src/main/java/edu/harvard/iq/dataverse/DataTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,47 +57,47 @@ public DataTable() {
@Column( nullable = false )
private String unf;

/*
/**
* caseQuantity: Number of observations
*/
private Long caseQuantity;


/*
/**
* varQuantity: Number of variables
*/
private Long varQuantity;

/*
/**
* recordsPerCase: this property is specific to fixed-field data files
* in which rows of observations may represented by *multiple* lines.
* The only known use case (so far): the fixed-width data files from
* ICPSR.
*/
private Long recordsPerCase;

/*
/**
* DataFile that stores the data for this DataTable
*/
@ManyToOne
@JoinColumn(nullable=false)
private DataFile dataFile;

/*
/**
* DataVariables in this DataTable:
*/
@OneToMany (mappedBy="dataTable", cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST})
@OrderBy ("fileOrder")
private List<DataVariable> dataVariables;

/*
/**
* originalFileType: the format of the file from which this data table was
* extracted (STATA, SPSS, R, etc.)
* Note: this was previously stored in the StudyFile.
*/
private String originalFileFormat;

/*
/**
* originalFormatVersion: the version/release number of the original file
* format; for example, STATA 9, SPSS 12, etc.
*/
Expand Down
14 changes: 7 additions & 7 deletions src/main/java/edu/harvard/iq/dataverse/Dataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class Dataset extends DvObjectContainer {
private static final long serialVersionUID = 1L;

@OneToMany(mappedBy = "owner", cascade = CascadeType.MERGE)
private List<DataFile> files = new ArrayList();
private List<DataFile> files = new ArrayList<>();

private String protocol;
private String authority;
Expand All @@ -56,7 +56,7 @@ public class Dataset extends DvObjectContainer {
private String identifier;
@OneToMany(mappedBy = "dataset", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
@OrderBy("versionNumber DESC, minorVersionNumber DESC")
private List<DatasetVersion> versions = new ArrayList();
private List<DatasetVersion> versions = new ArrayList<>();
@OneToOne(mappedBy = "dataset", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
private DatasetLock datasetLock;
@OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST})
Expand Down Expand Up @@ -87,7 +87,7 @@ public Dataset() {
DatasetVersion datasetVersion = new DatasetVersion();
datasetVersion.setDataset(this);
datasetVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
datasetVersion.setFileMetadatas(new ArrayList());
datasetVersion.setFileMetadatas(new ArrayList<FileMetadata>());
datasetVersion.setVersionNumber(new Long(1));
datasetVersion.setMinorVersionNumber(new Long(0));
versions.add(datasetVersion);
Expand Down Expand Up @@ -218,7 +218,7 @@ public void setVersions(List<DatasetVersion> versions) {
private DatasetVersion createNewDatasetVersion(Template template) {
DatasetVersion dsv = new DatasetVersion();
dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
dsv.setFileMetadatas(new ArrayList());
dsv.setFileMetadatas(new ArrayList<FileMetadata>());
DatasetVersion latestVersion = null;

//if the latest version has values get them copied over
Expand Down Expand Up @@ -273,7 +273,7 @@ private DatasetVersion createNewDatasetVersion(Template template) {
if (template == null) {
getVersions().add(0, dsv);
} else {
this.setVersions(new ArrayList());
this.setVersions(new ArrayList<DatasetVersion>());
getVersions().add(0, dsv);
}

Expand Down Expand Up @@ -301,9 +301,9 @@ public DatasetVersion getCreateVersion() {
dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
dsv.setDataset(this);
dsv.setDatasetFields(dsv.initDatasetFields());;
dsv.setFileMetadatas(new ArrayList());
dsv.setFileMetadatas(new ArrayList<FileMetadata>());

this.setVersions(new ArrayList());
this.setVersions(new ArrayList<DatasetVersion>());
getVersions().add(0, dsv);

return dsv;
Expand Down
8 changes: 4 additions & 4 deletions src/main/java/edu/harvard/iq/dataverse/DatasetField.java
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ public void setParentDatasetFieldCompoundValue(DatasetFieldCompoundValue parentD

@OneToMany(mappedBy = "parentDatasetField", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
@OrderBy("displayOrder ASC")
private List<DatasetFieldCompoundValue> datasetFieldCompoundValues = new ArrayList();
private List<DatasetFieldCompoundValue> datasetFieldCompoundValues = new ArrayList<>();

public List<DatasetFieldCompoundValue> getDatasetFieldCompoundValues() {
return datasetFieldCompoundValues;
Expand All @@ -182,7 +182,7 @@ public void setDatasetFieldCompoundValues(List<DatasetFieldCompoundValue> datase

@OneToMany(mappedBy = "datasetField", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
@OrderBy("displayOrder ASC")
private List<DatasetFieldValue> datasetFieldValues = new ArrayList();
private List<DatasetFieldValue> datasetFieldValues = new ArrayList<>();

public List<DatasetFieldValue> getDatasetFieldValues() {
return this.datasetFieldValues;
Expand All @@ -194,7 +194,7 @@ public void setDatasetFieldValues(List<DatasetFieldValue> datasetFieldValues) {

@ManyToMany(cascade = {CascadeType.MERGE})
@JoinTable(indexes = {@Index(columnList="datasetfield_id"),@Index(columnList="controlledvocabularyvalues_id")})
private List<ControlledVocabularyValue> controlledVocabularyValues = new ArrayList();
private List<ControlledVocabularyValue> controlledVocabularyValues = new ArrayList<>();

public List<ControlledVocabularyValue> getControlledVocabularyValues() {
return controlledVocabularyValues;
Expand Down Expand Up @@ -271,7 +271,7 @@ public String getCompoundDisplayValue() {
}

public List<String> getValues() {
List returnList = new ArrayList();
List<String> returnList = new ArrayList<>();
if (!datasetFieldValues.isEmpty()) {
for (DatasetFieldValue dsfv : datasetFieldValues) {
returnList.add(dsfv.getValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public static DatasetFieldCompoundValue createNewEmptyDatasetFieldCompoundValue(

@OneToMany(mappedBy = "parentDatasetFieldCompoundValue", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
@OrderBy("datasetFieldType ASC")
private List<DatasetField> childDatasetFields = new ArrayList();
private List<DatasetField> childDatasetFields = new ArrayList<>();

public Long getId() {
return id;
Expand Down Expand Up @@ -134,7 +134,7 @@ public DatasetFieldCompoundValue copy(DatasetField parent) {
public Map<DatasetField,String> getDisplayValueMap() {
// todo - this currently only supports child datasetfields with single values
// need to determine how we would want to handle multiple
Map fieldMap = new LinkedHashMap();
Map<DatasetField, String> fieldMap = new LinkedHashMap<>();

for (DatasetField childDatasetField : childDatasetFields) {
// skip the value if it is empty or N/A
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public class DatasetFieldServiceBean implements java.io.Serializable {
private static final String NAME_QUERY = "SELECT dsfType from DatasetFieldType dsfType where dsfType.name= :fieldName";

public List<DatasetFieldType> findAllAdvancedSearchFieldTypes() {
return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.id").getResultList();
return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.id", DatasetFieldType.class).getResultList();
}

public List<DatasetFieldType> findAllFacetableFieldTypes() {
Expand All @@ -44,24 +44,24 @@ public List<DatasetFieldType> findFacetableFieldTypesByMetadataBlock(Long metada
}

public List<DatasetFieldType> findAllRequiredFields() {
return em.createQuery("select object(o) from DatasetFieldType as o where o.required = true order by o.id").getResultList();
return em.createQuery("select object(o) from DatasetFieldType as o where o.required = true order by o.id", DatasetFieldType.class).getResultList();
}

public List<DatasetFieldType> findAllOrderedById() {
return em.createQuery("select object(o) from DatasetFieldType as o order by o.id").getResultList();
return em.createQuery("select object(o) from DatasetFieldType as o order by o.id", DatasetFieldType.class).getResultList();
}

public List<DatasetFieldType> findAllOrderedByName() {
return em.createQuery("select object(o) from DatasetFieldType as o order by o.name").getResultList();
return em.createQuery("select object(o) from DatasetFieldType as o order by o.name", DatasetFieldType.class).getResultList();
}

public DatasetFieldType find(Object pk) {
return (DatasetFieldType) em.find(DatasetFieldType.class, pk);
return em.find(DatasetFieldType.class, pk);
}

public DatasetFieldType findByName(String name) {
try {
return (DatasetFieldType) em.createQuery(NAME_QUERY).setParameter("fieldName", name).getSingleResult();
return em.createQuery(NAME_QUERY, DatasetFieldType.class).setParameter("fieldName", name).getSingleResult();
} catch (NoResultException e) {
return null;
}
Expand Down Expand Up @@ -107,7 +107,7 @@ public ForeignMetadataFieldMapping findFieldMapping(String formatName, String pa
}

public ControlledVocabularyValue findControlledVocabularyValue(Object pk) {
return (ControlledVocabularyValue) em.find(ControlledVocabularyValue.class, pk);
return em.find(ControlledVocabularyValue.class, pk);
}

/**
Expand Down Expand Up @@ -155,8 +155,8 @@ public ControlledVocabAlternate findControlledVocabAlternateByControlledVocabula
} catch (NoResultException e) {
return null;
} catch (NonUniqueResultException ex){
List results = typedQuery.getResultList();
return (ControlledVocabAlternate) results.get(0);
List<ControlledVocabAlternate> results = typedQuery.getResultList();
return results.get(0);
}
}

Expand Down
Loading