Skip to content

Commit

Permalink
Merge pull request IQSS#7 from IQSS/master
Browse files Browse the repository at this point in the history
new code3
  • Loading branch information
Jian881219 committed Sep 4, 2014
2 parents 856af7b + 860d5a4 commit 01183da
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 10 deletions.
6 changes: 3 additions & 3 deletions scripts/api/data/metadatablocks/citation.tsv
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,8 @@
contributorType Supervisor 13
contributorType Work Package Leader 14
contributorType Other 15
authorIdentifierScheme ISNI 0
authorIdentifierScheme ORCID 1
authorIdentifierScheme ORCID 0
authorIdentifierScheme ISNI 1
language Abkhaz 0
language Afar 1
language Afrikaans 2
Expand Down Expand Up @@ -279,4 +279,4 @@
language Yoruba 181
language Zhuang, Chuang 182
language Zulu 183
language Not applicable 184
language Not applicable 184
21 changes: 21 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/Template.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.persistence.CascadeType;
Expand All @@ -18,6 +19,7 @@
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import javax.validation.constraints.Size;
import org.hibernate.validator.constraints.NotBlank;

/**
Expand Down Expand Up @@ -47,6 +49,7 @@ public Long getId() {
}

@NotBlank(message = "Please enter a name.")
@Size(max = 255, message = "Name must be at most 255 characters.")
private String name;

public String getName() {
Expand Down Expand Up @@ -276,6 +279,24 @@ public void setDatasetFields(List<DatasetField> datasetFields) {
this.datasetFields = datasetFields;
}

public List<DatasetField> getFlatDatasetFields() {
return getFlatDatasetFields(getDatasetFields());
}

private List<DatasetField> getFlatDatasetFields(List<DatasetField> dsfList) {
List<DatasetField> retList = new LinkedList();
for (DatasetField dsf : dsfList) {
retList.add(dsf);
if (dsf.getDatasetFieldType().isCompound()) {
for (DatasetFieldCompoundValue compoundValue : dsf.getDatasetFieldCompoundValues()) {
retList.addAll(getFlatDatasetFields(compoundValue.getChildDatasetFields()));
}

}
}
return retList;
}

@Override
public int hashCode() {
int hash = 0;
Expand Down
33 changes: 33 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,18 @@
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDateverseTemplateCommand;
import java.sql.Timestamp;
import java.util.Date;
import java.util.Set;
import javax.ejb.EJB;
import javax.ejb.EJBException;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;

/**
*
Expand Down Expand Up @@ -110,6 +115,34 @@ public void edit(TemplatePage.EditMode editMode) {
}

public String save() {

boolean dontSave = false;
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
for (DatasetField dsf : template.getFlatDatasetFields()) {
dsf.setValidationMessage(null); // clear out any existing validation message
Set<ConstraintViolation<DatasetField>> constraintViolations = validator.validate(dsf);
for (ConstraintViolation<DatasetField> constraintViolation : constraintViolations) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Validation Error", constraintViolation.getMessage()));
dsf.setValidationMessage(constraintViolation.getMessage());
dontSave = true;
break; // currently only support one message, so we can break out of the loop after the first constraint violation
}
for (DatasetFieldValue dsfv : dsf.getDatasetFieldValues()) {
dsfv.setValidationMessage(null); // clear out any existing validation message
Set<ConstraintViolation<DatasetFieldValue>> constraintViolations2 = validator.validate(dsfv);
for (ConstraintViolation<DatasetFieldValue> constraintViolation : constraintViolations2) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Validation Error", constraintViolation.getMessage()));
dsfv.setValidationMessage(constraintViolation.getMessage());
dontSave = true;
break; // currently only support one message, so we can break out of the loop after the first constraint violation
}
}
}
if (dontSave) {
return "";
}

Command<Dataverse> cmd;
try {
if (editMode == EditMode.CREATE) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -562,7 +562,7 @@ private File generateRotatedImage (File tabfile, int varcount, int casecount) th

int MAX_OUTPUT_STREAMS = 32;
int MAX_BUFFERED_BYTES = 10 * 1024 * 1024; // 10 MB - for now?
int MAX_COLUMN_BUFFER = 8192;
int MAX_COLUMN_BUFFER = 8 * 1024;

// offsetHeader will contain the byte offsets of the individual column
// vectors in the final rotated image file
Expand Down Expand Up @@ -604,9 +604,18 @@ private File generateRotatedImage (File tabfile, int varcount, int casecount) th
tokensize = token.getBytes().length;
if (bufferedSizes[varindex]+tokensize > MAX_COLUMN_BUFFER) {
// fill the buffer and dump its contents into the temp file:
// (do note that there may be *several* MAX_COLUMN_BUFFERs
// worth of bytes in the token!)

int tokenoffset = 0;

if (bufferedSizes[varindex] != MAX_COLUMN_BUFFER) {
System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], MAX_COLUMN_BUFFER-bufferedSizes[varindex]);
}
tokenoffset = MAX_COLUMN_BUFFER-bufferedSizes[varindex];
System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], tokenoffset);
} // (otherwise the buffer is already full, and we should
// simply dump it into the temp file, without adding any
// extra bytes to it)

File bufferTempFile = columnTempFiles[varindex];
if (bufferTempFile == null) {
bufferTempFile = File.createTempFile("columnBufferFile", "bytes");
Expand All @@ -618,18 +627,29 @@ private File generateRotatedImage (File tabfile, int varcount, int casecount) th
BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream (bufferTempFile, true));
outputStream.write(bufferedColumns[varindex], 0, MAX_COLUMN_BUFFER);
cachedfileSizes[varindex] += MAX_COLUMN_BUFFER;

// keep writing MAX_COLUMN_BUFFER-size chunks of bytes into
// the temp file, for as long as there's more than MAX_COLUMN_BUFFER
// bytes left in the token:

while (tokensize - tokenoffset > MAX_COLUMN_BUFFER) {
outputStream.write(token.getBytes(), tokenoffset, MAX_COLUMN_BUFFER);
cachedfileSizes[varindex] += MAX_COLUMN_BUFFER;
tokenoffset += MAX_COLUMN_BUFFER;
}

outputStream.close();

// buffer the remaining bytes and reset the buffered
// byte counter:

System.arraycopy(token.getBytes(),
MAX_COLUMN_BUFFER-bufferedSizes[varindex],
tokenoffset,
bufferedColumns[varindex],
0,
bufferedSizes[varindex] + tokensize - MAX_COLUMN_BUFFER);
tokensize - tokenoffset);

bufferedSizes[varindex] = bufferedSizes[varindex] + tokensize - MAX_COLUMN_BUFFER;
bufferedSizes[varindex] = tokensize - tokenoffset;

} else {
// continue buffering
Expand Down Expand Up @@ -803,7 +823,7 @@ private void reverseRotatedImage (File rotfile, int varcount, int casecount) thr

/**
* main() method, for testing
* usage: java edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator testfile.tab varcount casecount
* usage: java edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator testfile.tab varcount casecount column type
* make sure the CLASSPATH contains ...
*
*/
Expand Down

0 comments on commit 01183da

Please sign in to comment.