Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use StandardCharsets instead of charset names #10077

Merged
merged 15 commits into from
Aug 13, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/DataCitation.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
Expand Down Expand Up @@ -253,7 +254,7 @@ public String toBibtexString() {

public void writeAsBibtexCitation(OutputStream os) throws IOException {
// Use UTF-8
Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8"));
Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8));
if(getFileTitle() !=null && isDirect()) {
out.write("@incollection{");
} else {
Expand Down Expand Up @@ -317,7 +318,7 @@ public String toRISString() {

public void writeAsRISCitation(OutputStream os) throws IOException {
// Use UTF-8
Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8"));
Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8));
out.write("Provider: " + publisher + "\r\n");
out.write("Content: text/plain; charset=\"utf-8\"" + "\r\n");
// Using type "DATA" - see https://github.com/IQSS/dataverse/issues/4816
Expand Down
7 changes: 4 additions & 3 deletions src/main/java/edu/harvard/iq/dataverse/Shib.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.apache.commons.lang3.StringUtils;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
Expand Down Expand Up @@ -458,9 +459,9 @@ private String getRequiredValueFromAssertion(String key) throws Exception {
if (attributeValue.isEmpty()) {
throw new Exception(key + " was empty");
}
if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) {
attributeValue= new String( attributeValue.getBytes("ISO-8859-1"), "UTF-8");
}
if (systemConfig.isShibAttributeCharacterSetConversionEnabled()) {
attributeValue= new String( attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8);
}
String trimmedValue = attributeValue.trim();
logger.fine("The SAML assertion for \"" + key + "\" (required) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\".");
return trimmedValue;
Expand Down
5 changes: 3 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/api/Admin.java
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@

import java.io.InputStream;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Level;
Expand Down Expand Up @@ -1153,7 +1154,7 @@ public void write(OutputStream os) throws IOException,
os.write(",\n".getBytes());
}

os.write(output.build().toString().getBytes("UTF8"));
os.write(output.build().toString().getBytes(StandardCharsets.UTF_8));

if (!wroteObject) {
wroteObject = true;
Expand Down Expand Up @@ -1267,7 +1268,7 @@ public void write(OutputStream os) throws IOException,
os.write(",\n".getBytes());
}

os.write(output.build().toString().getBytes("UTF8"));
os.write(output.build().toString().getBytes(StandardCharsets.UTF_8));

if (!wroteObject) {
wroteObject = true;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package edu.harvard.iq.dataverse.authorization.providers.builtin;

import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.lang3.RandomStringUtils;
Expand Down Expand Up @@ -36,13 +36,13 @@ public interface Algorithm {
public String encrypt(String plainText) {
try {
MessageDigest md = MessageDigest.getInstance("SHA");
md.update(plainText.getBytes("UTF-8"));
md.update(plainText.getBytes(StandardCharsets.UTF_8));
byte[] raw = md.digest();
//String hash = Base64.encodeToString(raw, true);
String hash = Base64.getEncoder().encodeToString(raw);
return hash;

} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,30 +20,16 @@

package edu.harvard.iq.dataverse.dataaccess;

import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.datavariable.DataVariable;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.logging.Logger;
import java.util.regex.Matcher;

Expand Down
24 changes: 9 additions & 15 deletions src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
Expand Down Expand Up @@ -318,17 +318,17 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
int width = fullSizeImage.getWidth();
int height = fullSizeImage.getHeight();
FileChannel src = null;
try {
src = new FileInputStream(tmpFile).getChannel();
} catch (FileNotFoundException ex) {
try (FileInputStream fis = new FileInputStream(tmpFile)) {
src = fis.getChannel();
} catch (IOException ex) {
IOUtils.closeQuietly(inputStream);
logger.severe(ex.getMessage());
return null;
}
FileChannel dest = null;
try {
dest = new FileOutputStream(tmpFile).getChannel();
} catch (FileNotFoundException ex) {
try (FileInputStream fis = new FileInputStream(tmpFile)) {
bencomp marked this conversation as resolved.
Show resolved Hide resolved
dest = fis.getChannel();
} catch (IOException ex) {
IOUtils.closeQuietly(inputStream);
logger.severe(ex.getMessage());
return null;
Expand Down Expand Up @@ -409,14 +409,8 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) {
String base64Image = datasetThumbnail.getBase64image();
String leadingStringToRemove = FileUtil.DATA_URI_SCHEME;
String encodedImg = base64Image.substring(leadingStringToRemove.length());
byte[] decodedImg = null;
try {
decodedImg = Base64.getDecoder().decode(encodedImg.getBytes("UTF-8"));
logger.fine("returning this many bytes for " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length);
} catch (UnsupportedEncodingException ex) {
logger.info("dataset thumbnail could not be decoded for dataset id " + dataset.getId() + ": " + ex);
return null;
}
byte[] decodedImg = Base64.getDecoder().decode(encodedImg.getBytes(StandardCharsets.UTF_8));
logger.fine("returning this many bytes for " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length);
ByteArrayInputStream nonDefaultDatasetThumbnail = new ByteArrayInputStream(decodedImg);
logger.fine("For dataset id " + dataset.getId() + " a thumbnail was found and is being returned.");
return nonDefaultDatasetThumbnail;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import edu.harvard.iq.dataverse.DatasetLock.Reason;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.workflow.step.Failure;
Expand All @@ -14,7 +13,7 @@
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
Expand All @@ -32,7 +31,7 @@
import org.duracloud.error.ContentStoreException;

@RequiredPermissions(Permission.PublishDataset)
public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {

private static final Logger logger = Logger.getLogger(DuraCloudSubmitToArchiveCommand.class.getName());
private static final String DEFAULT_PORT = "443";
Expand Down Expand Up @@ -117,7 +116,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
public void run() {
try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) {

dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8")));
dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8));
dataciteOut.close();
success=true;
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.settings.JvmSettings;
Expand All @@ -26,14 +25,14 @@
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.util.Map;
import java.util.logging.Logger;

@RequiredPermissions(Permission.PublishDataset)
public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {

private static final Logger logger = Logger.getLogger(GoogleCloudSubmitToArchiveCommand.class.getName());
private static final String GOOGLECLOUD_BUCKET = ":GoogleCloudBucket";
Expand Down Expand Up @@ -82,7 +81,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
public void run() {
try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) {

dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8")));
dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8));
dataciteOut.close();
success = true;
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import edu.harvard.iq.dataverse.DatasetLock.Reason;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.util.bagit.BagGenerator;
Expand All @@ -17,6 +16,7 @@
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.logging.Logger;

Expand All @@ -41,7 +41,7 @@
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;

@RequiredPermissions(Permission.PublishDataset)
public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {

private static final Logger logger = Logger.getLogger(S3SubmitToArchiveCommand.class.getName());
private static final String S3_CONFIG = ":S3ArchiverConfig";
Expand Down Expand Up @@ -86,7 +86,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t

spaceName = getSpaceName(dataset);
String dataciteXml = getDataCiteXml(dv);
try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes("UTF-8"))) {
try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes(StandardCharsets.UTF_8))) {
// Add datacite.xml file
ObjectMetadata om = new ObjectMetadata();
om.setContentLength(dataciteIn.available());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
import io.gdcc.spi.export.Exporter;
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import java.util.Optional;

import jakarta.json.JsonObject;
import jakarta.ws.rs.core.MediaType;


Expand All @@ -35,7 +35,7 @@ public String getDisplayName(Locale locale) {
@Override
public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
try{
outputStream.write(dataProvider.getDatasetJson().toString().getBytes("UTF8"));
outputStream.write(dataProvider.getDatasetJson().toString().getBytes(StandardCharsets.UTF_8));
outputStream.flush();
} catch (Exception e){
throw new ExportException("Unknown exception caught during JSON export.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@
import edu.harvard.iq.dataverse.util.BundleUtil;

import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import java.util.Optional;
import java.util.logging.Logger;

import jakarta.json.JsonObject;
import jakarta.ws.rs.core.MediaType;

@AutoService(Exporter.class)
Expand All @@ -25,7 +25,7 @@ public class OAI_OREExporter implements Exporter {
public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream)
throws ExportException {
try {
outputStream.write(dataProvider.getDatasetORE().toString().getBytes("UTF8"));
outputStream.write(dataProvider.getDatasetORE().toString().getBytes(StandardCharsets.UTF_8));
outputStream.flush();
} catch (Exception e) {
logger.severe(e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import java.util.logging.Logger;
import jakarta.ws.rs.core.MediaType;
Expand Down Expand Up @@ -75,7 +76,7 @@ public class SchemaDotOrgExporter implements Exporter {
@Override
public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
try {
outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes("UTF8"));
outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes(StandardCharsets.UTF_8));
} catch (IOException ex) {
logger.info("IOException calling outputStream.write: " + ex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.io.*;
import java.nio.*;
import java.nio.channels.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.lang.reflect.*;
import java.util.regex.*;
Expand Down Expand Up @@ -252,7 +253,7 @@ public String testDTAformat(MappedByteBuffer buff) {
try {
headerBuffer = new byte[STATA_13_HEADER.length()];
buff.get(headerBuffer, 0, STATA_13_HEADER.length());
headerString = new String(headerBuffer, "US-ASCII");
headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
} catch (Exception ex) {
// probably a buffer underflow exception;
// we don't have to do anything... null will
Expand All @@ -273,7 +274,7 @@ public String testDTAformat(MappedByteBuffer buff) {
try {
headerBuffer = new byte[STATA_14_HEADER.length()];
buff.get(headerBuffer, 0, STATA_14_HEADER.length());
headerString = new String(headerBuffer, "US-ASCII");
headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
} catch (Exception ex) {
// probably a buffer underflow exception;
// we don't have to do anything... null will
Expand All @@ -292,7 +293,7 @@ public String testDTAformat(MappedByteBuffer buff) {
try {
headerBuffer = new byte[STATA_15_HEADER.length()];
buff.get(headerBuffer, 0, STATA_15_HEADER.length());
headerString = new String(headerBuffer, "US-ASCII");
headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
} catch (Exception ex) {
// probably a buffer underflow exception;
// we don't have to do anything... null will
Expand Down
Loading
Loading