Skip to content

Commit

Permalink
[Spotless] Applying Google Code Format for common #7 (#1940)
Browse files Browse the repository at this point in the history
* Spotless apply for common directory.

Signed-off-by: Mitchell Gale <Mitchell.Gale@improving.com>

* Igoring checkstyle for common

Signed-off-by: Mitchell Gale <Mitchell.Gale@improving.com>

* Spotless apply on string utils,

Signed-off-by: Mitchell Gale <Mitchell.Gale@improving.com>

* Typo fix

Signed-off-by: Yury-Fridlyand <yury.fridlyand@improving.com>

---------

Signed-off-by: Mitchell Gale <Mitchell.Gale@improving.com>
Signed-off-by: Yury-Fridlyand <yury.fridlyand@improving.com>
Co-authored-by: Yury-Fridlyand <yury.fridlyand@improving.com>
  • Loading branch information
MitchellGale and Yury-Fridlyand committed Aug 14, 2023
1 parent 245c4f8 commit e377cf2
Show file tree
Hide file tree
Showing 28 changed files with 421 additions and 495 deletions.
3 changes: 2 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,8 @@ repositories {
spotless {
java {
target fileTree('.') {
include 'datasources/**/*.java',
include 'common/**/*.java',
'datasources/**/*.java',
'core/**/*.java'
exclude '**/build/**', '**/build-*/**'
}
Expand Down
7 changes: 6 additions & 1 deletion common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ repositories {
mavenCentral()
}

// Being ignored as a temporary measure before being removed in favour of
// spotless https://github.com/opensearch-project/sql/issues/1101
checkstyleTest.ignoreFailures = true
checkstyleMain.ignoreFailures = true

dependencies {
api "org.antlr:antlr4-runtime:4.7.1"
api group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
Expand Down Expand Up @@ -62,4 +67,4 @@ configurations.all {
resolutionStrategy.force "org.apache.httpcomponents:httpcore:4.4.13"
resolutionStrategy.force "joda-time:joda-time:2.10.12"
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.36"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
* SPDX-License-Identifier: Apache-2.0
*/


package org.opensearch.sql.common.antlr;

import org.antlr.v4.runtime.CharStream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
* SPDX-License-Identifier: Apache-2.0
*/


package org.opensearch.sql.common.antlr;

import java.util.Locale;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
* SPDX-License-Identifier: Apache-2.0
*/


package org.opensearch.sql.common.antlr;

public class SyntaxCheckException extends RuntimeException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,17 @@ public class AwsSigningInterceptor implements Interceptor {
private static final Logger LOG = LogManager.getLogger();

/**
* AwsSigningInterceptor which intercepts http requests
* and adds required headers for sigv4 authentication.
* AwsSigningInterceptor which intercepts http requests and adds required headers for sigv4
* authentication.
*
* @param awsCredentialsProvider awsCredentialsProvider.
* @param region region.
* @param serviceName serviceName.
*/
public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvider,
@NonNull String region, @NonNull String serviceName) {
public AwsSigningInterceptor(
@NonNull AWSCredentialsProvider awsCredentialsProvider,
@NonNull String region,
@NonNull String serviceName) {
this.okHttpAwsV4Signer = new OkHttpAwsV4Signer(region, serviceName);
this.awsCredentialsProvider = awsCredentialsProvider;
}
Expand All @@ -48,25 +50,27 @@ public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvi
public Response intercept(Interceptor.Chain chain) throws IOException {
Request request = chain.request();

DateTimeFormatter timestampFormat = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'")
.withZone(ZoneId.of("GMT"));
DateTimeFormatter timestampFormat =
DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'").withZone(ZoneId.of("GMT"));


Request.Builder newRequestBuilder = request.newBuilder()
.addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now()))
.addHeader("host", request.url().host());
Request.Builder newRequestBuilder =
request
.newBuilder()
.addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now()))
.addHeader("host", request.url().host());

AWSCredentials awsCredentials = awsCredentialsProvider.getCredentials();
if (awsCredentialsProvider instanceof STSAssumeRoleSessionCredentialsProvider) {
newRequestBuilder.addHeader("x-amz-security-token",
newRequestBuilder.addHeader(
"x-amz-security-token",
((STSAssumeRoleSessionCredentialsProvider) awsCredentialsProvider)
.getCredentials()
.getSessionToken());
}
Request newRequest = newRequestBuilder.build();
Request signed = okHttpAwsV4Signer.sign(newRequest,
awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey());
Request signed =
okHttpAwsV4Signer.sign(
newRequest, awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey());
return chain.proceed(signed);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,11 @@ public BasicAuthenticationInterceptor(@NonNull String username, @NonNull String
this.credentials = Credentials.basic(username, password);
}


@Override
public Response intercept(Interceptor.Chain chain) throws IOException {
Request request = chain.request();
Request authenticatedRequest = request.newBuilder()
.header("Authorization", credentials).build();
Request authenticatedRequest =
request.newBuilder().header("Authorization", credentials).build();
return chain.proceed(authenticatedRequest);
}

}
57 changes: 27 additions & 30 deletions common/src/main/java/org/opensearch/sql/common/grok/Converter.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,7 @@
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
* Convert String argument to the right type.
*/
/** Convert String argument to the right type. */
public class Converter {

public enum Type {
Expand All @@ -51,13 +49,13 @@ public enum Type {
private static final Pattern SPLITTER = Pattern.compile("[:;]");

private static final Map<String, Type> TYPES =
Arrays.stream(Type.values())
.collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t));
Arrays.stream(Type.values()).collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t));

private static final Map<String, Type> TYPE_ALIASES =
Arrays.stream(Type.values())
.flatMap(type -> type.aliases.stream()
.map(alias -> new AbstractMap.SimpleEntry<>(alias, type)))
.flatMap(
type ->
type.aliases.stream().map(alias -> new AbstractMap.SimpleEntry<>(alias, type)))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

private static Type getType(String key) {
Expand All @@ -69,34 +67,30 @@ private static Type getType(String key) {
return type;
}

/**
* getConverters.
*/
public static Map<String, IConverter<? extends Object>>
getConverters(Collection<String> groupNames, Object... params) {
/** getConverters. */
public static Map<String, IConverter<? extends Object>> getConverters(
Collection<String> groupNames, Object... params) {
return groupNames.stream()
.filter(Converter::containsDelimiter)
.collect(Collectors.toMap(Function.identity(), key -> {
String[] list = splitGrokPattern(key);
IConverter<? extends Object> converter = getType(list[1]).converter;
if (list.length == 3) {
converter = converter.newConverter(list[2], params);
}
return converter;
}));
.collect(
Collectors.toMap(
Function.identity(),
key -> {
String[] list = splitGrokPattern(key);
IConverter<? extends Object> converter = getType(list[1]).converter;
if (list.length == 3) {
converter = converter.newConverter(list[2], params);
}
return converter;
}));
}

/**
* getGroupTypes.
*/
/** getGroupTypes. */
public static Map<String, Type> getGroupTypes(Collection<String> groupNames) {
return groupNames.stream()
.filter(Converter::containsDelimiter)
.map(Converter::splitGrokPattern)
.collect(Collectors.toMap(
l -> l[0],
l -> getType(l[1])
));
.collect(Collectors.toMap(l -> l[0], l -> getType(l[1])));
}

public static String extractKey(String key) {
Expand All @@ -120,7 +114,6 @@ default IConverter<T> newConverter(String param, Object... params) {
}
}


static class DateConverter implements IConverter<Instant> {

private final DateTimeFormatter formatter;
Expand All @@ -138,8 +131,12 @@ private DateConverter(DateTimeFormatter formatter, ZoneId timeZone) {

@Override
public Instant convert(String value) {
TemporalAccessor dt = formatter
.parseBest(value.trim(), ZonedDateTime::from, LocalDateTime::from, OffsetDateTime::from,
TemporalAccessor dt =
formatter.parseBest(
value.trim(),
ZonedDateTime::from,
LocalDateTime::from,
OffsetDateTime::from,
Instant::from,
LocalDate::from);
if (dt instanceof ZonedDateTime) {
Expand Down
66 changes: 27 additions & 39 deletions common/src/main/java/org/opensearch/sql/common/grok/Grok.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,36 +16,29 @@
import org.opensearch.sql.common.grok.Converter.IConverter;

/**
* {@code Grok} parse arbitrary text and structure it.
* <br>
* {@code Grok} is simple API that allows you to easily parse logs
* and other files (single line). With {@code Grok},
* you can turn unstructured log and event data into structured data.
* {@code Grok} parse arbitrary text and structure it. <br>
* {@code Grok} is simple API that allows you to easily parse logs and other files (single line).
* With {@code Grok}, you can turn unstructured log and event data into structured data.
*
* @since 0.0.1
*/
public class Grok implements Serializable {
/**
* Named regex of the originalGrokPattern.
*/
/** Named regex of the originalGrokPattern. */
private final String namedRegex;

/**
* Map of the named regex of the originalGrokPattern
* with id = namedregexid and value = namedregex.
* Map of the named regex of the originalGrokPattern with id = namedregexid and value =
* namedregex.
*/
private final Map<String, String> namedRegexCollection;
/**
* Original {@code Grok} pattern (expl: %{IP}).
*/

/** Original {@code Grok} pattern (expl: %{IP}). */
private final String originalGrokPattern;
/**
* Pattern of the namedRegex.
*/

/** Pattern of the namedRegex. */
private final Pattern compiledNamedRegex;

/**
* {@code Grok} patterns definition.
*/
/** {@code Grok} patterns definition. */
private final Map<String, String> grokPatternDefinition;

public final Set<String> namedGroups;
Expand All @@ -54,19 +47,16 @@ public class Grok implements Serializable {

public final Map<String, IConverter<? extends Object>> converters;

/**
* only use in grok discovery.
*/
/** only use in grok discovery. */
private String savedPattern = "";

/**
* Grok.
*/
public Grok(String pattern,
String namedRegex,
Map<String, String> namedRegexCollection,
Map<String, String> patternDefinitions,
ZoneId defaultTimeZone) {
/** Grok. */
public Grok(
String pattern,
String namedRegex,
Map<String, String> namedRegexCollection,
Map<String, String> patternDefinitions,
ZoneId defaultTimeZone) {
this.originalGrokPattern = pattern;
this.namedRegex = namedRegex;
this.compiledNamedRegex = Pattern.compile(namedRegex);
Expand Down Expand Up @@ -132,8 +122,8 @@ public Map<String, String> getNamedRegexCollection() {
}

/**
* Match the given <tt>log</tt> with the named regex.
* And return the json representation of the matched element
* Match the given <tt>log</tt> with the named regex. And return the json representation of the
* matched element
*
* @param log : log to match
* @return map containing matches
Expand All @@ -144,8 +134,8 @@ public Map<String, Object> capture(String log) {
}

/**
* Match the given list of <tt>log</tt> with the named regex
* and return the list of json representation of the matched elements.
* Match the given list of <tt>log</tt> with the named regex and return the list of json
* representation of the matched elements.
*
* @param logs : list of log
* @return list of maps containing matches
Expand All @@ -159,8 +149,8 @@ public ArrayList<Map<String, Object>> capture(List<String> logs) {
}

/**
* Match the given <tt>text</tt> with the named regex
* {@code Grok} will extract data from the string and get an extence of {@link Match}.
* Match the given <tt>text</tt> with the named regex {@code Grok} will extract data from the
* string and get an extence of {@link Match}.
*
* @param text : Single line of log
* @return Grok Match
Expand All @@ -172,9 +162,7 @@ public Match match(CharSequence text) {

Matcher matcher = compiledNamedRegex.matcher(text);
if (matcher.find()) {
return new Match(
text, this, matcher, matcher.start(0), matcher.end(0)
);
return new Match(text, this, matcher, matcher.start(0), matcher.end(0));
}

return Match.EMPTY;
Expand Down
Loading

0 comments on commit e377cf2

Please sign in to comment.