Skip to content

Commit

Permalink
Merge pull request #194 from Hurence/feature/it-tests
Browse files Browse the repository at this point in the history
it tests
  • Loading branch information
oalam authored Oct 20, 2021
2 parents dda6bda + 1686d9b commit fc82d0d
Show file tree
Hide file tree
Showing 14 changed files with 58 additions and 66 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/integ-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@ name: integration tests

on:
push:
branches: [ master, release-1.3.5 ]
branches: [ master ]
pull_request:
branches: [ master, release-1.3.5 ]
branches: [ master ]
workflow_dispatch:
inputs:
branches: [ master, release-1.3.5 ]
branches: [ master]


jobs:
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: build

on: [push]
on:
push:
branches-ignore:

jobs:
build:
Expand Down
7 changes: 5 additions & 2 deletions historian-server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
<vertx.version>3.8.1</vertx.version>
<vertx-swagger-router.version>1.5.0</vertx-swagger-router.version>
<solr.version>8.2.0</solr.version>
<test.containers>1.12.2</test.containers>
</properties>

<dependencyManagement>
Expand Down Expand Up @@ -123,6 +122,10 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.restlet.jee</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -181,7 +184,7 @@
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
<version>1.12.2</version>
<version>${test.containers}</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import io.vertx.reactivex.ext.web.codec.BodyCodec;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
Expand All @@ -33,6 +32,7 @@
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.DockerComposeContainer;

import javax.validation.constraints.NotNull;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.DockerComposeContainer;

import javax.validation.constraints.NotNull;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
Expand Down
9 changes: 0 additions & 9 deletions historian-spark/docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,6 @@ docker tag hurence/historian-spark:latest hurence/historian-spark:1.3.8
Deploy the image to Docker hub
------------------------------

tag the image as latest

verify image build :

```shell script
docker images
docker tag <IMAGE_ID> latest
```

then login and push the latest image

```shell script
Expand Down
22 changes: 9 additions & 13 deletions historian-spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -174,14 +174,14 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- <exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>*</artifactId>
</exclusion>-->
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.restlet.jee</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>

Expand Down Expand Up @@ -224,6 +224,10 @@
<groupId>com.google.guava</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.restlet.jee</groupId>
<artifactId>*</artifactId>
</exclusion>

<!-- <exclusion>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
Expand Down Expand Up @@ -299,14 +303,6 @@
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</exclusion>
<!-- <exclusion>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
<!-- <artifactId>hadoop-annotations</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
<!-- <artifactId>hadoop-hdfs-client</artifactId>-->
<!-- </exclusion>-->
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>*</artifactId>
Expand All @@ -322,7 +318,7 @@
<dependency>
<groupId>com.hurence.historian</groupId>
<artifactId>historian-tools</artifactId>
<version>1.3.8</version>
<version>${project.version}</version>
</dependency>
</dependencies>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ class Compactor(val options: CompactorConf) extends Serializable with Runnable {

val measuresDS = convertChunksToMeasures(uncompactedChunks)
val compactedChunksDS = convertMeasuresToChunks(measuresDS)
compactedChunksDS.show(10,false)

writeCompactedChunksToSolr(compactedChunksDS)
deleteOldChunks(day)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@ import com.hurence.timeseries.model.Definitions._
import scala.collection.JavaConverters._

/**
* val options.config = Map(
* "zkhost" -> options.zkHosts,
* "collection" -> options.collectionName
* )
*
*/
* val options.config = Map(
* "zkhost" -> options.zkHosts,
* "collection" -> options.collectionName
* )
*
*/
class SolrChunksWriter extends Writer[Chunk] {


Expand All @@ -32,22 +32,15 @@ class SolrChunksWriter extends Writer[Chunk] {
else
options.config

var someTags : Boolean = true
val tagCols : List[Column] = if (options.config.contains(TAG_NAMES)) {
options.config(TAG_NAMES).split(",").toList
.map(tag => col(FIELD_TAGS)(tag).as(tag))
} else {
// No tags specified
someTags = false
List[Column]()
}
// build column names with tags
val mainCols = FIELDS.asScala.toList.map(name => col(name).as(getColumnFromField(name)))
val keysDF = ds.select(explode(map_keys(col(FIELD_TAGS)))).distinct()
val keys = keysDF.collect().map(f=>f.get(0))
val tagCols = keys.map(f=> col(FIELD_TAGS).getItem(f).as(f.toString)).toList

val mainCols = FIELDS.asScala.toList
.map(name => col(name).as(getColumnFromField(name)))

// todo manage dateFormatbucket and date interval
// write the dataset to SolR
ds
.select(mainCols ::: tagCols: _*)
.select(mainCols ::: tagCols:_*)
.withColumn(SOLR_COLUMN_VALUE, base64(col(SOLR_COLUMN_VALUE)))
.write
.format("solr")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1147,7 +1147,7 @@ public void testCompactorHourly(SparkSession sparkSession) throws InterruptedExc
SolrITHelper.COLLECTION_HISTORIAN,
"",
"compactor",
"yyyy-MM-dd.HH");
"yyyy-MM-dd");

Compactor compactor = new Compactor(conf);
compactor.setSolrClient(cloudClient);
Expand Down
11 changes: 5 additions & 6 deletions integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,6 @@
<artifactId>integration-tests</artifactId>
<packaging>jar</packaging>

<properties>
<scala.binary.version>2.11</scala.binary.version>
<solr.version>8.2.0</solr.version>
<test.containers>1.12.2</test.containers>
</properties>

<dependencies>
<!-- HISTORIAN modules -->
Expand Down Expand Up @@ -54,6 +49,10 @@
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<groupId>org.restlet.jee</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--SPARK-->
Expand Down Expand Up @@ -93,7 +92,7 @@
<dependency>
<groupId>com.hurence.historian</groupId>
<artifactId>historian-tools</artifactId>
<version>1.3.8</version>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
</dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.SolrParams;
import org.jetbrains.annotations.NotNull;
import org.noggit.JSONUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.DockerComposeContainer;

import javax.validation.constraints.NotNull;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public class SolrExtension implements BeforeAllCallback, AfterAllCallback, Param
public final static String SOLR1_SERVICE_NAME = "solr1_1";
public final static int SOLR_1_PORT = 8983;
public final static int SOLR_2_PORT = 8983;
public final static String ZOOKEEPER_SERVICE_NAME = "zookeeper_1";
public final static String ZOOKEEPER_SERVICE_NAME = "zoo1_1";
public final static int ZOOKEEPER_PORT = 2181;
private final static String IMAGE = "solr:8";
public final static String SOLR_CONF_TEMPLATE_HISTORIAN_CURRENT = "historian-current";
Expand Down Expand Up @@ -77,11 +77,11 @@ public void afterAll(ExtensionContext extensionContext) throws Exception {
@Override
public void beforeAll(ExtensionContext extensionContext) throws Exception {
this.dockerComposeContainer = new DockerComposeContainer(
new File(getClass().getResource("/shared-resources/docker-compose-test.yml").getFile())
new File(getClass().getResource("/docker-compose-for-grafana-tests.yml").getFile())
)
.withExposedService(ZOOKEEPER_SERVICE_NAME, ZOOKEEPER_PORT, Wait.forListeningPort())
;/* .withExposedService(ZOOKEEPER_SERVICE_NAME, ZOOKEEPER_PORT, Wait.forListeningPort())
.withExposedService(SOLR1_SERVICE_NAME, SOLR_1_PORT, Wait.forListeningPort())
.waitingFor(SOLR2_SERVICE_NAME, Wait.forListeningPort());
.waitingFor(SOLR2_SERVICE_NAME, Wait.forListeningPort());*/
logger.info("Starting docker compose");
this.dockerComposeContainer.start();

Expand Down
15 changes: 12 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -83,14 +83,14 @@
<spark.version>2.3.2</spark.version>
<spark.solr>3.6.6</spark.solr>
<!-- junit-->
<junit.version>4.13.1</junit.version>
<junit-platform.version>1.2.0</junit-platform.version>
<junit.version>4.13</junit.version>
<junit-platform.version>1.5.2</junit-platform.version>
<junit.jupiter.version>5.5.2</junit.jupiter.version>
<!-- vertx-->
<vertx.version>3.8.1</vertx.version>
<vertx-swagger-router.version>1.5.0</vertx-swagger-router.version>
<!-- docker-->
<test.containers>1.12.2</test.containers>
<test.containers>1.16.0</test.containers>
<!-- others -->
<lombok.version>1.18.12</lombok.version>
<lombok.maven.version>1.18.12.0</lombok.maven.version>
Expand All @@ -110,6 +110,15 @@
<module>historian-scrapper</module>
</modules>


<repositories>
<repository>
<id>maven-restlet</id>
<name>Public online Restlet repository</name>
<url>https://maven.restlet.talend.com</url>
</repository>
</repositories>

<dependencies>
<!--A plugin to generate source code generated by lombok in target folder-->
<dependency>
Expand Down

0 comments on commit fc82d0d

Please sign in to comment.