Skip to content
This repository has been archived by the owner on Sep 18, 2023. It is now read-only.

Commit

Permalink
[CI] fix travis
Browse files Browse the repository at this point in the history
Signed-off-by: Yuan Zhou <yuan.zhou@intel.com>
  • Loading branch information
zhouyuan committed Jan 7, 2021
1 parent b6c1eb5 commit 556234d
Showing 1 changed file with 3 additions and 133 deletions.
136 changes: 3 additions & 133 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,35 +4,6 @@ language: java
jdk: openjdk8
jobs:
include:
#Other modules can refer to oap-cache-oap to build independent travis-ci job,
#oap-cache-oap is a CI building demo of the corresponding module oap-cache/oap.
- name: oap-cache-oap
before_install:
- sudo apt-get install libpthread-stubs0-dev
- sudo apt-get install libnuma-dev
- sudo apt-get install cmake
- sudo apt-get install asciidoctor libkmod-dev libudev-dev uuid uuid-dev libjson-c-dev systemd libkeyutils-dev
- sudo apt-get install pandoc libgtest-dev pkg-config libtbb-dev rapidjson-dev valgrind
install:
- # Download spark 3.0.0
- "[ -f spark ] || mkdir spark && cd spark && wget https://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz && cd .."
- "tar -xf ./spark/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=`pwd`/spark-3.0.0-bin-hadoop2.7"
before_script:
- cd ${TRAVIS_BUILD_DIR}/dev
- ./install_vmemcache.sh
- ./install_memkind.sh
- ./install_pmdk.sh
- ./install_pmemkv.sh
- ../oap-common/src/native/memkind/compile.sh
- ../oap-common/src/native/libpmemblk/compile.sh
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmplatform.so /usr/lib/
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmblkplatform.so /usr/lib/
- sudo ldconfig /usr/lib
script:
- cd ${TRAVIS_BUILD_DIR}
- mvn clean test -pl oap-common -q -Ppersistent-memory -Pvmemcache -Plibpmemblk -am
- mvn clean test -pl oap-cache/oap -q -Ppersistent-memory -Pvmemcache -am

- name: oap-native-sql
dist: bionic
Expand Down Expand Up @@ -60,112 +31,11 @@ jobs:
- cd ../../java
- mvn clean install -q -P arrow-jni -am -Darrow.cpp.build.dir=/tmp/arrow/cpp/build/release/ -DskipTests -Dcheckstyle.skip
script:
- cd ${TRAVIS_BUILD_DIR}/
- mvn clean -q package -DskipTests -pl oap-native-sql/core -am #skip core tests
- cd ${TRAVIS_BUILD_DIR}/core
- mvn clean -q package -DskipTests #skip core tests
- # run native sql unit tests
- count=0; while [ $count -le 3 ]; do echo "Elapsed 3 minutes"; sleep 180; let count++; done & # print log each 3 minutes for 3 times to avoid no-log issue
- mvn test -DmembersOnlySuites=org.apache.spark.sql.travis -pl oap-native-sql/core -am -DfailIfNoTests=false -Dexec.skip=true &> log-file.log # skip cpp build
- mvn test -DmembersOnlySuites=org.apache.spark.sql.travis -DfailIfNoTests=false -Dexec.skip=true &> log-file.log # skip cpp build
- echo '#!/bin/bash' > grep.sh
- echo "module_tested=0; module_should_test=4; tests_total=0; while read -r line; do num=\$(echo \"\$line\" | grep -o -E '[0-9]+'); tests_total=\$((tests_total+num)); done <<<\"\$(grep \"Total number of tests run:\" log-file.log)\"; succeed_total=0; while read -r line; do [[ \$line =~ [^0-9]*([0-9]+)\, ]]; num=\${BASH_REMATCH[1]}; succeed_total=\$((succeed_total+num)); let module_tested++; done <<<\"\$(grep \"succeeded\" log-file.log)\"; if test \$tests_total -eq \$succeed_total -a \$module_tested -eq \$module_should_test; then echo \"All unit tests succeed\"; else echo \"Unit tests failed\"; exit 1; fi" >> grep.sh
- bash grep.sh
- # run data source unit tests
- mvn -q test -pl oap-data-source/arrow -am
- name: oap-shuffle-remote-shuffle
install:
- #empty install step
script:
- cd ${TRAVIS_BUILD_DIR}/oap-shuffle/remote-shuffle/
- mvn -q test

- name: oap-Rpmem-shuffle
dist: bionic
jdk:
- openjdk8
before_install:
- echo ${TRAVIS_COMMIT_MESSAGE}
#- if [[ ${TRAVIS_COMMIT_MESSAGE} != \[oap-native-sql\]* ]]; then travis_terminate 0 ; fi ;
- sudo apt-get install -y openjdk-8-jdk git maven g++-7 cmake build-essential libboost-dev libboost-system-dev autogen autoconf libtool pandoc asciidoctor libkmod-dev libdaxctl-dev pkg-config libkmod2 kmod libuuid1 libudev1 libudev-dev libjson-c-dev libjemalloc-dev
- export | grep JAVA_HOME
- "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64"
- export |grep JAVA_HOME
install:
- # Download spark 3.0
- "[ -f spark ] || mkdir spark && cd spark && wget http://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz && cd .."
- "tar -xf ./spark/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=`pwd`/spark-3.0.0-bin-hadoop2.7"
before_script:
- cd /tmp
#libfabric
- git clone https://github.com/ofiwg/libfabric.git && cd libfabric && git checkout -b v1.8.0 tags/v1.8.0 && ./autogen.sh && ./configure --prefix=/usr/local --enable-sockets
- make -j && sudo make install
#HPNL
- cd /tmp
- git clone https://github.com/Intel-bigdata/HPNL.git
- cd HPNL && git submodule update --init --recursive && mkdir build && cd build
- cmake -DWITH_VERBS=ON -DWITH_JAVA=ON ..
- make -j && sudo make install
- cd ../java/hpnl
- sudo mvn install -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
#PMDK
- cd /tmp
- git clone https://github.com/pmem/pmdk.git && cd pmdk && git checkout tags/1.8
# PMDK uses pkg_config to find lbndctl, disable it for now
- make NDCTL_ENABLE=n
- sudo make NDCTL_ENABLE=n install
#RPMem
- cd /tmp
- git clone https://github.com/efficient/libcuckoo && cd libcuckoo && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX=/usr/local -DBUILD_EXAMPLES=1 -DBUILD_TESTS=1 ..
- make all && sudo make install
script:
- cd ${TRAVIS_BUILD_DIR}/oap-shuffle/RPMem-shuffle
# skip tests, do tests locally
- sudo mvn install -DskipTests -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn

- name: oap-mllib
dist: bionic
jdk:
- openjdk8
cache:
directories:
- /opt/intel/inteloneapi
- /opt/intel/oneapi
- $HOME/downloads
install:
- # Install Spark 3.0
- "[ -d $HOME/downloads ] || mkdir $HOME/downloads"
- "cd $HOME/downloads && wget http://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz"
- "cd $HOME && tar -xf $HOME/downloads/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=$HOME/spark-3.0.0-bin-hadoop2.7"
- # Install Dependencies
- ${TRAVIS_BUILD_DIR}/oap-mllib/dev/install-build-deps-ubuntu.sh
script:
- cd ${TRAVIS_BUILD_DIR}/oap-mllib/mllib-dal
- export ONEAPI_ROOT=/opt/intel/oneapi
- source /opt/intel/oneapi/dal/latest/env/vars.sh
- source /opt/intel/oneapi/tbb/latest/env/vars.sh
- source /tmp/oneCCL/build/_install/env/setvars.sh
- ./test.sh

- name: oap-spark
before_install:
- sudo apt-get install libpthread-stubs0-dev
- sudo apt-get install libnuma-dev
- sudo apt-get install cmake
- sudo apt-get install asciidoctor libkmod-dev libudev-dev uuid uuid-dev libjson-c-dev systemd libkeyutils-dev
- sudo apt-get install pandoc libgtest-dev pkg-config libtbb-dev rapidjson-dev valgrind
install:
- #empty install step
before_script:
- cd ${TRAVIS_BUILD_DIR}/dev
- ./install_memkind.sh
- ./install_pmdk.sh
- ./install_pmemkv.sh
- ../oap-common/src/native/memkind/compile.sh
- ../oap-common/src/native/libpmemblk/compile.sh
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmplatform.so /usr/lib/
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmblkplatform.so /usr/lib/
- sudo ldconfig /usr/lib
script:
- cd ${TRAVIS_BUILD_DIR}
- mvn clean test -pl oap-common -q -Ppersistent-memory -Plibpmemblk -am
- mvn clean package -pl oap-spark -q -Ppersistent-memory -am

0 comments on commit 556234d

Please sign in to comment.