-
Notifications
You must be signed in to change notification settings - Fork 68
/
.travis.yml
71 lines (56 loc) · 2.77 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sudo: required
dist: trusty
language: python
matrix:
include:
# Spark 2.3.1
- jdk: oraclejdk8
python: 2.7
env: >
TEST_SPARK_VERSION=2.3.1
cache:
directories:
- $HOME/.sbt
- $HOME/.m2
notifications:
email: false
before_install:
- export SPARK_HOME=$TRAVIS_BUILD_DIR/spark/spark-$TEST_SPARK_VERSION-bin-hadoop2.7
# Download Python packages.
- pip install coverage pycodestyle
# Download Spark
- mkdir $TRAVIS_BUILD_DIR/spark
- curl -O http://mirrors.gigenet.com/apache/spark/spark-$TEST_SPARK_VERSION/spark-$TEST_SPARK_VERSION-bin-hadoop2.7.tgz
- tar zxfC spark-$TEST_SPARK_VERSION-bin-hadoop2.7.tgz $TRAVIS_BUILD_DIR/spark
# Set the PySpark libraries paths for, for example, py4j and pyspark. This should be set after Spark is downloaded.
- export PYTHONPATH=$(ZIPS=("$SPARK_HOME"/python/lib/*.zip); IFS=:; echo "${ZIPS[*]}"):$PYTHONPATH
install:
- build/sbt assembly -Drepourl=http://nexus-private.hortonworks.com/nexus/content/groups/public/ -Dhadoop.version=3.0.0.3.0.0.0-SNAPSHOT -Dhive.version=3.0.0.3.0.0.0-SNAPSHOT
# sbt test:package generates a jar containing test classes complied. This is required to run the Python tests.
- build/sbt test:package -Drepourl=http://nexus-private.hortonworks.com/nexus/content/groups/public/ -Dhadoop.version=3.0.0.3.0.0.0-SNAPSHOT -Dhive.version=3.0.0.3.0.0.0-SNAPSHOT
# assembly also produces a Python API module under targets. Adds it to the Python path.
- export PYTHONPATH=$(ZIPS=(`pwd`/target/*.zip); IFS=:; echo "${ZIPS[*]}"):$PYTHONPATH
script:
# Scala / Java
- build/sbt test -Drepourl=http://nexus-private.hortonworks.com/nexus/content/groups/public/ -Dhadoop.version=3.0.0.3.0.0.0-SNAPSHOT -Dhive.version=3.0.0.3.0.0.0-SNAPSHOT
- build/scalastyle
# Python
- cd python
- python setup.py sdist
- coverage run pyspark_llap/sql/tests.py
- coverage report --include "*pyspark_llap-*pyspark_llap/*" --show-missing
- pycodestyle `find . -name '*.py'` --ignore=E402,E731,E241,W503,E226,E722,E741,E305 --max-line-length=100