|
1 | | -dist: trusty |
2 | | - |
3 | | -language: java |
| 1 | +sudo: required |
4 | 2 |
|
5 | | -jdk: oraclejdk8 |
| 3 | +dist: trusty |
6 | 4 |
|
7 | | -sudo: required |
| 5 | +language: minimal |
8 | 6 |
|
9 | 7 | services: |
10 | 8 | - docker |
11 | 9 |
|
12 | 10 | cache: |
13 | 11 | directories: |
14 | 12 | - $HOME/.ivy2/ |
15 | | - - $HOME/.sbt/launchers/ |
16 | | - - $HOME/.cache/spark-versions/ |
17 | | - - $HOME/.sbt/boot/scala-2.11.8/ |
18 | 13 |
|
19 | 14 | env: |
| 15 | + global: |
| 16 | + - DOCKER_COMPOSE_VERSION=1.22.0 |
20 | 17 | matrix: |
21 | | - - SCALA_BINARY_VERSION=2.11.8 SPARK_VERSION=2.4.0 SPARK_BUILD="spark-2.4.0-bin-hadoop2.7" |
22 | | - SPARK_BUILD_URL="https://dist.apache.org/repos/dist/release/spark/spark-2.4.0/spark-2.4.0-bin-hadoop2.7.tgz" |
23 | | - PYTHON_VERSION=2.7.13 |
24 | | - - SCALA_BINARY_VERSION=2.11.8 SPARK_VERSION=2.4.0 SPARK_BUILD="spark-2.4.0-bin-hadoop2.7" |
25 | | - SPARK_BUILD_URL="https://dist.apache.org/repos/dist/release/spark/spark-2.4.0/spark-2.4.0-bin-hadoop2.7.tgz" |
26 | | - PYTHON_VERSION=3.6.2 |
| 18 | + - PYTHON_VERSION=3.6 |
| 19 | + - PYTHON_VERSION=2.7 |
27 | 20 |
|
28 | 21 | before_install: |
29 | | - - ./bin/download_travis_dependencies.sh |
30 | | - - if [[ "$PYTHON_VERSION" == 2.* ]]; then |
31 | | - export CONDA_URL="repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh" |
32 | | - export PYSPARK_PYTHON=python2; |
33 | | - else |
34 | | - export CONDA_URL="repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"; |
35 | | - export PYSPARK_PYTHON=python3; |
36 | | - fi |
37 | | - - docker run -e "JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64" |
38 | | - -e SPARK_VERSION |
39 | | - -e SPARK_BUILD |
40 | | - -e SCALA_BINARY_VERSION |
41 | | - -e PYTHON_VERSION |
42 | | - -e PYSPARK_PYTHON |
43 | | - -e CONDA_URL |
44 | | - -d --name ubuntu-test -v $HOME ubuntu:16.04 tail -f /dev/null |
45 | | - - docker cp `pwd` ubuntu-test:$HOME/ |
46 | | - - docker cp $HOME/.cache ubuntu-test:$HOME/ |
47 | | - - docker ps |
48 | | - |
49 | | -# See this page: http://conda.pydata.org/docs/travis.html |
50 | | -install: |
51 | | - # install needed ubuntu packages |
52 | | - - docker exec -t ubuntu-test bash -c "apt-get update && apt-get upgrade -y" |
53 | | - - docker exec -t ubuntu-test bash -c "apt-get install -y curl bzip2 openjdk-8-jdk unzip" |
54 | | - # download and set up protoc |
55 | | - - docker exec -t ubuntu-test bash -c " |
56 | | - curl -OL https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-x86_64.zip; |
57 | | - unzip protoc-3.6.1-linux-x86_64.zip -d /usr/local;" |
58 | | - # download and set up miniconda |
59 | | - - docker exec -t ubuntu-test bash -c " |
60 | | - curl https://$CONDA_URL >> $HOME/miniconda.sh; |
61 | | - bash $HOME/miniconda.sh -b -p $HOME/miniconda; |
62 | | - bash $HOME/miniconda.sh -b -p $HOME/miniconda; |
63 | | - $HOME/miniconda/bin/conda config --set always_yes yes --set changeps1 no; |
64 | | - $HOME/miniconda/bin/conda update -q conda; |
65 | | - $HOME/miniconda/bin/conda info -a; |
66 | | - $HOME/miniconda/bin/conda create -q -n test-environment python=$PYTHON_VERSION" |
| 22 | + # update docker compose to the specified version, https://docs.travis-ci.com/user/docker/#using-docker-compose |
| 23 | + - sudo rm /usr/local/bin/docker-compose |
| 24 | + - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose |
| 25 | + - chmod +x docker-compose |
| 26 | + - sudo mv docker-compose /usr/local/bin |
67 | 27 |
|
68 | | - # Activate conda environment ad install required packages |
69 | | - - docker exec -t ubuntu-test bash -c " |
70 | | - source $HOME/miniconda/bin/activate test-environment; |
71 | | - python --version; |
72 | | - pip --version; |
73 | | - pip install --user -r $HOME/tensorframes/python/requirements.txt;" |
| 28 | +install : |
| 29 | + - docker-compose build --build-arg PYTHON_VERSION=$PYTHON_VERSION |
| 30 | + - docker-compose up -d |
| 31 | + - docker-compose exec master bash -i -c "build/sbt tfs_testing/assembly" |
74 | 32 |
|
75 | 33 | script: |
76 | | - # Run the scala unit tests first |
77 | | - - docker exec -t ubuntu-test bash -c " |
78 | | - source $HOME/miniconda/bin/activate test-environment; |
79 | | - cd $HOME/tensorframes; |
80 | | - ./build/sbt -Dspark.version=$SPARK_VERSION |
81 | | - -Dpython.version=$PYSPARK_PYTHON |
82 | | - -Dscala.version=$SCALA_BINARY_VERSION |
83 | | - tfs_testing/test" |
84 | | - |
85 | | - # Build the assembly |
86 | | - - docker exec -t ubuntu-test bash -c " |
87 | | - source $HOME/miniconda/bin/activate test-environment; |
88 | | - cd $HOME/tensorframes; |
89 | | - ./build/sbt -Dspark.version=$SPARK_VERSION |
90 | | - -Dscala.version=$SCALA_BINARY_VERSION |
91 | | - tfs_testing/assembly" |
92 | | - |
93 | | - # Run python tests |
94 | | - - docker exec -t ubuntu-test bash -c " |
95 | | - source $HOME/miniconda/bin/activate test-environment; |
96 | | - cd $HOME/tensorframes; |
97 | | - SPARK_HOME=$HOME/.cache/spark-versions/$SPARK_BUILD ./python/run-tests.sh" |
| 34 | + - docker-compose exec master bash -i -c "build/sbt tfs_testing/test" |
| 35 | + - docker-compose exec master bash -i -c "python/run-tests.sh" |
0 commit comments