forked from LinhongLiu/OAP
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
164 lines (160 loc) · 7.59 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
sudo: required
dist: bionic
language: java
jdk: openjdk8
jobs:
include:
#Other modules can refer to oap-cache-oap to build independent travis-ci job,
#oap-cache-oap is a CI building demo of the corresponding module oap-cache/oap.
- name: oap-cache-oap
before_install:
- sudo apt-get install libpthread-stubs0-dev
- sudo apt-get install libnuma-dev
- sudo apt-get install cmake
- sudo apt-get install asciidoctor libkmod-dev libudev-dev uuid uuid-dev libjson-c-dev systemd libkeyutils-dev
- sudo apt-get install pandoc libgtest-dev pkg-config libtbb-dev rapidjson-dev valgrind
install:
- # Download spark 3.0.0
- "[ -f spark ] || mkdir spark && cd spark && wget https://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz && cd .."
- "tar -xf ./spark/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=`pwd`/spark-3.0.0-bin-hadoop2.7"
before_script:
- cd ${TRAVIS_BUILD_DIR}/dev
- ./install_vmemcache.sh
- ./install_memkind.sh
- ./install_pmdk.sh
- ./install_pmemkv.sh
- ../oap-common/src/native/memkind/compile.sh
- ../oap-common/src/native/libpmemblk/compile.sh
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmplatform.so /usr/lib/
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmblkplatform.so /usr/lib/
- sudo ldconfig /usr/lib
script:
- cd ${TRAVIS_BUILD_DIR}
- mvn clean test -pl oap-common -q -Ppersistent-memory -Pvmemcache -Plibpmemblk -am
- mvn clean test -pl oap-cache/oap -q -Ppersistent-memory -Pvmemcache -am
- name: oap-native-sql
dist: bionic
jdk:
- openjdk8
before_install:
- echo ${TRAVIS_COMMIT_MESSAGE}
#- if [[ ${TRAVIS_COMMIT_MESSAGE} != \[oap-native-sql\]* ]]; then travis_terminate 0 ; fi ;
- sudo apt-get install cmake
- sudo apt-get install libboost-all-dev
- export | grep JAVA_HOME
install:
- # Download spark 3.0
- "[ -f spark ] || mkdir spark && cd spark && wget http://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz && cd .."
- "tar -xf ./spark/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=`pwd`/spark-3.0.0-bin-hadoop2.7"
before_script:
- cd /tmp
- git clone https://github.com/intel-bigdata/arrow.git
- cd arrow && git checkout oap-master && cd cpp
- sed -i "s/\${Python3_EXECUTABLE}/\/opt\/pyenv\/shims\/python3/g" CMakeLists.txt
- mkdir build && cd build
- cmake .. -DARROW_JNI=ON -DARROW_GANDIVA_JAVA=ON -DARROW_GANDIVA=ON -DARROW_PARQUET=ON -DARROW_HDFS=ON -DARROW_FILESYSTEM=ON -DARROW_WITH_SNAPPY=ON -DARROW_JSON=ON -DARROW_DATASET=ON -DARROW_WITH_LZ4=ON && make
- sudo make install
- cd ../../java
- mvn clean install -q -P arrow-jni -am -Darrow.cpp.build.dir=/tmp/arrow/cpp/build/release/ -DskipTests -Dcheckstyle.skip
script:
- cd ${TRAVIS_BUILD_DIR}/
- mvn clean -q package -DskipTests -pl oap-native-sql/core -am #skip core tests
- cd ${TRAVIS_BUILD_DIR}/
- mvn clean -q test -pl oap-data-source/arrow -am
- name: oap-shuffle-remote-shuffle
install:
- #empty install step
script:
- cd ${TRAVIS_BUILD_DIR}/oap-shuffle/remote-shuffle/
- mvn -q test
- name: oap-Rpmem-shuffle
dist: bionic
jdk:
- openjdk8
before_install:
- echo ${TRAVIS_COMMIT_MESSAGE}
#- if [[ ${TRAVIS_COMMIT_MESSAGE} != \[oap-native-sql\]* ]]; then travis_terminate 0 ; fi ;
- sudo apt-get install -y openjdk-8-jdk git maven g++-7 cmake build-essential libboost-dev libboost-system-dev autogen autoconf libtool pandoc asciidoctor libkmod-dev libdaxctl-dev pkg-config libkmod2 kmod libuuid1 libudev1 libudev-dev libjson-c-dev libjemalloc-dev
- export | grep JAVA_HOME
- "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64"
- export |grep JAVA_HOME
install:
- # Download spark 3.0
- "[ -f spark ] || mkdir spark && cd spark && wget http://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz && cd .."
- "tar -xf ./spark/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=`pwd`/spark-3.0.0-bin-hadoop2.7"
before_script:
- cd /tmp
#libfabric
- git clone https://github.com/ofiwg/libfabric.git && cd libfabric && git checkout -b v1.8.0 tags/v1.8.0 && ./autogen.sh && ./configure --prefix=/usr/local --enable-sockets
- make -j && sudo make install
#HPNL
- cd /tmp
- git clone https://github.com/Intel-bigdata/HPNL.git
- cd HPNL && git submodule update --init --recursive && mkdir build && cd build
- cmake -DWITH_VERBS=ON -DWITH_JAVA=ON ..
- make -j && sudo make install
- cd ../java/hpnl
- sudo mvn install -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
#PMDK
- cd /tmp
- git clone https://github.com/pmem/pmdk.git && cd pmdk && git checkout tags/1.8
# PMDK uses pkg_config to find lbndctl, disable it for now
- make NDCTL_ENABLE=n
- sudo make NDCTL_ENABLE=n install
#RPMem
- cd /tmp
- git clone https://github.com/efficient/libcuckoo && cd libcuckoo && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX=/usr/local -DBUILD_EXAMPLES=1 -DBUILD_TESTS=1 ..
- make all && sudo make install
script:
- cd ${TRAVIS_BUILD_DIR}/oap-shuffle/RPMem-shuffle
# skip tests, do tests locally
- sudo mvn install -DskipTests -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
- name: oap-mllib
dist: bionic
jdk:
- openjdk8
cache:
directories:
- /opt/intel/inteloneapi
- $HOME/downloads
install:
- # Install Spark 3.0
- "[ -d $HOME/downloads ] || mkdir $HOME/downloads"
- "cd $HOME/downloads && wget http://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz"
- "cd $HOME && tar -xf $HOME/downloads/spark-3.0.0-bin-hadoop2.7.tgz"
- "export SPARK_HOME=$HOME/spark-3.0.0-bin-hadoop2.7"
- # Install Dependencies
- ${TRAVIS_BUILD_DIR}/oap-mllib/dev/install-build-deps-ubuntu.sh
script:
- cd ${TRAVIS_BUILD_DIR}/oap-mllib/mllib-dal
- export ONEAPI_ROOT=/opt/intel/inteloneapi
- source /opt/intel/inteloneapi/daal/2021.1-beta07/env/vars.sh
- source /opt/intel/inteloneapi/tbb/2021.1-beta07/env/vars.sh
- source /tmp/oneCCL/build/_install/env/setvars.sh
- ./test.sh
- name: oap-spark
before_install:
- sudo apt-get install libpthread-stubs0-dev
- sudo apt-get install libnuma-dev
- sudo apt-get install cmake
- sudo apt-get install asciidoctor libkmod-dev libudev-dev uuid uuid-dev libjson-c-dev systemd libkeyutils-dev
- sudo apt-get install pandoc libgtest-dev pkg-config libtbb-dev rapidjson-dev valgrind
install:
- #empty install step
before_script:
- cd ${TRAVIS_BUILD_DIR}/dev
- ./install_memkind.sh
- ./install_pmdk.sh
- ./install_pmemkv.sh
- ../oap-common/src/native/memkind/compile.sh
- ../oap-common/src/native/libpmemblk/compile.sh
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmplatform.so /usr/lib/
- sudo cp ../oap-common/src/resources/linux/64/lib/libpmblkplatform.so /usr/lib/
- sudo ldconfig /usr/lib
script:
- cd ${TRAVIS_BUILD_DIR}
- mvn clean test -pl oap-common -q -Ppersistent-memory -Plibpmemblk -am
- mvn clean package -pl oap-spark -q -Ppersistent-memory -am