-
Notifications
You must be signed in to change notification settings - Fork 28
/
Copy path.travis.yml
444 lines (423 loc) · 18.8 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
###############################################################################
language: python
os: linux
addons:
apt:
packages:
# Install ATLAS and LAPACK for numpy/scipy
- libatlas-dev
- libatlas-base-dev
- liblapack-dev
# Install GEOS for Shapely
- libgeos-dev
# Install JPEG library for Pillow>=3.0.0
- libjpeg-dev
###############################################################################
# Cache data which has to be downloaded on every build.
# This is just for Travis and doesn't do anything on Shippable.
cache:
directories:
# Cache files downloaded by pip
- $HOME/.cache/pip
- $HOME/Library/Caches/pip
# Cache our miniconda download.
- $HOME/Downloads
# Cache OSX brew packages
- $HOME/Library/Caches/Homebrew
- $HOME/Library/Caches/PyenvVirtualenv
###############################################################################
env:
global:
# Set this to be python, python2, or python3 etc as appropriate.
# If it is empty, it is automatically determined.
- PYTHONCMD=""
# Whether to test with oldest supported dependencies.
# Values are "true", "false" or "".
- USE_OLDEST_DEPS=""
# Whether to build documentation.
# Values are "true", "false" or "".
- BUILD_DOCS="true"
# Whether to test to see if the notebooks run.
# Values are "true", "false" or "".
- TEST_NOTEBOOKS="true"
- USE_SIMA="true"
# Set flag for whether to use a conda environment
# values can be:
# "" or "false": conda not used
# "env": conda used to make environment, then pip used to install deps
# "numpy": conda used for numpy and scipy, pip used for everything else
# "full": conda used to install every dependency available, pip otherwise
- USE_CONDA=""
# Set cache location for pyenv virtualenv on OSX
- PYENV_VIRTUALENV_CACHE_PATH="$HOME/Library/Caches/PyenvVirtualenv"
jobs:
fast_finish: true
include:
# Ubuntu -------------------------------------
# Jobs to run on Conda
- python: "3.6"
env:
- USE_CONDA="binder"
# All versions pre-installed
- if: branch =~ /^v?\d+(\.[x\d]+)+$/
python: "3.8"
env:
- USE_SIMA="false"
- python: "2.7"
env:
- USE_OLDEST_DEPS="true"
- TEST_NOTEBOOKS="false"
- python: "3.5"
env:
- USE_OLDEST_DEPS="true"
- TEST_NOTEBOOKS="false"
allow_failures:
- env:
- USE_CONDA="binder"
###############################################################################
# Setup the environment before installing
before_install:
# Remember the directory where our repository to test is located
- REPOPATH="$(pwd)" && pwd
# ---------------------------------------------------------------------------
# Automatically determine python executable
- |
if [[ "$PYTHONCMD" != "" ]]; then
:
elif "$TRAVIS_OS_NAME" == "linux" ]]; then
PYTHONCMD="python";
elif [[ "${TRAVIS_PYTHON_VERSION}." == "2."* ]]; then
PYTHONCMD="python2";
else
PYTHONCMD="python3";
fi;
# ---------------------------------------------------------------------------
# Check which python versions are pre-installed
- ls -l /usr/local/bin/python*
- $PYTHONCMD --version || echo "No $PYTHONCMD installed"
# ---------------------------------------------------------------------------
# If we want to run the tests using the oldest set of dependencies we
# support, modify any *requirements*.txt files every '>=' becomes '=='.
# Undo swapping any requirements which say version>=, since they are for
# our environment markers.
- |
if [[ "$USE_OLDEST_DEPS" == "true" ]]; then
for FILE in *requirements*.txt; do
sed -e 's/>=/~=/g' $FILE > $FILE.tmp && mv $FILE.tmp $FILE;
sed -e 's/version\s*~=/version>=/g' $FILE > $FILE.tmp && mv $FILE.tmp $FILE;
done;
fi;
# ---------------------------------------------------------------------------
# Check whether the python version we are using matches the one we want
- |
PYVER="";
PYVER="$($PYTHONCMD --version 2>&1)";
PYVER=${PYVER#Python };
echo "Current Python version... $PYVER";
if [[ "$USE_CONDA" != "false" ]] && [[ "$USE_CONDA" != "" ]]; then
echo "We were planning to use a conda environment anyway.";
elif [[ "$USE_BREW" == "true" ]]; then
echo "We were planning to use a pyenv virtualenv environment via brew anyway.";
elif [[ "${PYVER}." == "${TRAVIS_PYTHON_VERSION}."* ]]; then
USE_VENV="true";
echo "Correct python currently in use, but we will make a new virtualenv for it.";
elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
USE_BREW="true";
echo "We are on OSX and will use a pyenv virtualenv environment via brew.";
else
USE_CONDA="env";
echo "We will use a conda environment to install the right python version.";
fi;
# ---------------------------------------------------------------------------
# Install python with brew on OSX
# https://github.com/yyuu/pyenv/wiki#suggested-build-environment
- |
if [[ "$USE_BREW" == "true" && "$TRAVIS_OS_NAME" == "osx" ]]; then
echo "Installing python with brew and pyenv-virtualenv on Mac OSX";
export HOMEBREW_NO_INSTALL_CLEANUP=1;
export HOMEBREW_NO_AUTO_UPDATE=1;
brew update;
brew install openssl readline sqlite3 xz zlib pyenv pyenv-virtualenv;
brew upgrade pyenv;
pyenv install "$TRAVIS_PYTHON_VERSION";
export PYENV_ROOT="$HOME/.pyenv";
export PATH="$PYENV_ROOT/bin:$PATH";
pyenv virtualenv "$TRAVIS_PYTHON_VERSION" test-environment;
pyenv activate test-environment;
export PATH="$PYENV_ROOT/versions/test-environment/bin:$PATH";
PYTHONCMD=python;
fi;
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Make a new python environment, to ensure there are no site-packages
- |
if [[ "$USE_VENV" == "true" ]]; then
rm -rf ~/test-environment;
$PYTHONCMD -m pip install virtualenv;
$PYTHONCMD -m virtualenv -p "$PYTHONCMD" ~/test-environment;
source ~/test-environment/bin/activate;
fi;
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Install conda and set up conda environment
# The following is based on Minicoda's how-to Travis page
# http://conda.pydata.org/docs/travis.html
# - Download miniconda. No need to redownload if we already have the latest version cached.
# - Install miniconda to the home directory, if it isn't there already.
# - Add conda to the path and automatically say yes to any check from conda
# - Remove test environment from conda, if it's still there from last time
# - Update conda
- |
if [[ "$USE_CONDA" != "false" && "$USE_CONDA" != "" ]]; then
mkdir -p $HOME/Downloads;
if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
MINICONDA_FNAME="Miniconda2-latest-Linux-x86_64.sh";
else
MINICONDA_FNAME="Miniconda3-latest-Linux-x86_64.sh";
fi;
MINICONDA_URL="https://repo.continuum.io/miniconda/$MINICONDA_FNAME";
travis_retry wget -c "$MINICONDA_URL" -O "$HOME/Downloads/$MINICONDA_FNAME";
if [ ! -d "$HOME/miniconda/bin" ]; then
if [ -d "$HOME/miniconda" ]; then rm -r "$HOME/miniconda"; fi;
bash "$HOME/Downloads/$MINICONDA_FNAME" -b -p "$HOME/miniconda";
fi;
export PATH="$HOME/miniconda/bin:$PATH";
hash -r;
conda config --set always_yes yes --set changeps1 no;
conda config --add channels conda-forge anaconda;
conda remove -n test-environment --all || echo "No test-environment to remove";
travis_retry conda update -q conda;
fi;
# Useful for debugging any issues with conda
- conda info -a || echo "No conda"
- conda list || echo "No conda"
#
# If necessary, check which is the earliest version of numpy and scipy
# available on conda for this version of python.
# Because any given version of scipy is only available for a narrow range
# of numpy versions, we constrain only scipy and not numpy to its oldest
# possible requirement when scipy is being installed. The version of numpy
# we end up must still satisfy the original requirement.txt setting, and
# be from around the time of the oldest supported scipy release.
- |
if [[ "$USE_CONDA" == "full" || "$USE_CONDA" == "numpy" ]]; then
if [ -f requirements.txt ]; then
NUMPY_REQUIREMENT="$(grep '^numpy\([!<>=~ ]\|$\)' requirements.txt)";
echo "NumPy requirement is '$NUMPY_REQUIREMENT'";
SCIPY_REQUIREMENT="$(grep '^scipy\([!<>=~ ]\|$\)' requirements.txt)";
echo "SciPy requirement is '$SCIPY_REQUIREMENT'";
fi;
if [[ "$USE_OLDEST_DEPS" == "true" ]]; then
if [[ "$SCIPY_REQUIREMENT" != "" ]]; then
SCIPY_VERSION="$(bash
./.ci/conda_min_version.sh
"$SCIPY_REQUIREMENT" "$TRAVIS_PYTHON_VERSION")";
if [[ "$SCIPY_VERSION" != "" ]]; then
SCIPY_REQUIREMENT="scipy==$SCIPY_VERSION";
fi;
elif [[ "$NUMPY_REQUIREMENT" != "" ]]; then
NUMPY_VERSION="$(bash
./.ci/conda_min_version.sh
"$NUMPY_REQUIREMENT" "$TRAVIS_PYTHON_VERSION")";
if [[ "$NUMPY_VERSION" != "" ]]; then
NUMPY_REQUIREMENT="numpy==$NUMPY_VERSION";
fi;
fi;
fi;
fi;
# Create the conda environment with pip, numpy and scipy installed (if they
# are in requirements.txt)
- |
if [[ "$USE_CONDA" != "false" && "$USE_CONDA" != "" ]]; then
if [[ "$USE_CONDA" == "binder" ]]; then
conda env create -q -f .binder/environment.yml -n test-environment;
else
conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION pip $NUMPY_REQUIREMENT $SCIPY_REQUIREMENT;
fi;
source activate test-environment;
PYTHONCMD=python;
fi;
# If you get an error from this command which looks like this:
# Error: Unsatisfiable package specifications.
# Generating hint:
# [ COMPLETE ]|###########| 100%
# Hint: the following packages conflict with each other:
# - numpy >=1.9.0
# - scipy ==0.12.0
#
# This is because you have constrained the numpy version in requirements.txt
# to a more recent set of values (e.g. numpy>=1.9.0) than the scipy
# constraint (e.g. scipy>=0.12.0). The USE_OLDEST_DEPS code has
# looked up the oldest compatible version available on conda (scipy==0.12.0)
# but there is no numpy version for this which matches your constraint.
#
# You can resolve this by doing a search of the conda packages available
# conda search scipy
# and changing your scipy constraint to be scipy>=x.y.z, where x.y.z is the
# oldest version which has a matching numpy version in its buildstring.
# To resolve the example, we look for the first scipy version which has
# 'np19' in its buildstring, and find it is scipy version 0.14.0, so we
# update the requirements.txt file to have 'scipy>=0.14.0' instead of
# 'scipy>=0.12.0'.
#
# ---------------------------------------------------------------------------
# Check whether the python version we are using matches the one we want
# Fail the build if it doesn't match
- |
PYVER="$($PYTHONCMD --version 2>&1)";
PYVER=${PYVER#Python };
echo "Current Python version... $PYVER";
echo "We want Python version... $TRAVIS_PYTHON_VERSION";
if [[ "${PYVER}." == "${TRAVIS_PYTHON_VERSION}."* ]]; then
echo "Python version acceptable. Continuing job.";
else
echo "Python version not acceptable. Exiting job.";
fi;
[[ "${PYVER}." == "${TRAVIS_PYTHON_VERSION}."* ]];
###############################################################################
# install requirements
install:
# Update pip
- $PYTHONCMD -m pip install --upgrade pip
# Make a list of all requirements
- cat requirements.txt requirements-test.txt > requirements_all.txt
# Conditionally install the packages which are needed for building docs
- if [[ "$BUILD_DOCS" == "true" ]]; then
cat requirements-docs.txt >> requirements_all.txt;
fi
# Conditionally install the packages which are needed for plotting
# Also, tell matplotlib to use the agg backend and not X server
- |
if [[ "$TEST_NOTEBOOKS" != "" && "$TEST_NOTEBOOKS" != "false" ]]; then
export MPLBACKEND="agg";
cat requirements-plots.txt >> requirements_all.txt;
if [[ "$USE_SIMA" == "true" ]]; then
echo "sima" >> requirements_all.txt;
else
rm -f examples/*SIMA* examples/*sima*;
fi;
fi;
# Show the resulting list of packages
- cat requirements_all.txt;
# ---------------------------------------------------------------------------
# If we are using a conda environment, install as much as we can in conda
# Anything we can't install into conda will be installed with pip later
- |
if [[ "$USE_CONDA" == "full" ]]; then
while read REQUIREMENT;
do conda install $REQUIREMENT || echo "$REQUIREMENT not on conda";
done < requirements_all.txt;
fi;
# ---------------------------------------------------------------------------
# Installation with pip will handle all packages if we aren't using conda,
# or all packages which couldn't be installed with conda if we were using it.
# Do numpy and scipy requirements first, because some packages need them
# in order for the pip install to complete.
- |
sed -n -e '/^numpy\([!<>=~ $]\)/p' requirements.txt > .requirements_first.txt;
sed -n -e '/^scipy\([!<>=~ $]\)/p' requirements.txt >> .requirements_first.txt;
cat .requirements_first.txt;
$PYTHONCMD -m pip install -r .requirements_first.txt;
# Install required packages listed in requirements.txt. We install this
# with the upgrade flag to ensure we have the most recent version of
# the dependency which is compatible with the specification.
- if [[ "$USE_OLDEST_DEPS" == "true" || "$USE_CONDA" == "binder" ]]; then
$PYTHONCMD -m pip install -r requirements_all.txt;
else
$PYTHONCMD -m pip install --upgrade -r requirements_all.txt;
fi;
# ---------------------------------------------------------------------------
# Install our own package
- $PYTHONCMD setup.py develop
###############################################################################
before_script:
# Double-check we are still in the right directory
- pwd
# Check what python packages we have installed
- conda info -a || echo "No conda"
- $PYTHONCMD --version
- conda env export > environment.yml && cat environment.yml || echo "No conda"
- $PYTHONCMD -m pip freeze
# ---------------------------------------------------------------------------
# Remove any cached results files from previous build, if present
- rm -f testresults.xml coverage.xml .coverage;
# ---------------------------------------------------------------------------
# Set up folders for test results on Shippable
- if [ "$SHIPPABLE" = "true" ]; then
rm -fr shippable;
mkdir -p shippable/testresults;
mkdir -p shippable/codecoverage;
fi;
###############################################################################
# commands to run test scripts
script:
- $PYTHONCMD --version;
if [[ "$NUMPY_REQUIREMENT" != "" ]]; then
$PYTHONCMD -c "import numpy; print('numpy %s' % numpy.__version__)";
fi;
if [[ "$SCIPY_REQUIREMENT" != "" ]]; then
$PYTHONCMD -c "import scipy; print('scipy %s' % scipy.__version__)";
fi;
# ---------------------------------------------------------------------------
# Test the main code base
- $PYTHONCMD -m pytest --cov=fissa --cov-report term --cov-report xml --cov-config .coveragerc --junitxml=testresults.xml
# Build the documentation
- if [[ "$BUILD_DOCS" == "true" ]]; then
make -C docs html;
fi;
# Test the notebooks
- if [[ "$TEST_NOTEBOOKS" != "" && "$TEST_NOTEBOOKS" != "false" ]]; then
$PYTHONCMD -m pytest --nbsmoke-run ./examples/;
fi;
- if [[ "$TEST_NOTEBOOKS" != "" && "$TEST_NOTEBOOKS" != "false" ]]; then
$PYTHONCMD -m pytest --nbsmoke-lint ./examples/;
fi;
###############################################################################
# commands to run after tests are done
after_script:
# Show where we ended up
- pwd
# Go back to the repository directory, just in case
# Show what results files there are
- cd ${REPOPATH} && ls -alh;
# ---------------------------------------------------------------------------
# Move results and coverage files into appropriate places
- if [ "$SHIPPABLE" = "true" ] && [ -f testresults.xml ]; then
mv testresults.xml shippable/testresults/;
fi;
if [ "$SHIPPABLE" = "true" ] && [ -f coverage.xml ]; then
cp coverage.xml shippable/codecoverage/;
fi;
###############################################################################
after_success:
# Only run coveralls on Travis. When running on a public Travis-CI, the
# repo token is automatically inferred, but to run coveralls on Shippable
# the repo token needs to be specified in a .coveralls.yml or as an
# environment variable COVERALLS_REPO_TOKEN. This should be kept hidden
# from public viewing, either by encrypting the token or running on a
# private build.
# We ignore coveralls failures because the coveralls server is not 100%
# reliable and we don't want the CI to report a failure just because the
# coverage report wasn't published.
- |
if [[ "$TRAVIS" = "true" && "$SHIPPABLE" != "true" ]]; then
curl -s -S -L --connect-timeout 5 --retry 6 https://codecov.io/bash -o codecov-upload.sh || echo "Codecov script failed to download";
travis_retry bash codecov-upload.sh || echo "Codecov push failed";
$PYTHONCMD -m pip install coveralls;
fi;
###############################################################################
# Steps to take before archiving on Shippable (does nothing on Travis)
before_archive:
# Have shippable archive the environment.yml artifact by putting it in
# the REPO/shippable folder. This is available to download as a tar file for
# each build.
# Since this build was successful, you can share it for users to install from
# with the command `conda env create -f environment.yml` and know they have
# a working build.
# If you want to save this file on Travis, you will need to turn on the
# artifacts addon (or do something else with it). See here for details
# https://docs.travis-ci.com/user/uploading-artifacts/
- if [ "$SHIPPABLE" = "true" ] && [ -f environment.yml ]; then
cp environment.yml shippable/;
fi;
###############################################################################
# Enable archiving of artifacts on Shippable (does nothing on Travis)
archive: true