forked from uber/petastorm
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.travis.yml
119 lines (100 loc) · 5.69 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# Copyright (c) 2017-2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
dist: trusty
language: python
python:
- '2.7'
- '3.6'
services:
- docker
install:
# We will use selitvin/petastorm_ci:latest as docker image for running on travis-ci.
# To rebuild the ci image, see docker/Makefile.The image contains all petastorm dependencies
# installed, we do rerun dependencies installation to refresh ersions of the dependencies
# on each build if needed
- docker pull selitvin/petastorm_ci:latest
before_script:
# run docker container for an hour. Will execute all docker commands inside this container
- docker run -v `pwd`:/petastorm --name petastorm_ci selitvin/petastorm_ci:latest /bin/sh -c "sleep 3600" &
# wait for docker to start
- sleep 5
- export RUN="docker exec petastorm_ci bash /run_in_venv.sh ${TRAVIS_PYTHON_VERSION}"
- export PYTEST="pytest --timeout=180 --duration=0 -v --cov=./ --cov-append --color=yes"
# Refresh our dependencies' versions as in the CI image, they may go stale
- $RUN pip install -e /petastorm/[test,tf,torch,docs,opencv]
# Upgrade pip to avoid weird failures, such as failing to install torchvision
# This will use requirements from setup.py and install them in the tavis's virtual environment
# [tf] chooses to depend on cpu version of tensorflow (alternatively, could do [tf_gpu])
# pyarrow was compiled against a newer version of numpy than we require so we need to upgrade it
# (or optionally install pyarrow from source instead of through binaries)
- $RUN pip install --upgrade numpy
- $RUN pip list
# We run a bunch of linting in before_script to fail fast and not run unit tests in case of lint failure.
- $RUN flake8 . --count --show-source --statistics
- $RUN flake8 . --count --exit-zero --max-complexity=10 --statistics
- $RUN pylint --rcfile=.pylintrc petastorm examples -f parseable -r n
# enable core dumps
- $RUN ulimit -c unlimited -S
script:
# Build documentation
- $RUN bash -c "cd /petastorm/docs/autodoc && pwd && make html"
# Running pytest twice: first tests that do not have to be forked.
# `-Y` will result in datasets generated for testing to be persisted between runs in pytest cache.
- $RUN $PYTEST -m "forked" --forked -Y
--ignore=examples/mnist/tests/test_pytorch_mnist.py
--ignore=petastorm/tests/test_pytorch_utils.py
--ignore=petastorm/tests/test_pytorch_dataloader.py
petastorm examples
- $RUN $PYTEST -m "not forked" -v -Y
--ignore=examples/mnist/tests/test_pytorch_mnist.py
--ignore=petastorm/tests/test_pytorch_utils.py
--ignore=petastorm/tests/test_pytorch_dataloader.py
petastorm examples
# Tensorflow and pytorch tests confict (segfault). Split into separate runs.
- $RUN $PYTEST
examples/mnist/tests/test_pytorch_mnist.py
petastorm/tests/test_pytorch_dataloader.py
petastorm/tests/test_pytorch_utils.py
after_success:
- codecov
after_failure:
# Only upon failure, install gdb to process core dump
- sudo apt-get install -y gdb
- COREFILE=$(find . -maxdepth 2 -name "core*" | head -n 1)
# NOTE: without debug info, the stack trace won't be too useful.
# If it's possible to run with python2.7-dbg, then gdb hook py-bt will do its magic.
- if [[ -f "$COREFILE" ]]; then gdb -c "$COREFILE" python -ex "thread apply all bt" -ex "set pagination 0" -batch; fi
before_deploy:
# Build a python wheel before deployment
- $RUN python setup.py bdist_wheel
deploy:
# Disabling pypi deployment. There is an issue with authentication at the moment
- provider: pypi
user: uber
password:
secure: OEyw3NVW1kuSWSJAW+OBSZUk4uSSnkZnQ4njM1EFooTDZ2axAe69y+Z/PRjuLup7bbdxgq8DUXzK+jUWs07vcPYUKp0ek9Gni9gmv08B62P2plupfw35KCjxBTd3RY1cWpkX425ePucuAH1PojY5J0LU4BCPOmyzdpAniLdmcXwJTM1EsQXCub3v/TBK1dxyFMHUyW9WqRStQI1xF8mxhVupnZ/spN+ixvqTMakBczqEttd8AsoSXQtvEUo5ot082/U8LryvrHEsHH52hdbWlSR8TxbC1fK8LgEeXoeqfhOiYNtdOTB1+cGmdJSvjbHCq4OUfl1sbTbBSxrkLNeFAQztAQqIybp1F5PjjVKAr3mHB7HpnMoUcDQiVZ8srfwBpCGZlsj6BsNxYGidiSXvKyKHO1TnTHxNhXIjPzAxjPHXFm7HH9kCXavGEMwXMn06TNq9uP/e6ekd0qE2l+x7xc9i7bdSpIXC9s1uCNqbObiV+/BNXvtDjAVxmp4d+foIPwLx6Qz42IFzPb96lY2Ifow8I8406F/Y5t003xFwnkkgbfI6Ya8gJaJIP6pYGuWJY/lj6/5nElroeiZg1G3rCRBfxwG0RJ2RVhNpuqAcXrQ7SUQ/bbDKUc5psrE920kpF/V3j5tA83pSyJW49IjNQxRABGviPh7aSVWFBeyh0BE=
distributions: sdist bdist_wheel
on:
tags: true
python: 3.6
# Push all python wheels created to Github release
- provider: releases
skip_cleanup: true
api_key:
secure: I96u6KqgSj0Cqx2NvLLLgw6rx7+gN4/6wdptfBrP1zxppMERu/iaLYLgDwDKRXzgTIrx5LopFIllf9/kPcUJgDFj/AxESo6aukzcYK3tB2OhEYIAxYcZf8Kt3aV+AHp/TIDdZ9sGbVLxypuxSXqiAc5dJw8S0Njja/9Qgxe4jqNXNLjLxEb3qlCrYJFEA9MxxPgH/QsWZ6M43hR5gNrGWrhSaIHSjMXXkzRjQG+bGApFf+XRx7le2M5vIeaw1K/osJ930QEjDTnp0v5hFvkB9F1buC17rZO8Xy0KAAhB1+SaEWtS4N0lFIVYE1b3Ke1ek82kW1d7UDEAcEL6ccICboKtvhFfgb1MXVQkdRq4HlhwHl8n4FJIztbwXaQW1aISR5x+m6RIFwMuI+U/MjcVVt+CEgo8X4HATb2pdgWnFcS649t7lhrx7HCBNZpUNmfoZ/YyNgPdQf+vreCNxWOpqi4UacH890GhLFUFXtijeL7xGXWIg7ugQmlK27CfM1mMN1Vp9NTRAXA0x7HuqckWKY85lCJMT+3IJMgVjz+BdGZDQ5RJI22FmRDNuWrH/Mio77EK+sOsKm/X3Q8/Sb8uwU+Ft1Vb7aikQ2NIl89uQQdVxH4TlQw6/VQSZODacTUwMz5CkB0RJNrlbZpUpPrHiTVa4dRo7/34qe7iGA7XtfY=
file_glob: true
file: dist/*.whl
on:
tags: true
python: 3.6