Skip to content

Commit 57a88cd

Browse files
mthrokfacebook-github-bot
authored andcommitted
Import torchaudio 20200723
Summary: Import torchaudio 20200723 #814 Reviewed By: fmassa Differential Revision: D22666393 fbshipit-source-id: 50df07b5c158fe4e95ada7ea54381b2e26f6aecd
1 parent 4f19eef commit 57a88cd

File tree

111 files changed

+6697
-902
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

111 files changed

+6697
-902
lines changed

.circleci/config.yml

Lines changed: 1342 additions & 0 deletions
Large diffs are not rendered by default.

.circleci/config.yml.in

Lines changed: 631 additions & 0 deletions
Large diffs are not rendered by default.

.circleci/regenerate.py

Lines changed: 180 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,180 @@
1+
#!/usr/bin/env python3
2+
3+
"""
4+
This script should use a very simple, functional programming style.
5+
Avoid Jinja macros in favor of native Python functions.
6+
7+
Don't go overboard on code generation; use Python only to generate
8+
content that can't be easily declared statically using CircleCI's YAML API.
9+
10+
Data declarations (e.g. the nested loops for defining the configuration matrix)
11+
should be at the top of the file for easy updating.
12+
13+
See this comment for design rationale:
14+
https://github.com/pytorch/vision/pull/1321#issuecomment-531033978
15+
"""
16+
17+
import jinja2
18+
import yaml
19+
import os.path
20+
21+
22+
PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
23+
24+
25+
def build_workflows(prefix='', upload=False, filter_branch=None, indentation=6):
26+
w = []
27+
w += build_download_job(filter_branch)
28+
for btype in ["wheel", "conda"]:
29+
for os_type in ["linux", "macos", "windows"]:
30+
for python_version in PYTHON_VERSIONS:
31+
w += build_workflow_pair(btype, os_type, python_version, filter_branch, prefix, upload)
32+
33+
return indent(indentation, w)
34+
35+
36+
def build_download_job(filter_branch):
37+
job = {
38+
"name": "download_third_parties_nix",
39+
}
40+
41+
if filter_branch:
42+
job["filters"] = gen_filter_branch_tree(filter_branch)
43+
return [{"download_third_parties_nix": job}]
44+
45+
46+
def build_workflow_pair(btype, os_type, python_version, filter_branch, prefix='', upload=False):
47+
48+
w = []
49+
base_workflow_name = "{prefix}binary_{os_type}_{btype}_py{python_version}".format(
50+
prefix=prefix,
51+
os_type=os_type,
52+
btype=btype,
53+
python_version=python_version,
54+
)
55+
56+
w.append(generate_base_workflow(base_workflow_name, python_version, filter_branch, os_type, btype))
57+
58+
if upload:
59+
60+
is_py3_linux = os_type in ['linux', "windows"] and not python_version.startswith("2.")
61+
62+
w.append(generate_upload_workflow(base_workflow_name, filter_branch, btype))
63+
64+
if filter_branch == 'nightly' and is_py3_linux:
65+
pydistro = 'pip' if btype == 'wheel' else 'conda'
66+
w.append(generate_smoketest_workflow(pydistro, base_workflow_name, filter_branch, python_version, os_type))
67+
68+
return w
69+
70+
71+
def generate_base_workflow(base_workflow_name, python_version, filter_branch, os_type, btype):
72+
73+
d = {
74+
"name": base_workflow_name,
75+
"python_version": python_version,
76+
}
77+
78+
if os_type in ['linux', 'macos']:
79+
d['requires'] = ['download_third_parties_nix']
80+
81+
if filter_branch:
82+
d["filters"] = gen_filter_branch_tree(filter_branch)
83+
84+
return {"binary_{os_type}_{btype}".format(os_type=os_type, btype=btype): d}
85+
86+
87+
def gen_filter_branch_tree(*branches):
88+
return {
89+
"branches": {
90+
"only": list(branches),
91+
},
92+
"tags": {
93+
# Using a raw string here to avoid having to escape
94+
# anything
95+
"only": r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"
96+
}
97+
}
98+
99+
100+
def generate_upload_workflow(base_workflow_name, filter_branch, btype):
101+
d = {
102+
"name": "{base_workflow_name}_upload".format(base_workflow_name=base_workflow_name),
103+
"context": "org-member",
104+
"requires": [base_workflow_name],
105+
}
106+
107+
if filter_branch:
108+
d["filters"] = gen_filter_branch_tree(filter_branch)
109+
110+
return {"binary_{btype}_upload".format(btype=btype): d}
111+
112+
113+
def generate_smoketest_workflow(pydistro, base_workflow_name, filter_branch, python_version, os_type):
114+
115+
required_build_suffix = "_upload"
116+
required_build_name = base_workflow_name + required_build_suffix
117+
118+
smoke_suffix = "smoke_test_{pydistro}".format(pydistro=pydistro)
119+
d = {
120+
"name": "{base_workflow_name}_{smoke_suffix}".format(
121+
base_workflow_name=base_workflow_name, smoke_suffix=smoke_suffix),
122+
"requires": [required_build_name],
123+
"python_version": python_version,
124+
}
125+
126+
if filter_branch:
127+
d["filters"] = gen_filter_branch_tree(filter_branch)
128+
129+
return {"smoke_test_{os_type}_{pydistro}".format(os_type=os_type, pydistro=pydistro): d}
130+
131+
132+
def indent(indentation, data_list):
133+
return ("\n" + " " * indentation).join(yaml.dump(data_list).splitlines())
134+
135+
136+
def unittest_workflows(indentation=6):
137+
jobs = []
138+
jobs += build_download_job(None)
139+
for os_type in ["linux", "windows", "macos"]:
140+
for device_type in ["cpu", "gpu"]:
141+
if os_type == "macos" and device_type == "gpu":
142+
continue
143+
144+
for i, python_version in enumerate(PYTHON_VERSIONS):
145+
job = {
146+
"name": f"unittest_{os_type}_{device_type}_py{python_version}",
147+
"python_version": python_version,
148+
}
149+
150+
if device_type == 'gpu':
151+
job['filters'] = gen_filter_branch_tree('master', 'nightly')
152+
153+
if os_type != "windows":
154+
job['requires'] = ['download_third_parties_nix']
155+
156+
jobs.append({f"unittest_{os_type}_{device_type}": job})
157+
158+
if i == 0 and os_type == "linux" and device_type == "cpu":
159+
jobs.append({
160+
f"stylecheck": {
161+
"name": f"stylecheck_py{python_version}",
162+
"python_version": python_version,
163+
}
164+
})
165+
return indent(indentation, jobs)
166+
167+
168+
if __name__ == "__main__":
169+
d = os.path.dirname(__file__)
170+
env = jinja2.Environment(
171+
loader=jinja2.FileSystemLoader(d),
172+
lstrip_blocks=True,
173+
autoescape=False,
174+
)
175+
176+
with open(os.path.join(d, 'config.yml'), 'w') as f:
177+
f.write(env.get_template('config.yml.in').render(
178+
build_workflows=build_workflows,
179+
unittest_workflows=unittest_workflows,
180+
))
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
# this Dockerfile is for torchaudio smoke test, it will be created periodically via CI system
2+
# if you need to do it locally, follow below steps once you have Docker installed
3+
# assuming you're within the directory where this Dockerfile located
4+
# $ docker build . -t torchaudio/smoketest
5+
6+
# if you want to push to aws ecr, make sure you have the rights to write to ECR, then run
7+
# $ eval $(aws ecr get-login --region us-east-1 --no-include-email)
8+
# $ export MYTAG=localbuild ## you can choose whatever tag you like
9+
# $ docker tag torchaudio/smoketest 308535385114.dkr.ecr.us-east-1.amazonaws.com/torchaudio/smoke_test:${MYTAG}
10+
# $ docker push 308535385114.dkr.ecr.us-east-1.amazonaws.com/torchaudio/smoke_test:${MYTAG}
11+
12+
FROM ubuntu:latest
13+
14+
RUN apt-get -qq update && apt-get -qq -y install curl bzip2 sox libsox-dev libsox-fmt-all \
15+
&& curl -sSL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o /tmp/miniconda.sh \
16+
&& bash /tmp/miniconda.sh -bfp /usr/local \
17+
&& rm -rf /tmp/miniconda.sh \
18+
&& conda install -y python=3 \
19+
&& conda update conda \
20+
&& apt-get -qq -y remove curl bzip2 \
21+
&& apt-get -qq -y autoremove \
22+
&& apt-get autoclean \
23+
&& rm -rf /var/lib/apt/lists/* /var/log/dpkg.log \
24+
&& conda clean --all --yes
25+
26+
ENV PATH /opt/conda/bin:$PATH
27+
28+
RUN conda create -y --name python3.6 python=3.6
29+
RUN conda create -y --name python3.7 python=3.7
30+
RUN conda create -y --name python3.8 python=3.8
31+
SHELL [ "/bin/bash", "-c" ]
32+
RUN echo "source /usr/local/etc/profile.d/conda.sh" >> ~/.bashrc
33+
RUN source /usr/local/etc/profile.d/conda.sh && conda activate python3.6 && conda install -y -c conda-forge sox && conda install -y numpy
34+
RUN source /usr/local/etc/profile.d/conda.sh && conda activate python3.7 && conda install -y -c conda-forge sox && conda install -y numpy
35+
RUN source /usr/local/etc/profile.d/conda.sh && conda activate python3.8 && conda install -y -c conda-forge sox && conda install -y numpy
36+
CMD [ "/bin/bash"]

.circleci/test/test_sort_yaml.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
#!/usr/bin/env python3
2+
3+
"""
4+
To compare new version with previous:
5+
6+
./regenerate.sh
7+
meld <(git show HEAD:./config.yml | ./sort-yaml.py) <(cat config.yml | ./sort-yaml.py)
8+
"""
9+
10+
11+
import sys
12+
import yaml
13+
14+
sys.stdout.write(yaml.dump(yaml.load(sys.stdin, Loader=yaml.FullLoader), sort_keys=True))

.circleci/unittest/linux/README.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
This directory contains;
2+
3+
- docker
4+
Docker image definition and scripts to build and update Docker image for unittest.
5+
- scripts
6+
Scripts used by CircleCI to run unit tests.
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
*
2+
!scripts
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
scripts/build_third_parties.sh
2+
Dockerfile.tmp
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
FROM ubuntu:18.04 as builder
2+
3+
RUN apt update -q
4+
5+
################################################################################
6+
# Build Kaldi
7+
################################################################################
8+
RUN apt install -q -y \
9+
autoconf \
10+
automake \
11+
bzip2 \
12+
g++ \
13+
gfortran \
14+
git \
15+
libatlas-base-dev \
16+
libtool \
17+
make \
18+
python2.7 \
19+
python3 \
20+
sox \
21+
subversion \
22+
unzip \
23+
wget \
24+
zlib1g-dev
25+
26+
# KALDI uses MKL as a default math library, but we are going to copy featbin binaries and dependent
27+
# shared libraries to the final image, so we use ATLAS, which is easy to reinstall in the final image.
28+
RUN git clone --depth 1 https://github.com/kaldi-asr/kaldi.git /opt/kaldi && \
29+
cd /opt/kaldi/tools && \
30+
make -j $(nproc) && \
31+
cd /opt/kaldi/src && \
32+
./configure --shared --mathlib=ATLAS --use-cuda=no && \
33+
make featbin -j $(nproc)
34+
35+
# Copy featbins and dependent libraries
36+
ADD ./scripts /scripts
37+
RUN bash /scripts/copy_kaldi_executables.sh /opt/kaldi /kaldi
38+
39+
################################################################################
40+
# Build the final image
41+
################################################################################
42+
FROM BASE_IMAGE
43+
RUN apt update && apt install -y \
44+
g++ \
45+
gfortran \
46+
git \
47+
libatlas3-base \
48+
wget \
49+
curl \
50+
make \
51+
file \
52+
sox \
53+
libsox-dev \
54+
libsox-fmt-all \
55+
cmake \
56+
pkg-config \
57+
&& rm -rf /var/lib/apt/lists/*
58+
COPY --from=builder /kaldi /kaldi
59+
ENV PATH="${PATH}:/kaldi/bin" LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/kaldi/lib"
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
#!/usr/bin/env bash
2+
3+
set -euo pipefail
4+
5+
if [ $# -ne 1 ]; then
6+
printf "Usage %s <CUDA_VERSION>\n\n" "$0"
7+
exit 1
8+
fi
9+
10+
if [ "$1" = "cpu" ]; then
11+
base_image="ubuntu:18.04"
12+
image="pytorch/torchaudio_unittest_base:manylinux"
13+
elif [[ "$1" =~ ^(9.2|10.1)$ ]]; then
14+
base_image="nvidia/cuda:$1-runtime-ubuntu18.04"
15+
image="pytorch/torchaudio_unittest_base:manylinux-cuda$1"
16+
else
17+
printf "Unexpected <CUDA_VERSION> string: %s" "$1"
18+
exit 1;
19+
fi
20+
21+
cd "$( dirname "${BASH_SOURCE[0]}" )"
22+
23+
# docker build also accepts reading from STDIN
24+
# but in that case, no context (other files) can be passed, so we write out Dockerfile
25+
sed "s|BASE_IMAGE|${base_image}|g" Dockerfile > Dockerfile.tmp
26+
docker build -t "${image}" -f Dockerfile.tmp .
27+
docker push "${image}"

0 commit comments

Comments
 (0)