forked from openvinotoolkit/openvino_notebooks
-
Notifications
You must be signed in to change notification settings - Fork 0
194 lines (175 loc) · 7.46 KB
/
convert_notebooks.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
# Execute notebooks and convert them to Markdown and HTML
name: Convert Notebooks
on:
workflow_dispatch:
push:
branches:
- 'main'
- 'latest'
paths:
- 'notebooks/**.ipynb'
- 'notebooks/**.py'
- 'requirements.txt'
- 'README.md'
- '.ci/*'
- '.github/workflows/convert_notebooks.yml'
pull_request:
paths:
- '.github/workflows/convert_notebooks.yml'
schedule:
- cron: '0 0 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
build:
strategy:
fail-fast: false
# Matrix is unnecessary here, but this allows easy copying of steps from treon.yml
matrix:
os: [ubuntu-22.04]
python: [3.9]
runs-on: ${{ matrix.os }}
steps:
- name: Maximize build space
run: |
sudo rm -rf /usr/local/lib/android # will release about 10 GB if you don't need Android
sudo rm -rf /usr/share/dotnet # will release about 20GB if you don't need .NET
sudo rm -rf /opt/ghc
echo "Available storage:"
df -h
- name: Set Swap Space
uses: pierotofy/set-swap-space@49819abfb41bd9b44fb781159c033dba90353a7c # master
with:
swap-size-gb: 10
- name: Install required packages for rst converstion
run: |
sudo apt-get update && sudo apt-get install texlive texlive-latex-extra pandoc -y
shell: bash
#### Installation/preparation ####
#
# These steps are copied from convert_notebooks.yml
# This should ideally be a reusable workflow
- name: Checkout repository
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- name: Dotenv Action
id: dotenv
uses: xom9ikk/dotenv@ac290ca23a42155a0cba1031d23afa46240116a9 # v2.3.0
with:
path: ./.github/workflows
- name: Set up Python
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: ${{ matrix.python }}
- name: Install required packages
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
sudo apt-get install libsndfile1 -y
fi
shell: bash
- name: Cache OpenVINO Pip Packages
id: cachepip
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: |
pipcache
key: ${{ env.PIP_CACHE_KEY }}-${{ matrix.os }}-${{ matrix.python }}
# Cache specific files to reduce downloads or prevent network issues
- name: Cache Files
id: cachefiles
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: |
# NOTE: when modifying cache paths, update FILES_CACHE_KEY in .env
# and change cache paths in both treon.yml and convert_notebooks.yml
case_00030.zip
notebooks/ct-segmentation-quantize/kits19_frames_1
notebooks/pytorch-post-training-quantization-nncf/output/tiny-imagenet-200.zip
# omz cache location is set to this with test_replace
notebooks/optical-character-recognition/open_model_zoo_cache
notebooks/ct-scan-live-inference/kits19_frames_1
notebooks/pytorch-quantization-aware-training/data/tiny-imagenet-200.zip
key: ${{ env.FILES_CACHE_KEY }}
# PaddleGAN stores cache in ppgan directory in CACHE_DIR
- name: Set CACHE_DIR
shell: bash
run: |
python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))'
# replace backslashes with forward slashes for Windows paths
python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))' | sed -e 's/\\/\//g' >> $GITHUB_ENV
# PaddleHub stores cache in directory pointed to by HUB_HOME environment variable
- name: Set HUB_HOME
shell: bash
run: |
echo HUB_HOME=${{ env.CACHE_DIR }}/.paddlehub >> $GITHUB_ENV
# Cache PaddlePaddle cache directories to prevent CI failing due to network/download issues
- name: Cache PaddlePaddle cache directories (per OS)
id: cacheusercache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: |
${{ env.HUB_HOME }}
${{ env.CACHE_DIR }}/paddle
${{ env.CACHE_DIR }}/ppgan
key: ${{ env.USER_CACHE_KEY }}-${{ runner.os }}
- name: Cache openvino packages
if: steps.cachepip.outputs.cache-hit != 'true'
run: |
python -m pip install --upgrade pip==21.3.*
mkdir pipcache
python -m pip install --cache-dir pipcache --no-deps openvino openvino-dev nncf
cp -r pipcache pipcache_openvino
python -m pip uninstall -y openvino openvino-dev nncf
# Download a small dataset to use for testing purposes in monai-kidney training notebook
- name: Download CT files
if: steps.cachefiles.outputs.cache-hit != 'true'
run: |
curl -O https://storage.openvinotoolkit.org/data/test_data/openvino_notebooks/kits19/case_00030.zip
- name: Copy CT files
run: |
mkdir notebooks/ct-segmentation-quantize/kits19
mkdir notebooks/ct-segmentation-quantize/kits19/kits19_frames
unzip case_00030.zip
cp -r case_00030 case_00001
mv case_00030 notebooks/ct-segmentation-quantize/kits19/kits19_frames
mv case_00001 notebooks/ct-segmentation-quantize/kits19/kits19_frames
- name: Download shared datasets
run: |
mkdir -p notebooks/data/librispeech
wget -O notebooks/data/librispeech/test-clean.tar.gz http://openslr.elda.org/resources/12/test-clean.tar.gz
tar -xvf notebooks/data/librispeech/test-clean.tar.gz
mkdir -p notebooks/data/cifar10
wget -O notebooks/data/cifar10/cifar-10-python.tar.gz https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
- name: Install dependencies
run: |
python -m pip install --upgrade pip==21.3.*
python -m pip install -r .ci/dev-requirements.txt --cache-dir pipcache
python -m ipykernel install --user --name openvino_env
# Cache OpenVINO packages. mv works cross-platform
- name: Make pipcache directory with OpenVINO packages
if: steps.cachepip.outputs.cache-hit != 'true'
run: |
mv pipcache pipcache_full
mv pipcache_openvino pipcache
# Create list of installed pip packages that can be downloaded as artifacts
# to verify the exact environment of a specific test run
- name: Pip freeze
run: |
python -m pip freeze
python -m pip freeze > pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt
- name: Archive pip freeze
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: pip-freeze-${{matrix.os}}-${{ matrix.python }}
path: pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt
#### End installation/preparation
- name: convert_notebooks
shell: bash
run: .ci/convert_notebooks.sh
- name: Save reStructuredText files
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: rst_files
path: rst_files