forked from ipython/ipyparallel
-
Notifications
You must be signed in to change notification settings - Fork 0
224 lines (188 loc) · 8.26 KB
/
test-ssh.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
name: SSH-Test
on:
workflow_dispatch:
concurrency:
group: >-
${{ github.workflow }}-
${{ github.ref_type }}-
${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
env:
# UTF-8 content may be interpreted as ascii and causes errors without this.
LANG: C.UTF-8
IPP_DISABLE_JS: "1"
JUPYTER_PLATFORM_DIRS: "1"
jobs:
test:
runs-on: ${{ matrix.runs_on || 'ubuntu-20.04' }}
timeout-minutes: 50
strategy:
# Keep running even if one variation of the job fail
fail-fast: false
matrix:
include:
- python: "3.9"
cluster_type: ssh
- python: "3.8"
cluster_type: ssh
runs_on: windows-2019
steps:
- uses: actions/checkout@v3
- name: Cache conda environment
uses: actions/cache@v3
with:
path: |
~/conda
key: conda
- name: Cache node_modules
uses: actions/cache@v3
with:
path: |
node_modules
key: ${{ runner.os }}-yarn-${{ hashFiles('yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Set environment variables
if: ${{ matrix.env }}
env:
MATRIX_ENV: ${{ toJSON(matrix.env) }}
run: |
python3 <<EOF
import json
import os
matrix_env = json.loads(os.environ["MATRIX_ENV"])
with open(os.environ["GITHUB_ENV"], "a") as f:
for key, value in matrix_env.items():
f.write(f"{key}={value}\n")
EOF
- name: Docker infos
run: |
docker version
docker images
- name: Set up docker-compose for ssh linux launcher
if: ${{ matrix.cluster_type == 'ssh' && !contains(matrix.runs_on, 'windows') }}
run: |
export DOCKER_BUILDKIT=1
export COMPOSE_DOCKER_CLI_BUILD=1
cd ci/ssh
docker-compose -f linux_docker-compose.yaml up -d --build
# retrieve id_rsa file for public key authentication
mkdir ~/.ssh/
docker cp ssh_sshd_1:/home/ciuser/.ssh/id_rsa ~/.ssh/id_rsa
cat ~/.ssh/id_rsa
#check ssh connection and accept host key
ssh -o "StrictHostKeyChecking no" ciuser@127.0.0.1 -p 2222 -v ls /
# just to see the ip-address of runner vm
ifconfig
- name: Set up docker-compose for ssh windows launcher
if: ${{ matrix.cluster_type == 'ssh' && contains(matrix.runs_on, 'windows') }}
env:
SSH_HOST: ciuser@127.0.0.1
SSH_PORT: 2222
CODE_ROOT: c:\src\ipyparallel
run: |
cd ci/ssh
docker-compose -f win_docker-compose.yaml up -d --build
# retrieve id_rsa file for public key authentication
mkdir $env:USERPROFILE/.ssh/
docker run ipyparallel-sshd powershell.exe -Command "type C:\Users\ciuser\.ssh\id_rsa" | out-file -encoding ascii $env:USERPROFILE/.ssh/id_rsa
# install newer version of openssh (removes "GetConsoleMode on STD_INPUT_HANDLE failed with 6" error in pytest)
choco install openssh --pre
mv C:\Windows\System32\OpenSSH C:\Windows\System32\_OpenSSH
#check ssh connection and accept host key
ssh -o "StrictHostKeyChecking no" $env:SSH_HOST -p $env:SSH_PORT -v dir c:\
# copy ipyparallel code to docker container (use zip, scp and unzip)
ssh $env:SSH_HOST -p $env:SSH_PORT mkdir $env:CODE_ROOT
# zip ipyparallel files (excluding files probably not needed)
cd ../..
$exclude = @("__pycache__","node_modules")
$files = Get-ChildItem -Path "." -Exclude $exclude
Compress-Archive -Path $files -DestinationPath ipyparallel.zip -CompressionLevel Fastest
# copy file into docker (we need to do it over ssh since docker copy or mount doesn't work in Hyper-V)
scp -P $env:SSH_PORT ipyparallel.zip ${env:SSH_HOST}:${env:CODE_ROOT}
# deflate ipyparallel files
ssh $env:SSH_HOST -p $env:SSH_PORT powershell.exe -Command "Expand-Archive -Path $env:CODE_ROOT\ipyparallel.zip -DestinationPath $env:CODE_ROOT"
# pip install ipyparallel files
#ssh $env:SSH_HOST -p $env:SSH_PORT "cd $env:CODE_ROOT && pip install -e ."
ssh $env:SSH_HOST -p $env:SSH_PORT "pip install -e file://c:/src/ipyparallel#egg=ipyparallel[test]"
# just to see the ip-address of runner vm
ipconfig
echo "hostname:"
hostname
echo "nslookup $env:computername"
nslookup $env:computername
echo "hostname - python code:"
python -c "import socket;print('hostname:',socket.gethostname());print('gethostbyname_ex:',socket.gethostbyname_ex(socket.gethostname()))"
echo "in docker container: hostname - python code:"
docker run ipyparallel-sshd python -c "import socket;print('hostname:','$env:computername');print('gethostbyname_ex:',socket.gethostbyname_ex('$env:computername'))"
echo "in docker container: nslookup $env:computername"
docker run ipyparallel-sshd nslookup $env:computername
echo "in docker container: ping jo"
docker run ipyparallel-sshd ping jo
#echo "print hosts file:"
#type c:\Windows\System32\Drivers\etc\hosts
#echo "in docker container: print hosts file:"
#docker run ipyparallel-sshd cmd.exe /C type c:\Windows\System32\Drivers\etc\hosts
- name: Set up slurm
if: ${{ matrix.cluster_type == 'slurm' }}
run: |
export DOCKER_BUILDKIT=1
export COMPOSE_DOCKER_CLI_BUILD=1
cd ci/slurm
docker-compose up -d --build
- name: Install Python (conda) ${{ matrix.python }}
if: ${{ matrix.cluster_type == 'mpi' }}
run: |
export MAMBA_ROOT_PREFIX=$HOME/conda
test -d $MAMBA_ROOT_PREFIX || mkdir $MAMBA_ROOT_PREFIX
wget -qO- https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
micromamba activate
micromamba install -y -c conda-forge mpich mpi4py python=${{ matrix.python }}
echo "PATH=$MAMBA_ROOT_PREFIX/bin:$PATH" >> $GITHUB_ENV
- name: Install Python ${{ matrix.python }}
if: ${{ matrix.cluster_type != 'mpi' }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- name: Install ipyparallel itself
run: |
pip install --upgrade pip
pip install --no-deps .
- name: Install Python dependencies
run: |
pip install --pre --upgrade ipyparallel[test]
- name: Install extra Python packages
if: ${{ ! startsWith(matrix.python, '3.11') }}
run: |
pip install distributed joblib
pip install --only-binary :all: matplotlib || echo "no matplotlib"
- name: Show environment
run: pip freeze
- name: Run tests in container ${{ matrix.container }}
if: ${{ matrix.container }}
run: echo "EXEC=docker exec -i ${{ matrix.container }}" >> $GITHUB_ENV
- name: Run ${{ matrix.cluster_type }} shellcmd tests
if: ${{ matrix.cluster_type }}
run: |
pytest -v --maxfail=2 --cov=ipyparallel ipyparallel/tests/test_shellcmd.py
- name: Run ${{ matrix.cluster_type }} tests
if: ${{ matrix.cluster_type }}
run: |
pytest -v --maxfail=2 --cov=ipyparallel ipyparallel/tests/test_${{ matrix.cluster_type }}.py
- name: Output windows test_ssh.py::test_get_output log (on success)
if: success() && contains(matrix.runs_on, 'windows')
run: |
echo "-------------- test_get_output.log --------------------"
if (Test-Path -Path $env:USERPROFILE\test_get_output.log) {
type $env:USERPROFILE\test_get_output.log
}
- name: Output linux test_ssh.py::test_get_output log (on success)
if: success() && !contains(matrix.runs_on, 'windows')
run: |
echo "-------------- test_get_output.log --------------------"
if test -f ~/test_get_output.log; then
cat ~/test_get_output.log
fi
#- name: Submit codecov report
# uses: codecov/codecov-action@v3