This repository was archived by the owner on Sep 27, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathworker.js
392 lines (302 loc) · 9.72 KB
/
worker.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
const os = require('os');
const fs = require('fs').promises;
const {createWriteStream} = require('fs');
const axios = require('axios');
const execa = require('execa');
const Nile = require('nile');
const log = require('./lib/worker-logger');
const storj = require('./lib/rclone');
const pathing = require('./lib/pathing');
const keys = JSON.parse(process.env.GITHUB_KEYS);
const getKeyPair = () => keys[Math.floor(Math.random() * keys.length)];
const nile = new Nile('gitbackup.org');
const nileLog = async log => {
try {
await nile.pushChunk(`${os.hostname()}-${process.env.pm_id}`, log);
} catch(err) {
console.warn('Log to nile failed');
}
};
async function getGithubEndpoint(...args) {
try {
return await axios.get(...args);
} catch(error) {
// if 'Forbidden', assume hit rate limit
if(error.response.status === 403) {
// time to reset + random timeout to avoid multiple workers hitting at once
const timeout = ((Number(error.response.headers['x-ratelimit-reset']) * 1000) - Date.now()) + (Math.random() * 10000);
log.warn(`Rate limit reached. Waiting ${Math.floor(timeout / 1000)} seconds.`);
nileLog(`Rate limit reached. Waiting ${Math.floor(timeout / 1000)} seconds.`);
await new Promise(resolve => setTimeout(resolve, timeout));
// retry
return getGithubEndpoint(...args);
}
throw error;
}
}
async function getRepos({ username }) {
const repos = [];
// pull repository pages
for(let i = 1; ; i++) {
const [ client_id, client_secret ] = getKeyPair();
const {data} = await getGithubEndpoint(`https://api.github.com/users/${username}/repos`, {
params: {
page: i,
per_page: 100,
client_id,
client_secret
}
});
// if page is empty break
if(data.length === 0) {
break;
}
repos.push(...data);
}
return repos;
}
async function storjUpload(source, target) {
var err = null;
let retries;
for(retries = 3; retries > 0; retries--) {
err = null;
try {
const stat = await fs.stat(source);
const rate = 1000.0 / (100 * 1024); // 100KiB per second
const minT = 60 * 1000; // Give at least 60 seconds to finish
const maxT = 4 * 60 * 60 * 1000; // No more than 4 hours
const timeout = Math.min(stat.size * rate + minT, maxT);
const copy = storj.cp(source, target);
setTimeout(() => {
copy.cancel();
}, timeout);
await copy;
break;
} catch(e) {
err = e;
}
}
if (err != null || retries === 0) {
log.error('Failed to copy to Storj', err);
nileLog('Failed to copy to Storj');
throw new Error('Failed to copy to Storj');
}
}
async function storjSize(path) {
try {
const [{size}] = await storj.ls(path);
if(typeof size === 'number') {
return size;
}
} catch(err) {
}
return 0;
}
class UserError extends Error {
constructor(message) {
super(message);
this.name = this.constructor.name;
}
};
class RepoError extends Error {
constructor(message) {
super(message);
this.name = this.constructor.name;
}
}
async function cloneUser({ username, lastSynced }) {
// get list of repositories from Github API
let publicLog = '';
const repos = await (async () => {
try {
return getRepos({ username });
} catch(error) {
if(error.response.status === 404) {
throw new UserError('Not Found');
}
throw error;
}
})();
const reportedRepos = repos.length;
let storageDelta = 0;
let totalUpload = 0;
log.info(username, 'has', repos.length, 'repositories');
nileLog(`${username} has ${repos.length} repositories`);
let totalRepos = 0;
const jsonPath = `${__dirname}/repos/${username}.json`;
const storjJsonPath = `${pathing.encode(username)}.json`;
await fs.writeFile(jsonPath, JSON.stringify(repos));
storageDelta -= await storjSize(storjJsonPath);
await storjUpload(jsonPath, storjJsonPath);
storageDelta += (await fs.stat(jsonPath)).size;
await fs.unlink(jsonPath);
for(const repo of repos) {
const lastUpdated = new Date(repo.updated_at);
log.info(repo.full_name, { lastUpdated, lastSynced, updated_at: repo.updated_at });
publicLog += `${repo.full_name} ${JSON.stringify({ lastUpdated, lastSynced, updated_at: repo.updated_at })}\n`;
// skip if repository hasn't been updated since last sync
if(lastUpdated < lastSynced) {
publicLog += `repository hasn't been updated since last sync. skipping\n`;
nileLog(`repository hasn't been updated since last sync. skipping`);
continue;
}
// skip if repository is too big
if(repo.size > 4250000) {
publicLog += `repository is too big (${repo.size}). skipping\n`;
nileLog(`repository is too big (${repo.size}). skipping`);
continue;
}
const repoPath = `${__dirname}/repos/${repo.full_name}`;
const repoBundlePath = `${repoPath}.bundle`;
const repoZipPath = `${repoPath}.zip`;
// Purge any existing data if it exists.
await execa('rm', ['-rf', repoZipPath]);
await execa('rm', ['-rf', repoBundlePath]);
await execa('rm', ['-rf', repoPath]);
// Create bundle:
log.info(repo.full_name, 'cloning');
publicLog += `cloning\n`;
nileLog(`${repo.full_name} cloning`);
try {
await execa('git', ['clone', '--mirror', repo.git_url, repoPath]);
await execa('git', ['bundle', 'create', repoBundlePath, '--all'], {
cwd: repoPath,
});
} catch(err) {
log.info(repo.full_name, 'clone failed');
publicLog += 'clone failed. skipping\n';
continue;
}
// Download zip:
log.info(repo.full_name, 'downloading zip');
publicLog += 'downloading zip\n';
nileLog(`${repo.full_name} downloading zip`);
const {data} = await axios.get(`${repo.html_url}/archive/master.zip`, {
responseType: 'stream',
});
data.pipe(createWriteStream(repoZipPath));
log.info(repo.full_name, 'mkdir storj parent directory');
const storjPath = pathing.encode(username);
const storjBundlePath = `${storjPath}/${repo.name}.bundle`;
const storjZipPath = `${storjPath}/${repo.name}.zip`;
// Remove old sizes from total storage delta:
storageDelta -= await storjSize(storjBundlePath);
storageDelta -= await storjSize(storjZipPath);
// Try to upload the files:
await storjUpload(repoBundlePath, storjBundlePath);
await storjUpload(repoZipPath, storjZipPath);
// Update total storage usage delta:
storageDelta += (await fs.stat(repoBundlePath)).size;
storageDelta += (await fs.stat(repoZipPath)).size;
// Update total upload
totalUpload += (await fs.stat(repoBundlePath)).size;
totalUpload += (await fs.stat(repoZipPath)).size;
// If this is us-central-1; mirror it to us2 for testing.
/*
if (storjPath.startsWith("us-central-1:")) {
const storjPath2 = storjPath.replace(/^us-central-1:/, 'us2:')
const storjBundlePath2 = `${storjPath2}/${repo.name}.bundle`;
const storjZipPath2 = `${storjPath2}/${repo.name}.zip`;
// Try to upload the files:
await storjUpload(repoBundlePath, storjBundlePath2);
await storjUpload(repoZipPath, storjZipPath2);
}
*/
log.info(repo.full_name, 'cleaning up');
publicLog += 'cleaning up\n';
await execa('rm', [ '-rf', repoBundlePath ]);
await execa('rm', [ '-rf', repoZipPath ]);
await execa('rm', [ '-rf', repoPath ]);
totalRepos++;
log.info(repo.full_name, 'done');
}
const logPath = `${__dirname}/repos/${username}.log`;
const storjLogPath = `${pathing.encode(username)}.log`;
await fs.writeFile(logPath, publicLog);
storageDelta -= await storjSize(storjLogPath);
await storjUpload(logPath, storjLogPath);
storageDelta += (await fs.stat(logPath)).size;
// wait 5 seconds after each user
await new Promise(resolve => setTimeout(resolve, 5000));
return {
totalRepos,
reportedRepos,
storageDelta,
totalUpload
};
}
(async () => {
// wait random amount to avoid instantaneous parallel requests
await new Promise(resolve => setTimeout(resolve, Math.floor(Math.random() * 60000)));
const lockClient = axios.create({
baseURL: process.env.SERVER_URL || 'http://localhost:8000',
timeout: 10000,
headers: {
'X-Worker-Token': process.env.WORKER_TOKEN
}
});
for(; ;) {
// get already locked user
const startTime = Date.now();
const username = (await lockClient.post('/lock')).data;
try {
// make loop in background to re-instantiate lock every 5 seconds
const updateLock = setInterval(async () => {
await lockClient.post(`/lock/${username}`);
}, 5000);
// find out when user was last synced
const _lastSynced = (await lockClient.get(`/lock/${username}/last_synced`)).data;
const lastSynced = new Date(_lastSynced);
// sync user
const {
totalRepos,
reportedRepos,
storageDelta,
totalUpload
} = await (async () => {
try {
return await cloneUser({ username, lastSynced })
} catch(error) {
log.info(`Caught sync failure of '${username}', cleaning up`);
await execa('rm', [ '-rf', `${__dirname}/repos/${username}` ]);
throw error;
}
})();
await execa('rm', [ '-rf', `${__dirname}/repos/${username}` ]);
// stop updating lock
clearInterval(updateLock);
// free lock and submit total amount of repositories
await lockClient.post(`/lock/${username}/complete`, null, {
params: {
totalRepos,
reportedRepos,
storageDelta
}
});
const userTime = Date.now() - startTime;
const worker_id = `${os.hostname()}-${process.env.pm_id}`;
const users_per_minute = 60000 / userTime;
const repos_per_minute = users_per_minute * totalRepos;
const bytes_per_minute = users_per_minute * totalUpload;
await lockClient.post('/worker/push_stats', null, {
params: {
worker_id,
users_per_minute,
repos_per_minute,
bytes_per_minute
}
});
} catch(error) {
log.info('user error', error);
const message = error instanceof UserError
? 'Uncaught Failure'
: error.message;
// set user to 'error' status
await lockClient.post(`/lock/${username}/error`, {
params: {
message
}
});
}
}
})();