-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.js
139 lines (122 loc) · 3.79 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
const { promisify } = require("util");
const AWS = require("aws-sdk");
const fs = require("fs");
const path = require("path");
const rimraf = promisify(require("rimraf"));
const tar = require("tar");
const through2 = require("through2");
const amqp = require("amqplib");
const execa = require("execa");
const RABBITMQ_URI = process.env.RABBITMQ_URI || "amqp://localhost";
const DOCKER_CREDENTIALS_PATH = "/gcr/mechmania2017-key.json";
const COMPILER_QUEUE = `compilerQueue`;
const STANCHION_QUEUE = `stanchionQueue`;
const COMPILE_DIR = "/compile";
const s3 = new AWS.S3({
params: { Bucket: "mechmania" }
});
const getObject = promisify(s3.getObject.bind(s3));
const upload = promisify(s3.upload.bind(s3));
const mkdir = promisify(fs.mkdir);
const chmod = promisify(fs.chmod);
const readdir = promisify(fs.readdir);
async function main() {
// Login to docker
// docker login -u _json_key --password-stdin https://gcr.io
const dockerLoginProc = execa("docker", [
"login",
"-u",
"_json_key",
"--password-stdin",
"https://gcr.io"
]);
fs.createReadStream(DOCKER_CREDENTIALS_PATH).pipe(dockerLoginProc.stdin);
const { stdout, stderr } = await dockerLoginProc;
console.log(stdout, stderr);
const conn = await amqp.connect(RABBITMQ_URI);
const ch = await conn.createChannel();
ch.assertQueue(COMPILER_QUEUE, { durable: true });
ch.assertQueue(STANCHION_QUEUE, { durable: true });
ch.prefetch(1);
process.on("SIGTERM", async () => {
console.log("Got SIGTERM");
await ch.close();
conn.close();
});
console.log(`Listening to ${COMPILER_QUEUE}`);
ch.consume(
COMPILER_QUEUE,
async message => {
console.log(`Got message`);
const id = message.content.toString();
// clear the COMPILE_DIR
console.log(`${id} - Cleaning ${COMPILE_DIR}`);
await rimraf(COMPILE_DIR);
// Extract and decompress
console.log(`${id} - Extracting contents of script to ${COMPILE_DIR}`);
await mkdir(COMPILE_DIR);
const data = s3
.getObject({ Key: `scripts/${id}` })
.createReadStream()
.pipe(tar.x({ C: COMPILE_DIR }));
data.on("close", async () => {
const image = `gcr.io/mechmania2017/${id}`;
// Compile the script
console.log(`${id} - Compiling files at ${COMPILE_DIR}`);
// TODO: Handle errors
let stdout = "";
let stderr = "";
let success = false;
try {
const proc = await execa("docker", [
"build",
COMPILE_DIR,
"-t",
image
]);
stdout = proc.stdout;
stderr = proc.stderr;
success = true;
} catch (e) {
stdout = e.stdout;
success = false;
stderr = e.stderr;
}
console.log(stdout);
console.warn(stderr);
const body = `
==================================================
stdout:
${stdout}
===================================================
stderr:
${stderr}
`;
console.log(`${id} - Upload to s3 (${id})`);
const data = await upload({
Key: `compiled/${id}`,
Body: body
});
console.log(`${id} - Uploaded to s3 (${data.Location})`);
console.log(`${id} - Pushing image to GCR`);
if (success) {
// Push to GCR
const { stdout: pushStdOut, stderr: pushStdErr } = await execa(
"docker",
["push", image]
);
console.log(pushStdOut);
console.warn(pushStdErr);
// Notify Stanchion
console.log(`${id} - Notifying ${STANCHION_QUEUE}`);
ch.sendToQueue(STANCHION_QUEUE, Buffer.from(id), {
persistent: true
});
}
ch.ack(message);
});
},
{ noAck: false }
);
}
main().catch(console.trace);