Commit 79adfd7d authored by Nilanjan Daw's avatar Nilanjan Daw

Added Executor level state emitters

Added Kafka producers to alert DM?DA of executor states. Removed indirect state gathering module from DA
parent 7d5fc388
...@@ -2,4 +2,5 @@ bitnami* ...@@ -2,4 +2,5 @@ bitnami*
node_modules node_modules
package-lock.json package-lock.json
firecracker* firecracker*
secrets.json secrets.json
\ No newline at end of file grunt
\ No newline at end of file
{"id":"10.196.11.241","master_node":"10.129.6.5"} {"id":"192.168.31.51","master_node":"10.129.6.5"}
\ No newline at end of file \ No newline at end of file
...@@ -8,7 +8,7 @@ const events = require('events'); ...@@ -8,7 +8,7 @@ const events = require('events');
const workerEvent = new events.EventEmitter(); const workerEvent = new events.EventEmitter();
const parentProcess = require('process'); const parentProcess = require('process');
function runIsolate(local_repository, functionHash, port) { function runIsolate(local_repository, functionHash, port, resource_id) {
let filename = local_repository + functionHash + ".js" let filename = local_repository + functionHash + ".js"
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
...@@ -30,7 +30,7 @@ function runIsolate(local_repository, functionHash, port) { ...@@ -30,7 +30,7 @@ function runIsolate(local_repository, functionHash, port) {
} }
function runProcess(local_repository, functionHash, port) { function runProcess(local_repository, functionHash, port, resource_id) {
let filename = local_repository + functionHash + ".js" let filename = local_repository + functionHash + ".js"
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let timeStart = Date.now() let timeStart = Date.now()
...@@ -41,7 +41,7 @@ function runProcess(local_repository, functionHash, port) { ...@@ -41,7 +41,7 @@ function runProcess(local_repository, functionHash, port) {
result += data; result += data;
let timeDifference = Math.ceil((Date.now() - timeStart)) let timeDifference = Math.ceil((Date.now() - timeStart))
console.log("process time taken: ", timeDifference); console.log("process time taken: ", timeDifference);
workerEvent.emit('start', functionHash, port, "process") // workerEvent.emit('start', functionHash, port, "process")
resolve(result); resolve(result);
}); });
...@@ -52,14 +52,14 @@ function runProcess(local_repository, functionHash, port) { ...@@ -52,14 +52,14 @@ function runProcess(local_repository, functionHash, port) {
process.on('close', (code) => { process.on('close', (code) => {
console.log(`child process exited with code ${code}`); console.log(`child process exited with code ${code}`);
workerEvent.emit('end', port, "process"); // workerEvent.emit('end', port, "process");
}); });
}) })
} }
function runContainer(imageName, port) { function runContainer(imageName, port, resource_id) {
console.log(imageName); console.log(imageName);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
...@@ -80,9 +80,10 @@ function runContainer(imageName, port) { ...@@ -80,9 +80,10 @@ function runContainer(imageName, port) {
if (code != 0) if (code != 0)
reject("error") reject("error")
else { else {
const process = spawn('docker', ["run", "--name", imageName, registry_url + imageName]); const process = spawn('docker', ["run", "--rm", "-p", `${port}:5000`, "--name", imageName, registry_url + imageName,
resource_id, imageName, port, "container"]);
let result = ""; let result = "";
timeStart = Date.now() // timeStart = Date.now()
process.stdout.on('data', (data) => { process.stdout.on('data', (data) => {
console.log(`stdout: ${data}`); console.log(`stdout: ${data}`);
let timeDifference = Math.ceil((Date.now() - timeStart)) let timeDifference = Math.ceil((Date.now() - timeStart))
...@@ -104,58 +105,29 @@ function runContainer(imageName, port) { ...@@ -104,58 +105,29 @@ function runContainer(imageName, port) {
}) })
} else { } else {
const process_checkContainer = spawn('docker', ['container', 'inspect', imageName]);
process_checkContainer.on('close', (code) => {
console.log("container starting at port", port); console.log("container starting at port", port);
if (code != 0) { const process = spawn('docker', ["run", "--rm", "-p", `${port}:5000`, "--name", imageName,
const process = spawn('docker', ["run", "-p", `${port}:5000`, "--name", imageName, registry_url + imageName]); registry_url + imageName, resource_id, imageName, port, "container"]);
let result = ""; let result = "";
timeStart = Date.now() // timeStart = Date.now()
process.stdout.on('data', (data) => { process.stdout.on('data', (data) => {
console.log(`stdout: ${data}`); console.log(`stdout: ${data}`);
let timeDifference = Math.ceil((Date.now() - timeStart)) let timeDifference = Math.ceil((Date.now() - timeStart))
console.log("container run time taken: ", timeDifference); console.log("container run time taken: ", timeDifference);
// result += data; // result += data;
workerEvent.emit('start', imageName, port, "container") workerEvent.emit('start', imageName, port, "container")
resolve(result); resolve(result);
}); });
process.stderr.on('data', (data) => { process.stderr.on('data', (data) => {
console.error(`stderr: ${data}`); console.error(`stderr: ${data}`);
reject(data); reject(data);
}); });
process.on('close', (code) => { process.on('close', (code) => {
workerEvent.emit('end', port, "container"); workerEvent.emit('end', port, "container");
}) })
} else {
const clean_container = spawn('docker', ['rm', imageName])
clean_container.on('close', code => {
const process = spawn('docker', ["run", "-p", `${port}:5000`, "--name", imageName, registry_url + imageName]);
let result = "";
timeStart = Date.now()
process.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
let timeDifference = Math.ceil((Date.now() - timeStart))
console.log("container run time taken: ", timeDifference);
// result += data;
workerEvent.emit('start', imageName, port, "container")
resolve(result);
});
process.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
reject(data);
});
process.on('close', (code) => {
workerEvent.emit('end', port, "container");
})
})
}
})
} }
......
...@@ -4,6 +4,8 @@ const config = require('./config.json') ...@@ -4,6 +4,8 @@ const config = require('./config.json')
const libSupport = require('./lib') const libSupport = require('./lib')
libSupport.updateConfig() libSupport.updateConfig()
const node_id = config.id const node_id = config.id
const {spawn } = require('child_process')
let grunt = spawn('./grunt')
const execute = require('./execute') const execute = require('./execute')
const fs = require('fs') const fs = require('fs')
const kafka = require('kafka-node') const kafka = require('kafka-node')
...@@ -36,21 +38,19 @@ libSupport.makeTopic(node_id).then(() => { ...@@ -36,21 +38,19 @@ libSupport.makeTopic(node_id).then(() => {
let topic = message.topic let topic = message.topic
message = message.value message = message.value
message = JSON.parse(message) message = JSON.parse(message)
if (topic !== 'heartbeat') { let runtime = message.runtime
let runtime = message.runtime let functionHash = message.functionHash
let functionHash = message.functionHash let resource_id = message.resource_id
let function_id = message.function_id let port = message.port
let port = message.port /**
/** * Download necessary files (function file) and Start resource deployment
* Download necessary files (function file) and Start resource deployment */
*/ if (message.type === "execute") {
if (message.type === "execute") { console.log("function_id", resource_id);
console.log("function_id", function_id); libSupport.download(host_url + "/repository/" + functionHash + ".js", local_repository + functionHash + ".js").then(() => {
libSupport.download(host_url + "/repository/" + functionHash + ".js", local_repository + functionHash + ".js").then(() => { startWorker(local_repository, functionHash, resource_id, producer, runtime, port)
startWorker(local_repository, functionHash, function_id, producer, runtime, port) })
})
}
} }
}) })
...@@ -60,31 +60,33 @@ libSupport.makeTopic(node_id).then(() => { ...@@ -60,31 +60,33 @@ libSupport.makeTopic(node_id).then(() => {
* Start a worker executor of the runtime type * Start a worker executor of the runtime type
* @param {String} local_repository * @param {String} local_repository
* @param {String} functionHash * @param {String} functionHash
* @param {String} function_id * @param {String} resource_id
* @param {String} producer * @param {String} producer
* @param {String} runtime * @param {String} runtime
* @param {Number} port * @param {Number} port
*/ */
function startWorker(local_repository, functionHash, function_id, producer, runtime, port) { function startWorker(local_repository, functionHash, resource_id, producer, runtime, port) {
console.log("Using port", port, "for functionHash", functionHash); console.log("Using port", port, "for functionHash", functionHash);
usedPort.set(port, functionHash) usedPort.set(port, functionHash)
fs.writeFileSync('./local_repository/config.json', JSON.stringify({port})); fs.writeFile('./local_repository/config.json', JSON.stringify({port, functionHash, resource_id, runtime}), () => {
if (runtime === "isolate") if (runtime === "isolate")
execute.runIsolate(local_repository, functionHash, port) execute.runIsolate(local_repository, functionHash, port, resource_id)
else if (runtime === "process") else if (runtime === "process")
execute.runProcess(local_repository, functionHash, port) execute.runProcess(local_repository, functionHash, port, resource_id)
else if (runtime === "container") else if (runtime === "container")
execute.runContainer(functionHash, port) execute.runContainer(functionHash, port, resource_id)
else { else {
producer.send( producer.send(
[{ [{
topic: "response", topic: "response",
messages: JSON.stringify({ status: "unknown runtime" }) messages: JSON.stringify({ status: "unknown runtime" })
}], () => { }) }], () => { })
return return
} }
});
} }
function heartbeat() { function heartbeat() {
...@@ -92,31 +94,77 @@ function heartbeat() { ...@@ -92,31 +94,77 @@ function heartbeat() {
topic: "heartbeat", topic: "heartbeat",
messages: JSON.stringify({"address": node_id, "timestamp": Date.now()}) messages: JSON.stringify({"address": node_id, "timestamp": Date.now()})
}] }]
producer.send(payload, function() { producer.send(payload, function() {})
})
} }
execute.workerEvent.on("start", (functionHash, port, runtime) => { execute.workerEvent.on("start", (functionHash, port, runtime) => {
console.log("started function Port: ", port, functionHash); console.log("started function Port: ", port, functionHash);
producer.send( // producer.send(
[{ // [{
topic: "deployed", // topic: "deployed",
messages: JSON.stringify({ functionHash, port, runtime, node_id }) // messages: JSON.stringify({ functionHash, port, runtime, node_id })
}], () => { }) // }], () => { })
}) })
execute.workerEvent.on('end', (port, runtime) => { execute.workerEvent.on('end', (port, runtime) => {
let functionHash = usedPort.get(port) let functionHash = usedPort.get(port)
usedPort.delete(port) usedPort.delete(port)
producer.send( // producer.send(
[{ // [{
topic: "removeWorker", // topic: "removeWorker",
messages: JSON.stringify({ functionHash, port, runtime, node_id }) // messages: JSON.stringify({ functionHash, port, runtime, node_id })
}], () => { // }], () => {
console.log("Ending worker for function", functionHash, usedPort); // console.log("Ending worker for function", functionHash, usedPort);
}) // })
}) })
setInterval(heartbeat, 1000); grunt.stdout.on('data', data => {
\ No newline at end of file console.log(data.toString());
})
grunt.stderr.on('data', data => {
console.log(data.toString());
})
setInterval(heartbeat, 1000);
/**
* Channel LOG_COMMON
Source: Executor
{
"node_id"
"resource_id"
"function_id"
"status": true/false
"reason": "deployed / exd"
}
Source: Executor
{
"node_id"
"resource_id"
"function_id"
"usage": {
"cpu"
"memory"
"network"
}
}
Source: ReverseProxy
{
"node_id"
"resource_id"
"function_id"
"average_fn_time"
}
Source: Dispatch Manager
{
"node_id"
"resource_id"
"function_id"
"coldstart_time"
}
*/
...@@ -16,9 +16,11 @@ const libSupport = require('./lib') ...@@ -16,9 +16,11 @@ const libSupport = require('./lib')
* functionToPort maps the function and its respective port mapping * functionToPort maps the function and its respective port mapping
* TODO: change this to hold a list of mappings of horizontal scaling * TODO: change this to hold a list of mappings of horizontal scaling
*/ */
let functionToPort = new Map(), let functionToResource = new Map(), // TODO: make the resource a list for horizontal scale out
usedPort = new Map(), // TODO: remove after integration with RM usedPort = new Map(), // TODO: remove after integration with RM
rmQueue = new Map() rmQueue = new Map(),
db = new Map(),
resourceMap = new Map()
let kafka = require('kafka-node'), let kafka = require('kafka-node'),
Producer = kafka.Producer, Producer = kafka.Producer,
...@@ -41,7 +43,6 @@ let kafka = require('kafka-node'), ...@@ -41,7 +43,6 @@ let kafka = require('kafka-node'),
{ autoCommit: true } { autoCommit: true }
]) ])
let db = new Map()
app.use(morgan('combined')) app.use(morgan('combined'))
app.use(bodyParser.urlencoded({ extended: true })) app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json()) app.use(bodyParser.json())
...@@ -113,8 +114,7 @@ function deployContainer(path, imageName) { ...@@ -113,8 +114,7 @@ function deployContainer(path, imageName) {
COPY package.json /app COPY package.json /app
RUN npm install RUN npm install
COPY . /app COPY . /app
ENTRYPOINT ["node", "${imageName}.js"]`
CMD node ${imageName}.js`
, function (err) { , function (err) {
if (err) { if (err) {
console.log("failed", err); console.log("failed", err);
...@@ -165,15 +165,20 @@ function deployContainer(path, imageName) { ...@@ -165,15 +165,20 @@ function deployContainer(path, imageName) {
*/ */
app.post('/serverless/execute/:id', (req, res) => { app.post('/serverless/execute/:id', (req, res) => {
let runtime = req.body.runtime let runtime = req.body.runtime
if (functionToPort.has(req.params.id + runtime)) { if (functionToResource.has(req.params.id + runtime)) {
/** /**
* Bypass deployment pipeline if resource available * Bypass deployment pipeline if resource available
*/ */
let forwardTo = functionToPort.get(req.params.id + runtime) let forwardTo = functionToResource.get(req.params.id + runtime)
console.log("resource found", forwardTo); let resource = resourceMap.get(forwardTo.resource_id)
libSupport.reverseProxy(req, res, `http://${forwardTo.node_id}:${forwardTo.port}/serverless/function/execute`) console.log("resource found", forwardTo, resource);
libSupport.reverseProxy(req, res, `http://${resource.node_id}:${resource.port}/serverless/function/execute`)
} else { } else {
/**
* FIXME: Here, every request even for the same function will be queued up potentially launching multiple
* resource of the same type
*/
requestQueue.push({ req, res }) requestQueue.push({ req, res })
/** /**
* We store functions for function placement heuristics purposes. This lets us look into the function * We store functions for function placement heuristics purposes. This lets us look into the function
...@@ -195,20 +200,29 @@ function dispatch() { ...@@ -195,20 +200,29 @@ function dispatch() {
let runtime = req.body.runtime let runtime = req.body.runtime
let functionHash = req.params.id let functionHash = req.params.id
let function_id = libSupport.makeid(20) // each function resource request is associated with an unique ID let resource_id = libSupport.makeid(20) // each function resource request is associated with an unique ID
console.log("Dispatching function with Id", function_id, runtime); console.log("Dispatching function with Id", resource_id, runtime);
let node_id = getAddress() // Requests the RM for address and other metadata for function placement let node_id = getAddress() // Requests the RM for address and other metadata for function placement
let port = libSupport.getPort(usedPort) // TODO: will be provided by the RM
resourceMap.set(resource_id, {
runtime, functionHash, port, node_id
})
console.log(resourceMap);
let payload = [{ let payload = [{
topic: node_id, topic: node_id,
messages: JSON.stringify({ messages: JSON.stringify({
"type": "execute", // Request sent to Dispatch Daemon via Kafka for actual deployment at the Worker "type": "execute", // Request sent to Dispatch Daemon via Kafka for actual deployment at the Worker
function_id, resource_id,
runtime, functionHash, runtime, functionHash,
port: libSupport.getPort(usedPort) // TODO: will be provided by the RM port
}), }),
partition: 0 partition: 0
}] }]
producer.send(payload, () => { })
db.set(functionHash + runtime, { req, res })
/** uncomment when RM is available
rmQueue.set(function_id, payload) rmQueue.set(function_id, payload)
let payloadToRM = [{ let payloadToRM = [{
topic: "REQUEST_DISPATCHER_2_ARBITER", topic: "REQUEST_DISPATCHER_2_ARBITER",
...@@ -222,7 +236,7 @@ function dispatch() { ...@@ -222,7 +236,7 @@ function dispatch() {
db.set(functionHash + runtime, { req, res }) db.set(functionHash + runtime, { req, res })
}) })
*/
} }
} }
...@@ -238,7 +252,7 @@ consumer.on('message', function (message) { ...@@ -238,7 +252,7 @@ consumer.on('message', function (message) {
let topic = message.topic let topic = message.topic
message = message.value message = message.value
if (topic === "response") { if (topic === "response") {
console.log(message); console.log("response", message);
// message = JSON.parse(message) // message = JSON.parse(message)
// console.log(message); // console.log(message);
...@@ -257,38 +271,52 @@ consumer.on('message', function (message) { ...@@ -257,38 +271,52 @@ consumer.on('message', function (message) {
console.log(workerNodes); console.log(workerNodes);
} }
} else if (topic == "deployed") { } else if (topic == "deployed") {
try {
message = JSON.parse(message)
} catch (e) {
// process.exit(0)
}
message = JSON.parse(message)
console.log("deployed", message); console.log("deployed", message);
if (db.has(message.functionHash + message.runtime)) { if (db.has(message.functionHash + message.runtime)) {
let { req, res } = db.get(message.functionHash + message.runtime) let { req, res } = db.get(message.functionHash + message.runtime)
if (parseInt(message.port) != -1) functionToResource.set(message.functionHash + message.runtime, {
functionToPort.set(message.functionHash + message.runtime, { resource_id: message.resource_id
port: parseInt(message.port), })
node_id: message.node_id let resource = resourceMap.get(message.resource_id)
})
libSupport.reverseProxy(req, res, libSupport.reverseProxy(req, res,
`http://${message.node_id}:${message.port}/serverless/function/execute`) `http://${resource.node_id}:${resource.port}/serverless/function/execute`)
.then(() => { .then(() => {
db.delete(message.functionHash + message.runtime) db.delete(message.functionHash + message.runtime)
}) })
} }
} else if (topic == "removeWorker") { } else if (topic == "removeWorker") {
console.log("removing metadata", message); console.log("removing metadata", message);
message = JSON.parse(message) try {
message = JSON.parse(message)
} catch(e) {
// process.exit(0)
}
usedPort.delete(message.port) usedPort.delete(message.port)
functionToPort.delete(message.functionHash + message.runtime) let resource = functionToResource.get(message.functionHash + message.runtime)
functionToResource.delete(message.functionHash + message.runtime)
resourceMap.delete(resource.resource_id)
} else if (topic == "RESPONSE_ARBITER_2_DISPATCHER") { } else if (topic == "RESPONSE_ARBITER_2_DISPATCHER") {
message = JSON.parse(message) message = JSON.parse(message)
console.log(message); console.log(message);
let payload = rmQueue.get(message.id) let payload = rmQueue.get(message.id)
payload[0].topic = getAddress() if (payload != null) {
console.log(payload); payload[0].topic = getAddress()
console.log(payload);
producer.send(payload, () => { })
} else {
console.log("something went wrong");
}
producer.send(payload, () => { })
} }
}); });
......
...@@ -2,13 +2,20 @@ ...@@ -2,13 +2,20 @@
const express = require('express') const express = require('express')
const bodyParser = require('body-parser') const bodyParser = require('body-parser')
const app = express() const app = express()
let port = 5000 let port = 5000, resource_id, functionHash, portExternal, runtime
let config = null; let config = null;
try { try {
config = require('./config.json') config = require('./config.json')
port = config.port port = config.port
resource_id = config.resource_id
functionHash = config.functionHash
runtime = config.runtime
} catch (e) { } catch (e) {
port = 5000 port = 5000
resource_id = process.argv[2]
functionHash = process.argv[3]
portExternal = process.argv[4]
runtime = process.argv[5]
} }
let kafka = require('kafka-node'), let kafka = require('kafka-node'),
...@@ -36,11 +43,11 @@ function executor(payload) { ...@@ -36,11 +43,11 @@ function executor(payload) {
} }
app.listen(port, () => { app.listen(port, () => {
console.log(`Server listening on port ${port}!`) console.log(`Resource ${resource_id} Server listening on port ${port}!`)
producer.send( producer.send(
[{ [{
topic: "response", topic: "deployed",
messages: "ready" messages: JSON.stringify({ functionHash, portExternal, runtime, resource_id })
}], () => { }) }], () => { })
}) })
...@@ -48,8 +55,15 @@ function shouldDie() { ...@@ -48,8 +55,15 @@ function shouldDie() {
if (Date.now() - lastRequest > 5 * 1000) { if (Date.now() - lastRequest > 5 * 1000) {
console.log("Idle for too long. Exiting"); console.log("Idle for too long. Exiting");
producer.send(
[{
topic: "removeWorker",
messages: JSON.stringify({ functionHash, portExternal, runtime, resource_id })
}], () => {
console.log("Ending worker for function", functionHash, "resource_id", resource_id);
process.exit(0)
})
process.exit(0)
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment