Commit 1e2d3ca4 authored by Naman Dixit's avatar Naman Dixit

Merge on pull

parents 85c0cfb7 0198dbc7
......@@ -6,3 +6,4 @@ firecracker*
secrets.json
resource_system/bin/**
resource_system/version.linux
local_experiments/
{
"registry_url" :"10.129.6.5:5000/",
"registry_url" :"localhost:5000/",
"master_port": 8080,
"master_address": "localhost",
"kafka_host": "10.129.6.5:9092",
"grunt_host": "https://www.namandixit.net/lovecraftian_nightmares/grunt",
"log_channel": "LOG_COMMON",
"couchdb_host": "10.129.6.5:5984",
"couchdb_db_name": "serverless",
"couchdb_host": "localhost:5984",
"function_db_name": "serverless",
"metrics_db_name": "metrics",
"implicit_chain_db_name": "implicit_chain",
"network": {
"network_bridge": "hybrid_kafka-serverless",
"internal": {
"kafka_host": "kafka:9092"
},
"external": {
"kafka_host": "localhost:29092"
}
},
"topics": {
"request_dm_2_rm": "request",
"heartbeat": "heartbeat",
"deployed": "deployed",
"remove_worker": "removeWorker",
"response_rm_2_dm": "RESPONSE_RM_2_DM_DUMMY",
"hscale": "hscale"
"hscale": "hscale",
"log_channel": "LOG_COMMON"
},
"autoscalar_metrics": {
"open_request_threshold": 100
},
"speculative_deployment": true
}
\ No newline at end of file
"speculative_deployment": false,
"JIT_deployment": false,
"id_size": 20
}
{"id":"192.168.31.51","master_node":"10.129.6.5"}
\ No newline at end of file
{"id":"192.168.0.105","master_node":"192.168.0.105"}
\ No newline at end of file
......@@ -17,7 +17,7 @@ function runIsolate(local_repository, metadata) {
return new Promise((resolve, reject) => {
const worker = new Worker(filename, {
argv: [resource_id, functionHash, port, "isolate"],
argv: [resource_id, functionHash, port, "isolate", constants.network.external.kafka_host],
resourceLimits: {
maxOldGenerationSizeMb: memory
}
......@@ -43,7 +43,8 @@ function runProcess(local_repository, metadata) {
return new Promise((resolve, reject) => {
let timeStart = Date.now()
const process = spawn('node', [filename, resource_id, functionHash, port, "process", `--max-old-space-size=${memory}` ]);
const process = spawn('node', [filename, resource_id, functionHash, port, "process",
constants.network.external.kafka_host, `--max-old-space-size=${memory}` ]);
process.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
let timeDifference = Math.ceil((Date.now() - timeStart))
......@@ -91,8 +92,9 @@ function runContainer(metadata) {
if (code != 0)
reject("error")
else {
const process = spawn('docker', ["run", "--rm", "-p", `${port}:${port}`, "--name", resource_id, registry_url + imageName,
resource_id, imageName, port, "container"]);
const process = spawn('docker', ["run", "--rm", `--network=${constants.network.network_bridge}`, "-p", `${port}:${port}`,
"--name", resource_id, registry_url + imageName,
resource_id, imageName, port, "container", constants.network.internal.kafka_host]);
let result = "";
// timeStart = Date.now()
process.stdout.on('data', (data) => {
......@@ -118,8 +120,9 @@ function runContainer(metadata) {
} else {
logger.info("container starting at port", port);
const process = spawn('docker', ["run", "--rm", "-p", `${port}:${port}`, "--name", resource_id,
registry_url + imageName, resource_id, imageName, port, "container"]);
const process = spawn('docker', ["run", "--rm", `--network=${constants.network.network_bridge}`,
"-p", `${port}:${port}`, "--name", resource_id,
registry_url + imageName, resource_id, imageName, port, "container", constants.network.internal.kafka_host]);
let result = "";
// timeStart = Date.now()
process.stdout.on('data', (data) => {
......
......@@ -11,7 +11,7 @@ const fs = require('fs')
const fetch = require('node-fetch');
let metadataDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metadataDB = metadataDB + "/" + constants.couchdb_db_name + "/"
metadataDB = metadataDB + "/" + constants.function_db_name + "/"
const kafka = require('kafka-node')
const logger = libSupport.logger
......@@ -21,7 +21,7 @@ const host_url = "http://" + constants.master_address + ":" + constants.master_p
let Producer = kafka.Producer,
client = new kafka.KafkaClient({
kafkaHost: constants.kafka_host,
kafkaHost: constants.network.external.kafka_host,
autoConnect: true
}),
producer = new Producer(client),
......@@ -77,9 +77,9 @@ libSupport.makeTopic(node_id).then(() => {
/**
* download and start grunt
*/
libSupport.download(constants.grunt_host, "grunt").then(() => {
libSupport.download(constants.grunt_host, "grunt", false).then(() => {
logger.info("Downloaded grunt binary from repository")
fs.chmod('grunt', 0o555, (err) => {
fs.chmod('grunt', 0o755, (err) => {
logger.info("grunt made executable. Starting grunt")
let grunt = spawn('./grunt', [node_id])
grunt.stdout.on('data', data => {
......
const http = require('http');
const fetch = require('node-fetch');
const fs = require('fs');
const process = require('process')
const { spawnSync } = require('child_process');
......@@ -30,7 +30,7 @@ function makeTopic(id) {
console.log("Using Primary IP", id, "as topic");
let client = new kafka.KafkaClient({
kafkaHost: constants.kafka_host,
kafkaHost: constants.network.external.kafka_host,
autoConnect: true
}),
Producer = kafka.Producer,
......@@ -50,28 +50,48 @@ function makeTopic(id) {
})
}
var download = function (url, dest, cb) {
return new Promise((resolve, reject) => {
// var download = function (url, dest, check = true, cb) {
// return new Promise((resolve, reject) => {
// console.log(url);
// if (!check || !fs.existsSync(dest)) {
// var file = fs.createWriteStream(dest);
// var request = https.get(url, function (response) {
// response.pipe(file);
// file.on('finish', function () {
// file.close(cb); // close() is async, call cb after close completes.
// resolve();
// });
// }).on('error', function (err) { // Handle errors
// fs.unlink(dest); // Delete the file async. (But we don't check the result)
// logger.error("download failed" + err.message);
// if (cb) cb(err.message);
// reject(err);
// });
// } else {
// resolve();
// }
// })
// };
const download = (async (url, path, check = true) => {
if (!check || !fs.existsSync(path)) {
console.log(url);
if (!fs.existsSync(dest)) {
var file = fs.createWriteStream(dest);
var request = https.get(url, function (response) {
response.pipe(file);
file.on('finish', function () {
file.close(cb); // close() is async, call cb after close completes.
resolve();
});
}).on('error', function (err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
const res = await fetch(url);
const fileStream = fs.createWriteStream(path);
await new Promise((resolve, reject) => {
res.body.pipe(fileStream);
res.body.on("error", (err) => {
reject(err);
});
} else {
resolve();
}
})
};
fileStream.on("finish", function () {
resolve();
});
});
}
});
function makeid(length) {
var result = '';
......
......@@ -16,6 +16,7 @@
"kafka-node": "^5.0.0",
"morgan": "^1.9.1",
"mqtt": "^3.0.0",
"node-fetch": "^2.6.0",
"redis": "^2.8.0",
"request": "^2.88.2",
"winston": "^3.2.1"
......
'use strict';
const express = require('express')
const libSupport = require('./lib')
const router = express.Router()
const fs = require('fs')
const { spawn } = require('child_process')
const fetch = require('node-fetch')
const constants = require('../constants.json')
const secrets = require('./secrets.json')
const operator = require('./operator')
let metadataDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metadataDB = metadataDB + "/" + constants.function_db_name + "/"
let metricsDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metricsDB = metricsDB + "/" + constants.metrics_db_name + "/"
const logger = libSupport.logger
const registry_url = constants.registry_url
router.post('/deploy', (req, res) => {
// let runtime = req.body.runtime
let files = req.files
const chain_id = libSupport.makeid(constants.id_size)
const file_path = __dirname + "/repository/"
let aliases = {}
let deployHandles = []
createDirectory(file_path).then(() => {
for (const [file_alias, file] of Object.entries(files)) {
let functionHash = file.md5
if (file_alias === 'map') {
file.mv(file_path + 'map' + chain_id + ".json")
continue
}
// aliases[file_alias] = functionHash
deployHandles.push(deploy(file_path, functionHash, file, aliases, file_alias))
}
console.log("aliases", aliases);
Promise.all(deployHandles).then(() => {
console.log("done");
fs.writeFile(file_path + `aliases${chain_id}.json`, JSON.stringify(aliases, null, 2), function(err) {
res.json({
status: "success",
function_id: chain_id
})
})
}).catch(err => {
res.json({
status: "error",
reason: err
}).status(400)
})
})
})
async function deploy(file_path, functionHash, file, aliases, file_alias) {
let runtime = "container", memory = 330
try {
await moveFile(file, file_path, functionHash)
functionHash = libSupport.generateExecutor(file_path, functionHash)
aliases[file_alias] = functionHash
/**
* Adding meta caching via couchdb
* This will create / update function related metadata like resource limits etc
* on a database named "serverless".
*/
let res = await fetch(metadataDB + functionHash)
let json = await res.json()
console.log(json);
if (json.error === "not_found") {
logger.warn("New function, creating metadata")
await fetch(metadataDB + functionHash, {
method: 'put',
body: JSON.stringify({
memory: memory
}),
headers: { 'Content-Type': 'application/json' },
})
// let json = await res.json()
// console.log(json)
} else {
logger.warn('Repeat deployment, updating metadata')
try {
await fetch(metadataDB + functionHash, {
method: 'put',
body: JSON.stringify({
memory: memory,
_rev: json._rev
}),
headers: { 'Content-Type': 'application/json' },
})
// let json = await res.json()
// console.log(json)
} catch (err) {
console.log(err);
}
}
if (runtime === "container") {
try {
await deployContainer(file_path, functionHash)
console.log("called");
return Promise.resolve(functionHash)
} catch(err) {
return Promise.reject(err)
}
} else {
return Promise.resolve(functionHash)
}
} catch (err) {
logger.error(err)
return Promise.reject(err)
}
}
function moveFile(file, file_path, functionHash) {
return new Promise((resolve, reject) =>{
file.mv(file_path + functionHash, function (err) {
if (err)
reject(err)
resolve()
})
})
}
async function deployContainer(path, imageName) {
return new Promise((resolve, reject) => {
let buildStart = Date.now()
fs.writeFile('./repository/Dockerfile' + imageName,
`FROM node:latest
WORKDIR /app
COPY ./worker_env/package.json /app
ADD ./worker_env/node_modules /app/node_modules
COPY ${imageName}.js /app
ENTRYPOINT ["node", "${imageName}.js"]`
, function (err) {
if (err) {
logger.error("failed", err);
reject(err);
}
else {
logger.info('Dockerfile created');
const process = spawn('docker', ["build", "-t", registry_url + imageName, path, "-f", path + `Dockerfile${imageName}`]);
process.stdout.on('data', (data) => {
logger.info(`stdout: ${data}`);
});
process.stderr.on('data', (data) => {
logger.error(`stderr: ${data}`);
});
process.on('close', (code) => {
logger.warn(`child process exited with code ${code}`);
let timeDifference = Math.ceil((Date.now() - buildStart))
logger.info("image build time taken: ", timeDifference);
const process_push = spawn('docker', ["push", registry_url + imageName]);
process_push.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
process_push.stderr.on('data', (data) => {
logger.error(`stderr: ${data}`);
});
process_push.on('close', (code) => {
logger.info("image pushed to repository");
resolve();
})
});
}
});
})
}
router.post('/execute/:id', (req, res) => {
let map, aliases
// if (req.body.map)
// map = req.body.map
// else {
if (req.files && req.files.map) {
map = JSON.parse(req.files.map.data.toString());
let mapPlanner = JSON.parse(req.files.map.data.toString());
readMap(`./repository/aliases${req.params.id}.json`, true)
.then(data => {
aliases = data
let payload = JSON.parse(req.body.data)
console.log(payload);
speculative_deployment(aliases, mapPlanner);
orchestrator(res, payload, map, aliases, {})
})
} else {
readMap(`./repository/map${req.params.id}.json`)
.then(data => {
map = data
let mapPlanner = JSON.parse(JSON.stringify(map))
readMap(`./repository/aliases${req.params.id}.json`, true)
.then(data => {
aliases = data
let payload = JSON.parse(req.body.data)
speculative_deployment(aliases, mapPlanner);
orchestrator(res, payload, map, aliases, {})
})
})
}
})
async function orchestrator(res, payload, map, aliases, result) {
if (Object.keys(map).length == 0) {
console.log("time to resolve", result);
res.json(result)
// return resolve(result)
}
else {
for (const [functionName, metadata] of Object.entries(map)) {
// console.log(functionName, metadata, aliases[functionName]);
// console.log(metadata);
if (metadata.type === "function" && metadata.wait_for.length == 0) {
let url = `http://${constants.master_address}:${constants.master_port}/serverless/execute/${aliases[functionName].alias}`
console.log(url);
let data = {
method: 'post',
body: JSON.stringify({
runtime: metadata.runtime,
payload
}),
headers: { 'Content-Type': 'application/json' }
}
delete map[functionName]
aliases[functionName].status = "running"
fetch(url, data).then(res => res.json())
.then(json => {
// console.log(json);
result[functionName] = json
aliases[functionName].status = "done"
let branchMap = null
for (const [_key, metadata] of Object.entries(map)) {
if (metadata.type === "function" || metadata.type === "conditional") {
let index = metadata.wait_for.indexOf(functionName)
if (index >= 0)
metadata.wait_for.splice(index, 1);
}
if (metadata.type === "conditional" && metadata.wait_for.length == 0) {
let conditionResult = checkCondition(metadata.condition.op1, metadata.condition.op2, metadata.condition.op, result)
console.log(conditionResult, "aliases", aliases);
let branchToTake = metadata[conditionResult]
branchMap = map[branchToTake]
delete map[_key]
makeBranchRunnable(branchMap, aliases)
}
}
orchestrator(res, payload, (branchMap == null)? map: branchMap, aliases, result)
})
}
}
}
}
function makeBranchRunnable(branchMap, aliases) {
delete branchMap['type']
for (const [_key, metadata] of Object.entries(branchMap)) {
if (metadata.type === "function" || metadata.type === "conditional") {
let wait_for = []
for (const dependent of metadata.wait_for) {
if (aliases[dependent].status !== "done")
wait_for.push(dependent)
metadata.wait_for = wait_for
}
}
}
}
function checkCondition(op1, op2, op, result) {
op1 = op1.split(".")
let data = result[op1[0]][op1[1]]
return (operator[op](data, op2))? "success": "fail"
}
async function speculative_deployment(aliases, map) {
if (constants.speculative_deployment) {
console.log(aliases);
let getData = []
for (const [mod, metadata] of Object.entries(map)) {
if (constants.JIT_deployment) {
console.log(mod, metadata, aliases[mod].alias);
let url = metricsDB + aliases[mod].alias
console.log(url);
let data = libSupport.fetchData(url)
console.log(data);
getData.push(data)
} else {
let payload = [{
topic: constants.topics.hscale,
messages: JSON.stringify({ "runtime": metadata.runtime, "functionHash": aliases[mod].alias })
}]
notify(payload)
}
}
if (constants.JIT_deployment) {
Promise.all(getData).then((values) => {
let dataMap = new Map()
for (const data of values) {
dataMap[data._id] = data
}
let done = new Map()
let toBeDone = new Set()
// var plannerMap = new Map(map)
do {
for (const [mod, metadata] of Object.entries(map)) {
if (metadata.wait_for.length == 0 && done[mod] === undefined) {
done[mod] = dataMap[aliases[mod].alias][metadata.runtime].coldstart // expecting the first ones to run
// to be hit by coldstarts
// delete plannerMap[mod];
} else if (done[mod] === undefined) {
let flag = true
let maxWait = 0
for (const dependency of metadata.wait_for) {
console.log(dependency);
if (done[dependency] === undefined) {
flag = false
break
} else if (maxWait < done[dependency])
maxWait = done[dependency]
}
if (flag) {
console.log("notifying", mod);
let notifyTime = ((maxWait - dataMap[aliases[mod].alias][metadata.runtime].starttime) > 0) ?
maxWait - dataMap[aliases[mod].alias][metadata.runtime].starttime : 0
console.log(mod, "max wait", maxWait, "notify time:", notifyTime);
let payload = [{
topic: constants.topics.hscale,
messages: JSON.stringify({ "runtime": metadata.runtime, "functionHash": aliases[mod].alias })
}]
setTimeout(notify, notifyTime, payload)
done[mod] = maxWait + dataMap[aliases[mod].alias][metadata.runtime].warmstart
if (toBeDone.has(mod))
delete toBeDone[mod]
// delete plannerMap[mod]
} else {
toBeDone.add(mod)
}
}
console.log(done, toBeDone);
}
} while (toBeDone.size != 0)
})
}
}
}
function readMap(filename, alias = false) {
return new Promise((resolve, reject) => {
fs.readFile(filename, (err, blob) => {
if (err)
reject(err)
else {
const data = JSON.parse(blob)
if (alias) {
for (const [key, functionHash] of Object.entries(data)) {
data[key] = {
alias: functionHash,
status: "waiting"
}
// libSupport.fetchData(metricsDB + functionHash, null)
// .then(metrics => {
// data[key]
// })
}
}
resolve(data)
}
})
})
}
function notify(payload) {
libSupport.producer.send(payload, function () { })
}
function createDirectory(path) {
return new Promise((resolve, reject) => {
if (!fs.existsSync(path)) {
fs.mkdir(path, err => {
if (err)
reject();
resolve();
})
} else {
resolve();
}
})
}
module.exports = router;
"use strict";
const express = require('express')
const bodyParser = require('body-parser')
const express = require('express');
const fileUpload = require('express-fileupload');
const constants = require('.././constants.json')
const secrets = require('./secrets.json')
const fs = require('fs')
const constants = require('.././constants.json');
const chainHandler = require('./explicit_chain_handler');
const secrets = require('./secrets.json');
const fs = require('fs');
const { spawn } = require('child_process');
const morgan = require('morgan')
const heap = require('heap')
const morgan = require('morgan');
const heap = require('heap');
const fetch = require('node-fetch');
const swStats = require('swagger-stats');
const apiSpec = require('./swagger.json');
const util = require('util')
/**
* URL to the couchdb database server used to store function metadata
*/
let metadataDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metadataDB = metadataDB + "/" + constants.couchdb_db_name + "/"
metadataDB = metadataDB + "/" + constants.function_db_name + "/"
let metricsDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metricsDB = metricsDB + "/" + constants.metrics_db_name + "/"