Commit 4d074b06 authored by Nilanjan Daw's avatar Nilanjan Daw

Merge branch 'explicit_function_chaining'

parents 66dc57aa fb27d6a9
{ {
"registry_url" :"localhost:5000/", "registry_url" :"10.129.6.5:5000/",
"master_port": 8080, "master_port": 8080,
"master_address": "localhost", "master_address": "localhost",
"grunt_host": "https://www.namandixit.net/lovecraftian_nightmares/grunt", "grunt_host": "https://www.namandixit.net/lovecraftian_nightmares/grunt",
"couchdb_host": "localhost:5984", "couchdb_host": "10.129.6.5:5984",
"function_db_name": "serverless", "db": {
"metrics_db_name": "metrics", "function_meta": "serverless",
"implicit_chain_db_name": "implicit_chain", "metrics": "metrics",
"implicit_chain_meta": "implicit_chain",
"explicit_chain_meta": "explicit_chain"
},
"network": { "network": {
"network_bridge": "hybrid_kafka-serverless", "network_bridge": "hybrid_kafka-serverless",
"use_bridge": false,
"internal": { "internal": {
"kafka_host": "kafka:9092" "kafka_host": "10.129.6.5:9092"
}, },
"external": { "external": {
"kafka_host": "localhost:29092" "kafka_host": "10.129.6.5:9092"
} }
}, },
"topics": { "topics": {
...@@ -28,7 +32,10 @@ ...@@ -28,7 +32,10 @@
"autoscalar_metrics": { "autoscalar_metrics": {
"open_request_threshold": 100 "open_request_threshold": 100
}, },
"speculative_deployment": false, "metrics": {
"JIT_deployment": false, "alpha": 0.7
},
"speculative_deployment": true,
"JIT_deployment": true,
"id_size": 20 "id_size": 20
} }
{
"registry_url" :"10.129.6.5:5000/",
"master_port": 8080,
"master_address": "10.129.6.5",
"grunt_host": "https://www.namandixit.net/lovecraftian_nightmares/grunt",
"couchdb_host": "10.129.6.5:5984",
"db": {
"function_meta": "serverless",
"metrics": "metrics",
"implicit_chain_meta": "implicit_chain",
"explicit_chain_meta": "explicit_chain"
},
"network": {
"network_bridge": "hybrid_kafka-serverless",
"internal": {
"kafka_host": "kafka:9092"
},
"external": {
"kafka_host": "10.129.6.5:9092"
}
},
"topics": {
"request_dm_2_rm": "request",
"heartbeat": "heartbeat",
"deployed": "deployed",
"remove_worker": "removeWorker",
"response_rm_2_dm": "RESPONSE_RM_2_DM_DUMMY",
"hscale": "hscale",
"log_channel": "LOG_COMMON"
},
"autoscalar_metrics": {
"open_request_threshold": 100
},
"metrics": {
"alpha": 0.7
},
"speculative_deployment": false,
"JIT_deployment": true,
"id_size": 20
}
...@@ -92,9 +92,15 @@ function runContainer(metadata) { ...@@ -92,9 +92,15 @@ function runContainer(metadata) {
if (code != 0) if (code != 0)
reject("error") reject("error")
else { else {
const process = spawn('docker', ["run", "--rm", `--network=${constants.network.network_bridge}`, "-p", `${port}:${port}`, let process = null;
"--name", resource_id, registry_url + imageName, if (constants.network.use_bridge)
resource_id, imageName, port, "container", constants.network.internal.kafka_host]); process = spawn('docker', ["run", "--rm", `--network=${constants.network.network_bridge}`, "-p", `${port}:${port}`,
"--name", resource_id, registry_url + imageName,
resource_id, imageName, port, "container", constants.network.internal.kafka_host]);
else
process = spawn('docker', ["run", "--rm", "-p", `${port}:${port}`,
"--name", resource_id, registry_url + imageName,
resource_id, imageName, port, "container", constants.network.internal.kafka_host]);
let result = ""; let result = "";
// timeStart = Date.now() // timeStart = Date.now()
process.stdout.on('data', (data) => { process.stdout.on('data', (data) => {
...@@ -119,10 +125,15 @@ function runContainer(metadata) { ...@@ -119,10 +125,15 @@ function runContainer(metadata) {
}) })
} else { } else {
logger.info("container starting at port", port); logger.info("container starting at port", port);
let process = null;
const process = spawn('docker', ["run", "--rm", `--network=${constants.network.network_bridge}`, if (constants.network.use_bridge)
"-p", `${port}:${port}`, "--name", resource_id, process = spawn('docker', ["run", "--rm", `--network=${constants.network.network_bridge}`,
registry_url + imageName, resource_id, imageName, port, "container", constants.network.internal.kafka_host]); "-p", `${port}:${port}`, "--name", resource_id,
registry_url + imageName, resource_id, imageName, port, "container", constants.network.internal.kafka_host]);
else
process = spawn('docker', ["run", "--rm",
"-p", `${port}:${port}`, "--name", resource_id,
registry_url + imageName, resource_id, imageName, port, "container", constants.network.internal.kafka_host]);
let result = ""; let result = "";
// timeStart = Date.now() // timeStart = Date.now()
process.stdout.on('data', (data) => { process.stdout.on('data', (data) => {
......
...@@ -158,7 +158,7 @@ function heartbeat() { ...@@ -158,7 +158,7 @@ function heartbeat() {
topic: "heartbeat", topic: "heartbeat",
messages: JSON.stringify({"address": node_id, "timestamp": Date.now()}) messages: JSON.stringify({"address": node_id, "timestamp": Date.now()})
}] }]
producer.send(payload, function() {}) producer.send(payload, function(cb) {})
} }
......
...@@ -27,7 +27,7 @@ function updateConfig() { ...@@ -27,7 +27,7 @@ function updateConfig() {
} }
function makeTopic(id) { function makeTopic(id) {
console.log("Using Primary IP", id, "as topic"); console.log("Using Primary IP", id, "as topic", "publishing to:", constants.network.external.kafka_host);
let client = new kafka.KafkaClient({ let client = new kafka.KafkaClient({
kafkaHost: constants.network.external.kafka_host, kafkaHost: constants.network.external.kafka_host,
......
...@@ -10,18 +10,11 @@ const { spawn } = require('child_process'); ...@@ -10,18 +10,11 @@ const { spawn } = require('child_process');
const morgan = require('morgan'); const morgan = require('morgan');
const heap = require('heap'); const heap = require('heap');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const swStats = require('swagger-stats'); // const swStats = require('swagger-stats');
const apiSpec = require('./swagger.json'); // const apiSpec = require('./swagger.json');
const util = require('util') const util = require('util')
const sharedMeta = require('./shared_meta')
/**
* URL to the couchdb database server used to store function metadata
*/
let metadataDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metadataDB = metadataDB + "/" + constants.function_db_name + "/"
let metricsDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metricsDB = metricsDB + "/" + constants.metrics_db_name + "/"
const app = express() const app = express()
const libSupport = require('./lib') const libSupport = require('./lib')
...@@ -30,13 +23,15 @@ let date = new Date(); ...@@ -30,13 +23,15 @@ let date = new Date();
let log_channel = constants.topics.log_channel let log_channel = constants.topics.log_channel
let usedPort = new Map(), // TODO: remove after integration with RM let usedPort = new Map(), // TODO: remove after integration with RM
db = new Map(), // queue holding request to be dispatched db = sharedMeta.db, // queue holding request to be dispatched
resourceMap = new Map(), // map between resource_id and resource details like node_id, port, associated function etc resourceMap = sharedMeta.resourceMap, // map between resource_id and resource details like node_id, port, associated function etc
functionToResource = new Map(), // a function to resource map. Each map contains a minheap of functionToResource = sharedMeta.functionToResource, // a function to resource map. Each map contains a minheap of
// resources associated with the function // resources associated with the function
workerNodes = new Map(), // list of worker nodes currently known to the DM workerNodes = sharedMeta.workerNodes, // list of worker nodes currently known to the DM
functionBranchTree = new Map() // a tree to store function branch predictions functionBranchTree = sharedMeta.functionBranchTree, // a tree to store function branch predictions
metricsDB = sharedMeta.metricsDB,
metadataDB = sharedMeta.metadataDB
let kafka = require('kafka-node'), let kafka = require('kafka-node'),
Producer = kafka.Producer, Producer = kafka.Producer,
...@@ -64,11 +59,10 @@ app.use(morgan('combined', { ...@@ -64,11 +59,10 @@ app.use(morgan('combined', {
app.use(express.json()); app.use(express.json());
app.use(express.urlencoded({ extended: true })); app.use(express.urlencoded({ extended: true }));
const file_path = __dirname + "/repository/" const file_path = __dirname + "/repository/"
app.use('/repository', express.static(file_path)); // file server hosting deployed functions app.use('/repository', express.static(file_path)); // file server hosting deployed functions
app.use(fileUpload()) app.use(fileUpload())
app.use(swStats.getMiddleware({ swaggerSpec: apiSpec })); // statistics middleware // app.use(swStats.getMiddleware({ swaggerSpec: apiSpec })); // statistics middleware
app.use('/serverless/chain', chainHandler); // chain router (explicit_chain_handler.js) for handling explicit chains app.use('/serverless/chain', chainHandler.router); // chain router (explicit_chain_handler.js) for handling explicit chains
let requestQueue = [] let requestQueue = []
const WINDOW_SIZE = 10 const WINDOW_SIZE = 10
...@@ -225,7 +219,7 @@ app.post('/serverless/execute/:id', (req, res) => { ...@@ -225,7 +219,7 @@ app.post('/serverless/execute/:id', (req, res) => {
res.timestamp = Date.now() res.timestamp = Date.now()
if (functionToResource.has(id)) { if (functionToResource.has(id)) {
res.start = 'warmstart' res.start = 'warmstart'
libSupport.reverseProxy(req, res, functionToResource, resourceMap, functionBranchTree) libSupport.reverseProxy(req, res)
} else { } else {
res.start = 'coldstart' res.start = 'coldstart'
/** /**
...@@ -334,17 +328,14 @@ function postDeploy(message) { ...@@ -334,17 +328,14 @@ function postDeploy(message) {
"reason": "deployment", "reason": "deployment",
"status": true, "status": true,
starttime: (Date.now() - resource.deploy_request_time) starttime: (Date.now() - resource.deploy_request_time)
}, message.resource_id, resourceMap) }, message.resource_id)
if (db.has(id)) { if (db.has(id)) {
let sendQueue = db.get(id) let sendQueue = db.get(id)
logger.info("forwarding request via reverse proxy to: " + JSON.stringify(resource)); logger.info("forwarding request via reverse proxy to: " + JSON.stringify(resource));
while (sendQueue && sendQueue.length != 0) { while (sendQueue && sendQueue.length != 0) {
let { req, res } = sendQueue.shift() let { req, res } = sendQueue.shift()
libSupport.reverseProxy(req, res, functionToResource, resourceMap, functionBranchTree) libSupport.reverseProxy(req, res)
.then(() => {
})
} }
db.delete(id) db.delete(id)
} }
...@@ -410,7 +401,7 @@ consumer.on('message', function (message) { ...@@ -410,7 +401,7 @@ consumer.on('message', function (message) {
"reason": "terminate", "reason": "terminate",
"total_request": message.total_request, "total_request": message.total_request,
"status": true "status": true
}, message.resource_id, resourceMap) }, message.resource_id)
.then(() => { .then(() => {
resourceMap.delete(message.resource_id) resourceMap.delete(message.resource_id)
if (resourceArray.length == 0) if (resourceArray.length == 0)
...@@ -428,7 +419,11 @@ consumer.on('message', function (message) { ...@@ -428,7 +419,11 @@ consumer.on('message', function (message) {
functionHash = message.functionHash functionHash = message.functionHash
logger.info(`Generated new resource ID: ${resource_id} for runtime: ${runtime}`); logger.info(`Generated new resource ID: ${resource_id} for runtime: ${runtime}`);
console.log("Resource Status: ", functionToResource); console.log("Resource Status: ", functionToResource);
if (!functionToResource.has(functionHash + runtime) && !db.has(functionHash + runtime)) {
console.log("adding db");
db.set(functionHash + runtime, [])
}
/** /**
* Request RM for resource * Request RM for resource
*/ */
...@@ -451,6 +446,7 @@ consumer.on('message', function (message) { ...@@ -451,6 +446,7 @@ consumer.on('message', function (message) {
}), }),
partition: 0 partition: 0
}] }]
producer.send(payloadToRM, () => { producer.send(payloadToRM, () => {
// db.set(functionHash + runtime, { req, res }) // db.set(functionHash + runtime, { req, res })
console.log("sent rm"); console.log("sent rm");
...@@ -521,7 +517,7 @@ function autoscalar() { ...@@ -521,7 +517,7 @@ function autoscalar() {
*/ */
async function speculative_deployment(req, runtime) { async function speculative_deployment(req, runtime) {
if (constants.speculative_deployment && req.headers['x-resource-id'] === undefined) { if (constants.speculative_deployment && req.headers['x-resource-id'] === undefined) {
console.log(functionBranchTree, req.params.id); // console.log(functionBranchTree, req.params.id);
if (functionBranchTree.has(req.params.id)) { if (functionBranchTree.has(req.params.id)) {
let branchInfo = functionBranchTree.get(req.params.id) let branchInfo = functionBranchTree.get(req.params.id)
...@@ -558,7 +554,7 @@ async function speculative_deployment(req, runtime) { ...@@ -558,7 +554,7 @@ async function speculative_deployment(req, runtime) {
} }
} }
setInterval(libSupport.metrics.broadcastMetrics, 5000) setInterval(libSupport.metrics.broadcastMetrics, 5000)
setInterval(libSupport.viterbi, 1000, functionBranchTree) // setInterval(libSupport.viterbi, 1000)
setInterval(autoscalar, 1000); setInterval(autoscalar, 1000);
setInterval(dispatch, 1000); setInterval(dispatch, 1000);
app.listen(port, () => logger.info(`Server listening on port ${port}!`)) app.listen(port, () => logger.info(`Server listening on port ${port}!`))
\ No newline at end of file
This diff is collapsed.
...@@ -5,11 +5,12 @@ const secrets = require('./secrets.json') ...@@ -5,11 +5,12 @@ const secrets = require('./secrets.json')
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const util = require('util') const util = require('util')
const prom = require('prom-client'); const prom = require('prom-client');
const sharedMeta = require('./shared_meta');
const Registry = prom.Registry; const Registry = prom.Registry;
const register = new Registry(); const register = new Registry();
const alpha = 0.99 const alpha = constants.metrics.alpha
let log_channel = constants.topics.log_channel, let log_channel = constants.topics.log_channel,
metrics = { } metrics = { }
...@@ -29,8 +30,7 @@ register.registerMetric(coldstartMetric); ...@@ -29,8 +30,7 @@ register.registerMetric(coldstartMetric);
register.registerMetric(starttimeMetric); register.registerMetric(starttimeMetric);
register.registerMetric(requestMetric); register.registerMetric(requestMetric);
let metricsDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}` let metricsDB = sharedMeta.metricsDB
metricsDB = metricsDB + "/" + constants.metrics_db_name + "/"
let kafka = require('kafka-node'), let kafka = require('kafka-node'),
Producer = kafka.Producer, Producer = kafka.Producer,
client = new kafka.KafkaClient({ client = new kafka.KafkaClient({
...@@ -129,6 +129,7 @@ async function broadcastMetrics() { ...@@ -129,6 +129,7 @@ async function broadcastMetrics() {
warmstart: metric.longterm.warmstart, warmstart: metric.longterm.warmstart,
starttime: metric.longterm.starttime starttime: metric.longterm.starttime
} }
let payload = { let payload = {
method: 'put', method: 'put',
body: JSON.stringify(dbData), body: JSON.stringify(dbData),
...@@ -136,6 +137,7 @@ async function broadcastMetrics() { ...@@ -136,6 +137,7 @@ async function broadcastMetrics() {
} }
await fetch(metricsDB + functionHash, payload) await fetch(metricsDB + functionHash, payload)
metric.timestamp = Date.now() metric.timestamp = Date.now()
} }
} }
......
const secrets = require('./secrets.json')
const constants = require('.././constants.json')
let db = new Map(), // queue holding request to be dispatched
resourceMap = new Map(), // map between resource_id and resource details like node_id, port, associated function etc
functionToResource = new Map(), // a function to resource map. Each map contains a minheap of
// resources associated with the function
workerNodes = new Map(), // list of worker nodes currently known to the DM
functionBranchTree = new Map(), // a tree to store function branch predictions
conditionProbabilityExplicit = new Map() // tree holding conditional probabilities for explicit chains
/**
* URL to the couchdb database server used to store data
*/
let metadataDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metadataDB = metadataDB + "/" + constants.db.function_meta + "/"
let metricsDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
metricsDB = metricsDB + "/" + constants.db.metrics + "/"
let implicitChainDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
implicitChainDB = implicitChainDB + "/" + constants.db.implicit_chain_meta + "/"
let explicitChainDB = `http://${secrets.couchdb_username}:${secrets.couchdb_password}@${constants.couchdb_host}`
explicitChainDB = explicitChainDB + "/" + constants.db.explicit_chain_meta + "/"
module.exports = {
db, functionBranchTree, functionToResource, workerNodes, resourceMap,
conditionProbabilityExplicit,
metadataDB, metricsDB, implicitChainDB, explicitChainDB
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment