Commit 5846c061 authored by nilanjandaw's avatar nilanjandaw

Merge branch 'master' of https://git.cse.iitb.ac.in/synerg/xanadu

parents b1271413 124cdfe7
*repository/
bitnami*
node_modules
package-lock.json
......
......@@ -2,5 +2,5 @@
"mqtt_url": "10.129.6.5",
"registry_url" :"10.129.6.5:5000/",
"master_port": 8080,
"master_address": "10.129.6.5"
"master_address": "localhost"
}
\ No newline at end of file
{
"id": "tpt8hqn7ok"
}
\ No newline at end of file
const mqtt = require('mqtt')
const constants = require(".././constants.json")
console.log(constants.mqtt_url);
const client = mqtt.connect('mqtt://' + constants.mqtt_url)
const node_id = require("./config.json").id
const libSupport = require('./lib')
const execute = require('./execute')
const fs = require('fs')
const node_id = libSupport.makeid(10)
const local_repository = __dirname + "/local_repository/"
const host_url = "http://" + constants.master_address + ":" + constants.master_port
client.on('connect', function () {
client.subscribe(node_id, function (err) {
if (!err) {
console.log("node listening to id", node_id);
}
})
})
client.on('message', function (topic, message) {
let kafka = require('kafka-node'),
Producer = kafka.Producer,
KeyedMessage = kafka.KeyedMessage,
client = new kafka.KafkaClient({
kafkaHost: '10.129.6.5:9092',
autoConnect: true
}),
producer = new Producer(client),
Consumer = kafka.Consumer,
consumer = new Consumer(client,
[
{ topic: node_id, partition: 0, offset: 0}
],
[
{ autoCommit: true }
])
consumer.on('message', function (message) {
console.log(message);
let topic = message.topic
message = message.value
message = JSON.parse(message)
if (topic !== 'heartbeat') {
let runtime = message.runtime
......@@ -34,30 +42,43 @@ client.on('message', function (topic, message) {
if (runtime === "isolate")
execute.runIsolate(local_repository + functionHash).then(result => {
client.publish("response", JSON.stringify({
producer.send([{
topic: "response",
messages: JSON.stringify({
status: "success",
result,
function_id
}))
function_id})
}], () =>{})
})
else if (runtime === "process")
execute.runProcess(local_repository + functionHash).then(result => {
client.publish("response", JSON.stringify({
producer.send(
[{
topic: "response",
messages: JSON.stringify({
status: "success",
result,
function_id
}))
function_id})
}], () =>{})
})
else if (runtime === "container")
execute.runContainer(functionHash).then(result => {
client.publish("response", JSON.stringify({
producer.send(
[{
topic: "response",
messages: JSON.stringify({
status: "success",
result,
function_id
}))
function_id})
}], () =>{})
})
else {
client.publish("response", JSON.stringify({ status: "unknown runtime" }))
producer.send(
[{
topic: "response",
messages: JSON.stringify({ status: "unknown runtime" })
}], () =>{})
return
}
})
......@@ -65,30 +86,43 @@ client.on('message', function (topic, message) {
if (runtime === "isolate")
execute.runIsolate(local_repository + functionHash).then(result => {
client.publish("response", JSON.stringify({
producer.send(
[{
topic: "response",
messages: JSON.stringify({
status: "success",
result,
function_id
}))
function_id})
}], () =>{})
})
else if (runtime === "process")
execute.runProcess(local_repository + functionHash).then(result => {
client.publish("response", JSON.stringify({
producer.send(
[{
topic: "response",
messages: JSON.stringify({
status: "success",
result,
function_id
}))
function_id})
}], () =>{})
})
else if (runtime === "container")
execute.runContainer(functionHash).then(result => {
client.publish("response", JSON.stringify({
producer.send(
[{
topic: "response",
messages: JSON.stringify({
status: "success",
result,
function_id
}))
function_id})
}], () =>{})
})
else {
client.publish("response", JSON.stringify({ status: "unknown runtime" }))
producer.send(
[{
topic: "response",
messages: JSON.stringify({ status: "unknown runtime" })
}], () =>{})
return
}
}
......@@ -99,7 +133,13 @@ client.on('message', function (topic, message) {
})
function heartbeat() {
client.publish("heartbeat", JSON.stringify({"address": node_id}))
let payload = [{
topic: "heartbeat",
messages: JSON.stringify({"address": node_id})
}]
producer.send(payload, function() {
})
}
setInterval(heartbeat, 1000);
\ No newline at end of file
*
!.gitignore
\ No newline at end of file
......@@ -14,6 +14,7 @@
"express": "^4.17.1",
"express-fileupload": "^1.1.6",
"isolated-vm": "^3.0.0",
"kafka-node": "^5.0.0",
"morgan": "^1.9.1",
"mqtt": "^3.0.0",
"redis": "^2.8.0"
......
......@@ -26,8 +26,7 @@ let kafka = require('kafka-node'),
{ topic: 'response', partition: 0, offset: 0}, { topic: 'heartbeat' }
],
[
{ autoCommit: false },
{ fromOffset: true}
{ autoCommit: true }
])
let db = new Map()
......@@ -161,11 +160,16 @@ function dispatch() {
let function_id = libSupport.makeid(20)
console.log("Dispatching function with Id", function_id, runtime);
let node_id = getAddress()
client.publish(node_id, JSON.stringify({
let payload = [{
topic: node_id,
messages: JSON.stringify({
"type": "execute",
function_id,
runtime, functionHash
}))
}),
partition: 0
}]
producer.send(payload, () => {})
db.set(function_id, res)
}
......@@ -204,7 +208,25 @@ app.listen(port, () => console.log(`Server listening on port ${port}!`))
// })
consumer.on('message', function (message) {
// console.log(message);
let topic = message.topic
message = message.value
if (topic === "response") {
message = JSON.parse(message)
console.log(message);
let res = db.get(message.function_id)
res.json({
"status": "success",
"reply": message.result
})
db.delete(message.function_id)
} else if (topic === "heartbeat") {
message = JSON.parse(message)
if (workerNodes.indexOf(message.address) === -1) {
workerNodes.push(message.address)
console.log(workerNodes);
}
}
});
setInterval(dispatch, 1000);
setInterval(dispatch, 2000);
......@@ -14,6 +14,7 @@
"express": "^4.17.1",
"express-fileupload": "^1.1.6",
"isolated-vm": "^3.0.0",
"kafka-node": "^5.0.0",
"morgan": "^1.9.1",
"mqtt": "^3.0.0",
"nano": "^8.1.0",
......
*
!.gitignore
\ No newline at end of file
## Hybrid execution environment for Serverless Workloads
Execution environments are typically container oriented for FaaS platforms but this creates problems since containers are on one hand not totally secure and on the other hand not capable of high-performance. This project looks into creating a hybrid execution environment to cater to different workload needs.
### System Requirements
- Node.js (10.x and above)
- g++
- build-essential
### How to execute?
After nodejs has been installed
- install the dependencies: execute `npm install` from within the project folder
- run the server as `npm start` or `node index.js`
### How to interact?
The platform works via a HTTP API based interface, the interface is divided into two parts:
- Deploy: The deploy interface is used to upload the function file and store on the server, and also setup containers and VM images. <br >
An example CURL command:
```
curl -X POST \
http://localhost:8080/serverless/deploy \
-H 'Content-Type: application/x-www-form-urlencoded' \
-H 'cache-control: no-cache' \
-H 'content-type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' \
-F runtime=container \
-F serverless=@/home/nilanjan/Desktop/serverless/hybrid/test/script.js
```
<br> The POST request contains two parameters: 1. <emp>runtime</emp> which specifies the runtime to use viz. isolate, process, container or virtual machine and 2. <emp>severless</emp> which sends the serverless function as file via multipart/form-data.
<br> On successful deployment the API returns a function key which is to be for function execution.
- Execute: To execute the submitted function, we use the Execute API. <br>
An example CURL command:
```
curl -X POST \
http://localhost:8080/serverless/execute/761eec785d64451203293427bea5c7ad \
-H 'cache-control: no-cache' \
-H 'content-type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' \
-F runtime=process
```
<br> The API takes a route value as the key returned by the deploy API and a runtime parameter specifying the runtime to be used.
\ No newline at end of file
# Project Xanadu
## A Hybrid execution environment for Serverless Workloads
Execution environments are typically container oriented for FaaS platforms but this creates problems since containers are on one hand not totally secure and on the other hand not capable of high-performance. This project looks into creating a hybrid execution environment to cater to different workload needs.
## Architecture
Xanadu is divided into two extremely loosely coupled modules, the **Dispatch Module (DM)** and the **Resource Manager (RM)** module. The RM looks after resource provisioning and consolidation at the host level while the DM looks after handling user requests and executing those requests at the requisite isolation level using resources provided by the RM. \
A loose architecture diagram of Xanadu is given below.
![Xanadu Architecture](design_documents/hybrid_serverless.png)
## Dispatch Module (DM)
The DM is divided into two submodules the **Dispatcher** and the **Dispatch Daemon**. The Dispatcher runs on the Master node while the Dispatch Daemon runs on each Worker nodes. When a request arrives at the dispatcher, it queries the RM for resources and on receiving the resource requests the Dispatch Daemon to run and execute the function on the specified worker node.
### Directory Structure
```bash
.
├── constants.json
├── dispatch_daemon
│   ├── config.json
│   ├── execute.js
│   ├── index.js
│   ├── isolate.js
│   ├── lib.js
│   ├── local_repository
│   ├── package.json
│   └── package-lock.json
├── dispatcher
│   ├── index.js
│   ├── isolate.js
│   ├── lib.js
│   ├── package.json
│   ├── package-lock.json
│   └── repository
└── package-lock.json
```
### System Requirements
- Node.js (10.x and above)
- g++
- build-essential
- Docker
- Java
- Apache Kafka (Configure to allow auto-delete and auto-registration of topics)
### Starting the server
After nodejs has been installed
- Install the dependencies: execute `npm install` from within the project folder
- Modify the constants.json file as required.
- For Worker nodes modify the config.json in dispatch_daemon to provide an unique ID to each node.
- Run the Master and Worker server as `npm start` or `node index.js`
### Interaction API
The platform works via a HTTP API based interface, the interface is divided into two parts:
- Deploy: The deploy interface is used to upload the function file and store on the server, and also setup containers and VM images.\
An example CURL command:
```bash
curl -X POST \
http://localhost:8080/serverless/deploy \
-H 'Content-Type: application/x-www-form-urlencoded' \
-H 'cache-control: no-cache' \
-H 'content-type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' \
-F runtime=container \
-F serverless=@/home/nilanjan/Desktop/serverless/hybrid/test/script.js
```
The POST request contains two parameters: 1. *runtime* which specifies the runtime to use viz. isolate, process, container or virtual machine and 2. *severless* which sends the serverless function as file via multipart/form-data.\
On successful deployment the API returns a function key which is to be for function execution.
- Execute: To execute the submitted function, we use the Execute API.\
An example CURL command:
```bash
curl -X POST \
http://localhost:8080/serverless/execute/761eec785d64451203293427bea5c7ad \
-H 'cache-control: no-cache' \
-H 'content-type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' \
-F runtime=process
```
The API takes a route value as the key returned by the deploy API and a runtime parameter specifying the runtime to be used.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment