Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
X
xanadu
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Analytics
Analytics
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Commits
Open sidebar
SYNERG
xanadu
Commits
4d074b06
Commit
4d074b06
authored
Apr 20, 2020
by
Nilanjan Daw
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'explicit_function_chaining'
parents
66dc57aa
fb27d6a9
Changes
10
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
467 additions
and
243 deletions
+467
-243
dispatch_system/constants.json
dispatch_system/constants.json
+16
-9
dispatch_system/constants_server.json
dispatch_system/constants_server.json
+40
-0
dispatch_system/dispatch_daemon/execute.js
dispatch_system/dispatch_daemon/execute.js
+18
-7
dispatch_system/dispatch_daemon/index.js
dispatch_system/dispatch_daemon/index.js
+1
-1
dispatch_system/dispatch_daemon/lib.js
dispatch_system/dispatch_daemon/lib.js
+1
-1
dispatch_system/dispatch_manager/explicit_chain_handler.js
dispatch_system/dispatch_manager/explicit_chain_handler.js
+166
-66
dispatch_system/dispatch_manager/index.js
dispatch_system/dispatch_manager/index.js
+25
-29
dispatch_system/dispatch_manager/lib.js
dispatch_system/dispatch_manager/lib.js
+163
-127
dispatch_system/dispatch_manager/metrics.js
dispatch_system/dispatch_manager/metrics.js
+5
-3
dispatch_system/dispatch_manager/shared_meta.js
dispatch_system/dispatch_manager/shared_meta.js
+32
-0
No files found.
dispatch_system/constants.json
View file @
4d074b06
{
"registry_url"
:
"
localhost
:5000/"
,
"registry_url"
:
"
10.129.6.5
:5000/"
,
"master_port"
:
8080
,
"master_address"
:
"localhost"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
"couchdb_host"
:
"localhost:5984"
,
"function_db_name"
:
"serverless"
,
"metrics_db_name"
:
"metrics"
,
"implicit_chain_db_name"
:
"implicit_chain"
,
"couchdb_host"
:
"10.129.6.5:5984"
,
"db"
:
{
"function_meta"
:
"serverless"
,
"metrics"
:
"metrics"
,
"implicit_chain_meta"
:
"implicit_chain"
,
"explicit_chain_meta"
:
"explicit_chain"
},
"network"
:
{
"network_bridge"
:
"hybrid_kafka-serverless"
,
"use_bridge"
:
false
,
"internal"
:
{
"kafka_host"
:
"
kafka
:9092"
"kafka_host"
:
"
10.129.6.5
:9092"
},
"external"
:
{
"kafka_host"
:
"
localhost:2
9092"
"kafka_host"
:
"
10.129.6.5:
9092"
}
},
"topics"
:
{
...
...
@@ -28,7 +32,10 @@
"autoscalar_metrics"
:
{
"open_request_threshold"
:
100
},
"speculative_deployment"
:
false
,
"JIT_deployment"
:
false
,
"metrics"
:
{
"alpha"
:
0.7
},
"speculative_deployment"
:
true
,
"JIT_deployment"
:
true
,
"id_size"
:
20
}
dispatch_system/constants_server.json
0 → 100644
View file @
4d074b06
{
"registry_url"
:
"10.129.6.5:5000/"
,
"master_port"
:
8080
,
"master_address"
:
"10.129.6.5"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
"couchdb_host"
:
"10.129.6.5:5984"
,
"db"
:
{
"function_meta"
:
"serverless"
,
"metrics"
:
"metrics"
,
"implicit_chain_meta"
:
"implicit_chain"
,
"explicit_chain_meta"
:
"explicit_chain"
},
"network"
:
{
"network_bridge"
:
"hybrid_kafka-serverless"
,
"internal"
:
{
"kafka_host"
:
"kafka:9092"
},
"external"
:
{
"kafka_host"
:
"10.129.6.5:9092"
}
},
"topics"
:
{
"request_dm_2_rm"
:
"request"
,
"heartbeat"
:
"heartbeat"
,
"deployed"
:
"deployed"
,
"remove_worker"
:
"removeWorker"
,
"response_rm_2_dm"
:
"RESPONSE_RM_2_DM_DUMMY"
,
"hscale"
:
"hscale"
,
"log_channel"
:
"LOG_COMMON"
},
"autoscalar_metrics"
:
{
"open_request_threshold"
:
100
},
"metrics"
:
{
"alpha"
:
0.7
},
"speculative_deployment"
:
false
,
"JIT_deployment"
:
true
,
"id_size"
:
20
}
dispatch_system/dispatch_daemon/execute.js
View file @
4d074b06
...
...
@@ -92,9 +92,15 @@ function runContainer(metadata) {
if
(
code
!=
0
)
reject
(
"
error
"
)
else
{
const
process
=
spawn
(
'
docker
'
,
[
"
run
"
,
"
--rm
"
,
`--network=
${
constants
.
network
.
network_bridge
}
`
,
"
-p
"
,
`
${
port
}
:
${
port
}
`
,
"
--name
"
,
resource_id
,
registry_url
+
imageName
,
resource_id
,
imageName
,
port
,
"
container
"
,
constants
.
network
.
internal
.
kafka_host
]);
let
process
=
null
;
if
(
constants
.
network
.
use_bridge
)
process
=
spawn
(
'
docker
'
,
[
"
run
"
,
"
--rm
"
,
`--network=
${
constants
.
network
.
network_bridge
}
`
,
"
-p
"
,
`
${
port
}
:
${
port
}
`
,
"
--name
"
,
resource_id
,
registry_url
+
imageName
,
resource_id
,
imageName
,
port
,
"
container
"
,
constants
.
network
.
internal
.
kafka_host
]);
else
process
=
spawn
(
'
docker
'
,
[
"
run
"
,
"
--rm
"
,
"
-p
"
,
`
${
port
}
:
${
port
}
`
,
"
--name
"
,
resource_id
,
registry_url
+
imageName
,
resource_id
,
imageName
,
port
,
"
container
"
,
constants
.
network
.
internal
.
kafka_host
]);
let
result
=
""
;
// timeStart = Date.now()
process
.
stdout
.
on
(
'
data
'
,
(
data
)
=>
{
...
...
@@ -119,10 +125,15 @@ function runContainer(metadata) {
})
}
else
{
logger
.
info
(
"
container starting at port
"
,
port
);
const
process
=
spawn
(
'
docker
'
,
[
"
run
"
,
"
--rm
"
,
`--network=
${
constants
.
network
.
network_bridge
}
`
,
"
-p
"
,
`
${
port
}
:
${
port
}
`
,
"
--name
"
,
resource_id
,
registry_url
+
imageName
,
resource_id
,
imageName
,
port
,
"
container
"
,
constants
.
network
.
internal
.
kafka_host
]);
let
process
=
null
;
if
(
constants
.
network
.
use_bridge
)
process
=
spawn
(
'
docker
'
,
[
"
run
"
,
"
--rm
"
,
`--network=
${
constants
.
network
.
network_bridge
}
`
,
"
-p
"
,
`
${
port
}
:
${
port
}
`
,
"
--name
"
,
resource_id
,
registry_url
+
imageName
,
resource_id
,
imageName
,
port
,
"
container
"
,
constants
.
network
.
internal
.
kafka_host
]);
else
process
=
spawn
(
'
docker
'
,
[
"
run
"
,
"
--rm
"
,
"
-p
"
,
`
${
port
}
:
${
port
}
`
,
"
--name
"
,
resource_id
,
registry_url
+
imageName
,
resource_id
,
imageName
,
port
,
"
container
"
,
constants
.
network
.
internal
.
kafka_host
]);
let
result
=
""
;
// timeStart = Date.now()
process
.
stdout
.
on
(
'
data
'
,
(
data
)
=>
{
...
...
dispatch_system/dispatch_daemon/index.js
View file @
4d074b06
...
...
@@ -158,7 +158,7 @@ function heartbeat() {
topic
:
"
heartbeat
"
,
messages
:
JSON
.
stringify
({
"
address
"
:
node_id
,
"
timestamp
"
:
Date
.
now
()})
}]
producer
.
send
(
payload
,
function
()
{})
producer
.
send
(
payload
,
function
(
cb
)
{})
}
...
...
dispatch_system/dispatch_daemon/lib.js
View file @
4d074b06
...
...
@@ -27,7 +27,7 @@ function updateConfig() {
}
function
makeTopic
(
id
)
{
console
.
log
(
"
Using Primary IP
"
,
id
,
"
as topic
"
);
console
.
log
(
"
Using Primary IP
"
,
id
,
"
as topic
"
,
"
publishing to:
"
,
constants
.
network
.
external
.
kafka_host
);
let
client
=
new
kafka
.
KafkaClient
({
kafkaHost
:
constants
.
network
.
external
.
kafka_host
,
...
...
dispatch_system/dispatch_manager/explicit_chain_handler.js
View file @
4d074b06
...
...
@@ -6,17 +6,20 @@ const fs = require('fs')
const
{
spawn
}
=
require
(
'
child_process
'
)
const
fetch
=
require
(
'
node-fetch
'
)
const
constants
=
require
(
'
../constants.json
'
)
const
secrets
=
require
(
'
./secrets.json
'
)
const
operator
=
require
(
'
./operator
'
)
let
metadataDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metadataDB
=
metadataDB
+
"
/
"
+
constants
.
function_db_name
+
"
/
"
let
metricsDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metricsDB
=
metricsDB
+
"
/
"
+
constants
.
metrics_db_name
+
"
/
"
const
sharedMeta
=
require
(
'
./shared_meta
'
)
const
logger
=
libSupport
.
logger
const
registry_url
=
constants
.
registry_url
let
functionToResource
=
sharedMeta
.
functionToResource
,
db
=
sharedMeta
.
db
,
conditionProbabilityExplicit
=
sharedMeta
.
conditionProbabilityExplicit
,
metricsDB
=
sharedMeta
.
metricsDB
,
metadataDB
=
sharedMeta
.
metadataDB
,
explicitChainDB
=
sharedMeta
.
explicitChainDB
router
.
post
(
'
/deploy
'
,
(
req
,
res
)
=>
{
...
...
@@ -190,50 +193,57 @@ async function deployContainer(path, imageName) {
router
.
post
(
'
/execute/:id
'
,
(
req
,
res
)
=>
{
let
map
,
aliases
// if (req.body.map)
// map = req.body.map
// else {
if
(
req
.
files
&&
req
.
files
.
map
)
{
map
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
let
mapPlanner
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
readMap
(
`./repository/aliases
${
req
.
params
.
id
}
.json`
,
true
)
let
chain_id
=
req
.
params
.
id
libSupport
.
fetchData
(
explicitChainDB
+
chain_id
)
.
then
(
chainData
=>
{
console
.
log
(
chainData
);
if
(
chainData
.
error
!==
"
not_found
"
)
conditionProbabilityExplicit
[
chain_id
]
=
chainData
if
(
req
.
files
&&
req
.
files
.
map
)
{
map
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
let
mapPlanner
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
readMap
(
`./repository/aliases
${
chain_id
}
.json`
,
true
)
.
then
(
data
=>
{
aliases
=
data
let
payload
=
JSON
.
parse
(
req
.
body
.
data
)
console
.
log
(
payload
);
speculative_deployment
(
aliases
,
mapPlanner
);
orchestrator
(
res
,
payload
,
map
,
aliases
,
{})
speculative_deployment
(
chain_id
,
aliases
,
mapPlanner
,
0
);
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
{})
})
}
else
{
readMap
(
`./repository/map
${
req
.
params
.
id
}
.json`
)
}
else
{
readMap
(
`./repository/map
${
chain_
id
}
.json`
)
.
then
(
data
=>
{
map
=
data
let
mapPlanner
=
JSON
.
parse
(
JSON
.
stringify
(
map
))
readMap
(
`./repository/aliases
${
req
.
params
.
id
}
.json`
,
true
)
readMap
(
`./repository/aliases
${
chain_
id
}
.json`
,
true
)
.
then
(
data
=>
{
aliases
=
data
let
payload
=
JSON
.
parse
(
req
.
body
.
data
)
speculative_deployment
(
aliases
,
mapPlanner
);
orchestrator
(
res
,
payload
,
map
,
aliases
,
{})
speculative_deployment
(
chain_id
,
aliases
,
mapPlanner
,
0
);
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
{})
})
})
}
}
})
})
async
function
orchestrator
(
res
,
payload
,
map
,
aliases
,
result
)
{
async
function
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
result
)
{
if
(
Object
.
keys
(
map
).
length
==
0
)
{
console
.
log
(
"
time to resolve
"
,
result
);
res
.
json
(
result
)
let
payload
=
{
method
:
'
put
'
,
body
:
JSON
.
stringify
(
conditionProbabilityExplicit
[
chain_id
]),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
}
libSupport
.
fetchData
(
explicitChainDB
+
chain_id
,
payload
)
// return resolve(result)
}
else
{
for
(
const
[
functionName
,
metadata
]
of
Object
.
entries
(
map
))
{
// console.log(functionName, metadata, aliases[functionName]);
// console.log(metadata);
if
(
metadata
.
type
===
"
function
"
&&
metadata
.
wait_for
.
length
==
0
)
{
let
url
=
`http://
${
constants
.
master_address
}
:
${
constants
.
master_port
}
/serverless/execute/
${
aliases
[
functionName
].
alias
}
`
...
...
@@ -244,12 +254,12 @@ async function orchestrator(res, payload, map, aliases, result) {
runtime
:
metadata
.
runtime
,
payload
}),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
headers
:
{
'
Content-Type
'
:
'
application/json
'
,
'
x-chain-type
'
:
'
explicit
'
}
}
delete
map
[
functionName
]
aliases
[
functionName
].
status
=
"
running
"
fetch
(
url
,
data
).
then
(
res
=>
res
.
json
()
)
libSupport
.
fetchData
(
url
,
data
)
.
then
(
json
=>
{
// console.log(json);
result
[
functionName
]
=
json
...
...
@@ -267,7 +277,21 @@ async function orchestrator(res, payload, map, aliases, result) {
if
(
metadata
.
type
===
"
conditional
"
&&
metadata
.
wait_for
.
length
==
0
)
{
let
conditionResult
=
checkCondition
(
metadata
.
condition
.
op1
,
metadata
.
condition
.
op2
,
metadata
.
condition
.
op
,
result
)
console
.
log
(
conditionResult
,
"
aliases
"
,
aliases
);
if
(
conditionProbabilityExplicit
[
chain_id
]
===
undefined
)
conditionProbabilityExplicit
[
chain_id
]
=
{}
if
(
conditionProbabilityExplicit
[
chain_id
][
_key
]
===
undefined
)
conditionProbabilityExplicit
[
chain_id
][
_key
]
=
{
request_count
:
0
,
probability
:
0
}
let
oldProbability
=
conditionProbabilityExplicit
[
chain_id
][
_key
].
probability
let
updateProbability
=
(
conditionResult
===
'
success
'
)
?
1.0
:
0.0
conditionProbabilityExplicit
[
chain_id
][
_key
].
probability
=
oldProbability
*
conditionProbabilityExplicit
[
chain_id
][
_key
].
request_count
+
updateProbability
conditionProbabilityExplicit
[
chain_id
][
_key
].
request_count
++
conditionProbabilityExplicit
[
chain_id
][
_key
].
probability
/=
conditionProbabilityExplicit
[
chain_id
][
_key
].
request_count
console
.
log
(
conditionResult
,
"
probability table
"
,
conditionProbabilityExplicit
);
let
branchToTake
=
metadata
[
conditionResult
]
branchMap
=
map
[
branchToTake
]
delete
map
[
_key
]
...
...
@@ -275,7 +299,7 @@ async function orchestrator(res, payload, map, aliases, result) {
}
}
orchestrator
(
res
,
payload
,
(
branchMap
==
null
)?
map
:
branchMap
,
aliases
,
result
)
orchestrator
(
chain_id
,
res
,
payload
,
(
branchMap
==
null
)?
map
:
branchMap
,
aliases
,
result
)
})
}
}
...
...
@@ -302,68 +326,131 @@ function checkCondition(op1, op2, op, result) {
return
(
operator
[
op
](
data
,
op2
))?
"
success
"
:
"
fail
"
}
async
function
speculative_deployment
(
aliases
,
map
)
{
if
(
constants
.
speculative_deployment
)
{
console
.
log
(
aliases
);
async
function
speculative_deployment
(
chain_id
,
aliases
,
map
,
offset
,
done
,
toBeDone
,
ignoreSet
)
{
console
.
log
(
"
offset:
"
,
offset
,
"
ignoreSet
"
,
ignoreSet
);
if
(
constants
.
speculative_deployment
)
{
let
getData
=
[]
for
(
const
[
mod
,
metadata
]
of
Object
.
entries
(
map
))
{
if
(
metadata
.
type
!==
'
function
'
)
{
if
(
metadata
.
type
===
'
conditional
'
&&
!
constants
.
JIT_deployment
)
{
let
probability
try
{
probability
=
conditionProbabilityExplicit
[
chain_id
][
mod
].
probability
}
catch
(
error
)
{
console
.
log
(
"
branch probability not present, random branch taken
"
);
probability
=
Math
.
random
()
}
let
branch
=
(
probability
>=
0.5
)
?
metadata
[
'
success
'
]
:
metadata
[
'
fail
'
]
let
branchMap
=
JSON
.
parse
(
JSON
.
stringify
(
map
[
branch
]))
delete
branchMap
[
'
type
'
]
console
.
log
(
"
success probability
"
,
probability
,
"
taking branch:
"
,
branch
);
speculative_deployment
(
chain_id
,
aliases
,
branchMap
)
}
continue
}
if
(
constants
.
JIT_deployment
)
{
console
.
log
(
mod
,
metadata
,
aliases
[
mod
].
alias
);
//
console.log(mod, metadata, aliases[mod].alias);
let
url
=
metricsDB
+
aliases
[
mod
].
alias
console
.
log
(
url
);
let
data
=
libSupport
.
fetchData
(
url
)
console
.
log
(
data
);
getData
.
push
(
data
)
}
else
{
let
payload
=
[{
topic
:
constants
.
topics
.
hscale
,
messages
:
JSON
.
stringify
({
"
runtime
"
:
metadata
.
runtime
,
"
functionHash
"
:
aliases
[
mod
].
alias
})
}]
notify
(
payload
)
notify
(
metadata
.
runtime
,
aliases
[
mod
].
alias
)
}
}
if
(
constants
.
JIT_deployment
)
{
Promise
.
all
(
getData
).
then
((
values
)
=>
{
let
dataMap
=
new
Map
()
for
(
const
data
of
values
)
{
if
(
values
.
error
===
"
not_found
"
)
dataMap
[
data
.
_id
]
=
0
dataMap
[
data
.
_id
]
=
data
}
let
done
=
new
Map
()
let
toBeDone
=
new
Set
()
if
(
done
===
undefined
)
{
console
.
log
(
"
new map
"
);
done
=
new
Map
()
}
if
(
toBeDone
===
undefined
)
{
toBeDone
=
new
Set
()
}
// var plannerMap = new Map(map)
do
{
for
(
const
[
mod
,
metadata
]
of
Object
.
entries
(
map
))
{
if
(
metadata
.
type
!==
'
function
'
&&
metadata
.
type
!==
'
conditional
'
)
{
continue
}
if
(
metadata
.
wait_for
.
length
==
0
&&
done
[
mod
]
===
undefined
)
{
done
[
mod
]
=
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
coldstart
// expecting the first ones to run
// to be hit by coldstarts
/**
* expecting the first ones to run to be hit by coldstarts
*/
try
{
done
[
mod
]
=
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
coldstart
}
catch
(
e
)
{
done
[
mod
]
=
0
}
// delete plannerMap[mod];
}
else
if
(
done
[
mod
]
===
undefined
)
{
let
flag
=
true
let
flag
=
true
,
redundantFlag
=
false
let
maxWait
=
0
for
(
const
dependency
of
metadata
.
wait_for
)
{
console
.
log
(
dependency
);
if
(
done
[
dependency
]
===
undefined
)
{
flag
=
false
break
}
else
if
(
maxWait
<
done
[
dependency
])
}
else
if
(
maxWait
<
done
[
dependency
]
&&
(
ignoreSet
===
undefined
||
!
ignoreSet
.
has
(
dependency
))
)
maxWait
=
done
[
dependency
]
else
if
(
ignoreSet
!==
undefined
&&
ignoreSet
.
has
(
dependency
))
{
redundantFlag
=
true
console
.
log
(
"
ignoring redundant dependency
"
,
dependency
);
}
}
// if (redundantFlag)
// maxWait += offset;
maxWait
+=
offset
if
(
flag
)
{
console
.
log
(
"
notifying
"
,
mod
);
let
notifyTime
=
((
maxWait
-
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
starttime
)
>
0
)
?
maxWait
-
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
starttime
:
0
console
.
log
(
mod
,
"
max wait
"
,
maxWait
,
"
notify time:
"
,
notifyTime
);
let
payload
=
[{
topic
:
constants
.
topics
.
hscale
,
messages
:
JSON
.
stringify
({
"
runtime
"
:
metadata
.
runtime
,
"
functionHash
"
:
aliases
[
mod
].
alias
})
}]
setTimeout
(
notify
,
notifyTime
,
payload
)
done
[
mod
]
=
maxWait
+
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
warmstart
if
(
metadata
.
type
===
'
conditional
'
)
{
console
.
log
(
"
setting notification for conditional
"
,
mod
);
let
probability
try
{
probability
=
conditionProbabilityExplicit
[
chain_id
][
mod
].
probability
}
catch
(
error
)
{
console
.
log
(
"
branch probability not present, random branch taken
"
);
probability
=
Math
.
random
()
}
let
branch
=
(
probability
>=
0.5
)?
metadata
[
'
success
'
]:
metadata
[
'
fail
'
]
let
branchMap
=
JSON
.
parse
(
JSON
.
stringify
(
map
[
branch
]))
delete
branchMap
[
'
type
'
]
console
.
log
(
"
success probability
"
,
probability
,
"
taking branch:
"
,
branch
);
if
(
ignoreSet
===
undefined
)
ignoreSet
=
new
Set
(
metadata
.
wait_for
)
else
ignoreSet
=
new
Set
(
ignoreSet
,
new
Set
(
metadata
.
wait_for
))
speculative_deployment
(
chain_id
,
aliases
,
branchMap
,
maxWait
,
done
,
toBeDone
,
ignoreSet
)
done
[
mod
]
=
maxWait
-
offset
}
else
{
console
.
log
(
"
notification set
"
,
mod
);
let
starttime
try
{
starttime
=
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
starttime
}
catch
(
e
)
{
starttime
=
0
}
let
notifyTime
=
((
maxWait
-
starttime
)
>
0
)
?
maxWait
-
starttime
:
0
// notifyTime += offset
console
.
log
(
mod
,
"
max wait
"
,
maxWait
,
"
notify time:
"
,
notifyTime
,
"
offset added
"
,
offset
);
setTimeout
(
notify
,
notifyTime
,
metadata
.
runtime
,
aliases
[
mod
].
alias
)
try
{
done
[
mod
]
=
maxWait
+
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
warmstart
-
offset
}
catch
(
e
)
{
done
[
mod
]
=
maxWait
-
offset
}
}
if
(
toBeDone
.
has
(
mod
))
delete
toBeDone
[
mod
]
// delete plannerMap[mod]
...
...
@@ -371,7 +458,7 @@ async function speculative_deployment(aliases, map) {
toBeDone
.
add
(
mod
)
}
}
console
.
log
(
done
,
toBeD
one
);
console
.
log
(
"
done
"
,
d
one
);
}
}
while
(
toBeDone
.
size
!=
0
)
})
...
...
@@ -405,8 +492,18 @@ function readMap(filename, alias = false) {
})
}
function
notify
(
payload
)
{
libSupport
.
producer
.
send
(
payload
,
function
()
{
})
function
notify
(
runtime
,
functionHash
)
{
// console.log("check map: ", functionToResource.has(functionHash + runtime));
if
(
!
functionToResource
.
has
(
functionHash
+
runtime
)
&&
!
db
.
has
(
functionHash
+
runtime
))
{
let
payload
=
[{
topic
:
constants
.
topics
.
hscale
,
messages
:
JSON
.
stringify
({
runtime
,
functionHash
})
}]
libSupport
.
producer
.
send
(
payload
,
function
()
{
})
}
else
{
console
.
log
(
"
resource already present: skipping speculation
"
);
}
}
function
createDirectory
(
path
)
{
...
...
@@ -423,4 +520,7 @@ function createDirectory(path) {
})
}
module
.
exports
=
router
;
module
.
exports
=
{
router
}
dispatch_system/dispatch_manager/index.js
View file @
4d074b06
...
...
@@ -10,18 +10,11 @@ const { spawn } = require('child_process');
const
morgan
=
require
(
'
morgan
'
);
const
heap
=
require
(
'
heap
'
);
const
fetch
=
require
(
'
node-fetch
'
);
const
swStats
=
require
(
'
swagger-stats
'
);
const
apiSpec
=
require
(
'
./swagger.json
'
);
//
const swStats = require('swagger-stats');
//
const apiSpec = require('./swagger.json');
const
util
=
require
(
'
util
'
)
const
sharedMeta
=
require
(
'
./shared_meta
'
)
/**
* URL to the couchdb database server used to store function metadata
*/
let
metadataDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metadataDB
=
metadataDB
+
"
/
"
+
constants
.
function_db_name
+
"
/
"
let
metricsDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metricsDB
=
metricsDB
+
"
/
"
+
constants
.
metrics_db_name
+
"
/
"
const
app
=
express
()
const
libSupport
=
require
(
'
./lib
'
)
...
...
@@ -30,13 +23,15 @@ let date = new Date();
let
log_channel
=
constants
.
topics
.
log_channel
let
usedPort
=
new
Map
(),
// TODO: remove after integration with RM
db
=
new
Map
()
,
// queue holding request to be dispatched
resourceMap
=
new
Map
()
,
// map between resource_id and resource details like node_id, port, associated function etc
functionToResource
=
new
Map
()
,
// a function to resource map. Each map contains a minheap of
db
=
sharedMeta
.
db
,
// queue holding request to be dispatched
resourceMap
=
sharedMeta
.
resourceMap
,
// map between resource_id and resource details like node_id, port, associated function etc
functionToResource
=
sharedMeta
.
functionToResource
,
// a function to resource map. Each map contains a minheap of
// resources associated with the function
workerNodes
=
new
Map
(),
// list of worker nodes currently known to the DM
functionBranchTree
=
new
Map
()
// a tree to store function branch predictions
workerNodes
=
sharedMeta
.
workerNodes
,
// list of worker nodes currently known to the DM
functionBranchTree
=
sharedMeta
.
functionBranchTree
,
// a tree to store function branch predictions
metricsDB
=
sharedMeta
.
metricsDB
,
metadataDB
=
sharedMeta
.
metadataDB
let
kafka
=
require
(
'
kafka-node
'
),
Producer
=
kafka
.
Producer
,
...
...
@@ -64,11 +59,10 @@ app.use(morgan('combined', {
app
.
use
(
express
.
json
());
app
.
use
(
express
.
urlencoded
({
extended
:
true
}));
const
file_path
=
__dirname
+
"
/repository/
"
app
.
use
(
'
/repository
'
,
express
.
static
(
file_path
));
// file server hosting deployed functions
app
.
use
(
fileUpload
())
app
.
use
(
swStats
.
getMiddleware
({
swaggerSpec
:
apiSpec
}));
// statistics middleware
app
.
use
(
'
/serverless/chain
'
,
chainHandler
);
// chain router (explicit_chain_handler.js) for handling explicit chains
//
app.use(swStats.getMiddleware({ swaggerSpec: apiSpec })); // statistics middleware
app
.
use
(
'
/serverless/chain
'
,
chainHandler
.
router
);
// chain router (explicit_chain_handler.js) for handling explicit chains
let
requestQueue
=
[]
const
WINDOW_SIZE
=
10
...
...
@@ -225,7 +219,7 @@ app.post('/serverless/execute/:id', (req, res) => {
res
.
timestamp
=
Date
.
now
()
if
(
functionToResource
.
has
(
id
))
{
res
.
start
=
'
warmstart
'
libSupport
.
reverseProxy
(
req
,
res
,
functionToResource
,
resourceMap
,
functionBranchTree
)
libSupport
.
reverseProxy
(
req
,
res
)
}
else
{
res
.
start
=
'
coldstart
'
/**
...
...
@@ -334,17 +328,14 @@ function postDeploy(message) {
"
reason
"
:
"
deployment
"
,
"
status
"
:
true
,
starttime
:
(
Date
.
now
()
-
resource
.
deploy_request_time
)
},
message
.
resource_id
,
resourceMap
)
},
message
.
resource_id
)
if
(
db
.
has
(
id
))
{
let
sendQueue
=
db
.
get
(
id
)
logger
.
info
(
"
forwarding request via reverse proxy to:
"
+
JSON
.
stringify
(
resource
));
while
(
sendQueue
&&
sendQueue
.
length
!=
0
)
{
let
{
req
,
res
}
=
sendQueue
.
shift
()
libSupport
.
reverseProxy
(
req
,
res
,
functionToResource
,
resourceMap
,
functionBranchTree
)
.
then
(()
=>
{
})
libSupport
.
reverseProxy
(
req
,
res
)
}
db
.
delete
(
id
)
}
...
...
@@ -410,7 +401,7 @@ consumer.on('message', function (message) {
"
reason
"
:
"
terminate
"
,
"
total_request
"
:
message
.
total_request
,
"
status
"
:
true
},
message
.
resource_id
,
resourceMap
)
},
message
.
resource_id
)
.
then
(()
=>
{
resourceMap
.
delete
(
message
.
resource_id
)
if
(
resourceArray
.
length
==
0
)
...
...
@@ -428,7 +419,11 @@ consumer.on('message', function (message) {
functionHash
=
message
.
functionHash
logger
.
info
(
`Generated new resource ID:
${
resource_id
}
for runtime:
${
runtime
}
`
);
console
.
log
(
"
Resource Status:
"
,
functionToResource
);
if
(
!
functionToResource
.
has
(
functionHash
+
runtime
)
&&
!
db
.
has
(
functionHash
+
runtime
))
{
console
.
log
(
"
adding db
"
);
db
.
set
(
functionHash
+
runtime
,
[])
}
/**
* Request RM for resource
*/
...
...
@@ -451,6 +446,7 @@ consumer.on('message', function (message) {
}),
partition
:
0
}]
producer
.
send
(
payloadToRM
,
()
=>
{
// db.set(functionHash + runtime, { req, res })
console
.
log
(
"
sent rm
"
);
...
...
@@ -521,7 +517,7 @@ function autoscalar() {
*/
async
function
speculative_deployment
(
req
,
runtime
)
{
if
(
constants
.
speculative_deployment
&&
req
.
headers
[
'
x-resource-id
'
]
===
undefined
)
{
console
.
log
(
functionBranchTree
,
req
.
params
.
id
);
//
console.log(functionBranchTree, req.params.id);
if
(
functionBranchTree
.
has
(
req
.
params
.
id
))
{
let
branchInfo
=
functionBranchTree
.
get
(
req
.
params
.
id
)
...
...
@@ -558,7 +554,7 @@ async function speculative_deployment(req, runtime) {
}
}
setInterval
(
libSupport
.
metrics
.
broadcastMetrics
,
5000
)
setInterval
(
libSupport
.
viterbi
,
1000
,
functionBranchTree
)
// setInterval(libSupport.viterbi, 1000
)
setInterval
(
autoscalar
,
1000
);
setInterval
(
dispatch
,
1000
);
app
.
listen
(
port
,
()
=>
logger
.
info
(
`Server listening on port
${
port
}
!`
))
\ No newline at end of file
dispatch_system/dispatch_manager/lib.js
View file @
4d074b06
...
...
@@ -6,10 +6,18 @@ const winston = require('winston')
const
constants
=
require
(
'
.././constants.json
'
)
const
secrets
=
require
(
'
./secrets.json
'
)
const
metrics
=
require
(
'
./metrics
'
)
const
sharedMeta
=
require
(
'
./shared_meta
'
)
const
{
createLogger
,
format
,
transports
}
=
winston
;
const
heap
=
require
(
'
heap
'
)
let
db
=
sharedMeta
.
db
,
// queue holding request to be dispatched
resourceMap
=
sharedMeta
.
resourceMap
,
// map between resource_id and resource details like node_id, port, associated function etc
functionToResource
=
sharedMeta
.
functionToResource
,
// a function to resource map. Each map contains a minheap of
// resources associated with the function
workerNodes
=
sharedMeta
.
workerNodes
,
// list of worker nodes currently known to the DM
functionBranchTree
=
sharedMeta
.
functionBranchTree
// Holds the function path's and related probability distribution
let
kafka
=
require
(
'
kafka-node
'
),
Producer
=
kafka
.
Producer
,
client
=
new
kafka
.
KafkaClient
({
...
...
@@ -18,9 +26,7 @@ let kafka = require('kafka-node'),
}),
producer
=
new
Producer
(
client
)
let
implicitChainDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
implicitChainDB
=
implicitChainDB
+
"
/
"
+
constants
.
implicit_chain_db_names
+
"
/
"
let
implicitChainDB
=
sharedMeta
.
implicitChainDB
/**
* Generates unique IDs of arbitrary length
* @param {Length of the ID} length
...
...
@@ -62,59 +68,73 @@ function generateExecutor(functionPath, functionHash) {
* Reverse proxy to take user requests and forward them to appropriate workers using a loadbalacer
* @param {JSON} req the user request to be forwarded to the worker
* @param {JSON} res Object to use to return the response to the user
* @param {Map} functionToResource Function to resource Map
* @param {Map} resourceMap Map from resource ID to resource metadata
* @param {Map} functionBranchTree Holds the function path's and related probability distribution
*/
function
reverseProxy
(
req
,
res
,
functionToResource
,
resourceMap
,
functionBranchTree
)
{
branchChainPredictor
(
req
,
resourceMap
,
functionToResource
,
functionBranchTree
)
return
new
Promise
((
resolve
,
reject
)
=>
{
let
runtime
=
req
.
body
.
runtime
let
id
=
req
.
params
.
id
+
runtime
/**
* Bypass deployment pipeline if resource available
*/
let
functionHeap
=
functionToResource
.
get
(
id
)
// loadbalancing by choosing worker with lowest load
let
forwardTo
=
functionHeap
[
0
]
let
resource
=
resourceMap
.
get
(
forwardTo
.
resource_id
)
// logger.info(`Choosing resource ${JSON.stringify(forwardTo.resource_id)}` +
// "\n forwarding via reverse proxy to: " + JSON.stringify(resource));
let
url
=
`http://
${
resource
.
node_id
}
:
${
resource
.
port
}
/serverless/function/execute`
function
reverseProxy
(
req
,
res
)
{
if
(
req
.
headers
[
'
x-chain-type
'
]
!==
'
explicit
'
)
branchChainPredictor
(
req
)
let
runtime
=
req
.
body
.
runtime
let
id
=
req
.
params
.
id
+
runtime
/**
* Bypass deployment pipeline if resource available
*/
let
functionHeap
=
functionToResource
.
get
(
id
)
// loadbalancing by choosing worker with lowest load
let
forwardTo
=
functionHeap
[
0
]
let
resource
=
resourceMap
.
get
(
forwardTo
.
resource_id
)
// logger.info(`Choosing resource ${JSON.stringify(forwardTo.resource_id)}` +
// "\n forwarding via reverse proxy to: " + JSON.stringify(resource));
let
url
=
`http://
${
resource
.
node_id
}
:
${
resource
.
port
}
/serverless/function/execute`
// logger.info("Request received at reverseproxy. Forwarding to: " + url);
forwardTo
.
open_request_count
+=
1
heap
.
heapify
(
functionHeap
,
compare
)
// maintain loadbalancer by heapifying the Map
// logger.info(functionHeap);
// logger.info("Request received at reverseproxy. Forwarding to: " + url);
forwardTo
.
open_request_count
+=
1
heap
.
heapify
(
functionHeap
,
compare
)
// maintain loadbalancer by heapifying the Map
// logger.info(functionHeap);
var
options
=
{
method
:
'
POST
'
,
uri
:
url
,
body
:
req
.
body
,
json
:
true
// Automatically stringifies the body to JSON
};
rp
(
options
)
.
then
(
function
(
parsedBody
)
{
let
serviceTime
=
Date
.
now
()
-
res
.
timestamp
res
.
json
(
parsedBody
)
forwardTo
.
open_request_count
-=
1
heap
.
heapify
(
functionHeap
,
compare
)
metrics
.
collectMetrics
({
type
:
res
.
start
,
value
:
serviceTime
,
functionHash
:
req
.
params
.
id
,
runtime
})
resolve
()
})
.
catch
(
function
(
err
)
{
forwardTo
.
open_request_count
-=
1
heap
.
heapify
(
functionHeap
,
compare
)
logger
.
error
(
"
error
"
+
err
);
res
.
json
(
err
.
message
).
status
(
err
.
statusCode
)
resolve
()
});
})
var
options
=
{
method
:
'
POST
'
,
uri
:
url
,
body
:
req
.
body
,
json
:
true
// Automatically stringifies the body to JSON
};
rp
(
options
)
.
then
(
function
(
parsedBody
)
{
let
serviceTime
=
Date
.
now
()
-
res
.
timestamp
res
.
json
(
parsedBody
)
forwardTo
.
open_request_count
-=
1
heap
.
heapify
(
functionHeap
,
compare
)
let
functionHash
=
req
.
params
.
id
let
functionData
=
functionBranchTree
.
get
(
functionHash
)
if
(
functionData
&&
functionData
.
req_count
%
5
==
0
)
{
if
(
functionData
.
parent
)
viterbi
(
functionHash
,
functionData
)
else
{
functionData
.
branches
=
Array
.
from
(
functionData
.
branches
.
entries
())
let
payload
=
{
method
:
'
put
'
,
body
:
JSON
.
stringify
(
functionBranchTree
.
get
(
functionHash
)),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
}
fetchData
(
implicitChainDB
+
functionHash
,
payload
)
.
then
((
updateStatus
)
=>
{
console
.
log
(
updateStatus
);
if
(
updateStatus
.
error
===
undefined
)
functionData
.
_rev
=
updateStatus
.
rev
})
functionData
.
branches
=
new
Map
(
functionData
.
branches
)
}
}
metrics
.
collectMetrics
({
type
:
res
.
start
,
value
:
serviceTime
,
functionHash
:
req
.
params
.
id
,
runtime
})
})
.
catch
(
function
(
err
)
{
forwardTo
.
open_request_count
-=
1
heap
.
heapify
(
functionHeap
,
compare
)
logger
.
error
(
"
error
"
+
err
);
res
.
json
(
err
.
message
).
status
(
err
.
statusCode
)
});
}
function
getPort
(
usedPort
)
{
...
...
@@ -161,10 +181,20 @@ function compare(a, b) {
return
a
.
open_request_count
-
b
.
open_request_count
}
function
branchChainPredictor
(
req
,
resourceMap
,
functionToResource
,
functionBranchTree
)
{
async
function
branchChainPredictor
(
req
)
{
// console.log(req.headers['x-resource-id']);
if
(
!
functionBranchTree
.
has
(
req
.
params
.
id
))
{
let
data
=
await
fetchData
(
implicitChainDB
+
req
.
params
.
id
)
if
(
data
.
error
===
"
not_found
"
)
console
.
log
(
"
no data
"
,
req
.
params
.
id
);
else
{
data
.
branches
=
new
Map
(
data
.
branches
)
functionBranchTree
.
set
(
req
.
params
.
id
,
data
)
}
}
if
(
req
.
headers
[
'
x-resource-id
'
]
===
undefined
)
{
let
functionHash
=
req
.
params
.
id
if
(
functionBranchTree
.
has
(
functionHash
))
{
let
branchInfo
=
functionBranchTree
.
get
(
functionHash
)
...
...
@@ -215,81 +245,87 @@ function branchChainPredictor(req, resourceMap, functionToResource, functionBran
// console.log("branch tree", functionBranchTree);
}
function
viterbi
(
functionBranchTree
)
{
functionBranchTree
.
forEach
((
metadata
,
node
)
=>
{
if
(
metadata
.
parent
&&
metadata
.
req_count
%
5
==
0
)
{
let
path
=
[]
let
parents
=
[[
node
,
{
prob
:
1
,
metadata
}]]
path
.
push
({
node
,
probability
:
1
})
let
siblings
=
new
Map
()
while
(
parents
.
length
>
0
)
{
// console.log("parent_group", parents);
for
(
const
parent
of
parents
)
{
// console.log("=========begin==========\n",parent, "\n=============end============");
// console.log(parent[1].metadata);
if
(
parent
[
1
].
metadata
===
undefined
)
continue
let
forwardBranches
=
parent
[
1
].
metadata
.
branches
// console.log(forwardBranches);
let
parentProbability
=
parent
[
1
].
prob
forwardBranches
.
forEach
((
branchProb
,
subNode
)
=>
{
let
probability
=
0
if
(
siblings
.
has
(
subNode
))
probability
=
siblings
.
get
(
subNode
)
probability
+=
branchProb
*
parentProbability
// console.log("prob", probability);
siblings
.
set
(
subNode
,
probability
)
})
// console.log("siblings", siblings);
}
parents
=
[]
let
maxSibling
,
maxProb
=
0
siblings
.
forEach
((
prob
,
sibling
)
=>
{
if
(
prob
>
maxProb
)
{
maxSibling
=
sibling
maxProb
=
prob
}
})
parentIDs
=
Array
.
from
(
siblings
.
keys
()
);
for
(
const
id
of
parentIDs
)
{
let
metadata
=
functionBranchTree
.
get
(
id
)
parents
.
push
([
id
,
{
prob
:
siblings
.
get
(
id
),
metadata
}
])
}
if
(
maxSibling
!==
undefined
)
path
.
push
({
node
:
maxSibling
,
probability
:
maxProb
})
siblings
=
new
Map
()
async
function
viterbi
(
node
,
metadata
)
{
console
.
log
(
"
function branch tree
"
,
functionBranchTree
.
get
(
node
));
let
path
=
[]
let
parents
=
[[
node
,
{
prob
:
1
,
metadata
}]]
path
.
push
({
node
,
probability
:
1
})
let
siblings
=
new
Map
()
while
(
parents
.
length
>
0
)
{
// console.log("parent_group", parents);
for
(
const
parent
of
parents
)
{
// console.log("=========begin==========\n",parent, "\n=============end============");
// console.log(parent[1].metadata);
if
(
parent
[
1
].
metadata
===
undefined
)
continue
let
forwardBranches
=
parent
[
1
].
metadata
.
branches
// console.log(forwardBranches);
let
parentProbability
=
parent
[
1
].
prob
forwardBranches
.
forEach
((
branchProb
,
subNode
)
=>
{
let
probability
=
0
if
(
siblings
.
has
(
subNode
))
probability
=
siblings
.
get
(
subNode
)
probability
+=
branchProb
*
parentProbability
// console.log("prob", probability);
siblings
.
set
(
subNode
,
probability
)
})
// console.log("siblings", siblings);
}
parents
=
[]
let
maxSibling
,
maxProb
=
0
siblings
.
forEach
((
prob
,
sibling
)
=>
{
if
(
prob
>
maxProb
)
{
maxSibling
=
sibling
maxProb
=
prob
}
// if (path.length > 0
)
// console.log("path", path
);
metadata
.
mle_path
=
path
if
(
path
.
length
>
1
)
{
let
payload
=
{
method
:
'
put
'
,
body
:
JSON
.
stringify
(
path
),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
}
)
parentIDs
=
Array
.
from
(
siblings
.
keys
()
);
for
(
const
id
of
parentIDs
)
{
let
metadata
=
functionBranchTree
.
get
(
id
)
parents
.
push
([
id
,
{
prob
:
siblings
.
get
(
id
),
metadata
}
fetch
(
implicitChainDB
+
functionHash
,
payload
)
}
])
}
if
(
maxSibling
!==
undefined
)
path
.
push
({
node
:
maxSibling
,
probability
:
maxProb
})
siblings
=
new
Map
()
}
if
(
path
.
length
>
1
)
console
.
log
(
"
path
"
,
path
);
metadata
.
mle_path
=
path
if
(
path
.
length
>
1
)
{
metadata
.
branches
=
Array
.
from
(
metadata
.
branches
.
entries
())
let
payload
=
{
method
:
'
put
'
,
body
:
JSON
.
stringify
(
functionBranchTree
.
get
(
node
)),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
}
});
fetchData
(
implicitChainDB
+
node
,
payload
)
.
then
((
updateStatus
)
=>
{
console
.
log
(
updateStatus
);
if
(
updateStatus
.
error
===
undefined
)
metadata
.
_rev
=
updateStatus
.
rev
})
metadata
.
branches
=
new
Map
(
metadata
.
branches
)
}
}
function
logBroadcast
(
message
,
resource_id
,
resourceMap
)
{
function
logBroadcast
(
message
,
resource_id
)
{
return
new
Promise
((
resolve
,
reject
)
=>
{
try
{
...
...
@@ -329,6 +365,6 @@ async function fetchData(url, data = null) {
module
.
exports
=
{
makeid
,
generateExecutor
,
reverseProxy
,
getPort
,
logger
,
compare
,
viterbi
,
logBroadcast
,
fetchData
,
metrics
,
logBroadcast
,
fetchData
,
metrics
,
producer
}
\ No newline at end of file
dispatch_system/dispatch_manager/metrics.js
View file @
4d074b06
...
...
@@ -5,11 +5,12 @@ const secrets = require('./secrets.json')
const
fetch
=
require
(
'
node-fetch
'
);
const
util
=
require
(
'
util
'
)
const
prom
=
require
(
'
prom-client
'
);
const
sharedMeta
=
require
(
'
./shared_meta
'
);
const
Registry
=
prom
.
Registry
;
const
register
=
new
Registry
();
const
alpha
=
0.99
const
alpha
=
constants
.
metrics
.
alpha
let
log_channel
=
constants
.
topics
.
log_channel
,
metrics
=
{
}
...
...
@@ -29,8 +30,7 @@ register.registerMetric(coldstartMetric);
register
.
registerMetric
(
starttimeMetric
);
register
.
registerMetric
(
requestMetric
);
let
metricsDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metricsDB
=
metricsDB
+
"
/
"
+
constants
.
metrics_db_name
+
"
/
"
let
metricsDB
=
sharedMeta
.
metricsDB
let
kafka
=
require
(
'
kafka-node
'
),
Producer
=
kafka
.
Producer
,
client
=
new
kafka
.
KafkaClient
({
...
...
@@ -129,6 +129,7 @@ async function broadcastMetrics() {
warmstart
:
metric
.
longterm
.
warmstart
,
starttime
:
metric
.
longterm
.
starttime
}
let
payload
=
{
method
:
'
put
'
,
body
:
JSON
.
stringify
(
dbData
),
...
...
@@ -136,6 +137,7 @@ async function broadcastMetrics() {
}
await
fetch
(
metricsDB
+
functionHash
,
payload
)
metric
.
timestamp
=
Date
.
now
()
}
}
...
...
dispatch_system/dispatch_manager/shared_meta.js
0 → 100644
View file @
4d074b06
const
secrets
=
require
(
'
./secrets.json
'
)
const
constants
=
require
(
'
.././constants.json
'
)
let
db
=
new
Map
(),
// queue holding request to be dispatched
resourceMap
=
new
Map
(),
// map between resource_id and resource details like node_id, port, associated function etc
functionToResource
=
new
Map
(),
// a function to resource map. Each map contains a minheap of
// resources associated with the function
workerNodes
=
new
Map
(),
// list of worker nodes currently known to the DM
functionBranchTree
=
new
Map
(),
// a tree to store function branch predictions
conditionProbabilityExplicit
=
new
Map
()
// tree holding conditional probabilities for explicit chains
/**
* URL to the couchdb database server used to store data
*/
let
metadataDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metadataDB
=
metadataDB
+
"
/
"
+
constants
.
db
.
function_meta
+
"
/
"
let
metricsDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
metricsDB
=
metricsDB
+
"
/
"
+
constants
.
db
.
metrics
+
"
/
"
let
implicitChainDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
implicitChainDB
=
implicitChainDB
+
"
/
"
+
constants
.
db
.
implicit_chain_meta
+
"
/
"
let
explicitChainDB
=
`http://
${
secrets
.
couchdb_username
}
:
${
secrets
.
couchdb_password
}
@
${
constants
.
couchdb_host
}
`
explicitChainDB
=
explicitChainDB
+
"
/
"
+
constants
.
db
.
explicit_chain_meta
+
"
/
"
module
.
exports
=
{
db
,
functionBranchTree
,
functionToResource
,
workerNodes
,
resourceMap
,
conditionProbabilityExplicit
,
metadataDB
,
metricsDB
,
implicitChainDB
,
explicitChainDB
}
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment