Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
X
xanadu
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Analytics
Analytics
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Commits
Open sidebar
SYNERG
xanadu
Commits
f5d6e780
Commit
f5d6e780
authored
Apr 28, 2020
by
Nilanjan Daw
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'explicit_function_chaining'
parents
4d074b06
a6391a8c
Changes
13
Hide whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
384 additions
and
240 deletions
+384
-240
dispatch_system/constants.json
dispatch_system/constants.json
+3
-2
dispatch_system/constants_local.json
dispatch_system/constants_local.json
+41
-0
dispatch_system/constants_server.json
dispatch_system/constants_server.json
+6
-5
dispatch_system/dispatch_manager/explicit_chain_handler.js
dispatch_system/dispatch_manager/explicit_chain_handler.js
+121
-142
dispatch_system/dispatch_manager/index.js
dispatch_system/dispatch_manager/index.js
+91
-25
dispatch_system/dispatch_manager/lib.js
dispatch_system/dispatch_manager/lib.js
+95
-66
local_experiments/Speculation Off, Speculation On and Speculation + JIT Deployment.png
... Off, Speculation On and Speculation + JIT Deployment.png
+0
-0
local_experiments/Worker Idle Time before initial Request.png
...l_experiments/Worker Idle Time before initial Request.png
+0
-0
local_experiments/chainlength_vs_overhead.png
local_experiments/chainlength_vs_overhead.png
+0
-0
local_experiments/function_chain_explicit.drawio
local_experiments/function_chain_explicit.drawio
+1
-0
local_experiments/function_chain_explicit.png
local_experiments/function_chain_explicit.png
+0
-0
local_experiments/speculation_vs_runtime.png
local_experiments/speculation_vs_runtime.png
+0
-0
local_experiments/speculative_explicit.txt
local_experiments/speculative_explicit.txt
+26
-0
No files found.
dispatch_system/constants.json
View file @
f5d6e780
{
{
"registry_url"
:
"10.129.6.5:5000/"
,
"registry_url"
:
"10.129.6.5:5000/"
,
"master_port"
:
8080
,
"master_port"
:
8080
,
"master_address"
:
"localhost"
,
"master_address"
:
"localhost"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
...
@@ -37,5 +37,6 @@
...
@@ -37,5 +37,6 @@
},
},
"speculative_deployment"
:
true
,
"speculative_deployment"
:
true
,
"JIT_deployment"
:
true
,
"JIT_deployment"
:
true
,
"aggressivity"
:
0.8
,
"id_size"
:
20
"id_size"
:
20
}
}
\ No newline at end of file
dispatch_system/constants_local.json
0 → 100644
View file @
f5d6e780
{
"registry_url"
:
"localhost:5000/"
,
"master_port"
:
8080
,
"master_address"
:
"localhost"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
"couchdb_host"
:
"localhost:5984"
,
"db"
:
{
"function_meta"
:
"serverless"
,
"metrics"
:
"metrics"
,
"implicit_chain_meta"
:
"implicit_chain"
,
"explicit_chain_meta"
:
"explicit_chain"
},
"network"
:
{
"network_bridge"
:
"hybrid_kafka-serverless"
,
"use_bridge"
:
true
,
"internal"
:
{
"kafka_host"
:
"kafka:9092"
},
"external"
:
{
"kafka_host"
:
"localhost:29092"
}
},
"topics"
:
{
"request_dm_2_rm"
:
"request"
,
"heartbeat"
:
"heartbeat"
,
"deployed"
:
"deployed"
,
"remove_worker"
:
"removeWorker"
,
"response_rm_2_dm"
:
"RESPONSE_RM_2_DM_DUMMY"
,
"hscale"
:
"hscale"
,
"log_channel"
:
"LOG_COMMON"
},
"autoscalar_metrics"
:
{
"open_request_threshold"
:
100
},
"metrics"
:
{
"alpha"
:
0.7
},
"speculative_deployment"
:
true
,
"JIT_deployment"
:
true
,
"id_size"
:
20
}
\ No newline at end of file
dispatch_system/constants_server.json
View file @
f5d6e780
{
{
"registry_url"
:
"10.129.6.5:5000/"
,
"registry_url"
:
"10.129.6.5:5000/"
,
"master_port"
:
8080
,
"master_port"
:
8080
,
"master_address"
:
"
10.129.6.5
"
,
"master_address"
:
"
localhost
"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
"grunt_host"
:
"https://www.namandixit.net/lovecraftian_nightmares/grunt"
,
"couchdb_host"
:
"10.129.6.5:5984"
,
"couchdb_host"
:
"10.129.6.5:5984"
,
"db"
:
{
"db"
:
{
...
@@ -12,8 +12,9 @@
...
@@ -12,8 +12,9 @@
},
},
"network"
:
{
"network"
:
{
"network_bridge"
:
"hybrid_kafka-serverless"
,
"network_bridge"
:
"hybrid_kafka-serverless"
,
"use_bridge"
:
false
,
"internal"
:
{
"internal"
:
{
"kafka_host"
:
"
kafka
:9092"
"kafka_host"
:
"
10.129.6.5
:9092"
},
},
"external"
:
{
"external"
:
{
"kafka_host"
:
"10.129.6.5:9092"
"kafka_host"
:
"10.129.6.5:9092"
...
@@ -34,7 +35,7 @@
...
@@ -34,7 +35,7 @@
"metrics"
:
{
"metrics"
:
{
"alpha"
:
0.7
"alpha"
:
0.7
},
},
"speculative_deployment"
:
fals
e
,
"speculative_deployment"
:
tru
e
,
"JIT_deployment"
:
true
,
"JIT_deployment"
:
true
,
"id_size"
:
20
"id_size"
:
20
}
}
\ No newline at end of file
dispatch_system/dispatch_manager/explicit_chain_handler.js
View file @
f5d6e780
...
@@ -8,6 +8,7 @@ const fetch = require('node-fetch')
...
@@ -8,6 +8,7 @@ const fetch = require('node-fetch')
const
constants
=
require
(
'
../constants.json
'
)
const
constants
=
require
(
'
../constants.json
'
)
const
operator
=
require
(
'
./operator
'
)
const
operator
=
require
(
'
./operator
'
)
const
sharedMeta
=
require
(
'
./shared_meta
'
)
const
sharedMeta
=
require
(
'
./shared_meta
'
)
const
util
=
require
(
'
util
'
)
const
logger
=
libSupport
.
logger
const
logger
=
libSupport
.
logger
...
@@ -197,30 +198,36 @@ router.post('/execute/:id', (req, res) => {
...
@@ -197,30 +198,36 @@ router.post('/execute/:id', (req, res) => {
libSupport
.
fetchData
(
explicitChainDB
+
chain_id
)
libSupport
.
fetchData
(
explicitChainDB
+
chain_id
)
.
then
(
chainData
=>
{
.
then
(
chainData
=>
{
console
.
log
(
chainData
);
console
.
log
(
chainData
);
let
path
=
{
path
:
[],
onPath
:
true
,
dependency
:
{},
level
:
0
}
if
(
chainData
.
error
!==
"
not_found
"
)
if
(
chainData
.
error
!==
"
not_found
"
)
conditionProbabilityExplicit
[
chain_id
]
=
chainData
conditionProbabilityExplicit
[
chain_id
]
=
chainData
if
(
req
.
files
&&
req
.
files
.
map
)
{
if
(
req
.
files
&&
req
.
files
.
map
)
{
map
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
map
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
let
mapPlanner
=
JSON
.
parse
(
req
.
files
.
map
.
data
.
toString
());
readMap
(
`./repository/aliases
${
chain_id
}
.json`
,
true
)
readMap
(
`./repository/aliases
${
chain_id
}
.json`
,
true
)
.
then
(
data
=>
{
.
then
(
data
=>
{
aliases
=
data
aliases
=
data
let
payload
=
JSON
.
parse
(
req
.
body
.
data
)
let
payload
=
JSON
.
parse
(
req
.
body
.
data
)
speculative_deployment
(
chain_id
,
aliases
,
mapPlanner
,
0
);
if
(
chainData
.
error
!=
"
not_found
"
)
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
{})
speculative_deployment
(
chain_id
,
aliases
,
chainData
);
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
{},
path
)
})
})
}
else
{
}
else
{
readMap
(
`./repository/map
${
chain_id
}
.json`
)
readMap
(
`./repository/map
${
chain_id
}
.json`
)
.
then
(
data
=>
{
.
then
(
data
=>
{
map
=
data
map
=
data
let
mapPlanner
=
JSON
.
parse
(
JSON
.
stringify
(
map
))
readMap
(
`./repository/aliases
${
chain_id
}
.json`
,
true
)
readMap
(
`./repository/aliases
${
chain_id
}
.json`
,
true
)
.
then
(
data
=>
{
.
then
(
data
=>
{
aliases
=
data
aliases
=
data
let
payload
=
JSON
.
parse
(
req
.
body
.
data
)
let
payload
=
JSON
.
parse
(
req
.
body
.
data
)
speculative_deployment
(
chain_id
,
aliases
,
mapPlanner
,
0
);
if
(
chainData
.
error
!=
"
not_found
"
)
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
{})
speculative_deployment
(
chain_id
,
aliases
,
chainData
);
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
{},
path
)
})
})
})
})
}
}
...
@@ -228,17 +235,41 @@ router.post('/execute/:id', (req, res) => {
...
@@ -228,17 +235,41 @@ router.post('/execute/:id', (req, res) => {
})
})
async
function
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
result
)
{
/**
* Orchestrator function to execute a chain and return the result to the invoker
* @param {string} chain_id ID of the chain to be executed
* @param {JSON} res response object
* @param {JSON} payload data to be passed to the chain
* @param {JSON} map holds the chain map
* @param {JSON} aliases internal alias to function chain mapping
* @param {JSON} result result obtained after chain executes
*/
async
function
orchestrator
(
chain_id
,
res
,
payload
,
map
,
aliases
,
result
,
path
)
{
/**
* Adding dependencies on MLE path to a map
* for fast lookup during speculation
*/
for
(
const
[
functionName
,
metadata
]
of
Object
.
entries
(
map
))
{
if
(
metadata
.
type
===
"
function
"
||
metadata
.
type
===
"
conditional
"
)
{
if
(
path
.
dependency
[
functionName
]
===
undefined
)
path
.
dependency
[
functionName
]
=
JSON
.
parse
(
JSON
.
stringify
(
metadata
.
wait_for
))
}
}
if
(
Object
.
keys
(
map
).
length
==
0
)
{
if
(
Object
.
keys
(
map
).
length
==
0
)
{
console
.
log
(
"
time to resolve
"
,
result
);
console
.
log
(
"
time to resolve
"
,
result
);
res
.
json
(
result
)
res
.
json
(
result
)
if
(
path
.
onPath
)
conditionProbabilityExplicit
[
chain_id
][
"
path
"
]
=
path
let
payload
=
{
let
payload
=
{
method
:
'
put
'
,
method
:
'
put
'
,
body
:
JSON
.
stringify
(
conditionProbabilityExplicit
[
chain_id
]),
body
:
JSON
.
stringify
(
conditionProbabilityExplicit
[
chain_id
]),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
}
}
libSupport
.
fetchData
(
explicitChainDB
+
chain_id
,
payload
)
libSupport
.
fetchData
(
explicitChainDB
+
chain_id
,
payload
)
console
.
log
(
"
detected path
"
,
util
.
inspect
(
path
,
false
,
null
,
true
/* enable colors */
));
// return resolve(result)
// return resolve(result)
}
}
...
@@ -258,22 +289,27 @@ async function orchestrator(chain_id, res, payload, map, aliases, result) {
...
@@ -258,22 +289,27 @@ async function orchestrator(chain_id, res, payload, map, aliases, result) {
}
}
delete
map
[
functionName
]
delete
map
[
functionName
]
aliases
[
functionName
].
status
=
"
running
"
aliases
[
functionName
].
status
=
"
running
"
if
(
typeof
path
.
path
[
path
.
level
]
===
'
undefined
'
)
{
path
.
path
[
path
.
level
]
=
[]
}
path
.
path
[
path
.
level
].
push
({
functionName
,
type
:
"
function
"
,
runtime
:
metadata
.
runtime
})
libSupport
.
fetchData
(
url
,
data
)
libSupport
.
fetchData
(
url
,
data
)
.
then
(
json
=>
{
.
then
(
json
=>
{
// console.log(json);
// console.log(json);
result
[
functionName
]
=
json
result
[
functionName
]
=
json
aliases
[
functionName
].
status
=
"
done
"
aliases
[
functionName
].
status
=
"
done
"
let
branchMap
=
null
let
branchMap
=
null
,
flag
=
false
for
(
const
[
_key
,
metadata
]
of
Object
.
entries
(
map
))
{
for
(
const
[
_key
,
metadata
]
of
Object
.
entries
(
map
))
{
if
(
metadata
.
type
===
"
function
"
||
metadata
.
type
===
"
conditional
"
)
{
if
(
metadata
.
type
===
"
function
"
||
metadata
.
type
===
"
conditional
"
)
{
let
index
=
metadata
.
wait_for
.
indexOf
(
functionName
)
let
index
=
metadata
.
wait_for
.
indexOf
(
functionName
)
if
(
index
>=
0
)
if
(
index
>=
0
)
metadata
.
wait_for
.
splice
(
index
,
1
);
metadata
.
wait_for
.
splice
(
index
,
1
);
if
(
metadata
.
wait_for
.
length
==
0
)
flag
=
true
// something is runnable
}
}
if
(
metadata
.
type
===
"
conditional
"
&&
metadata
.
wait_for
.
length
==
0
)
{
if
(
metadata
.
type
===
"
conditional
"
&&
metadata
.
wait_for
.
length
==
0
)
{
let
conditionResult
=
checkCondition
(
metadata
.
condition
.
op1
,
metadata
.
condition
.
op2
,
metadata
.
condition
.
op
,
result
)
let
conditionResult
=
checkCondition
(
metadata
.
condition
.
op1
,
metadata
.
condition
.
op2
,
metadata
.
condition
.
op
,
result
)
...
@@ -296,16 +332,34 @@ async function orchestrator(chain_id, res, payload, map, aliases, result) {
...
@@ -296,16 +332,34 @@ async function orchestrator(chain_id, res, payload, map, aliases, result) {
branchMap
=
map
[
branchToTake
]
branchMap
=
map
[
branchToTake
]
delete
map
[
_key
]
delete
map
[
_key
]
makeBranchRunnable
(
branchMap
,
aliases
)
makeBranchRunnable
(
branchMap
,
aliases
)
if
((
conditionResult
===
'
success
'
)
&&
conditionProbabilityExplicit
[
chain_id
][
_key
].
probability
<
0.5
||
(
conditionResult
!==
'
success
'
)
&&
conditionProbabilityExplicit
[
chain_id
][
_key
].
probability
>
0.5
)
{
path
.
onPath
=
false
console
.
log
(
"
out of path
"
);
}
path
.
level
++
if
(
typeof
path
.
path
[
path
.
level
]
===
'
undefined
'
)
{
path
.
path
[
path
.
level
]
=
[]
}
path
.
path
[
path
.
level
].
push
({
functionName
:
_key
,
type
:
"
condition
"
})
}
}
}
}
orchestrator
(
chain_id
,
res
,
payload
,
(
branchMap
==
null
)?
map
:
branchMap
,
aliases
,
result
)
if
(
flag
)
path
.
level
++
orchestrator
(
chain_id
,
res
,
payload
,
(
branchMap
==
null
)?
map
:
branchMap
,
aliases
,
result
,
path
)
})
})
}
}
}
}
}
}
}
}
/**
* Make the branch runnable by removing redundant dependencies in the map
* which have already executed
* @param {JSON} branchMap sub map of the chain holding the branch to be executed
* @param {JSON} aliases internal alias to function chain mapping
*/
function
makeBranchRunnable
(
branchMap
,
aliases
)
{
function
makeBranchRunnable
(
branchMap
,
aliases
)
{
delete
branchMap
[
'
type
'
]
delete
branchMap
[
'
type
'
]
for
(
const
[
_key
,
metadata
]
of
Object
.
entries
(
branchMap
))
{
for
(
const
[
_key
,
metadata
]
of
Object
.
entries
(
branchMap
))
{
...
@@ -326,142 +380,67 @@ function checkCondition(op1, op2, op, result) {
...
@@ -326,142 +380,67 @@ function checkCondition(op1, op2, op, result) {
return
(
operator
[
op
](
data
,
op2
))?
"
success
"
:
"
fail
"
return
(
operator
[
op
](
data
,
op2
))?
"
success
"
:
"
fail
"
}
}
async
function
speculative_deployment
(
chain_id
,
aliases
,
map
,
offset
,
done
,
toBeDone
,
ignoreSet
)
{
async
function
speculative_deployment
(
chain_id
,
aliases
,
chainData
)
{
console
.
log
(
"
offset:
"
,
offset
,
"
ignoreSet
"
,
ignoreSet
);
// console.log("chainData", util.inspect(chainData, false, null, true /* enable colors */));
let
plan
=
[]
if
(
constants
.
speculative_deployment
)
{
let
path
=
chainData
.
path
.
path
let
getData
=
[]
if
(
constants
.
speculative_deployment
)
{
for
(
const
[
mod
,
metadata
]
of
Object
.
entries
(
map
))
{
for
(
let
i
=
0
;
i
<
path
.
length
;
i
++
)
{
if
(
metadata
.
type
!==
'
function
'
)
{
for
(
const
node
of
path
[
i
])
{
if
(
metadata
.
type
===
'
conditional
'
&&
!
constants
.
JIT_deployment
)
{
if
(
node
.
type
===
"
function
"
)
{
let
probability
node
.
id
=
aliases
[
node
.
functionName
].
alias
try
{
node
.
invokeTime
=
0
probability
=
conditionProbabilityExplicit
[
chain_id
][
mod
].
probability
// console.log(node);
}
catch
(
error
)
{
plan
.
push
(
node
)
console
.
log
(
"
branch probability not present, random branch taken
"
);
probability
=
Math
.
random
()
}
let
branch
=
(
probability
>=
0.5
)
?
metadata
[
'
success
'
]
:
metadata
[
'
fail
'
]
let
branchMap
=
JSON
.
parse
(
JSON
.
stringify
(
map
[
branch
]))
delete
branchMap
[
'
type
'
]
console
.
log
(
"
success probability
"
,
probability
,
"
taking branch:
"
,
branch
);
speculative_deployment
(
chain_id
,
aliases
,
branchMap
)
}
}
continue
}
if
(
constants
.
JIT_deployment
)
{
// console.log(mod, metadata, aliases[mod].alias);
let
url
=
metricsDB
+
aliases
[
mod
].
alias
let
data
=
libSupport
.
fetchData
(
url
)
getData
.
push
(
data
)
}
else
{
notify
(
metadata
.
runtime
,
aliases
[
mod
].
alias
)
}
}
}
}
if
(
constants
.
JIT_deployment
)
{
Promise
.
all
(
getData
).
then
((
values
)
=>
{
let
dataMap
=
new
Map
()
for
(
const
data
of
values
)
{
if
(
values
.
error
===
"
not_found
"
)
dataMap
[
data
.
_id
]
=
0
dataMap
[
data
.
_id
]
=
data
}
if
(
done
===
undefined
)
{
console
.
log
(
"
new map
"
);
done
=
new
Map
()
}
if
(
toBeDone
===
undefined
)
{
toBeDone
=
new
Set
()
}
// var plannerMap = new Map(map)
do
{
for
(
const
[
mod
,
metadata
]
of
Object
.
entries
(
map
))
{
if
(
metadata
.
type
!==
'
function
'
&&
metadata
.
type
!==
'
conditional
'
)
{
continue
}
if
(
metadata
.
wait_for
.
length
==
0
&&
done
[
mod
]
===
undefined
)
{
/**
* expecting the first ones to run to be hit by coldstarts
*/
try
{
done
[
mod
]
=
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
coldstart
}
catch
(
e
)
{
done
[
mod
]
=
0
}
// delete plannerMap[mod];
}
else
if
(
done
[
mod
]
===
undefined
)
{
let
flag
=
true
,
redundantFlag
=
false
let
maxWait
=
0
for
(
const
dependency
of
metadata
.
wait_for
)
{
if
(
done
[
dependency
]
===
undefined
)
{
flag
=
false
break
}
else
if
(
maxWait
<
done
[
dependency
]
&&
(
ignoreSet
===
undefined
||
!
ignoreSet
.
has
(
dependency
)))
maxWait
=
done
[
dependency
]
else
if
(
ignoreSet
!==
undefined
&&
ignoreSet
.
has
(
dependency
))
{
redundantFlag
=
true
console
.
log
(
"
ignoring redundant dependency
"
,
dependency
);
}
}
// if (redundantFlag)
// maxWait += offset;
maxWait
+=
offset
if
(
flag
)
{
if
(
metadata
.
type
===
'
conditional
'
)
{
console
.
log
(
"
setting notification for conditional
"
,
mod
);
let
probability
try
{
probability
=
conditionProbabilityExplicit
[
chain_id
][
mod
].
probability
}
catch
(
error
)
{
console
.
log
(
"
branch probability not present, random branch taken
"
);
probability
=
Math
.
random
()
}
let
branch
=
(
probability
>=
0.5
)?
metadata
[
'
success
'
]:
metadata
[
'
fail
'
]
let
branchMap
=
JSON
.
parse
(
JSON
.
stringify
(
map
[
branch
]))
delete
branchMap
[
'
type
'
]
console
.
log
(
"
success probability
"
,
probability
,
"
taking branch:
"
,
branch
);
if
(
ignoreSet
===
undefined
)
ignoreSet
=
new
Set
(
metadata
.
wait_for
)
else
ignoreSet
=
new
Set
(
ignoreSet
,
new
Set
(
metadata
.
wait_for
))
speculative_deployment
(
chain_id
,
aliases
,
branchMap
,
maxWait
,
done
,
toBeDone
,
ignoreSet
)
done
[
mod
]
=
maxWait
-
offset
}
else
{
console
.
log
(
"
notification set
"
,
mod
);
let
starttime
try
{
starttime
=
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
starttime
}
catch
(
e
)
{
starttime
=
0
}
let
notifyTime
=
((
maxWait
-
starttime
)
>
0
)
?
maxWait
-
starttime
:
0
// notifyTime += offset
console
.
log
(
mod
,
"
max wait
"
,
maxWait
,
"
notify time:
"
,
notifyTime
,
"
offset added
"
,
offset
);
setTimeout
(
notify
,
notifyTime
,
metadata
.
runtime
,
aliases
[
mod
].
alias
)
try
{
done
[
mod
]
=
maxWait
+
dataMap
[
aliases
[
mod
].
alias
][
metadata
.
runtime
].
warmstart
-
offset
}
catch
(
e
)
{
done
[
mod
]
=
maxWait
-
offset
}
}
if
(
toBeDone
.
has
(
mod
))
if
(
constants
.
JIT_deployment
)
{
delete
toBeDone
[
mod
]
let
conditionalDelay
=
0
,
metricsData
=
new
Map
(),
delayMap
=
new
Map
()
// delete plannerMap[mod]
let
data
=
await
libSupport
.
fetchData
(
metricsDB
+
"
_bulk_get
"
,
{
}
else
{
method
:
'
post
'
,
toBeDone
.
add
(
mod
)
body
:
JSON
.
stringify
({
}
docs
:
plan
}
}),
console
.
log
(
"
done
"
,
done
);
headers
:
{
'
Content-Type
'
:
'
application/json
'
},
}
}
while
(
toBeDone
.
size
!=
0
)
})
})
data
=
data
.
results
for
(
let
i
=
0
;
i
<
path
.
length
;
i
++
)
{
let
id
=
data
[
i
].
id
metricsData
[
id
]
=
data
[
i
].
docs
[
0
].
ok
}
// console.log(metricsData);
for
(
let
i
=
0
;
i
<
path
.
length
;
i
++
)
{
for
(
const
node
of
path
[
i
])
{
let
maxDelay
=
conditionalDelay
for
(
const
dependency
of
chainData
.
path
.
dependency
[
node
.
functionName
])
{
if
(
delayMap
.
get
(
dependency
)
>
maxDelay
)
maxDelay
=
delayMap
.
get
(
dependency
)
}
if
(
maxDelay
==
0
)
{
maxDelay
+=
(
node
.
type
===
"
function
"
)?
metricsData
[
node
.
id
][
node
.
runtime
].
coldstart
:
0
delayMap
.
set
(
node
.
functionName
,
maxDelay
)
}
else
{
if
(
node
.
type
===
"
function
"
)
node
.
invokeTime
=
maxDelay
-
metricsData
[
node
.
id
][
node
.
runtime
].
starttime
maxDelay
+=
(
node
.
type
===
"
function
"
)?
metricsData
[
node
.
id
][
node
.
runtime
].
warmstart
:
0
delayMap
.
set
(
node
.
functionName
,
maxDelay
)
}
if
(
node
.
type
===
"
condition
"
)
conditionalDelay
=
maxDelay
}
}
console
.
log
(
"
delay map
"
,
delayMap
);
console
.
log
(
"
notifcation plan
"
,
plan
);
}
let
counter
=
0
,
maxCount
=
plan
.
length
*
constants
.
aggressivity
for
(
const
node
of
plan
)
{
if
(
counter
>
maxCount
)
break
console
.
log
(
"
notification set for
"
,
node
.
functionName
);
setTimeout
(
notify
,
node
.
invokeTime
,
node
.
runtime
,
node
.
id
)
counter
++
}
}
}
}
}
}
...
@@ -522,5 +501,5 @@ function createDirectory(path) {
...
@@ -522,5 +501,5 @@ function createDirectory(path) {
module
.
exports
=
{
module
.
exports
=
{
router
router
,
notify
}
}
dispatch_system/dispatch_manager/index.js
View file @
f5d6e780
...
@@ -31,7 +31,8 @@ let usedPort = new Map(), // TODO: remove after integration with RM
...
@@ -31,7 +31,8 @@ let usedPort = new Map(), // TODO: remove after integration with RM
functionBranchTree
=
sharedMeta
.
functionBranchTree
,
// a tree to store function branch predictions
functionBranchTree
=
sharedMeta
.
functionBranchTree
,
// a tree to store function branch predictions
metricsDB
=
sharedMeta
.
metricsDB
,
metricsDB
=
sharedMeta
.
metricsDB
,
metadataDB
=
sharedMeta
.
metadataDB
metadataDB
=
sharedMeta
.
metadataDB
,
implicitChainDB
=
sharedMeta
.
implicitChainDB
let
kafka
=
require
(
'
kafka-node
'
),
let
kafka
=
require
(
'
kafka-node
'
),
Producer
=
kafka
.
Producer
,
Producer
=
kafka
.
Producer
,
...
@@ -514,39 +515,105 @@ function autoscalar() {
...
@@ -514,39 +515,105 @@ function autoscalar() {
*
*
* FIXME: Currently supports homogenous runtime chain i.e takes runtime as a param.
* FIXME: Currently supports homogenous runtime chain i.e takes runtime as a param.
* Change it to also profile runtime
* Change it to also profile runtime
* FIXME: Hardcoded container as a runtime. Make dynamic.
*/
*/
async
function
speculative_deployment
(
req
,
runtime
)
{
async
function
speculative_deployment
(
req
,
runtime
)
{
if
(
constants
.
speculative_deployment
&&
req
.
headers
[
'
x-resource-id
'
]
===
undefined
)
{
if
(
constants
.
speculative_deployment
&&
req
.
headers
[
'
x-resource-id
'
]
===
undefined
)
{
// console.log(functionBranchTree, req.params.id);
// console.log(functionBranchTree, req.params.id);
if
(
!
functionBranchTree
.
has
(
req
.
params
.
id
))
{
let
data
=
await
libSupport
.
fetchData
(
implicitChainDB
+
req
.
params
.
id
)
if
(
data
.
error
!==
"
not_found
"
)
{
data
.
branches
=
new
Map
(
data
.
branches
)
functionBranchTree
.
set
(
req
.
params
.
id
,
data
)
}
}
console
.
log
(
util
.
inspect
(
functionBranchTree
,
false
,
null
,
true
/* enable colors */
));
if
(
functionBranchTree
.
has
(
req
.
params
.
id
))
{
if
(
functionBranchTree
.
has
(
req
.
params
.
id
))
{
let
branchInfo
=
functionBranchTree
.
get
(
req
.
params
.
id
)
let
branchInfo
=
functionBranchTree
.
get
(
req
.
params
.
id
)
console
.
log
(
"
mle_path
"
,
branchInfo
.
mle_path
);
console
.
log
(
"
mle_path
"
,
branchInfo
.
mle_path
);
if
(
branchInfo
.
mle_path
&&
branchInfo
.
mle_path
.
length
>
1
)
{
if
(
branchInfo
.
mle_path
&&
branchInfo
.
mle_path
.
length
>
1
)
{
for
(
let
node
of
branchInfo
.
mle_path
)
/**
node
.
id
=
node
.
node
* calculating the depth upto which speculative deployment will work
let
metrics
=
await
libSupport
.
fetchData
(
metricsDB
+
"
_bulk_get
"
,
{
*/
method
:
'
post
'
,
let
deployDepth
=
branchInfo
.
mle_path
.
length
*
constants
.
aggressivity
body
:
JSON
.
stringify
({
if
(
constants
.
JIT_deployment
)
{
docs
:
branchInfo
.
mle_path
/**
}),
* Perform Speculation with JIT
headers
:
{
'
Content-Type
'
:
'
application/json
'
},
*/
})
for
(
let
node
of
branchInfo
.
mle_path
)
console
.
log
(
util
.
inspect
(
metrics
,
false
,
null
,
true
/* enable colors */
))
node
.
id
=
node
.
node
let
metricsPromise
=
libSupport
.
fetchData
(
metricsDB
+
"
_bulk_get
"
,
{
for
(
let
node
of
branchInfo
.
mle_path
)
{
method
:
'
post
'
,
// console.log(functionToResource);
body
:
JSON
.
stringify
({
docs
:
branchInfo
.
mle_path
if
(
!
functionToResource
.
has
(
node
.
node
+
runtime
)
&&
!
db
.
has
(
node
.
node
+
runtime
))
{
}),
console
.
log
(
"
Deploying according to MLE path:
"
,
node
.
node
);
headers
:
{
'
Content-Type
'
:
'
application/json
'
},
})
let
payload
=
[{
topic
:
constants
.
topics
.
hscale
,
let
chainDataPromise
=
libSupport
.
fetchData
(
implicitChainDB
+
"
_bulk_get
"
,
{
messages
:
JSON
.
stringify
({
"
runtime
"
:
"
container
"
,
"
functionHash
"
:
node
.
node
})
method
:
'
post
'
,
}]
body
:
JSON
.
stringify
({
producer
.
send
(
payload
,
function
()
{
})
docs
:
branchInfo
.
mle_path
db
.
set
(
node
.
node
+
runtime
,
[])
}),
headers
:
{
'
Content-Type
'
:
'
application/json
'
},
})
/**
* Get the branch chain and the metrics data related to the MLE path
*/
Promise
.
all
([
metricsPromise
,
chainDataPromise
])
.
then
(
data
=>
{
let
metrics
=
new
Map
(),
chainData
=
new
Map
()
let
currentDelay
=
0
data
[
0
]
=
data
[
0
].
results
,
data
[
1
]
=
data
[
1
].
results
for
(
let
i
=
0
;
i
<
deployDepth
;
i
++
)
{
let
id
=
data
[
0
][
i
].
id
metrics
[
id
]
=
data
[
0
][
i
].
docs
[
0
].
ok
id
=
data
[
1
][
i
].
id
chainData
[
id
]
=
data
[
1
][
i
].
docs
[
0
].
ok
if
(
chainData
[
id
])
chainData
[
id
].
branches
=
new
Map
(
chainData
[
id
].
branches
)
}
currentDelay
=
metrics
[
branchInfo
.
mle_path
[
0
].
id
].
container
.
starttime
for
(
let
i
=
1
;
i
<
deployDepth
;
i
++
)
{
let
parent
=
chainData
[
branchInfo
.
mle_path
[
i
-
1
].
id
]
let
self
=
branchInfo
.
mle_path
[
i
].
id
console
.
log
(
self
);
currentDelay
+=
parent
.
branches
.
get
(
self
)[
1
]
let
invokeTime
=
currentDelay
-
metrics
[
self
].
container
.
starttime
invokeTime
=
(
invokeTime
<
0
)?
0
:
invokeTime
console
.
log
(
self
,
"
current delay
"
,
currentDelay
,
"
invoke time:
"
,
currentDelay
-
metrics
[
self
].
container
.
starttime
);
setTimeout
(
chainHandler
.
notify
,
invokeTime
,
"
container
"
,
self
)
}
})
}
else
{
/**
* Perform Speculation without JIT
*/
let
depthCounter
=
0
for
(
let
node
of
branchInfo
.
mle_path
)
{
// console.log(functionToResource);
if
(
depthCounter
>
deployDepth
)
break
if
(
!
functionToResource
.
has
(
node
.
node
+
runtime
)
&&
!
db
.
has
(
node
.
node
+
runtime
))
{
console
.
log
(
"
Deploying according to MLE path:
"
,
node
.
node
);
let
payload
=
[{
topic
:
constants
.
topics
.
hscale
,
messages
:
JSON
.
stringify
({
"
runtime
"
:
"
container
"
,
"
functionHash
"
:
node
.
node
})
}]
producer
.
send
(
payload
,
function
()
{
})
db
.
set
(
node
.
node
+
runtime
,
[])
}
depthCounter
++
}
}
}
}
}
}
...
@@ -554,7 +621,6 @@ async function speculative_deployment(req, runtime) {
...
@@ -554,7 +621,6 @@ async function speculative_deployment(req, runtime) {
}
}
}
}
setInterval
(
libSupport
.
metrics
.
broadcastMetrics
,
5000
)
setInterval
(
libSupport
.
metrics
.
broadcastMetrics
,
5000
)
// setInterval(libSupport.viterbi, 1000)
setInterval
(
autoscalar
,
1000
);
setInterval
(
autoscalar
,
1000
);
setInterval
(
dispatch
,
1000
);
setInterval
(
dispatch
,
1000
);
app
.
listen
(
port
,
()
=>
logger
.
info
(
`Server listening on port
${
port
}
!`
))
app
.
listen
(
port
,
()
=>
logger
.
info
(
`Server listening on port
${
port
}
!`
))
\ No newline at end of file
dispatch_system/dispatch_manager/lib.js
View file @
f5d6e780
'
use strict
'
;
const
crypto
=
require
(
'
crypto
'
);
const
crypto
=
require
(
'
crypto
'
);
const
fs
=
require
(
'
fs
'
)
const
fs
=
require
(
'
fs
'
)
const
rp
=
require
(
'
request-promise
'
);
const
rp
=
require
(
'
request-promise
'
);
...
@@ -7,6 +8,7 @@ const constants = require('.././constants.json')
...
@@ -7,6 +8,7 @@ const constants = require('.././constants.json')
const
secrets
=
require
(
'
./secrets.json
'
)
const
secrets
=
require
(
'
./secrets.json
'
)
const
metrics
=
require
(
'
./metrics
'
)
const
metrics
=
require
(
'
./metrics
'
)
const
sharedMeta
=
require
(
'
./shared_meta
'
)
const
sharedMeta
=
require
(
'
./shared_meta
'
)
const
util
=
require
(
'
util
'
)
const
{
createLogger
,
format
,
transports
}
=
winston
;
const
{
createLogger
,
format
,
transports
}
=
winston
;
const
heap
=
require
(
'
heap
'
)
const
heap
=
require
(
'
heap
'
)
...
@@ -14,9 +16,9 @@ const heap = require('heap')
...
@@ -14,9 +16,9 @@ const heap = require('heap')
let
db
=
sharedMeta
.
db
,
// queue holding request to be dispatched
let
db
=
sharedMeta
.
db
,
// queue holding request to be dispatched
resourceMap
=
sharedMeta
.
resourceMap
,
// map between resource_id and resource details like node_id, port, associated function etc
resourceMap
=
sharedMeta
.
resourceMap
,
// map between resource_id and resource details like node_id, port, associated function etc
functionToResource
=
sharedMeta
.
functionToResource
,
// a function to resource map. Each map contains a minheap of
functionToResource
=
sharedMeta
.
functionToResource
,
// a function to resource map. Each map contains a minheap of
// resources associated with the function
// resources associated with the function
workerNodes
=
sharedMeta
.
workerNodes
,
// list of worker nodes currently known to the DM
functionBranchTree
=
sharedMeta
.
functionBranchTree
,
// Holds the function path's and related probability distribution
functionBranchTree
=
sharedMeta
.
functionBranchTree
// Holds the function path's and related probability distribution
timelineQueue
=
new
Map
()
// a temporary map holding request timestamps to be used for calulcating implicit chain invocation delays
let
kafka
=
require
(
'
kafka-node
'
),
let
kafka
=
require
(
'
kafka-node
'
),
Producer
=
kafka
.
Producer
,
Producer
=
kafka
.
Producer
,
...
@@ -49,13 +51,13 @@ function makeid(length) {
...
@@ -49,13 +51,13 @@ function makeid(length) {
* @param {string Function Hash value} functionHash
* @param {string Function Hash value} functionHash
*/
*/
function
generateExecutor
(
functionPath
,
functionHash
)
{
function
generateExecutor
(
functionPath
,
functionHash
)
{
input
=
fs
.
readFileSync
(
'
./repository/worker_env/env.js
'
)
let
input
=
fs
.
readFileSync
(
'
./repository/worker_env/env.js
'
)
functionFile
=
fs
.
readFileSync
(
functionPath
+
functionHash
)
let
functionFile
=
fs
.
readFileSync
(
functionPath
+
functionHash
)
searchSize
=
"
(resolve, reject) => {
"
.
length
let
searchSize
=
"
(resolve, reject) => {
"
.
length
insertIndex
=
input
.
indexOf
(
"
(resolve, reject) => {
"
)
+
searchSize
let
insertIndex
=
input
.
indexOf
(
"
(resolve, reject) => {
"
)
+
searchSize
output
=
input
.
slice
(
0
,
insertIndex
)
+
functionFile
+
input
.
slice
(
insertIndex
)
let
output
=
input
.
slice
(
0
,
insertIndex
)
+
functionFile
+
input
.
slice
(
insertIndex
)
let
hash
=
crypto
.
createHash
(
'
md5
'
).
update
(
output
).
digest
(
"
hex
"
);
let
hash
=
crypto
.
createHash
(
'
md5
'
).
update
(
output
).
digest
(
"
hex
"
);
console
.
log
(
hash
);
console
.
log
(
hash
);
...
@@ -69,7 +71,7 @@ function generateExecutor(functionPath, functionHash) {
...
@@ -69,7 +71,7 @@ function generateExecutor(functionPath, functionHash) {
* @param {JSON} req the user request to be forwarded to the worker
* @param {JSON} req the user request to be forwarded to the worker
* @param {JSON} res Object to use to return the response to the user
* @param {JSON} res Object to use to return the response to the user
*/
*/
function
reverseProxy
(
req
,
res
)
{
async
function
reverseProxy
(
req
,
res
)
{
if
(
req
.
headers
[
'
x-chain-type
'
]
!==
'
explicit
'
)
if
(
req
.
headers
[
'
x-chain-type
'
]
!==
'
explicit
'
)
branchChainPredictor
(
req
)
branchChainPredictor
(
req
)
let
runtime
=
req
.
body
.
runtime
let
runtime
=
req
.
body
.
runtime
...
@@ -88,8 +90,6 @@ function reverseProxy(req, res) {
...
@@ -88,8 +90,6 @@ function reverseProxy(req, res) {
// logger.info("Request received at reverseproxy. Forwarding to: " + url);
// logger.info("Request received at reverseproxy. Forwarding to: " + url);
forwardTo
.
open_request_count
+=
1
forwardTo
.
open_request_count
+=
1
heap
.
heapify
(
functionHeap
,
compare
)
// maintain loadbalancer by heapifying the Map
heap
.
heapify
(
functionHeap
,
compare
)
// maintain loadbalancer by heapifying the Map
// logger.info(functionHeap);
var
options
=
{
var
options
=
{
method
:
'
POST
'
,
method
:
'
POST
'
,
uri
:
url
,
uri
:
url
,
...
@@ -97,51 +97,56 @@ function reverseProxy(req, res) {
...
@@ -97,51 +97,56 @@ function reverseProxy(req, res) {
json
:
true
// Automatically stringifies the body to JSON
json
:
true
// Automatically stringifies the body to JSON
};
};
try
{
rp
(
options
)
let
parsedBody
=
await
rp
(
options
)
.
then
(
function
(
parsedBody
)
{
let
serviceTime
=
Date
.
now
()
-
res
.
timestamp
let
serviceTime
=
Date
.
now
()
-
res
.
timestamp
res
.
json
(
parsedBody
)
res
.
json
(
parsedBody
)
forwardTo
.
open_request_count
-=
1
forwardTo
.
open_request_count
-=
1
heap
.
heapify
(
functionHeap
,
compare
)
heap
.
heapify
(
functionHeap
,
compare
)
let
functionHash
=
req
.
params
.
id
let
functionHash
=
req
.
params
.
id
let
functionData
=
functionBranchTree
.
get
(
functionHash
)
let
functionData
=
functionBranchTree
.
get
(
functionHash
)
if
(
functionData
&&
functionData
.
req_count
%
5
==
0
)
{
if
(
functionData
&&
functionData
.
req_count
%
5
==
0
)
{
if
(
functionData
.
parent
)
if
(
functionData
.
parent
)
viterbi
(
functionHash
,
functionData
)
viterbi
(
functionHash
,
functionData
)
else
{
else
{
functionData
.
branches
=
Array
.
from
(
functionData
.
branches
.
entries
())
let
head
=
await
fetch
(
implicitChainDB
+
functionHash
,
{
let
payload
=
{
method
:
"
head
"
method
:
'
put
'
,
})
body
:
JSON
.
stringify
(
functionBranchTree
.
get
(
functionHash
)),
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
functionData
.
_rev
=
head
.
headers
.
get
(
"
etag
"
).
substring
(
1
,
head
.
headers
.
get
(
"
etag
"
).
length
-
1
)
}
functionData
.
branches
=
Array
.
from
(
functionData
.
branches
.
entries
())
let
payload
=
{
fetchData
(
implicitChainDB
+
functionHash
,
payload
)
method
:
'
put
'
,
.
then
((
updateStatus
)
=>
{
body
:
JSON
.
stringify
(
functionBranchTree
.
get
(
functionHash
)),
console
.
log
(
updateStatus
);
headers
:
{
'
Content-Type
'
:
'
application/json
'
}
if
(
updateStatus
.
error
===
undefined
)
functionData
.
_rev
=
updateStatus
.
rev
})
functionData
.
branches
=
new
Map
(
functionData
.
branches
)
}
}
fetchData
(
implicitChainDB
+
functionHash
,
payload
)
.
then
((
updateStatus
)
=>
{
console
.
log
(
updateStatus
);
if
(
updateStatus
.
error
===
undefined
)
functionData
.
_rev
=
updateStatus
.
rev
})
functionData
.
branches
=
new
Map
(
functionData
.
branches
)
}
}
metrics
.
collectMetrics
({
type
:
res
.
start
,
value
:
serviceTime
,
functionHash
:
req
.
params
.
id
,
runtime
})
}
})
metrics
.
collectMetrics
({
type
:
res
.
start
,
value
:
serviceTime
,
functionHash
:
req
.
params
.
id
,
runtime
})
.
catch
(
function
(
err
)
{
}
forwardTo
.
open_request_count
-=
1
catch
(
err
)
{
heap
.
heapify
(
functionHeap
,
compare
)
res
.
json
(
err
.
message
).
status
(
err
.
statusCode
)
logger
.
error
(
"
error
"
+
err
);
forwardTo
.
open_request_count
-=
1
res
.
json
(
err
.
message
).
status
(
err
.
statusCode
)
heap
.
heapify
(
functionHeap
,
compare
)
});
logger
.
error
(
"
error
"
+
err
)
}
}
}
function
getPort
(
usedPort
)
{
function
getPort
(
usedPort
)
{
let
port
=
-
1
,
ctr
=
0
let
port
=
-
1
,
ctr
=
0
do
{
do
{
min
=
Math
.
ceil
(
30000
);
let
min
=
Math
.
ceil
(
30000
);
max
=
Math
.
floor
(
60000
);
let
max
=
Math
.
floor
(
60000
);
port
=
Math
.
floor
(
Math
.
random
()
*
(
max
-
min
+
1
))
+
min
;
port
=
Math
.
floor
(
Math
.
random
()
*
(
max
-
min
+
1
))
+
min
;
ctr
+=
1
;
ctr
+=
1
;
if
(
ctr
>
30000
)
{
if
(
ctr
>
30000
)
{
...
@@ -183,16 +188,24 @@ function compare(a, b) {
...
@@ -183,16 +188,24 @@ function compare(a, b) {
async
function
branchChainPredictor
(
req
)
{
async
function
branchChainPredictor
(
req
)
{
// console.log(req.headers['x-resource-id']);
// console.log(req.headers['x-resource-id']);
let
destinationTimestamp
=
Date
.
now
()
if
(
!
functionBranchTree
.
has
(
req
.
params
.
id
))
{
if
(
!
functionBranchTree
.
has
(
req
.
params
.
id
))
{
let
data
=
await
fetchData
(
implicitChainDB
+
req
.
params
.
id
)
let
data
=
await
fetchData
(
implicitChainDB
+
req
.
params
.
id
)
if
(
data
.
error
===
"
not_found
"
)
if
(
data
.
error
!==
"
not_found
"
)
{
console
.
log
(
"
no data
"
,
req
.
params
.
id
);
else
{
data
.
branches
=
new
Map
(
data
.
branches
)
data
.
branches
=
new
Map
(
data
.
branches
)
functionBranchTree
.
set
(
req
.
params
.
id
,
data
)
functionBranchTree
.
set
(
req
.
params
.
id
,
data
)
}
}
}
}
if
(
functionBranchTree
.
has
(
req
.
params
.
id
)
&&
functionBranchTree
.
get
(
req
.
params
.
id
).
branches
.
size
>
0
)
{
// console.log(timelineQueue.has(req.params.id), timelineQueue.get(req.params.id));
if
(
!
timelineQueue
.
has
(
req
.
params
.
id
))
{
timelineQueue
.
set
(
req
.
params
.
id
,
[])
}
timelineQueue
.
get
(
req
.
params
.
id
).
push
(
destinationTimestamp
)
}
if
(
req
.
headers
[
'
x-resource-id
'
]
===
undefined
)
{
if
(
req
.
headers
[
'
x-resource-id
'
]
===
undefined
)
{
let
functionHash
=
req
.
params
.
id
let
functionHash
=
req
.
params
.
id
...
@@ -213,40 +226,51 @@ async function branchChainPredictor(req) {
...
@@ -213,40 +226,51 @@ async function branchChainPredictor(req) {
}
else
{
}
else
{
let
resource_id
=
req
.
headers
[
'
x-resource-id
'
]
let
resource_id
=
req
.
headers
[
'
x-resource-id
'
]
let
resource
=
resourceMap
.
get
(
resource_id
)
let
resource
=
resourceMap
.
get
(
resource_id
)
let
forwardBranch
=
req
.
params
.
id
let
forwardBranch
=
req
.
params
.
id
,
callDelay
=
0
if
(
timelineQueue
.
has
(
resource
.
functionHash
))
{
let
sourceTimestamp
=
timelineQueue
.
get
(
resource
.
functionHash
).
shift
()
callDelay
=
destinationTimestamp
-
sourceTimestamp
// console.log("callDelay", callDelay);
}
if
(
!
functionBranchTree
.
has
(
resource
.
functionHash
))
{
if
(
!
functionBranchTree
.
has
(
resource
.
functionHash
))
{
let
data
=
{
let
data
=
{
req_count
:
1
,
req_count
:
1
,
parent
:
false
,
parent
:
false
,
branches
:
new
Map
()
branches
:
new
Map
()
}
}
data
.
branches
.
set
(
forwardBranch
,
1
)
data
.
branches
.
set
(
forwardBranch
,
[
1
,
callDelay
]
)
functionBranchTree
.
set
(
resource
.
functionHash
,
data
)
functionBranchTree
.
set
(
resource
.
functionHash
,
data
)
}
else
{
}
else
{
let
branchInfo
=
functionBranchTree
.
get
(
resource
.
functionHash
)
let
branchInfo
=
functionBranchTree
.
get
(
resource
.
functionHash
)
if
(
!
branchInfo
.
parent
)
if
(
!
branchInfo
.
parent
)
branchInfo
.
req_count
++
branchInfo
.
req_count
++
if
(
branchInfo
.
branches
.
has
(
forwardBranch
))
{
if
(
branchInfo
.
branches
.
has
(
forwardBranch
))
{
let
branchProb
=
branchInfo
.
branches
.
get
(
forwardBranch
)
callDelay
=
constants
.
metrics
.
alpha
*
branchInfo
.
branches
.
get
(
forwardBranch
)[
1
]
+
callDelay
*
(
1
-
constants
.
metrics
.
alpha
)
console
.
log
(
"
call delay
"
,
callDelay
);
let
branchProb
=
branchInfo
.
branches
.
get
(
forwardBranch
)[
0
]
branchProb
=
(
branchProb
*
(
branchInfo
.
req_count
-
1
)
+
1.0
)
branchProb
=
(
branchProb
*
(
branchInfo
.
req_count
-
1
)
+
1.0
)
branchInfo
.
branches
.
set
(
forwardBranch
,
branchProb
)
branchInfo
.
branches
.
set
(
forwardBranch
,
[
branchProb
,
callDelay
]
)
}
else
{
}
else
{
branchInfo
.
branches
.
set
(
forwardBranch
,
1.0
)
branchInfo
.
branches
.
set
(
forwardBranch
,
[
1.0
,
callDelay
]
)
}
}
for
(
let
[
branch
,
prob
]
of
branchInfo
.
branches
.
entries
())
{
for
(
let
[
branch
,
prob
]
of
branchInfo
.
branches
.
entries
())
{
if
(
branch
!==
forwardBranch
)
if
(
branch
!==
forwardBranch
)
prob
*=
(
branchInfo
.
req_count
-
1
)
prob
[
0
]
*=
(
branchInfo
.
req_count
-
1
)
prob
/=
branchInfo
.
req_count
prob
[
0
]
/=
branchInfo
.
req_count
branchInfo
.
branches
.
set
(
branch
,
prob
)
}
}
}
}
}
}
// console.log("timelineQueue", timelineQueue);
// console.log("branch tree",
functionBranchTree
);
// console.log("branch tree",
util.inspect(functionBranchTree, false, null, true /* enable colors */)
);
}
}
async
function
viterbi
(
node
,
metadata
)
{
async
function
viterbi
(
node
,
metadata
)
{
console
.
log
(
"
function branch tree
"
,
functionBranchTree
.
get
(
node
));
let
path
=
[]
let
path
=
[]
let
parents
=
[[
node
,
{
let
parents
=
[[
node
,
{
prob
:
1
,
prob
:
1
,
...
@@ -272,7 +296,7 @@ async function viterbi(node, metadata) {
...
@@ -272,7 +296,7 @@ async function viterbi(node, metadata) {
let
probability
=
0
let
probability
=
0
if
(
siblings
.
has
(
subNode
))
if
(
siblings
.
has
(
subNode
))
probability
=
siblings
.
get
(
subNode
)
probability
=
siblings
.
get
(
subNode
)
probability
+=
branchProb
*
parentProbability
probability
+=
branchProb
[
0
]
*
parentProbability
// console.log("prob", probability);
// console.log("prob", probability);
siblings
.
set
(
subNode
,
probability
)
siblings
.
set
(
subNode
,
probability
)
...
@@ -288,7 +312,7 @@ async function viterbi(node, metadata) {
...
@@ -288,7 +312,7 @@ async function viterbi(node, metadata) {
maxProb
=
prob
maxProb
=
prob
}
}
})
})
parentIDs
=
Array
.
from
(
siblings
.
keys
());
let
parentIDs
=
Array
.
from
(
siblings
.
keys
());
for
(
const
id
of
parentIDs
)
{
for
(
const
id
of
parentIDs
)
{
let
metadata
=
functionBranchTree
.
get
(
id
)
let
metadata
=
functionBranchTree
.
get
(
id
)
parents
.
push
([
parents
.
push
([
...
@@ -305,10 +329,15 @@ async function viterbi(node, metadata) {
...
@@ -305,10 +329,15 @@ async function viterbi(node, metadata) {
if
(
path
.
length
>
1
)
if
(
path
.
length
>
1
)
console
.
log
(
"
path
"
,
path
);
console
.
log
(
"
path
"
,
path
);
metadata
.
mle_path
=
path
if
(
path
.
length
>
1
)
{
if
(
path
.
length
>
1
)
{
metadata
.
mle_path
=
path
metadata
.
branches
=
Array
.
from
(
metadata
.
branches
.
entries
())
metadata
.
branches
=
Array
.
from
(
metadata
.
branches
.
entries
())
let
head
=
await
fetch
(
implicitChainDB
+
node
,
{
method
:
"
head
"
})
metadata
.
_rev
=
head
.
headers
.
get
(
"
etag
"
).
substring
(
1
,
head
.
headers
.
get
(
"
etag
"
).
length
-
1
)
let
payload
=
{
let
payload
=
{
method
:
'
put
'
,
method
:
'
put
'
,
body
:
JSON
.
stringify
(
functionBranchTree
.
get
(
node
)),
body
:
JSON
.
stringify
(
functionBranchTree
.
get
(
node
)),
...
...
local_experiments/Speculation Off, Speculation On and Speculation + JIT Deployment.png
0 → 100644
View file @
f5d6e780
17.2 KB
local_experiments/Worker Idle Time before initial Request.png
0 → 100644
View file @
f5d6e780
20.7 KB
local_experiments/chainlength_vs_overhead.png
0 → 100644
View file @
f5d6e780
20.1 KB
local_experiments/function_chain_explicit.drawio
0 → 100644
View file @
f5d6e780
<mxfile host="Electron" modified="2020-04-02T13:40:21.628Z" agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/12.6.5 Chrome/80.0.3987.86 Electron/8.0.0 Safari/537.36" etag="YQW2D_Ds8ta8mPFvP2tW" version="12.6.5" type="device"><diagram id="_vj1HQFHM5RY5pbnoe9b" name="Page-1">3Vldk5owFP01PupAIuI+rh9r21VnZ+207r44qWQhWyROiAL99Y0SBAx1dfyA7YuTe5LA5dxz7w1Yg91FOGBo6Yyohd0a0KywBns1ANqttvjdAFEMGDqMAZsRK4b0FJiQP1iCmkRXxMJ+biGn1OVkmQfn1PPwnOcwxBgN8sveqJu/6xLZWAEmc+Sq6E9icUc+FjBT/AsmtpPcWW/dxTMLlCyWT+I7yKJBBoL9GuwySnk8WoRd7G64S3iJ9z38Y3bnGMMeP2bD6Pm9P/v2Cuj9eGY5zwMT3JN6S/rGo+SBGV15Ft7s0WqwQxl3qE095A4pXQpQF+A75jySoUIrTgXk8IUrZ3FI+DQzfpGX2ox7YdaIEsPjLJpmje2ehpGY6batlezzOaO/d2ERhHZUTiRNPl2xOT5ARKItxGzMD6yT6sVWTjaS8QGmCyw8FAsYdhEn67yKkBSjvVu32/pEiXAZaDJvQFOqRqaNnqgouUTsqNyVRl0MMm6k0FYLxbrwHzszMAoex7+i+WQ4NYPvENSbt9GFfpwu6lpD00FeHBB8JI+t9YQZEWRg9oFm9gJ6XREZZ2omF+0DoT3k5Bq5K3mnB12JtriOqLDC6AQO4XiyRNsHD0SNz0cV+cu47L6RcKOOU5JyjRnH4cE0krPGXja0pR1kSrKEnEw1bmvnc/jqj/nKBsbX0bD3w2KROYvmSRXIEib0M5GmR70Nc+dnTAXqIjhS0s1L18Wz4qOWr1vGp3XZ0nTNsBllha2w6xi3CdsZB5SGZhr5YJsV7EPHRh+W2YeA2ofAJ+tDUC+5D6mE/RdNB1a96RzyOivognNCtQQNWnlF3/BgVdgB2jftAFLVp72MZFLhJTt3Yl5cpNA31UQppNUsMy+aal4UNPpq5YWh7b1xwJITQ/1QU7Wj0WXaxUXSwjilf5R12L0rL6K7sve5Y1q9Umeopa7gVbRapW7/CADNkkudqZJYUP0uTqLCWAGvR/eLK5IozPRPg/h7b/rPC+z/BQ==</diagram></mxfile>
\ No newline at end of file
local_experiments/function_chain_explicit.png
0 → 100644
View file @
f5d6e780
32.1 KB
local_experiments/speculation_vs_runtime.png
0 → 100644
View file @
f5d6e780
14.4 KB
local_experiments/speculative_explicit.txt
0 → 100644
View file @
f5d6e780
vanilla 26.69 24.16 23.74 25.39 24.07
speculative 22.07 21.94 22.07 21.94 22.14
jit 23.3 23.25 23.29 23.15 23.79
63 107 68 75 72
61 111 60 79 80
61 79 79 67 75
75 78 63 72 74
47 49 48 49 46
53 51 54 48 51
214 154 144 136 173
125 170 147 188 162
5048 5026 5031 4971 5618
5837 5890 5041 5794 5126
10194 10164 9944 10140 10132
15119 15460 15023 15183 15146
94 78 81 80 110
98 87 72 80 81
81 76 86 145 75
77 74 96 118 74
67 63 68 68 64
181 167 181 89 757
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment