added registration exampls

anubhav
Nadesh Seen 2 years ago
parent 02accd6fd6
commit 70ca4fedfe

@ -27,6 +27,7 @@ responses = []
queue = []
list_of_func_ids = []
dag_responses = []
function_responses = []
x = 10
@ -113,21 +114,20 @@ def submit_dag_metadata(dag_metadata):
def execute_action(action_name):
def execute_action(action_name,request):
script_file = './actions.sh'
subprocess.call(['bash', script_file])
preprocess("action_url.txt")
url = action_url_mappings[action_name]
# print(request.json)
# json_data = json.loads(request.json)
reply = requests.post(url = url,json = request.json,verify=False)
return reply.json()
reply = requests.post(url = url,json = request,verify=False)
function_responses.append(reply.json())
def execute_dag(dag_name):
print("------------------------------------DAG START-----------------------------------------------")
def execute_dag(dag_name,request):
# print("------------------------------------DAG START-----------------------------------------------")
unique_id = uuid.uuid4()
print("DAG UNIQUE ID----------",unique_id)
dag_metadata={}
@ -156,7 +156,7 @@ def execute_dag(dag_name):
for dag_item in dag_data:
if(flag==0): # To indicate the first action in the DAG
queue.append(dag_item["node_id"])
action_properties_mapping[dag_item["node_id"]]["arguments"] = request.json
action_properties_mapping[dag_item["node_id"]]["arguments"] = request
while(len(queue)!=0):
flag=flag+1
action = queue.pop(0)
@ -254,15 +254,12 @@ def execute_dag(dag_name):
dag_metadata["function_activation_ids"] = list_of_func_ids
# print("DAG SPEC AFTER WORKFLOW EXECUTION--------\n")
# print(action_properties_mapping)
# print('\n')
submit_dag_metadata(dag_metadata)
print("DAG ID---->FUNC IDS",dag_metadata)
print('\n')
# print('INTERMEDIATE OUTPUTS FROM ALL ACTIONS-----\n')
# get_redis_contents(redis_instace)
# print('\n')
redis_instace.flushdb()
print("Cleaned up in-memory intermediate outputs successfully\n")

@ -0,0 +1,21 @@
import requests
import sys
import json
def server():
url = "http://10.129.28.219:5001/register/function/image-blur"
files = [
('pythonfile', open(sys.argv[1],'rb')),
('dockerfile', open(sys.argv[2],'rb')),
('requirements.txt', open(sys.argv[3],'rb'))
]
reply = requests.post(url = url,files = files,verify=False)
print(reply.json())
def main():
server()
if __name__=="__main__":
main()

@ -0,0 +1,21 @@
import requests
import sys
import json
def server():
url = "http://10.129.28.219:5001/register/trigger/"
input_json_file = open(sys.argv[1])
params = json.load(input_json_file)
reply = requests.post(url = url,json = params,verify=False)
print(reply.json())
def main():
server()
if __name__=="__main__":
main()

@ -271,7 +271,6 @@ def view_dag_metadata(dag_id):
# EXAMPLE URL: http://10.129.28.219:5001/run/action/odd-even-action
# http://10.129.28.219:5001/run/action/decode-function
# @app.route('/run/action/<action_name>/', methods=['POST'])
def execute_action(action_name):
try:
res = orchestrator.execute_action(action_name)
@ -286,56 +285,50 @@ def execute_action(action_name):
# EXAMPLE URL: http://10.129.28.219:5001/run/dag/odd-even-test/{"number":16}
@app.route('/run/<trigger_name>', methods=['GET', 'POST'])
def orchestrate_dag(trigger_name):
try:
with app.app_context():
triggers = validate_trigger.get_trigger_json(trigger_name)
# print(triggers)
if(len(triggers)==0): #could not fetch registered trigger
if(len(triggers)==0):
return {"response": "the given trigger is not registered in DAGit trigger store"}
else:
thread_list = []
result_queue = queue.Queue()
if(triggers[0]['type']=='dag'):
dags = triggers[0]['dags']
arguments = request.json
try:
for dag in dags:
thread_list.append(threading.Thread(target=orchestrator.execute_dag, args=[dag]))
thread_list.append(threading.Thread(target=orchestrator.execute_dag, args=[dag,arguments]))
for thread in thread_list:
thread.start()
for thread in thread_list:
thread.join()
print(orchestrator.dag_responses)
print(orchestrator.x)
# results = []
# while not result_queue.empty():
# result = result_queue.get()
# results.append(result)
return {"response":orchestrator.dag_responses}
# res = orchestrator.execute_dag(dag)
# return {"response":res,"status":200}
return {"response":orchestrator.dag_responses,"status":200}
except Exception as e:
print(e)
return {"response":"failed","status":400}
# thread_list.append(threading.Thread(target=orchestrator.execute_dag, args=[dag]))
# for thread in thread_list:
# thread.start()
# for thread in thread_list:
# thread.join()
# return {"response": dags}
else:
try:
functions = triggers[0]['functions']
arguments = request.json
for function in functions:
thread_list.append(threading.Thread(target=orchestrator.execute_action, args=[function]))
thread_list.append(threading.Thread(target=orchestrator.execute_action, args=[function,arguments]))
for thread in thread_list:
thread.start()
for thread in thread_list:
thread.join()
# return {"response": function}
return {"response":orchestrator.function_responses,"status":200}
except Exception as e:
print(e)
return {"response":"failed","status":400}
# res = orchestrator.execute_dag(dag_name)
# data = {"status": 200,"dag_output":res}
# return data
except Exception as e:
print(e)
data = {"status": 404 ,"message":"failed"}

@ -0,0 +1,6 @@
{
"trigger_name": "mydagtrigger",
"type":"dag",
"dags": ["toonify"],
"functions":""
}

@ -0,0 +1,6 @@
{
"trigger_name": "myfunctiontrigger-1",
"type":"function",
"dags":"",
"functions": ["decode-function"]
}
Loading…
Cancel
Save