v1 9:19 pm 2nd march commit

anubhav
Nadesh Seen 2 years ago
commit 0b3d51c30b

@ -0,0 +1,2 @@
#!/bin/bash
wsk -i api list | awk -F " " '{if(NR>2)print $1,$4}' > action_url.txt

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,12 @@
#!/bin/bash
# ./register.sh /decode-function /decode decode-action [SAMPLE USE]
function_dir_name=$1
docker_image_name=$2
cd $function_dir_name
chmod -R 777 ./
./buildAndPush.sh $docker_image_name

@ -0,0 +1,511 @@
#!/usr/bin/env python3
import sys
import requests
import uuid
import re
import subprocess
import threading
import queue
import redis
import pickle
import json
import os
import time
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from flask import Flask, request,jsonify,send_file
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
import pymongo
import shutil
app = Flask(__name__)
action_url_mappings = {} #Store action->url mappings
action_properties_mapping = {} #Stores the action name and its corresponding properties
responses = []
queue = []
list_of_func_ids = []
# def combine_json_files(files_list): #2 json files cant have same the same key. will lead to ambiguity
# combined_data = {}
# for file in files_list:
# with open(file, "r") as f:
# data = json.load(f)
# combined_data.update(data)
# return combined_data
def preprocess(filename):
with open(filename) as f:
lines = f.readlines()
action_url_list = []
for line in lines:
line = line.replace("\n", "")
line = line.replace("/guest/","")
action_url_list.append(line)
for item in action_url_list:
action_name = item.split(' ')[0]
url = item.split(' ')[1]
action_url_mappings[action_name] = url
def execute_thread(action,redis,url,json):
reply = requests.post(url = url,json=json,verify=False)
list_of_func_ids.append(reply.json()["activation_id"])
redis.set(action+"-output",pickle.dumps(reply.json()))
responses.append(reply.json())
def handle_parallel(queue,redis,action_properties_mapping,parallel_action_list):
thread_list = []
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
for action in parallel_action_list:
action_names = action_properties_mapping[action]["outputs_from"]
next_action = action_properties_mapping[action]["next"]
if(next_action!=""):
if next_action not in queue:
queue.append(next_action)
if(len(action_names)==1): # if only output of one action is required
key = action_names[0]+"-output"
output = pickle.loads(redis.get(key))
action_properties_mapping[action]["arguments"] = output
else:
for item in action_names:
key = item+"-output"
output = pickle.loads(redis.get(key))
output_list.append(output)
action_properties_mapping[action]["arguments"] = output_list
url = action_url_mappings[action]
thread_list.append(threading.Thread(target=execute_thread, args=[action,redis,url,action_properties_mapping[action]["arguments"]]))
for thread in thread_list:
thread.start()
for thread in thread_list:
thread.join()
action_properties_mapping[next_action]["arguments"] = responses
return responses
def create_redis_instance():
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
return r
def get_redis_contents(r):
keys = r.keys()
for key in keys:
value = pickle.loads(r.get(key))
if value is not None:
print(f"{key.decode('utf-8')}: {json.dumps(value, indent=4)}")
def connect_mongo():
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dags"]
return mycol
def get_dag_json(dag_name):
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dags"]
query = {"name":dag_name}
projection = {"_id": 0, "name": 1,"dag":1}
document = mycol.find(query, projection)
data = list(document)
return data
def submit_dag_metadata(dag_metadata):
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dag_metadata"]
try:
cursor = mycol.insert_one(dag_metadata)
print("OBJECT ID GENERATED",cursor.inserted_id)
data = {"message":"success"}
return json.dumps(data)
except Exception as err:
data = {"message":"failed","reason":err}
return json.dumps(data)
@app.route("/")
def home():
data = {"message": "Hello,welcome to create and manage serverless workflows.","author":"Anubhav Jana"}
return jsonify(data)
@app.route('/list/actions', methods=['GET'])
def list_actions():
list_of_actions = []
stream = os.popen(' wsk -i action list')
actions = stream.read().strip().split(' ')
for action in actions:
if action=='' or action=='private' or action=='blackbox':
continue
else:
list_of_actions.append(action.split('/')[2])
data = {"list of available actions":list_of_actions}
return jsonify(data)
@app.route('/register/trigger/',methods=['POST'])
def register_trigger():
trigger_json = request.json
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["trigger_store"]
mycol = mydb["triggers"]
try:
cursor = mycol.insert_one(trigger_json)
print("OBJECT ID GENERATED",cursor.inserted_id)
if(trigger_json["type"]=="dag"):
targets = trigger_json["dags"]
elif(trigger_json["type"]=="function"):
targets = trigger_json["functions"]
data = {"message":"success","trigger_name":trigger_json["trigger_name"],"trigger":trigger_json["trigger"],"trigger_type":trigger_json["type"],"trigger_target":targets}
return json.dumps(data)
except Exception as e:
print("Error--->",e)
data = {"message":"fail","reason":e}
return json.dumps(data)
@app.route('/register/function/<function_name>',methods=['POST'])
def register_function(function_name):
list_of_file_keys = []
document = {}
function_dir = '/home/faasapp/Desktop/anubhav/function_modules' # Library of functions
new_dir = function_name
destination = os.path.join(function_dir, new_dir)
# Create the directory
os.makedirs(destination, exist_ok=True)
files = request.files
for filekey in files:
if filekey!='description':
list_of_file_keys.append(filekey)
for key in list_of_file_keys:
file = request.files[key]
filename = file.filename
# Save, copy, remove
file.save(file.filename)
shutil.copy(filename, destination)
os.remove(filename)
image_build_script = 'buildAndPush.sh'
shutil.copy(image_build_script, destination)
# Prepare data
document["function_name"] = function_name
document["image_build_script"] = 'buildAndPush.sh'
document["python_script"] = (request.files[list_of_file_keys[0]]).filename
document["dockerfile"] = (request.files[list_of_file_keys[1]]).filename
document["requirements.txt"] =(request.files[list_of_file_keys[2]]).filename
docker_image_name = "10.129.28.219:5000/"+function_name+"-image"
api_name = "/"+function_name+"-api"
path_name = "/"+function_name+"-path"
password = '1234'
# build docker image
cmd = ["sudo", "-S", "/home/faasapp/Desktop/anubhav/controlplane/build_image.sh",destination,docker_image_name]
# open subprocess with Popen
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
# pass password to standard input
process.stdin.write(password + "\n")
process.stdin.flush()
# wait for process to complete and get output
output, errors = process.communicate()
# create action, register action with api, populate its mapping
subprocess.call(['./create_action.sh',destination,docker_image_name,function_name])
subprocess.call(['./register.sh',api_name,path_name,function_name])
subprocess.call(['bash', './actions.sh'])
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["function_store"]
mycol = mydb["functions"]
try:
cursor = mycol.insert_one(document)
print("OBJECT ID GENERATED",cursor.inserted_id)
data = {"message":"success"}
return json.dumps(data)
except Exception as e:
print("Error--->",e)
data = {"message":"fail","reason":e}
return json.dumps(data)
# data = {"message":"success"}
# return json.dumps(data)
@app.route('/register/dag',methods=['POST'])
def register_dag():
dag_json = request.json
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dags"]
try:
cursor = mycol.insert_one(dag_json)
print("OBJECT ID GENERATED",cursor.inserted_id)
data = {"message":"success"}
return json.dumps(data)
except Exception as e:
print("Error--->",e)
data = {"message":"fail","reason":e}
return json.dumps(data)
@app.route('/view/dag/<dag_name>',methods=['GET'])
def view_dag(dag_name):
dag_info_map = {}
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dags"]
document = mycol.find({"name":dag_name})
data = list(document)
dag_info_list = []
for items in data:
dag_info_list = items["dag"]
dag_info_map["DAG_Name--->>"] = items["name"]
dag_info_map["Number_of_nodes-->"] = len(dag_info_list)
dag_info_map["Starting_Node-->"] = dag_info_list[0]["node_id"]
for dag_items in dag_info_list:
node_info_map = {}
if(len(dag_items["properties"]["outputs_from"])==0):
node_info_map["get_outputs_from-->"] = "Starting action->No outputs consumed"
else:
node_info_map["get_outputs_from-->"] = dag_items["properties"]["outputs_from"]
node_info_map["primitive_type"] = dag_items["properties"]["primitive"]
if(dag_items["properties"]["primitive"]=="condition"):
node_info_map["next_node_id_if_condition_true"] = dag_items["properties"]["branch_1"]
node_info_map["next_node_id_if_condition_false"] = dag_items["properties"]["branch_2"]
else:
if(dag_items["properties"]["next"]!=""):
node_info_map["next_node_id-->"] = dag_items["properties"]["next"]
else:
node_info_map["next_node_id-->"] = "Ending node_id of a path"
dag_info_map[dag_items["node_id"]] = node_info_map
response = {"dag_data":dag_info_map}
formatted_json = json.dumps(response, indent=20)
return formatted_json
@app.route('/view/dags',methods=['GET'])
def view_dags():
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dags"]
document = mycol.find()
data = list(document)
# Serialize the data to JSON
json_data = json.dumps(data, default=str)
json_string ='{"dag":'+str(json_data)+'}'
data = json.loads(json_string)
# Format the JSON string with indentation
formatted_json = json.dumps(data, indent=4)
return formatted_json
# EXAMPLE URL: http://10.129.28.219:5001/view/activation/8d7df93e8f2940b8bdf93e8f2910b80f
@app.route('/view/activation/<activation_id>', methods=['GET', 'POST'])
def list_activations(activation_id):
# activation_id = '74a7b6c707d14973a7b6c707d1a97392'
cmd = ['wsk', '-i', 'activation', 'get', activation_id]
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
json_res = result.stdout.decode().split('\n')[1:] # Ignore first line of output
res = json.loads('\n'.join(json_res))
d={}
d["action_name"] = res["name"]
d["duration"] = res["duration"]
d["status"] = res["response"]["status"]
d["result"] = res["response"]["result"]
return json.dumps(d)
# EXAMPLE URL: http://10.129.28.219:5001/view/76cc8a53-0a63-47bb-a5b5-9e6744f67c61
@app.route('/view/<dag_id>',methods=['GET'])
def view_dag_metadata(dag_id):
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["dag_store"]
mycol = mydb["dag_metadata"]
query = {"dag_id":dag_id}
projection = {"_id": 0,"dag_id":1,"dag_name":1,"function_activation_ids":1}
document = mycol.find(query, projection)
data = list(document)
response = {"dag_metadata":data}
return json.dumps(response)
# EXAMPLE URL: http://10.129.28.219:5001/run/action/odd-even-action/{"number":16}
@app.route('/run/action/<action_name>/<param_json>', methods=['GET', 'POST'])
def execute_action(action_name,param_json):
script_file = './actions.sh'
subprocess.call(['bash', script_file])
preprocess("action_url.txt")
url = action_url_mappings[action_name]
json_data = json.loads(param_json)
reply = requests.post(url = url,json = json_data,verify=False)
return reply.json()
# EXAMPLE URL: http://10.129.28.219:5001/run/dag/odd-even-test/{"number":16}
@app.route('/run/dag/<dag_name>/<param_json>', methods=['GET', 'POST'])
def execute_dag(dag_name,param_json):
print("------------------------------------DAG START-----------------------------------------------")
unique_id = uuid.uuid4()
print("DAG UNIQUE ID----------",unique_id)
dag_metadata={}
dag_metadata["dag_id"] = str(unique_id)
dag_metadata["dag_name"] = dag_name
# list_of_func_ids = []
######### Updates the list of action->url mapping ###################
script_file = './actions.sh'
subprocess.call(['bash', script_file])
#####################################################################
preprocess("action_url.txt")
### Create in-memory redis storage ###
redis_instace = create_redis_instance()
#######################################
action_properties_mapping = {} #Stores the action name and its corresponding properties
dag_res = json.loads(json.dumps(get_dag_json(dag_name)))
dag_data = dag_res[0]["dag"]
for dag_item in dag_data:
action_properties_mapping[dag_item["node_id"]] = dag_item["properties"]
flag = 0
for dag_item in dag_data:
if(flag==0): # To indicate the first action in the DAG
queue.append(dag_item["node_id"])
action_properties_mapping[dag_item["node_id"]]["arguments"] = json.loads(param_json)
while(len(queue)!=0):
flag=flag+1
action = queue.pop(0)
print("ACTION DEQUEUED FROM QUEUE : --->",action)
##########################################################
# HANDLE THE ACTION #
##########################################################
if isinstance(action, str):
# if(isinstance(action_properties_mapping[action]['arguments'],list)):
# pass
json_data = action_properties_mapping[action]["arguments"]
url = action_url_mappings[action]
reply = requests.post(url = url,json=json_data,verify=False)
list_of_func_ids.append(reply.json()["activation_id"])
# print("Line 292------------",reply.json()["activation_id"])
redis_instace.set(action+"-output",pickle.dumps(reply.json()))
action_type = action_properties_mapping[action]["primitive"]
if(action_type=="condition"):
branching_action = action_properties_mapping[action]["branch_1"]
alternate_action = action_properties_mapping[action]["branch_2"]
result=reply.json()["result"]
condition_op = action_properties_mapping[action]["condition"]["operator"]
if(condition_op=="equals"):
if(isinstance(action_properties_mapping[action]["condition"]["target"], str)):
target = action_properties_mapping[action]["condition"]["target"]
else:
target=int(action_properties_mapping[action]["condition"]["target"])
if(result==target):
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
queue.append(branching_action)
action_names = action_properties_mapping[branching_action]["outputs_from"] # Get the list of actions whose output will be used
if(len(action_names)==1): # if only output of one action is required
key = action_names[0]+"-output"
output = pickle.loads(redis_instace.get(key))
action_properties_mapping[branching_action]["arguments"] = output
else:
for item in action_names:
key = item+"-output"
output = pickle.loads(redis_instace.get(key))
output_list.append(output)
action_properties_mapping[branching_action]["arguments"] = output_list
else:
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
queue.append(alternate_action)
action_names = action_properties_mapping[alternate_action]["outputs_from"] # Get the list of actions whose output will be used
if(len(action_names)==1): # if only output of one action is required
key = action_names[0]+"-output"
output = pickle.loads(redis_instace.get(key))
action_properties_mapping[alternate_action]["arguments"] = output
else:
for item in action_names:
key = item+"-output"
output = pickle.loads(redis_instace.get(key))
output_list.append(output)
action_properties_mapping[alternate_action]["arguments"] = output_list
if(condition_op=="greater_than"):
pass
if(condition_op=="greater_than_equals"):
pass
if(condition_op=="less_than"):
pass
if(condition_op=="less_than_equals"):
pass
elif(action_type=="serial"):
next_action = action_properties_mapping[action]["next"]
if(next_action!=""):
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
queue.append(next_action)
action_names = action_properties_mapping[next_action]["outputs_from"] # Get the list of actions whose output will be used
if(len(action_names)==1): # if only output of one action is required
key = action_names[0]+"-output"
output = pickle.loads(redis_instace.get(key))
action_properties_mapping[next_action]["arguments"] = output
else:
for item in action_names:
key = item+"-output"
output = pickle.loads(redis_instace.get(key))
output_list.append(output)
action_properties_mapping[next_action]["arguments"] = output_list
elif(action_type=="parallel"):
parallel_action_list = action_properties_mapping[action]["next"]
queue.append(parallel_action_list)
else:
reply = handle_parallel(queue,redis_instace,action_properties_mapping,action)
dag_metadata["function_activation_ids"] = list_of_func_ids
print("DAG SPEC AFTER WORKFLOW EXECUTION--------\n")
print(action_properties_mapping)
print('\n')
submit_dag_metadata(dag_metadata)
print("DAG ID---->FUNC IDS",dag_metadata)
print('\n')
print('INTERMEDIATE OUTPUTS FROM ALL ACTIONS-----\n')
get_redis_contents(redis_instace)
print('\n')
redis_instace.flushdb()
print("Cleaned up in-memory intermediate outputs successfully\n")
print("------------------------DAG END-----------------------------------")
if(isinstance(reply,list)):
return({"dag_id": dag_metadata["dag_id"],
"result": reply
})
else:
return({
"dag_id": dag_metadata["dag_id"],
"result": reply.json()
})
# return({
# "result": "success"
# })
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)

@ -0,0 +1,12 @@
#!/bin/bash
function_dir_name=$1
docker_image_name=$2
function_name=$3
cd $function_dir_name
chmod -R 777 ./
wsk -i action create $function_name --docker $docker_image_name --web=true --timeout=300000

@ -0,0 +1,19 @@
import requests
import sys
import json
def server():
url = "http://10.129.28.219:5001/register/dag"
input_json_file = open(sys.argv[1])
params = json.load(input_json_file)
reply = requests.post(url = url,json = params,verify=False)
print(reply.json())
def main():
server()
if __name__=="__main__":
main()

Binary file not shown.

After

Width:  |  Height:  |  Size: 214 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 140 KiB

@ -0,0 +1,29 @@
#!/bin/bash
# ./register.sh /decode-function /decode decode-action [SAMPLE USE]
api_name=$1
path_name=$2
action_name=$3
wsk -i api create $api_name $path_name post $action_name --response-type json
# ./register.sh /increment /increment-by-2 increment-action --response-type=json
# ./register.sh /multiply /multiply-by-2 multiply-action --response-type=json
# ./register.sh /prime /prime-check prime-check-action --response-type=json
# ./register.sh /even /even-print even-print-action --response-type=json
# ./register.sh /odd /odd-print odd-print-action --response-type=json
# ./register.sh /odd-even /odd-even-check odd-even-action --response-type=json
# ./register.sh /dummy /dummy3 dummy3-action --response-type=json

@ -0,0 +1,48 @@
import requests
import sys
import json
import pymongo
def get_trigger():
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["trigger_store"]
mycol = mydb["triggers"]
# query = {"dag_id":dag_id}
projection = {"_id": 0,"trigger_name":1,"type":1,"trigger":1,"dags":1,"functions":1}
document = mycol.find()
data = list(document)
print(data)
json_data = json.dumps(data, default=str)
json_string ='{"trigger_data":'+str(json_data)+'}'
data = json.loads(json_string)
# Format the JSON string with indentation
formatted_json = json.dumps(data, indent=4)
return formatted_json
def main():
res = json.loads(get_trigger())
print(res)
# def server():
# # server_ip = "10.129.28.219"
# # server_port = "5001"
# url = "http://10.129.28.219:5001/register/trigger/myfirsttrigger"
# # data = {"trigger_name":"myfirsttrigger", "dags":['odd-even-test']}
# # json_data = json.dumps(data)
# input_json_file = open(sys.argv[1])
# params = json.load(input_json_file)
# reply = requests.post(url = url,json = params,verify=False)
# print(reply.json())
# def main():
# server()
if __name__=="__main__":
main()

@ -0,0 +1,28 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD assemble.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,126 @@
#!/usr/bin/env python3
import os
import time
import json
import sys
import paramiko
import time
import pysftp
import logging
def main():
import time as time1
start = time1.time()
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
sftp = pysftp.Connection(
host="10.129.28.219",
username="faasapp",
password="1234",
cnopts=cnopts
)
logging.info("connection established successfully")
except:
logging.info('failed to establish connection to targeted server')
contour_directory = "contoured-images"
is_contour_dir = os.path.isdir(contour_directory)
if(is_contour_dir == False):
os.mkdir(contour_directory)
edge_detect__directory = "edge-detected-images"
is_edgedetect_dir = os.path.isdir(edge_detect__directory)
if(is_edgedetect_dir == False):
os.mkdir(edge_detect__directory)
remote_download_path_contour = "/home/faasapp/Desktop/anubhav/contour-finding/"+contour_directory
remote_download_path_edge_detection = "/home/faasapp/Desktop/anubhav/edge-detection/"+edge_detect__directory
remote_upload_path_contour = "/home/faasapp/Desktop/anubhav/assemble_images/"+contour_directory
remote_upload_path_edge_detect = "/home/faasapp/Desktop/anubhav/assemble_images/"+edge_detect__directory
try:
sftp.chdir(remote_download_path_contour) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_download_path_contour) # Create remote_path
sftp.chdir(remote_download_path_contour)
try:
sftp.chdir(remote_download_path_edge_detection) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_download_path_edge_detection) # Create remote_path
sftp.chdir(remote_download_path_edge_detection)
try:
sftp.chdir(remote_upload_path_contour) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_upload_path_contour) # Create remote_path
sftp.chdir(remote_upload_path_contour)
try:
sftp.chdir(remote_upload_path_edge_detect) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_upload_path_edge_detect) # Create remote_path
sftp.chdir(remote_upload_path_edge_detect)
current_path = os.getcwd()
sftp.get_d(remote_download_path_contour,preserve_mtime=True,localdir=contour_directory)
sftp.put_d(current_path+"/"+contour_directory,preserve_mtime=True,remotepath=remote_upload_path_contour)
sftp.get_d(remote_download_path_edge_detection,preserve_mtime=True,localdir=edge_detect__directory)
sftp.put_d(current_path+"/"+edge_detect__directory,preserve_mtime=True,remotepath=remote_upload_path_edge_detect)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
contour_mappings = params["value"][0]["image_contour_mappings"]
contour_exec_time = params["value"][0]["contour_execution_time"]
edge_detection_exec_time = params["value"][1]["edge_detection_execution_time"]
decode_execution_time = params["value"][0]["decode_execution_time"]
decode_images_sizes = params["value"][1]["decoded_images_size"]
contour_image_sizes = params["value"][0]["contour_detected_images_size"]
edge_detect_image_sizes = params["value"][1]["edge_detected_images_size"]
sorted_by_decode_image_sizes = sorted(decode_images_sizes.items(), key=lambda x:x[1], reverse=True)
sorted_contour_image_sizes = sorted(contour_image_sizes.items(), key=lambda x:x[1], reverse=True)
sorted_by_edge_detect_image_sizes = sorted(edge_detect_image_sizes.items(), key=lambda x:x[1], reverse=True)
highest_decode_image_size = sorted_by_decode_image_sizes[0][0]
highest_contour_images = sorted_contour_image_sizes[0][0]
highest_edge_detected_images = sorted_by_edge_detect_image_sizes[0][0]
# edge_detection_output = params["value"][1]["edge_detection_output"]
# contour_detection_output = params["value"][0]["contour_images"]
sorted_images_by_no_of_contours = sorted(contour_mappings.items(), key=lambda x:x[1], reverse=True)
highest_number_of_contour_line_image = sorted_images_by_no_of_contours[0][0]
end = time1.time()
assemble_exec_time = end-start
print(json.dumps({ "assemble_activation_id": str(activation_id),
"contour_exec_time": contour_exec_time,
"assemble_exec_time": assemble_exec_time,
"edge_detect_time": edge_detection_exec_time,
"decode_time": decode_execution_time,
"contour_lines_image_mappings": contour_mappings,
"image_with_highest_number_of_contour_lines": highest_number_of_contour_line_image,
"decode_image_sizes": decode_images_sizes,
"contour_image_sizes": contour_image_sizes,
"edge_detected_image_sizes": edge_detect_image_sizes,
"highest_size_decode_image": highest_decode_image_size,
"highest_contour_image" : highest_contour_images,
"highest_edge_detected_image": highest_edge_detected_images
}))
if __name__ == "__main__":
main()

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,31 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN pip install opencv-python
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD contour.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,107 @@
#!/usr/bin/env python3
import os
from io import BytesIO
import cv2
import time
import numpy as np
import subprocess
import logging
import json
import sys
import paramiko
import pysftp
def main():
import time as time1
start = time1.time()
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
sftp = pysftp.Connection(
host="10.129.28.219",
username="faasapp",
password="1234",
cnopts=cnopts
)
logging.info("connection established successfully")
except:
logging.info('failed to establish connection to targeted server')
contour_directory = "contoured-images"
is_contour_dir = os.path.isdir(contour_directory)
if(is_contour_dir == False):
os.mkdir(contour_directory)
images_dir = "images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
remote_download_path = "/home/faasapp/Desktop/anubhav/sprocket-decode/"+images_dir
remote_upload_path = "/home/faasapp/Desktop/anubhav/contour-finding/"+contour_directory
try:
sftp.chdir(remote_download_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_download_path) # Create remote_path
sftp.chdir(remote_download_path)
try:
sftp.chdir(remote_upload_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_upload_path) # Create remote_path
sftp.chdir(remote_upload_path)
sftp.get_d(remote_download_path,preserve_mtime=True,localdir=images_dir)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
decode_activation_id = params["activation_id"]
parts = params["parts"]
image_contour_mappings = {}
contour_detected_images = {}
for i in range(0,parts):
img_name = images_dir+'/Image' + str(i) + '.jpg'
img = cv2.imread(img_name)
img = cv2.resize(img,None,fx=0.9,fy=0.9)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
ret, binary = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU)
contours, hierarchy = cv2.findContours(binary, mode=cv2.RETR_TREE, method=cv2.CHAIN_APPROX_NONE)
contour_list_for_each_image=[]
cv2.drawContours(img, contours, -1, (0, 255, 0), thickness=2, lineType=cv2.LINE_AA)
for contour in contours:
approx = cv2.approxPolyDP(contour, 0.01* cv2.arcLength(contour, True), True)
contour_list_for_each_image.append(len(approx))
image_contour_mappings[img_name] = sum(contour_list_for_each_image)
filename = 'contour' + str(i) +'.jpg'
# Saving the image
cv2.imwrite(contour_directory+"/"+filename, img)
contour_img = cv2.imread(contour_directory+"/"+filename)
# contour_height, contour_width = contour_img.shape[:2]
contour_detected_size = os.stat(contour_directory+"/"+filename).st_size
contour_detected_images[contour_directory+"/"+filename] = contour_detected_size
current_path = os.getcwd()
sftp.put_d(current_path+"/"+contour_directory,preserve_mtime=True,remotepath=remote_upload_path)
contour_images = os.listdir(current_path+"/"+contour_directory)
end = time1.time()
exec_time = end-start
decode_execution_time = params["exec_time_decode"]
print(json.dumps({ "contour_images": contour_images,
"image_contour_mappings": image_contour_mappings,
"contour_detect_activation_id": str(activation_id),
"number_of_images_processed": parts,
"contour_execution_time": exec_time,
"decode_execution_time": decode_execution_time,
"contour_detected_images_size": contour_detected_images
}))
if __name__ == "__main__":
main()

@ -0,0 +1,5 @@
opencv-python
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,31 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN pip install opencv-python
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD edge_detect.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,104 @@
#!/usr/bin/env python3
import os
import pickle
from io import BytesIO
import cv2
import time
import numpy as np
import subprocess
import logging
import json
import sys
import paramiko
import pysftp
def main():
import time as time1
start = time1.time()
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
sftp = pysftp.Connection(
host="10.129.28.219",
username="faasapp",
password="1234",
cnopts=cnopts
)
logging.info("connection established successfully")
except:
logging.info('failed to establish connection to targeted server')
edge_detect__directory = "edge-detected-images"
is_edgedetect_dir = os.path.isdir(edge_detect__directory)
if(is_edgedetect_dir == False):
os.mkdir(edge_detect__directory)
images_dir = "images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
remote_download_path = "/home/faasapp/Desktop/anubhav/sprocket-decode/"+images_dir
remote_upload_path = "/home/faasapp/Desktop/anubhav/edge-detection/"+edge_detect__directory
try:
sftp.chdir(remote_download_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_download_path) # Create remote_path
sftp.chdir(remote_download_path)
try:
sftp.chdir(remote_upload_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_upload_path) # Create remote_path
sftp.chdir(remote_upload_path)
sftp.get_d(remote_download_path,preserve_mtime=True,localdir=images_dir)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
decode_activation_id = params["activation_id"]
decoded_images_sizes = {}
edge_detected_images = {}
parts = params["parts"]
for i in range(0,parts):
img_name = images_dir+'/Image' + str(i) + '.jpg'
img = cv2.imread(img_name)
# height, width = img.shape[:2]
size = os.stat(img_name).st_size
decoded_images_sizes[img_name] = size
image= cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
canny_output = cv2.Canny(image, 80, 150)
filename = 'detected-edges-' + str(i) +'.jpg'
# Saving the image
cv2.imwrite(edge_detect__directory+"/"+filename, canny_output)
edge_img = cv2.imread(edge_detect__directory+"/"+filename)
# edge_height, edge_width = edge_img.shape[:2]
edge_detected_size = os.stat(edge_detect__directory+"/"+filename).st_size
edge_detected_images[edge_detect__directory+"/"+filename] = edge_detected_size
current_path = os.getcwd()
sftp.put_d(current_path+"/"+edge_detect__directory,preserve_mtime=True,remotepath=remote_upload_path)
detected_edge_images = os.listdir(current_path+"/"+edge_detect__directory)
end = time1.time()
exec_time = end-start
decode_execution_time = params["exec_time_decode"]
print(json.dumps({ "edge_detection_output": detected_edge_images,
"edge_detect_activation_id": str(activation_id),
"number_of_images_processed": parts,
"edge_detection_execution_time": exec_time,
"decode_execution_time": decode_execution_time,
"edge_detected_images_size": edge_detected_images,
"decoded_images_size": decoded_images_sizes
}))
if __name__ == "__main__":
main()

@ -0,0 +1,6 @@
opencv-python
redis
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,30 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD even_print.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,34 @@
Blackbox Actions
================
1. Download and install the OpenWhisk CLI
2. Install OpenWhisk Docker action skeleton.
3. Add user code
4. Build image
5. Push image
6. Test out action with CLI
The script `buildAndPush.sh` is provided for your convenience. The following command sequence
runs the included example Docker action container using OpenWhisk.
```
# install dockerSkeleton with example
wsk sdk install docker
# change working directory
cd dockerSkeleton
# build/push, argument is your docker hub user name and a valid docker image name
./buildAndPush <dockerhub username>/whiskexample
# create docker action
wsk action create dockerSkeletonExample --docker <dockerhub username>/whiskExample
# invoke created action
wsk action invoke dockerSkeletonExample --blocking
```
The executable file must be located in the `/action` folder.
The name of the executable must be `/action/exec` and can be any file with executable permissions.
The sample docker action runs `example.c` by copying and building the source inside the container
as `/action/exec` (see `Dockerfile` lines 7 and 14).

@ -0,0 +1,2 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/even-print-image
wsk -i action create even-print-action --docker 10.129.28.219:5000/even-print-image --web=true --timeout=300000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,23 @@
#!/usr/bin/env python3
import os
import json
import sys
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number = params["number"]
print(json.dumps({ "activation_id": str(activation_id),
"number": number,
"result": "The number is even",
}))
return({ "activation_id": str(activation_id),
"number": number,
"result": "The number is even",
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,30 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD increment.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,34 @@
Blackbox Actions
================
1. Download and install the OpenWhisk CLI
2. Install OpenWhisk Docker action skeleton.
3. Add user code
4. Build image
5. Push image
6. Test out action with CLI
The script `buildAndPush.sh` is provided for your convenience. The following command sequence
runs the included example Docker action container using OpenWhisk.
```
# install dockerSkeleton with example
wsk sdk install docker
# change working directory
cd dockerSkeleton
# build/push, argument is your docker hub user name and a valid docker image name
./buildAndPush <dockerhub username>/whiskexample
# create docker action
wsk action create dockerSkeletonExample --docker <dockerhub username>/whiskExample
# invoke created action
wsk action invoke dockerSkeletonExample --blocking
```
The executable file must be located in the `/action` folder.
The name of the executable must be `/action/exec` and can be any file with executable permissions.
The sample docker action runs `example.c` by copying and building the source inside the container
as `/action/exec` (see `Dockerfile` lines 7 and 14).

@ -0,0 +1,6 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/increment-image
wsk -i action create increment-action --docker 10.129.28.219:5000/increment-image --web=true --timeout=300000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,24 @@
#!/usr/bin/env python3
import os
import json
import sys
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number = params["number"]
number = number + 2
print(json.dumps({ "activation_id": str(activation_id),
"number": number,
"result": "The number is incremented by 2",
}))
return({ "activation_id": str(activation_id),
"number": number,
"result": "The number is incremented by 2",
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,30 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD mul.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,34 @@
Blackbox Actions
================
1. Download and install the OpenWhisk CLI
2. Install OpenWhisk Docker action skeleton.
3. Add user code
4. Build image
5. Push image
6. Test out action with CLI
The script `buildAndPush.sh` is provided for your convenience. The following command sequence
runs the included example Docker action container using OpenWhisk.
```
# install dockerSkeleton with example
wsk sdk install docker
# change working directory
cd dockerSkeleton
# build/push, argument is your docker hub user name and a valid docker image name
./buildAndPush <dockerhub username>/whiskexample
# create docker action
wsk action create dockerSkeletonExample --docker <dockerhub username>/whiskExample
# invoke created action
wsk action invoke dockerSkeletonExample --blocking
```
The executable file must be located in the `/action` folder.
The name of the executable must be `/action/exec` and can be any file with executable permissions.
The sample docker action runs `example.c` by copying and building the source inside the container
as `/action/exec` (see `Dockerfile` lines 7 and 14).

@ -0,0 +1,6 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/multiply-image
wsk -i action create multiply-action --docker 10.129.28.219:5000/multiply-image --web=true --timeout=300000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,24 @@
#!/usr/bin/env python3
import os
import json
import sys
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number = params["number"]
number = number * 2
print(json.dumps({ "activation_id": str(activation_id),
"number": number,
"result": "The number is multiplied by 2",
}))
return({ "activation_id": str(activation_id),
"number": number,
"result": "The number is multiplied by 2",
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,30 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD odd_even_check.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,30 @@
#!/usr/bin/env python3
import os
import time
import json
import sys
import time
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number = params["number"]
if(number%2==0):
result="even"
else:
result="odd"
print(json.dumps({"activation_id": str(activation_id),
"number": number,
"result": result,
}))
return({ "activation_id": str(activation_id),
"number": number,
"result": result,
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,30 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD odd_print.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,27 @@
#!/usr/bin/env python3
import os
import time
import json
import sys
import time
import logging
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number = params["number"]
# result1 = params["result"]
print(json.dumps({ "activation_id": str(activation_id),
"number": number,
"result": "The number is odd",
}))
return({ "activation_id": str(activation_id),
"number": number,
"result": "The number is odd",
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,27 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD prime_check.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,34 @@
Blackbox Actions
================
1. Download and install the OpenWhisk CLI
2. Install OpenWhisk Docker action skeleton.
3. Add user code
4. Build image
5. Push image
6. Test out action with CLI
The script `buildAndPush.sh` is provided for your convenience. The following command sequence
runs the included example Docker action container using OpenWhisk.
```
# install dockerSkeleton with example
wsk sdk install docker
# change working directory
cd dockerSkeleton
# build/push, argument is your docker hub user name and a valid docker image name
./buildAndPush <dockerhub username>/whiskexample
# create docker action
wsk action create dockerSkeletonExample --docker <dockerhub username>/whiskExample
# invoke created action
wsk action invoke dockerSkeletonExample --blocking
```
The executable file must be located in the `/action` folder.
The name of the executable must be `/action/exec` and can be any file with executable permissions.
The sample docker action runs `example.c` by copying and building the source inside the container
as `/action/exec` (see `Dockerfile` lines 7 and 14).

@ -0,0 +1,4 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/prime-check-image
wsk -i action create prime-check-action --docker 10.129.28.219:5000/prime-check-image --web=true --timeout=300000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,32 @@
#!/usr/bin/env python3
import os
import json
import sys
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number = params["number"]
flag=0
for i in range(2,number//2):
if(number%i==0):
flag=1
break
if(flag==0):
result="The number is prime"
else:
result = "The number is not prime"
print(json.dumps({ "activation_id": str(activation_id),
"number": number,
"result": result,
}))
return({ "activation_id": str(activation_id),
"number": number,
"result": result,
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,27 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN apk add ffmpeg
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD decode.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,131 @@
#!/usr/bin/env python3
import os
import time
import redis
import pickle
from shutil import which
import subprocess
import logging
import json
import sys
import ffmpeg
import pysftp
from urllib.request import urlopen,urlretrieve
logging.basicConfig(level=logging.INFO)
def main():
import time as time1
start = time1.time()
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
sftp = pysftp.Connection(
host="10.129.28.219",
username="faasapp",
password="1234",
cnopts=cnopts
)
logging.info("connection established successfully")
except:
logging.info('failed to establish connection to targeted server')
images_dir = "images"
try:
remoteArtifactPath="/home/faasapp/Desktop/anubhav/sprocket-decode/"+images_dir
filesInRemoteArtifacts = sftp.listdir(path=remoteArtifactPath)
for file in filesInRemoteArtifacts:
sftp.remove(remoteArtifactPath+file)
except:
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
r.flushdb() #flush previous content if any
activation_id = os.environ.get('__OW_ACTIVATION_ID')
#dwn_link = 'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4'
params = json.loads(sys.argv[1])
dwn_link = params["filename"]
# Set how many spots you want to extract a video from.
parts = params["parts"]
file_name = 'decode_video.mp4'
urlretrieve(dwn_link, file_name)
sftp.put(file_name,preserve_mtime=True,remotepath="/home/faasapp/Desktop/anubhav/sprocket-decode/input.mp4")
probe = ffmpeg.probe(file_name)
video_stream = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None)
time = float(probe['streams'][0]['duration']) // 2
width = int(video_stream['width'])
#width = probe['streams'][0]['width']
intervals = time // parts
intervals = int(intervals)
interval_list = [(i * intervals, (i + 1) * intervals) for i in range(parts)]
result = []
for i in range(0,parts):
if os.path.exists(images_dir+'/Image' + str(i) + '.jpg'):
os.remove(images_dir+'/Image' + str(i) + '.jpg')
i = 0
for item in interval_list:
out = (
ffmpeg
.input(file_name, ss=item[1])
.filter('scale', width, -1)
.output(images_dir+'/Image' + str(i) + '.jpg', vframes=1)
.run(capture_stdout=False)
)
img = open(images_dir+'/Image' + str(i) + '.jpg',"rb").read()
pickled_object = pickle.dumps(img)
decode_output = "decode-output-image"+activation_id+"-"+str(i)
r.set(decode_output,pickled_object)
result.append('Image'+str(i)+'.jpg')
i += 1
remote_path = "/home/faasapp/Desktop/anubhav/sprocket-decode/"+images_dir
try:
sftp.chdir(remote_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_path) # Create remote_path
sftp.chdir(remote_path)
current_path = os.getcwd()
sftp.put_d(current_path+"/"+images_dir,preserve_mtime=True,remotepath=remote_path)
end = time1.time()
exec_time = end-start
print(json.dumps({"decode_output":result,
"activation_id": str(activation_id),
"parts": parts,
"file_link":dwn_link,
"exec_time_decode":exec_time
}))
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
{
"filename": "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4",
"parts": 10
}

@ -0,0 +1,6 @@
redis
ffmpeg-python
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,33 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN apk add ffmpeg
RUN pip install opencv-python
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD encode.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,3 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/encode-image-1
wsk -i action create encode-action --docker 10.129.28.219:5000/encode-image-1
wsk -i action update encode-action --docker 10.129.28.219:5000/encode-image-1 encode.py --timeout 400000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,126 @@
#!/usr/bin/env python3
import ffmpeg
import cv2
import time
from io import BytesIO
import os
import sys
import redis
import pickle
import json
from PIL import Image
import pysftp
import logging
logging.basicConfig(level=logging.INFO)
def main():
print("Inside encode\n")
import time as time1
start = time1.time()
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
sftp = pysftp.Connection(
host="10.129.28.219",
username="faasapp",
password="1234",
cnopts=cnopts
)
logging.info("connection established successfully")
except:
logging.info('failed to establish connection to targeted server')
filtered_dir = "filtered-images"
is_images_dir = os.path.isdir(filtered_dir)
if(is_images_dir == False):
os.mkdir(filtered_dir)
remote_path = "/home/faasapp/Desktop/anubhav/sprocket-filter/"+filtered_dir
remote_upload_path = "/home/faasapp/Desktop/anubhav/sprocket-encode/"+filtered_dir
try:
sftp.chdir(remote_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_path) # Create remote_path
sftp.chdir(remote_path)
try:
sftp.chdir(remote_upload_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_upload_path) # Create remote_path
sftp.chdir(remote_upload_path)
current_path = os.getcwd()
sftp.get_d(remote_path,preserve_mtime=True,localdir=filtered_dir)
sftp.put_d(current_path+"/"+filtered_dir,preserve_mtime=True,remotepath=remote_upload_path)
# print("Current Path",current_path)
path = current_path+"/"+filtered_dir+"/"
output_path="output.avi"
images = []
input_images = os.listdir(path)
for i in input_images:
i=path+i
images.append(i)
images.sort()
# cv2_fourcc = cv2.VideoWriter_fourcc(*'mp4v')
cv2_fourcc = cv2.VideoWriter_fourcc(*'MJPG')
frame = cv2.imread(images[0])
size = list(frame.shape)
del size[2]
size.reverse()
video = cv2.VideoWriter("output.avi",cv2_fourcc,3,size,1)
for i in range(len(images)):
video.write(cv2.imread(images[i]))
print('frame',i+1,'of',len(images))
video.release()
output_video_size = os.stat(output_path).st_size
upload_path = "/home/faasapp/Desktop/anubhav/sprocket-decode/output.avi"
current_files = os.listdir('.')
sftp.put(output_path,preserve_mtime=True,remotepath=upload_path)
# r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
decode_execution_time = params["exec_time_decode"]
#print(decode_execution_time)
filter_execution_time = params["exec_time_filter"]
# print(filter_execution_time)
parts = params["parts"]
end = time1.time()
exec_time = end-start
total_time = decode_execution_time + filter_execution_time + exec_time
print(json.dumps({ "encode_output": output_path,
"number_of_images_processed": parts,
"activation_id": str(activation_id),
"exec_time_filter": filter_execution_time,
"exec_time_decode": decode_execution_time,
"exec_time_encode": exec_time,
"workflow_execution_time": total_time,
"output_video_size_in_bytes": output_video_size
#"params":params
}))
if __name__ == "__main__":
main()

@ -0,0 +1,11 @@
opencv-python
redis
ffmpeg-python
zlib-state
pilgram
Pillow==6.2.2
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,29 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN apk add ffmpeg
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD filter.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,34 @@
Blackbox Actions
================
1. Download and install the OpenWhisk CLI
2. Install OpenWhisk Docker action skeleton.
3. Add user code
4. Build image
5. Push image
6. Test out action with CLI
The script `buildAndPush.sh` is provided for your convenience. The following command sequence
runs the included example Docker action container using OpenWhisk.
```
# install dockerSkeleton with example
wsk sdk install docker
# change working directory
cd dockerSkeleton
# build/push, argument is your docker hub user name and a valid docker image name
./buildAndPush <dockerhub username>/whiskexample
# create docker action
wsk action create dockerSkeletonExample --docker <dockerhub username>/whiskExample
# invoke created action
wsk action invoke dockerSkeletonExample --blocking
```
The executable file must be located in the `/action` folder.
The name of the executable must be `/action/exec` and can be any file with executable permissions.
The sample docker action runs `example.c` by copying and building the source inside the container
as `/action/exec` (see `Dockerfile` lines 7 and 14).

@ -0,0 +1,2 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/filter-image-1
wsk -i action create filter-action --docker 10.129.28.219:5000/filter-image-1

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,96 @@
#!/usr/bin/env python3
import os
import time
from io import BytesIO
import redis
import pickle
import subprocess
import json
import sys
import ffmpeg
from PIL import Image
import pysftp
import pilgram
import logging
from urllib.request import urlopen,urlretrieve
logging.basicConfig(level=logging.INFO)
def main():
import time as time1
start = time1.time()
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
sftp = pysftp.Connection(
host="10.129.28.219",
username="faasapp",
password="1234",
cnopts=cnopts
)
logging.info("connection established successfully")
except:
logging.info('failed to establish connection to targeted server')
filtered_dir = "filtered-images"
is_images_dir = os.path.isdir(filtered_dir)
if(is_images_dir == False):
os.mkdir(filtered_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
decode_activation_id = params["activation_id"]
decode_execution_time = params["exec_time_decode"]
parts = params["parts"]
filtered_result = []
for i in range(0,parts):
###############################################################
# Fetching from redis #
###############################################################
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
load_image = pickle.loads(r.get(decode_output)) #loads the data from redis
im = Image.open(BytesIO(load_image))
filterimg = filtered_dir+'/filtered-Image' + str(i) + '.jpg'
pilgram.lofi(im).save(filterimg)
###############################################################
# Storing into redis #
###############################################################
filtered_image = open(filtered_dir+'/filtered-Image' + str(i) + '.jpg',"rb").read()
pickled_object = pickle.dumps(filtered_image)
filter_output = "filter-output-image"+activation_id+"-"+str(i)
r.set(filter_output,pickled_object)
#print("Filter output",pickle.loads(r.get(filter_output)))
filtered_result.append(filterimg)
current_path = os.getcwd()
current_files = os.listdir(current_path+"/"+filtered_dir)
# print("Current Files",current_files)
### Pushing filtered images to remote faasapp server using sftp
remote_path = "/home/faasapp/Desktop/anubhav/sprocket-filter/"+filtered_dir
try:
sftp.chdir(remote_path) # Test if remote_path exists
except IOError:
sftp.mkdir(remote_path) # Create remote_path
sftp.chdir(remote_path)
#current_path = os.getcwd()
sftp.put_d(current_path+"/"+filtered_dir,preserve_mtime=True,remotepath=remote_path)
end = time1.time()
exec_time = end-start
print(json.dumps({ "filter_output": filtered_result,
"activation_id": str(activation_id),
"parts": parts,
"exec_time_filter": exec_time,
"exec_time_decode": decode_execution_time
}))
if __name__ == "__main__":
main()

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

@ -0,0 +1,9 @@
ffmpeg-python
redis
pilgram
zlib-state
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9
Pillow==6.2.2

@ -0,0 +1,28 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD test.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,4 @@
paramiko==2.11.0
pycparser==2.21
PyNaCl==1.5.0
pysftp==0.2.9

@ -0,0 +1,26 @@
#!/usr/bin/env python3
import os
import json
import sys
def main():
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
number=params["number"]
res = number + 2
print(json.dumps({ "activation_id": str(activation_id),
"number": number,
"result": res,
"message":"Hello yayy"
}))
return({"activation_id": str(activation_id),
"number": number,
"result": res,
"message":"Hello yayy"
})
if __name__ == "__main__":
main()

@ -0,0 +1,454 @@
<h1> DAGit </h1>
<p><h3>Currently being developed by Anubhav Jana, IITB</h3></p>
<h4>This serverless FaaS platform supports individual function registrations, DAG registrations, Trigger registrations associated with DAGs/functons. This platform also supports various DAG primitives which is provided in this document for reference.</h4>
<h2> Guide: Register a Function </h2>
<h4> This section will guide you how to register a function. The following pre-requites are to be fulfilled before you register a function </h4>
* DockerFile - based on which the image will be build to run your function
* Python file - application logic to run the action/function (Here, in this example this is "test.py")
* requirements.txt - add all dependant pip packages in this file. In case you dont have any library dependancies,submit a blank requirements.txt
<h5> You must have the above 3 files before you register the function </h5>
<h4> Following is the sample code <u>register_function.py</u> to register a function. This will create a new function named "testaction" and register it onto our function store handled by us. The url endpoint is: /regster/function/function_name</h4>
<h4> register_function.py </h4>
```python
import requests
import sys
import json
def server():
url = "http://10.129.28.219:5001/register/function/testaction"
files = [
('pythonfile', open(sys.argv[1],'rb')),
('dockerfile', open(sys.argv[2],'rb')),
('requirements.txt', open(sys.argv[3],'rb'))
]
reply = requests.post(url = url,files = files,verify=False)
print(reply.json())
def main():
server()
if __name__=="__main__":
main()
```
* <b>Usage:</b> python3 register_function.py test.py Dockerfile requirements.txt
<h2> Guide: Register a DAG </h2>
<h4> This section will guide you how to register a DAG. The following pre-requites are to be fulfilled before you register a DAG </h4>
* dag.json - a JSON specification file to define the DAG. Accepted DAG Format and a sample example is provided in this readme file itself.
<h4> Following is the sample code <u>dag_register.py</u> to register a DAG. This will register a new DAG onto our DAG store handled by us. The url endpoint is: /regster/dag</h4>
<h4> dag_register.py </h4>
```python
import requests
import sys
import json
def server():
url = "http://10.129.28.219:5001/register/dag"
input_json_file = open(sys.argv[1])
params = json.load(input_json_file)
reply = requests.post(url = url,json = params,verify=False)
print(reply.json())
def main():
server()
if __name__=="__main__":
main()
```
* <b>Usage:</b> python3 dag_register.py dag.json
<h2> Guide: Register a Trigger </h2>
<h4> This section will guide you how to register a trigger. The following pre-requites are to be fulfilled before you register a trigger </h4>
* trigger.json - a JSON specification file to define the trigger. Accepted DAG Format and a sample example is provided in this readme file itself.
<h4>Accepted Trigger Format</h4>
DAG specification includes both control dependancy as well as the control dependancy
<h4> Trigger Fields </h4>
* "trigger_name" : Name of the trigger. Type accepted is string
* "type": Type specifies whether the trigger is for function or dag. Accepted values are "dag" and "function"
* "trigger": Specifies the endpoint route
* "dags": If "type" field is specified as "dag","dags" will accept a list of dags to trigger (type = list). Else keep it as ""
* "functions": If "type" field is specified as "function","functions" will accept a list of functions to trigger (type = list). Else keep it as ""
<h3>Example format of trigger.json</h3>
```python
{
"trigger_name": "mydagtrigger",
"type":"dag",
"trigger":"/run/<dag_name>/<param_json>",
"dags": ["odd-even-test","dummy-dag"],
"functions":""
}
```
```python
{
"trigger_name": "myfunctiontrigger",
"type":"function",
"trigger":"/run/action/<action_name>/<param_json>",
"dags":"",
"functions": ["odd-even-action"]
}
```
<h4> Following is the sample code <u>trigger_register.py</u> to register a trigger. This will register a new trigger onto our Trigger store handled by us. The url endpoint is: /regster/trigger</h4>
<h4> trigger_register.py </h4>
```python
import requests
import sys
import json
def server():
url = "http://10.129.28.219:5001/register/trigger/"
input_json_file = open(sys.argv[1])
params = json.load(input_json_file)
reply = requests.post(url = url,json = params,verify=False)
print(reply.json())
def main():
server()
if __name__=="__main__":
main()
```
* <b>Usage:</b> python3 trigger_register.py trigger.json
<h2> List of triggers </h2>
* http://10.129.28.219:5001/register/function/<function_name>
* http://10.129.28.219:5001/run/dummy-dag/\{\"number":16\}\
* http://10.129.28.219:5001/run/odd-even-test/\{\"number":16\}\
* http://10.129.28.219:5001/action/odd-even-action/\{\"number":16\}\
* http://10.129.28.219:5001/view/<dag_id>
* http://10.129.28.219:5001/view/activation/<function_activation_id>
* http://10.129.28.219:5001/view/dags
* http://10.129.28.219:5001/view/dag/odd-even-test-2
* http://10.129.28.219:5001/register/dag
* http://10.129.28.219:5001/list/actions
* http://10.129.28.219:5001/view/dag/primitives
* http://10.129.28.219:5001/
<h3>Supported DAG Primitive</h3>
<!-- ![SUPPORTED DAG PRIMITIVES](/home/faasapp/Desktop/anubhav/controlplane/images/dag_primitives.png) -->
<img src="/home/faasapp/Desktop/anubhav/controlplane/images/dag_primitives.png" alt="dag_primitive" style="width:700px;height:500px;">
<h2>Accepted DAG Format</h2>
DAG specification includes both control dependancy as well as the control dependancy
<h4> DAG Fields </h4>
* "name" : Name of the DAG
* "node_id": Name of the function/action
* "node_id_label": Name you want to give to the node
* "primitive": Type of primitive the action supports - condition,parallel,serial(sequential)
* "condition": If primitive type is "condition", then you should provide the following fields "source", "operator" and "target", else you should leave it as ""
* "source": Specify any one of the response keys of the current node_id. For e.g. if one of the keys in response json is "result", and you want to provide a condition that if result=="even", then specify "source" as "result" and "target" as "even"
* "operator": Mathematical operations like "equals", "greater_than" , "less_than", "greater_than_equals", "less_than_equals" are accepted.
* "target": Specify the target value. It can accept both integer and string.
* "next": Specify the name of next node_id to be executed. If primitive = "parallel", "next" will take list of node_ids, else it will accept a single node_id in "<node id>" format. If this is the last node_id(ending node of the workflow), keep it as "".
* "branch_1": Specify node_id if primitive == condition else keep "". This is the target branch which will execute if condition is true
* "branch_2": Specify node_id if primitive == condition else keep "". This is the alternate branch which will execute if condition is false
* "arguments": Keep it blank for each node_id. It will get populated with json when the DAG is instantiated with the trigger
* "outputs_from": Specify the list of node_id/node_ids whose output current node_id needs to consume. This is for data dependancy.
{
"name":<string>
"dag":[
{
"node_id": "<string>",
"properties":
{
"node_id_label": "<string>"
"primitive":"<condition | parallel | serial>",
"condition":
{
"source":"<key obtained from node)id result json>",
"operator":"<equals || greater_than || less_than || ..>",
"target":"<string|integer>"
},
"next": "<next node_id to be executed : if primitive=parallel, "next" will take list of node_ids, if primitive: serial then specify a single node_id >",
"branch_1": "<node_id if type==condition else keep "">",
"branch_2": "<node_id if type==condition else keep "">",
"arguments": {} ---> Keep it blank for all action. It will get updated when the DAG is run
"outputs_from": "<list>"
}
},
{
},
.
.
.
{
}
]
}
<h2>Sample Example Usage</h2>
<!-- ![Odd-Even-Test](/home/faasapp/Desktop/anubhav/controlplane/images/odd-even-test-dag.png) -->
<img src="/home/faasapp/Desktop/anubhav/controlplane/images/odd-even-test-dag.png" alt="odd-even-action" style="width:700px;height:500px;">
{
"name": "odd-even-test",
"dag": [
{
"node_id": "odd-even-action",
"properties":
{
"label": "Odd Even Action",
"primitive": "condition",
"condition":
{
"source":"result",
"operator":"equals",
"target":"even"
},
"next": "",
"branch_1": "even-print-action",
"branch_2": "odd-print-action",
"arguments": {},
"outputs_from":[]
}
},
{
"node_id": "even-print-action",
"properties":
{
"label": "Even Print Action",
"primitive": "parallel",
"condition": {},
"next": ["increment-action","multiply-action"],
"branch_1": "",
"branch_2": "",
"arguments":{},
"outputs_from":["odd-even-action"]
}
},
{
"node_id": "increment-action",
"properties":
{
"label": "INCREMENT ACTION",
"primitive": "serial",
"condition": {},
"next": "",
"branch_1": "",
"branch_2": "",
"arguments":{},
"outputs_from":["even-print-action"]
}
},
{
"node_id": "multiply-action",
"properties":
{
"label": "MULTIPLY ACTION",
"primitive": "serial",
"condition": {},
"next": "",
"branch_1": "",
"branch_2": "",
"arguments":{},
"outputs_from":["even-print-action"]
}
},
{
"node_id": "odd-print-action",
"properties":
{
"label": "Odd Print Action",
"primitive": "serial",
"condition":{},
"next": "prime-check-action",
"branch_1": "",
"branch_2": "",
"arguments":{},
"outputs_from":["odd-even-action"]
}
},
{
"node_id": "prime-check-action",
"properties":
{
"label": "Prime Check Action",
"primitive": "serial",
"condition":{},
"next": "",
"branch_1": "",
"branch_2": "",
"arguments":{},
"outputs_from":["odd-print-action"]
}
}
]
}
<h2>Handle output from multiple actions</h2>
<p><h4>Suppose you want to merge outputs from two actions action_1 and action_2 in your action_3, then you must include the following lines in your action_3 to process incoming inputs from action_1 and action_2</p></h4>. This is applicable for merging primitive as well as handling output from multiple actions.
* "key_action_1" refers to a key from action_1 response which you want to use in action_3
* "key_action_2" refers to a key from action_2 response which you want to use in action_3
params = json.loads(sys.argv[1])
op_1 = params["__ow_body"][0]["key_action_1"]
op_2 = params["__ow_body"][1]["key_action_2"]
Use these op_1 and op_2 to process
##############################################
<h2>MongoDB - DAG Store</h2>
DAG Registration stores the given dag specification to a mongodb database named <span style="text-decoration: underline; color: red;">dag_store</span> and collection named <span style="text-decoration: underline; color: red;">dags</span>
<h5> Using mongo </h5>
* sudo service mongodb start: Starts the mongodb service
* mongo: Starts the mongo shell
* show dbs: Lists all databases
* use dag_store: Creates (if not present) and switches to this database
* db: View the current database
## Sample usage ##
use dag_store
db.dag_metadata.deleteOne({"_id" : ObjectId("63f523cac540a53983362751")})
db.dags.deleteOne({"_id" : ObjectId("63f523cac540a53983362751")})
db.dags.find()
db.dag_metadata.find()
db.dags.find({},{"_id": 1,"name":1})
db.dags.find("name":"odd-even-test-2")
use function_store
db.functions.find()
db.functions.deleteOne({"_id" : ObjectId("63fb33dd52f32fb6cb755517")})
use trigger_store
db.triggers.find()
db.triggers.deleteOne({"_id" : ObjectId("6400b57040aa62f477087c07")})
Loading…
Cancel
Save