added image processing functions - blur,denoise,thresholding,resize,rotate

anubhav
Nadesh Seen 2 years ago
parent 0b3d51c30b
commit 21e5405260

@ -18,6 +18,8 @@ requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
import pymongo
import shutil
import trigger_gateway
app = Flask(__name__)
@ -27,14 +29,9 @@ responses = []
queue = []
list_of_func_ids = []
def hello():
print("Hello")
# def combine_json_files(files_list): #2 json files cant have same the same key. will lead to ambiguity
# combined_data = {}
# for file in files_list:
# with open(file, "r") as f:
# data = json.load(f)
# combined_data.update(data)
# return combined_data
def preprocess(filename):
with open(filename) as f:
@ -57,8 +54,6 @@ def execute_thread(action,redis,url,json):
responses.append(reply.json())
def handle_parallel(queue,redis,action_properties_mapping,parallel_action_list):
thread_list = []
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
@ -123,7 +118,7 @@ def submit_dag_metadata(dag_metadata):
mycol = mydb["dag_metadata"]
try:
cursor = mycol.insert_one(dag_metadata)
print("OBJECT ID GENERATED",cursor.inserted_id)
# print("OBJECT ID GENERATED",cursor.inserted_id)
data = {"message":"success"}
return json.dumps(data)
except Exception as err:
@ -135,7 +130,7 @@ def home():
data = {"message": "Hello,welcome to create and manage serverless workflows.","author":"Anubhav Jana"}
return jsonify(data)
@app.route('/list/actions', methods=['GET'])
@app.route('/view/functions', methods=['GET'])
def list_actions():
list_of_actions = []
stream = os.popen(' wsk -i action list')
@ -216,8 +211,15 @@ def register_function(function_name):
# wait for process to complete and get output
output, errors = process.communicate()
# create action, register action with api, populate its mapping
print("OUTPUT---------",output)
print("ERRORS---------",errors)
# if(errors):
# print("There is error building docker file")
# data = {"message":"fail","reason":"docker build failed"}
# return json.dumps(data)
# else:
# create action, register action with api, populate its mapping
subprocess.call(['./create_action.sh',destination,docker_image_name,function_name])
subprocess.call(['./register.sh',api_name,path_name,function_name])
subprocess.call(['bash', './actions.sh'])
@ -235,11 +237,11 @@ def register_function(function_name):
data = {"message":"fail","reason":e}
return json.dumps(data)
# data = {"message":"success"}
# return json.dumps(data)
# data = {"message":"success"}
# return json.dumps(data)
@app.route('/register/dag',methods=['POST'])
@app.route('/register/dag/',methods=['POST'])
def register_dag():
dag_json = request.json
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
@ -270,7 +272,7 @@ def view_dag(dag_name):
dag_info_map["Number_of_nodes-->"] = len(dag_info_list)
dag_info_map["Starting_Node-->"] = dag_info_list[0]["node_id"]
for dag_items in dag_info_list:
node_info_map = {}
if(len(dag_items["properties"]["outputs_from"])==0):
@ -305,6 +307,38 @@ def view_dags():
# Format the JSON string with indentation
formatted_json = json.dumps(data, indent=4)
return formatted_json
@app.route('/view/triggers',methods=['GET'])
def view_triggers():
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["trigger_store"]
mycol = mydb["triggers"]
document = mycol.find()
data = list(document)
# Serialize the data to JSON
json_data = json.dumps(data, default=str)
json_string ='{"trigger":'+str(json_data)+'}'
data = json.loads(json_string)
# Format the JSON string with indentation
formatted_json = json.dumps(data, indent=4)
return formatted_json
@app.route('/view/trigger/<trigger_name>',methods=['GET'])
def view_trigger(trigger_name):
print(request.url)
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
mydb = myclient["trigger_store"]
mycol = mydb["triggers"]
query = {"trigger_name":trigger_name}
projection = {"_id": 0,"trigger_name":1,"type":1,"trigger":1,"dags":1,"functions":1}
document = mycol.find(query,projection)
data = list(document)
# print(data)
json_data = json.dumps(data, default=str)
json_string ='{"trigger":'+str(json_data)+'}'
data = json.loads(json_string)
formatted_json = json.dumps(data, indent=4)
return formatted_json
# EXAMPLE URL: http://10.129.28.219:5001/view/activation/8d7df93e8f2940b8bdf93e8f2910b80f
@app.route('/view/activation/<activation_id>', methods=['GET', 'POST'])
@ -319,7 +353,11 @@ def list_activations(activation_id):
d["duration"] = res["duration"]
d["status"] = res["response"]["status"]
d["result"] = res["response"]["result"]
return json.dumps(d)
return({"action_name":res["name"],
"duration": res["duration"],
"status": res["response"]["status"],
"result":res["response"]["result"]
})
# EXAMPLE URL: http://10.129.28.219:5001/view/76cc8a53-0a63-47bb-a5b5-9e6744f67c61
@app.route('/view/<dag_id>',methods=['GET'])
@ -334,22 +372,24 @@ def view_dag_metadata(dag_id):
response = {"dag_metadata":data}
return json.dumps(response)
# EXAMPLE URL: http://10.129.28.219:5001/run/action/odd-even-action/{"number":16}
@app.route('/run/action/<action_name>/<param_json>', methods=['GET', 'POST'])
def execute_action(action_name,param_json):
# EXAMPLE URL: http://10.129.28.219:5001/run/action/odd-even-action
# http://10.129.28.219:5001/run/action/decode-function
@app.route('/run/action/<action_name>/', methods=['POST'])
def execute_action(action_name):
script_file = './actions.sh'
subprocess.call(['bash', script_file])
preprocess("action_url.txt")
url = action_url_mappings[action_name]
json_data = json.loads(param_json)
reply = requests.post(url = url,json = json_data,verify=False)
# json_data = json.loads(request.json)
reply = requests.post(url = url,json = request.json,verify=False)
return reply.json()
# EXAMPLE URL: http://10.129.28.219:5001/run/dag/odd-even-test/{"number":16}
@app.route('/run/dag/<dag_name>/<param_json>', methods=['GET', 'POST'])
def execute_dag(dag_name,param_json):
@app.route('/run/dag/<dag_name>/', methods=['GET', 'POST'])
def execute_dag(dag_name):
print("------------------------------------DAG START-----------------------------------------------")
unique_id = uuid.uuid4()
print("DAG UNIQUE ID----------",unique_id)
@ -379,7 +419,7 @@ def execute_dag(dag_name,param_json):
for dag_item in dag_data:
if(flag==0): # To indicate the first action in the DAG
queue.append(dag_item["node_id"])
action_properties_mapping[dag_item["node_id"]]["arguments"] = json.loads(param_json)
action_properties_mapping[dag_item["node_id"]]["arguments"] = request.json
while(len(queue)!=0):
flag=flag+1
action = queue.pop(0)
@ -477,18 +517,18 @@ def execute_dag(dag_name,param_json):
dag_metadata["function_activation_ids"] = list_of_func_ids
print("DAG SPEC AFTER WORKFLOW EXECUTION--------\n")
print(action_properties_mapping)
print('\n')
# print("DAG SPEC AFTER WORKFLOW EXECUTION--------\n")
# print(action_properties_mapping)
# print('\n')
submit_dag_metadata(dag_metadata)
print("DAG ID---->FUNC IDS",dag_metadata)
print('\n')
print('INTERMEDIATE OUTPUTS FROM ALL ACTIONS-----\n')
get_redis_contents(redis_instace)
# print("DAG ID---->FUNC IDS",dag_metadata)
print('\n')
# print('INTERMEDIATE OUTPUTS FROM ALL ACTIONS-----\n')
# get_redis_contents(redis_instace)
# print('\n')
redis_instace.flushdb()
print("Cleaned up in-memory intermediate outputs successfully\n")
print("------------------------DAG END-----------------------------------")
if(isinstance(reply,list)):
return({"dag_id": dag_metadata["dag_id"],
"result": reply

@ -2,6 +2,7 @@
# ./register.sh /decode-function /decode decode-action [SAMPLE USE]
api_name=$1
path_name=$2
action_name=$3
@ -23,6 +24,8 @@ wsk -i api create $api_name $path_name post $action_name --response-type json
# ./register.sh /dummy /dummy3 dummy3-action --response-type=json
# ./register.sh /image-blur-api /image-blur-path image-blur --response-type=json

@ -0,0 +1,27 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
# Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
ENV AWS_SECRET_ACCESS_KEY="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"
ENV AWS_REGION="ap-south-1"
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk add ffmpeg
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install --no-cache-dir -r requirements.txt
# Ensure source assets are not drawn from the cache
# after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD decode.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,2 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/decode-function-image
wsk -i action create decode --docker 10.129.28.219:5000/decode-function-image --web=true --timeout=300000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,130 @@
#!/usr/bin/env python3
import os
import time
import redis
import pickle
import subprocess
import logging
import json
import sys
import ffmpeg
import boto3
from botocore.exceptions import ClientError
from urllib.request import urlopen,urlretrieve
logging.basicConfig(level=logging.INFO)
def main():
images_dir = "decoded-images"
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
r.flushdb() #flush previous content if any
activation_id = os.environ.get('__OW_ACTIVATION_ID')
#dwn_link = 'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4'
params = json.loads(sys.argv[1])
dwn_link = params["filename"]
# Set how many spots you want to extract a video from.
parts = params["parts"]
file_name = 'decode_video.mp4'
urlretrieve(dwn_link, file_name)
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
probe = ffmpeg.probe(file_name)
video_stream = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None)
time = float(probe['streams'][0]['duration']) // 2
width = int(video_stream['width'])
intervals = time // parts
intervals = int(intervals)
interval_list = [(i * intervals, (i + 1) * intervals) for i in range(parts)]
result = []
for i in range(0,parts):
if os.path.exists(images_dir+'/Image' + str(i) + '.jpg'):
os.remove(images_dir+'/Image' + str(i) + '.jpg')
i = 0
for item in interval_list:
out = (
ffmpeg
.input(file_name, ss=item[1])
.filter('scale', width, -1)
.output(images_dir+'/Image' + str(i) + '.jpg', vframes=1)
.run(capture_stdout=False)
)
img = open(images_dir+'/Image' + str(i) + '.jpg',"rb").read()
pickled_object = pickle.dumps(img)
decode_output = "decode-output-image"+activation_id+"-"+str(i)
r.set(decode_output,pickled_object)
result.append('Image'+str(i)+'.jpg')
i += 1
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
aws_region = os.getenv('AWS_REGION')
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
bucket_name = 'dagit-store'
folder_path = images_dir
folder_name = images_dir
for subdir, dirs, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(subdir, file)
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
url_list=[]
for image in result:
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
url_list.append(url)
try:
image_height = int(params["height"])
image_width = int(params["width"])
print(json.dumps({"image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts,
"height": image_height,
"width": image_width,
"file_link":dwn_link
}))
return({"image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts,
"height": image_height,
"width": image_width,
"file_link":dwn_link
})
except Exception as e:
print(json.dumps({"image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts,
"file_link":dwn_link
}))
return({"image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts,
"file_link":dwn_link
})
if __name__ == "__main__":
main()

@ -0,0 +1,4 @@
{
"filename": "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4",
"parts": 10
}

@ -0,0 +1,25 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
## Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
ENV AWS_SECRET_ACCESS_KEY="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"
ENV AWS_REGION="ap-south-1"
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install --no-cache-dir -r requirements.txt
RUN pip install opencv-python
# Ensure source assets are not drawn from the cacheafter this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD blur.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,92 @@
#!/usr/bin/env python3
import requests
import os
import boto3
import redis
import pickle
import json
import cv2
import sys
def main():
images_dir = "blurred-images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
blurred_result = []
try:
decode_activation_id = params["activation_id"]
parts = params["parts"]
for i in range(0,parts):
if os.path.exists(images_dir+'/blurred_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/blurred_image_'+str(i)+'.jpg')
for i in range(0,parts):
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
load_image = pickle.loads(r.get(decode_output))
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, 'wb') as f:
f.write(load_image)
img = cv2.imread(image_name)
blurred_image = cv2.GaussianBlur(img, (15, 15), 0)
output_image = images_dir+'/blurred_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, blurred_image)
blurred_result.append('blurred_image_'+str(i)+'.jpg')
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
image_url_list = params["image_url_links"]
parts = len(image_url_list)
for i in range(0,parts):
if os.path.exists(images_dir+'/blurred_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/blurred_image_'+str(i)+'.jpg')
for i in range(0,parts):
response = requests.get(image_url_list[i])
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, "wb") as f:
f.write(response.content)
img = cv2.imread(image_name)
blurred_image = cv2.GaussianBlur(img, (15, 15), 0)
output_image = images_dir+'/blurred_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, blurred_image)
blurred_result.append('blurred_image_'+str(i)+'.jpg')
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
aws_region = os.getenv('AWS_REGION')
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
bucket_name = 'dagit-store'
folder_path = images_dir
folder_name = images_dir
for subdir, dirs, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(subdir, file)
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
url_list=[]
for image in blurred_result:
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
url_list.append(url)
print(json.dumps({"blurred_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
}))
return({"output_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
})
if __name__ == "__main__":
main()

@ -0,0 +1,3 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/image-blur-image
wsk -i action create image-blur --docker 10.129.28.219:5000/image-blur-image --web=true --timeout=300000
./register.sh /image-blur-api /image-blur-path image-blur --response-type=json

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,4 @@
requests
boto3
redis
opencv-python

@ -0,0 +1,25 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
## Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
ENV AWS_SECRET_ACCESS_KEY="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"
ENV AWS_REGION="ap-south-1"
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install --no-cache-dir -r requirements.txt
RUN pip install opencv-python
# Ensure source assets are not drawn from the cache after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD denoise.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,3 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/image-denoise-image
./register.sh /image-denoise-api /image-denoise-path image-denoise --response-type=j
wsk -i action create image-denoise --docker 10.129.28.219:5000/image-denoise-image --web=true --timeout=300000

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,96 @@
#!/usr/bin/env python3
import requests
import os
import boto3
import redis
import pickle
import json
import cv2
import sys
def main():
images_dir = "denoised-images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
denoised_result = []
try:
decode_activation_id = params["activation_id"]
parts = params["parts"]
for i in range(0,parts):
if os.path.exists(images_dir+'/denoised_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/denoised_image_'+str(i)+'.jpg')
for i in range(0,parts):
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
load_image = pickle.loads(r.get(decode_output))
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, 'wb') as f:
f.write(load_image)
img = cv2.imread(image_name)
denoised_image = cv2.bilateralFilter(img, 20, 100, 100)
output_image = images_dir+'/denoised_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, denoised_image)
denoised_result.append('denoised_image_'+str(i)+'.jpg')
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
image_url_list = params["image_url_links"]
parts = len(image_url_list)
for i in range(0,parts):
if os.path.exists(images_dir+'/denoised_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/denoised_image_'+str(i)+'.jpg')
for i in range(0,parts):
response = requests.get(image_url_list[i])
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, "wb") as f:
f.write(response.content)
img = cv2.imread(image_name)
denoised_image = cv2.bilateralFilter(img, 20, 100, 100)
output_image = images_dir+'/denoised_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, denoised_image)
denoised_result.append('denoised_image_'+str(i)+'.jpg')
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
aws_region = os.getenv('AWS_REGION')
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
bucket_name = 'dagit-store'
folder_path = images_dir
folder_name = images_dir
for subdir, dirs, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(subdir, file)
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
url_list=[]
for image in denoised_result:
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
url_list.append(url)
print(json.dumps({"denoised_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
}))
return({"denoised_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
})
if __name__ == "__main__":
main()

@ -0,0 +1,5 @@
requests
boto3
redis
opencv-python

@ -0,0 +1,25 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
## Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
ENV AWS_SECRET_ACCESS_KEY="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"
ENV AWS_REGION="ap-south-1"
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install --no-cache-dir -r requirements.txt
RUN pip install opencv-python
# Ensure source assets are not drawn from the cache after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD resize.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,3 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/image-resize-image
wsk -i action create image-resize --docker 10.129.28.219:5000/image-resize-image --web=true --timeout=300000
./register.sh /image-resize-api /image-resize-path image-blur --response-type=json

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,5 @@
requests
boto3
redis
opencv-python

@ -0,0 +1,98 @@
#!/usr/bin/env python3
import requests
import os
import boto3
import redis
import pickle
import json
import cv2
import sys
def main():
images_dir = "resized-images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
resized_result = []
image_resize_height = params["height"]
image_resize_width = params["width"]
try:
decode_activation_id = params["activation_id"]
parts = params["parts"]
for i in range(0,parts):
if os.path.exists(images_dir+'/resized_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/resized_image_'+str(i)+'.jpg')
for i in range(0,parts):
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
load_image = pickle.loads(r.get(decode_output))
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, 'wb') as f:
f.write(load_image)
img = cv2.imread(image_name)
resized_image = cv2.resize(img, (image_resize_height,image_resize_width))
output_image = images_dir+'/resized_image_'+str(i)+'.jpg'
# Save the output image
cv2.imwrite(output_image, resized_image)
resized_result.append('resized_image_'+str(i)+'.jpg')
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
image_url_list = params["image_url_links"]
parts = len(image_url_list)
for i in range(0,parts):
if os.path.exists(images_dir+'/resized_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/resized_image_'+str(i)+'.jpg')
for i in range(0,parts):
response = requests.get(image_url_list[i])
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, "wb") as f:
f.write(response.content)
img = cv2.imread(image_name)
resized_image = cv2.resize(img, (image_resize_height,image_resize_width))
output_image = images_dir+'/resized_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, resized_image)
resized_result.append('resized_image_'+str(i)+'.jpg')
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
aws_region = os.getenv('AWS_REGION')
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
bucket_name = 'dagit-store'
folder_path = images_dir
folder_name = images_dir
for subdir, dirs, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(subdir, file)
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
url_list=[]
for image in resized_result:
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
url_list.append(url)
print(json.dumps({"resized_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
}))
return({"resized_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
})
if __name__ == "__main__":
main()

@ -0,0 +1,25 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
## Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
ENV AWS_SECRET_ACCESS_KEY="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"
ENV AWS_REGION="ap-south-1"
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
&& apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install --no-cache-dir -r requirements.txt
RUN pip install opencv-python
# Ensure source assets are not drawn from the cache after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD rotate.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,3 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/image-rotate-image
wsk -i action create image-rotate --docker 10.129.28.219:5000/image-rotate-image --web=true --timeout=300000
./register.sh /image-rotate-api /image-rotate-path image-rotate --response-type=json

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,5 @@
requests
boto3
redis
opencv-python

@ -0,0 +1,97 @@
#!/usr/bin/env python3
import requests
import os
import boto3
import redis
import pickle
import json
import cv2
import sys
# Rotate an image by 90 degree clockwise
def main():
images_dir = "rotated-images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
rotated_result=[]
try:
decode_activation_id = params["activation_id"]
parts = params["parts"]
for i in range(0,parts):
if os.path.exists(images_dir+'/rotated_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/rotated_image_'+str(i)+'.jpg')
for i in range(0,parts):
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
load_image = pickle.loads(r.get(decode_output))
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, 'wb') as f:
f.write(load_image)
img = cv2.imread(image_name)
# Rotate the image by 90 degrees
rotated = cv2.rotate(img, cv2.ROTATE_90_CLOCKWISE)
output_image = images_dir+'/rotated_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, rotated)
rotated_result.append('rotated_image_'+str(i)+'.jpg')
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
image_url_list = params["image_url_links"]
parts = len(image_url_list)
for i in range(0,parts):
if os.path.exists(images_dir+'/rotated_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/rotated_image_'+str(i)+'.jpg')
for i in range(0,parts):
response = requests.get(image_url_list[i])
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, "wb") as f:
f.write(response.content)
img = cv2.imread(image_name)
# Rotate the image by 90 degrees
rotated = cv2.rotate(img, cv2.ROTATE_90_CLOCKWISE)
output_image = images_dir+'/rotated_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, rotated)
rotated_result.append('rotated_image_'+str(i)+'.jpg')
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
aws_region = os.getenv('AWS_REGION')
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
bucket_name = 'dagit-store'
folder_path = images_dir
folder_name = images_dir
for subdir, dirs, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(subdir, file)
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
url_list=[]
for image in rotated_result:
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
url_list.append(url)
print(json.dumps({"rotated_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
}))
return({"rotated_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
})
if __name__ == "__main__":
main()

@ -0,0 +1,24 @@
# Dockerfile for Python whisk docker action
FROM openwhisk/dockerskeleton
ENV FLASK_PROXY_PORT 8080
## Install our action's Python dependencies
ADD requirements.txt /action/requirements.txt
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
ENV AWS_SECRET_ACCESS_KEY="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"
ENV AWS_REGION="ap-south-1"
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base && apk add jpeg-dev zlib-dev libjpeg \
&& pip install --upgrade pip
RUN cd /action; pip install -r requirements.txt
RUN pip install opencv-python
# Ensure source assets are not drawn from the cache after this date
ENV REFRESHED_AT 2016-09-05T13:59:39Z
# Add all source assets
ADD . /action
# Rename our executable Python action
ADD threshold.py /action/exec
# Leave CMD as is for Openwhisk
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]

@ -0,0 +1,3 @@
sudo ./buildAndPush.sh 10.129.28.219:5000/image-threshold-image
wsk -i action create image-thresholding --docker 10.129.28.219:5000/image-threshold-image --web=true --timeout=300000
./register.sh /image-thresholding-api /image-thresholding-path image-thresholding --response-type=json

@ -0,0 +1,24 @@
#!/bin/bash
#
# This script will build the docker image and push it to dockerhub.
#
# Usage: buildAndPush.sh imageName
#
# Dockerhub image names look like "username/appname" and must be all lower case.
# For example, "janesmith/calculator"
IMAGE_NAME=$1
echo "Using $IMAGE_NAME as the image name"
# Make the docker image
docker build -t $IMAGE_NAME .
if [ $? -ne 0 ]; then
echo "Docker build failed"
exit
fi
docker push $IMAGE_NAME
if [ $? -ne 0 ]; then
echo "Docker push failed"
exit
fi

@ -0,0 +1,5 @@
requests
boto3
redis
opencv-python

@ -0,0 +1,97 @@
#!/usr/bin/env python3
import os
import requests
import boto3
import redis
import pickle
import json
import cv2
import sys
# Image thresholding is a simple, yet effective,
# way of partitioning an image into a foreground and background. T
# his image analysis technique is a
# type of image segmentation that isolates objects by converting grayscale images into binary images.
def main():
images_dir = "thresholded-images"
is_images_dir = os.path.isdir(images_dir)
if(is_images_dir == False):
os.mkdir(images_dir)
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
activation_id = os.environ.get('__OW_ACTIVATION_ID')
params = json.loads(sys.argv[1])
thresholded_result = []
try:
decode_activation_id = params["activation_id"]
parts = params["parts"]
for i in range(0,parts):
if os.path.exists(images_dir+'/thresholded_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/thresholded_image_'+str(i)+'.jpg')
for i in range(0,parts):
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
load_image = pickle.loads(r.get(decode_output))
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, 'wb') as f:
f.write(load_image)
img = cv2.imread(image_name)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
_, thresh = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY)
output_image = images_dir+'/thresholded_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, thresh)
thresholded_result.append('thresholded_image_'+str(i)+'.jpg')
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
image_url_list = params["image_url_links"]
parts = len(image_url_list)
for i in range(0,parts):
if os.path.exists(images_dir+'/thresholded_image_'+str(i)+'.jpg'):
os.remove(images_dir+'/thresholded_image_'+str(i)+'.jpg')
for i in range(0,parts):
response = requests.get(image_url_list[i])
image_name = 'Image'+str(i)+'.jpg'
with open(image_name, "wb") as f:
f.write(response.content)
img = cv2.imread(image_name)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
_, thresh = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY)
output_image = images_dir+'/thresholded_image_'+str(i)+'.jpg'
cv2.imwrite(output_image, thresh)
thresholded_result.append('thresholded_image_'+str(i)+'.jpg')
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
aws_region = os.getenv('AWS_REGION')
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
bucket_name = 'dagit-store'
folder_path = images_dir
folder_name = images_dir
for subdir, dirs, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(subdir, file)
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
url_list=[]
for image in thresholded_result:
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
url_list.append(url)
print(json.dumps({"thresholded_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
}))
return({"thresholded_image_url_links":url_list,
"activation_id": str(activation_id),
"parts": parts
})
if __name__ == "__main__":
main()

@ -182,7 +182,7 @@ if __name__=="__main__":
<!-- ![SUPPORTED DAG PRIMITIVES](/home/faasapp/Desktop/anubhav/controlplane/images/dag_primitives.png) -->
<img src="/home/faasapp/Desktop/anubhav/controlplane/images/dag_primitives.png" alt="dag_primitive" style="width:700px;height:500px;">
<img src="./controlplane/images/dag_primitives.png" alt="dag_primitive" style="width:700px;height:500px;">
<h2>Accepted DAG Format</h2>
@ -266,7 +266,7 @@ DAG specification includes both control dependancy as well as the control depend
<h2>Sample Example Usage</h2>
<!-- ![Odd-Even-Test](/home/faasapp/Desktop/anubhav/controlplane/images/odd-even-test-dag.png) -->
<img src="/home/faasapp/Desktop/anubhav/controlplane/images/odd-even-test-dag.png" alt="odd-even-action" style="width:700px;height:500px;">
<img src="./controlplane/images/odd-even-test-dag.png" alt="odd-even-action" style="width:700px;height:500px;">
{
@ -404,51 +404,4 @@ op_2 = params["__ow_body"][1]["key_action_2"]
Use these op_1 and op_2 to process
##############################################
<h2>MongoDB - DAG Store</h2>
DAG Registration stores the given dag specification to a mongodb database named <span style="text-decoration: underline; color: red;">dag_store</span> and collection named <span style="text-decoration: underline; color: red;">dags</span>
<h5> Using mongo </h5>
* sudo service mongodb start: Starts the mongodb service
* mongo: Starts the mongo shell
* show dbs: Lists all databases
* use dag_store: Creates (if not present) and switches to this database
* db: View the current database
## Sample usage ##
use dag_store
db.dag_metadata.deleteOne({"_id" : ObjectId("63f523cac540a53983362751")})
db.dags.deleteOne({"_id" : ObjectId("63f523cac540a53983362751")})
db.dags.find()
db.dag_metadata.find()
db.dags.find({},{"_id": 1,"name":1})
db.dags.find("name":"odd-even-test-2")
use function_store
db.functions.find()
db.functions.deleteOne({"_id" : ObjectId("63fb33dd52f32fb6cb755517")})
use trigger_store
db.triggers.find()
db.triggers.deleteOne({"_id" : ObjectId("6400b57040aa62f477087c07")})
##############################################
Loading…
Cancel
Save