commit
6632d78875
@ -0,0 +1,53 @@
|
||||
openwhisk/
|
||||
dockerSkeleton/
|
||||
composer/
|
||||
function_modules/sprocket-decode/*.mp4
|
||||
function_modules/sprocket-decode/*.avi
|
||||
note.txt
|
||||
controlplane/dag1.png
|
||||
controlplane/*.json
|
||||
controlplane/action_url.txt
|
||||
controlplane/dag1.png
|
||||
controlplane/createDAG.py
|
||||
function_registration/
|
||||
function_modules/dummy_*
|
||||
|
||||
function_modules/numpy-action
|
||||
|
||||
function_modules/sprocket-decode/images
|
||||
function_modules/sprocket-decode/README.md
|
||||
function_modules/sprocket-decode/build.txt
|
||||
function_modules/sprocket-encode/filtered-images
|
||||
function_modules/sprocket-decode/build.txt
|
||||
function_modules/sprocket-encode/README.md
|
||||
|
||||
function_modules/assemble_images/contoured-images/*
|
||||
function_modules/assemble_images/edge-detected-images/*
|
||||
|
||||
function_modules/assemble_images/contoured-images/
|
||||
function_modules/assemble_images/edge-detected-images/
|
||||
|
||||
function_modules/assemble_images/README.md
|
||||
function_modules/assemble_images/build.txt
|
||||
function_modules/contour-finding/contoured-images
|
||||
function_modules/contour-finding/build.txt
|
||||
function_modules/contour-finding/README.md
|
||||
|
||||
|
||||
function_modules/edge-detection/edge-detected-images
|
||||
function_modules/edge-detection/build.txt
|
||||
function_modules/edge-detection/README.md
|
||||
|
||||
function_modules/odd_even_check/build.txt
|
||||
function_modules/odd_even_check/README.md
|
||||
function_modules/odd_print/README.md
|
||||
function_modules/odd_print/build.txt
|
||||
|
||||
function_modules/performance-testing
|
||||
|
||||
redis-input.json
|
||||
|
||||
flask_test.py
|
||||
|
||||
|
||||
|
@ -0,0 +1,3 @@
|
||||
AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
|
||||
AWS_SECRET_ACCESS_KEY="OZPLMjN/2ao6OlSd5PpIkT5d7cWD9WAP/DXSZbEs"
|
||||
AWS_REGION="ap-south-1"
|
Binary file not shown.
@ -0,0 +1,127 @@
|
||||
{
|
||||
"name": "odd-even-test",
|
||||
"dag": [
|
||||
{
|
||||
"node_id": "odd-even-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Odd Even Action",
|
||||
"primitive": "condition",
|
||||
"condition":
|
||||
{
|
||||
"source":"result",
|
||||
"operator":"equals",
|
||||
"target":"even"
|
||||
|
||||
},
|
||||
"next": "",
|
||||
"branch_1": "even-print-action",
|
||||
"branch_2": "odd-print-action",
|
||||
"arguments": {},
|
||||
"outputs_from":[]
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"node_id": "even-print-action",
|
||||
"properties":
|
||||
{
|
||||
|
||||
"label": "Even Print Action",
|
||||
"primitive": "parallel",
|
||||
"condition": {},
|
||||
"next": ["increment-action","multiply-action"],
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments":{},
|
||||
"outputs_from":["odd-even-action"]
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"node_id": "increment-action",
|
||||
"properties":
|
||||
{
|
||||
|
||||
"label": "INCREMENT ACTION",
|
||||
"primitive": "serial",
|
||||
"condition": {},
|
||||
"next": "dummy4-action",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments":{},
|
||||
"outputs_from":["even-print-action"]
|
||||
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"node_id": "multiply-action",
|
||||
"properties":
|
||||
{
|
||||
|
||||
"label": "MULTIPLY ACTION",
|
||||
"primitive": "serial",
|
||||
"condition": {},
|
||||
"next": "dummy4-action",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments":{},
|
||||
"outputs_from":["even-print-action"]
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"node_id": "dummy4-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Dummy 4",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments":{},
|
||||
"outputs_from":["increment-action","multiply-action"]
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"node_id": "odd-print-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Odd Print Action",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "prime-check-action",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments":{},
|
||||
"outputs_from":["odd-even-action"]
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"node_id": "prime-check-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Prime Check Action",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments":{},
|
||||
"outputs_from":["odd-print-action"]
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "dummy-dag",
|
||||
"dag": [
|
||||
{
|
||||
"node_id": "dummy1-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Dummy 1 Action",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "dummy2-action",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"node_id": "dummy2-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Dummy 2 Action",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "dummy3-action",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from": ["dummy1-action"]
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"node_id": "dummy3-action",
|
||||
"properties":
|
||||
{
|
||||
"label": "Dummy 3 Action",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from": ["dummy1-action","dummy2-action"]
|
||||
}
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "toonify",
|
||||
"dag": [
|
||||
{
|
||||
"node_id": "decode-function",
|
||||
"properties":
|
||||
{
|
||||
"label": "Decode Function",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "image-bilateral-filter",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"node_id": "image-bilateral-filter",
|
||||
"properties":
|
||||
{
|
||||
"label": "Cartoon effect Action",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "encode-function",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from": ["decode-function"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"node_id": "encode-function",
|
||||
"properties":
|
||||
{
|
||||
"label": "Cmobine Images to Video",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from": ["image-bilateral-filter"]
|
||||
}
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "FaceBlurring",
|
||||
"dag": [
|
||||
{
|
||||
"node_id": "decode-function",
|
||||
"properties":
|
||||
{
|
||||
"label": "Decode Function",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "face-detection",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"node_id": "face-detection",
|
||||
"properties":
|
||||
{
|
||||
"label": "Detect Face",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "image-blur",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from": ["decode-function"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"node_id": "image-blur",
|
||||
"properties":
|
||||
{
|
||||
"label": "Blur Faces",
|
||||
"primitive": "serial",
|
||||
"condition":{},
|
||||
"next": "",
|
||||
"branch_1": "",
|
||||
"branch_2": "",
|
||||
"arguments": {},
|
||||
"outputs_from": ["face-detection"]
|
||||
}
|
||||
}
|
||||
|
||||
]
|
||||
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/anubhavjana/schema#",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"dag": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"pattern": "^(odd-even-action|even-print-action|odd-print-action|prime-check-action)$"
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"label": {
|
||||
"type": "string",
|
||||
"pattern": "^(Odd Even Action|Even Print Action|Odd Print Action|Prime Check Action)$"
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"pattern": "^(conditional|serial|parallel)$"
|
||||
},
|
||||
"condition": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {
|
||||
"type": "string",
|
||||
"enum": ["result"]
|
||||
},
|
||||
"operator": {
|
||||
"type": "string",
|
||||
"enum": ["equals"]
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"enum": ["even"]
|
||||
}
|
||||
},
|
||||
"required": ["source", "operator", "target"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"next": {
|
||||
"type": "string",
|
||||
"pattern": "^(prime-check-action|)$"
|
||||
},
|
||||
"branch_1": {
|
||||
"type": "string",
|
||||
"pattern": "^(even-print-action|)$"
|
||||
},
|
||||
"branch_2": {
|
||||
"type": "string",
|
||||
"pattern": "^(odd-print-action|)$"
|
||||
},
|
||||
"arguments": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"number": {
|
||||
"type": "integer",
|
||||
"enum": [17]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"required": ["label", "type", "condition", "next", "branch_1", "branch_2", "arguments"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"required": ["id", "properties"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["dag"],
|
||||
"additionalProperties": false
|
||||
}
|
@ -0,0 +1,281 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import requests
|
||||
import uuid
|
||||
import re
|
||||
import subprocess
|
||||
import threading
|
||||
import queue
|
||||
import redis
|
||||
from flask import current_app
|
||||
import pickle
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
from flask import Flask, request,jsonify,send_file
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
import pymongo
|
||||
|
||||
|
||||
# app = Flask(__name__)
|
||||
|
||||
action_url_mappings = {} #Store action->url mappings
|
||||
action_properties_mapping = {} #Stores the action name and its corresponding properties
|
||||
responses = []
|
||||
queue = []
|
||||
list_of_func_ids = []
|
||||
dag_responses = []
|
||||
|
||||
x = 10
|
||||
|
||||
|
||||
def preprocess(filename):
|
||||
with open(filename) as f:
|
||||
lines = f.readlines()
|
||||
action_url_list = []
|
||||
for line in lines:
|
||||
line = line.replace("\n", "")
|
||||
line = line.replace("/guest/","")
|
||||
action_url_list.append(line)
|
||||
for item in action_url_list:
|
||||
action_name = item.split(' ')[0]
|
||||
url = item.split(' ')[1]
|
||||
action_url_mappings[action_name] = url
|
||||
|
||||
|
||||
def execute_thread(action,redis,url,json):
|
||||
reply = requests.post(url = url,json=json,verify=False)
|
||||
list_of_func_ids.append(reply.json()["activation_id"])
|
||||
redis.set(action+"-output",pickle.dumps(reply.json()))
|
||||
responses.append(reply.json())
|
||||
|
||||
|
||||
def handle_parallel(queue,redis,action_properties_mapping,parallel_action_list):
|
||||
thread_list = []
|
||||
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
|
||||
|
||||
for action in parallel_action_list:
|
||||
action_names = action_properties_mapping[action]["outputs_from"]
|
||||
next_action = action_properties_mapping[action]["next"]
|
||||
if(next_action!=""):
|
||||
if next_action not in queue:
|
||||
queue.append(next_action)
|
||||
if(len(action_names)==1): # if only output of one action is required
|
||||
key = action_names[0]+"-output"
|
||||
output = pickle.loads(redis.get(key))
|
||||
action_properties_mapping[action]["arguments"] = output
|
||||
else:
|
||||
for item in action_names:
|
||||
key = item+"-output"
|
||||
output = pickle.loads(redis.get(key))
|
||||
output_list.append(output)
|
||||
|
||||
action_properties_mapping[action]["arguments"] = output_list
|
||||
|
||||
url = action_url_mappings[action]
|
||||
thread_list.append(threading.Thread(target=execute_thread, args=[action,redis,url,action_properties_mapping[action]["arguments"]]))
|
||||
for thread in thread_list:
|
||||
thread.start()
|
||||
for thread in thread_list:
|
||||
thread.join()
|
||||
action_properties_mapping[next_action]["arguments"] = responses
|
||||
return responses
|
||||
|
||||
def create_redis_instance():
|
||||
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
|
||||
return r
|
||||
|
||||
|
||||
def get_dag_json(dag_name):
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["dag_store"]
|
||||
mycol = mydb["dags"]
|
||||
query = {"name":dag_name}
|
||||
projection = {"_id": 0, "name": 1,"dag":1}
|
||||
document = mycol.find(query, projection)
|
||||
data = list(document)
|
||||
return data
|
||||
|
||||
def submit_dag_metadata(dag_metadata):
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["dag_store"]
|
||||
mycol = mydb["dag_metadata"]
|
||||
try:
|
||||
cursor = mycol.insert_one(dag_metadata)
|
||||
# print("OBJECT ID GENERATED",cursor.inserted_id)
|
||||
data = {"message":"success"}
|
||||
return json.dumps(data)
|
||||
except Exception as err:
|
||||
data = {"message":"failed","reason":err}
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
|
||||
def execute_action(action_name):
|
||||
script_file = './actions.sh'
|
||||
subprocess.call(['bash', script_file])
|
||||
preprocess("action_url.txt")
|
||||
url = action_url_mappings[action_name]
|
||||
# print(request.json)
|
||||
# json_data = json.loads(request.json)
|
||||
reply = requests.post(url = url,json = request.json,verify=False)
|
||||
return reply.json()
|
||||
|
||||
|
||||
|
||||
def execute_dag(dag_name):
|
||||
|
||||
print("------------------------------------DAG START-----------------------------------------------")
|
||||
unique_id = uuid.uuid4()
|
||||
print("DAG UNIQUE ID----------",unique_id)
|
||||
dag_metadata={}
|
||||
dag_metadata["dag_id"] = str(unique_id)
|
||||
dag_metadata["dag_name"] = dag_name
|
||||
list_of_func_ids = []
|
||||
######### Updates the list of action->url mapping ###################
|
||||
script_file = './actions.sh'
|
||||
subprocess.call(['bash', script_file])
|
||||
#####################################################################
|
||||
preprocess("action_url.txt")
|
||||
|
||||
### Create in-memory redis storage ###
|
||||
redis_instace = create_redis_instance()
|
||||
#######################################
|
||||
|
||||
action_properties_mapping = {} #Stores the action name and its corresponding properties
|
||||
|
||||
|
||||
dag_res = json.loads(json.dumps(get_dag_json(dag_name)))
|
||||
dag_data = dag_res[0]["dag"]
|
||||
for dag_item in dag_data:
|
||||
action_properties_mapping[dag_item["node_id"]] = dag_item["properties"]
|
||||
|
||||
flag = 0
|
||||
for dag_item in dag_data:
|
||||
if(flag==0): # To indicate the first action in the DAG
|
||||
queue.append(dag_item["node_id"])
|
||||
action_properties_mapping[dag_item["node_id"]]["arguments"] = request.json
|
||||
while(len(queue)!=0):
|
||||
flag=flag+1
|
||||
action = queue.pop(0)
|
||||
print("ACTION DEQUEUED FROM QUEUE : --->",action)
|
||||
##########################################################
|
||||
# HANDLE THE ACTION #
|
||||
##########################################################
|
||||
if isinstance(action, str):
|
||||
# if(isinstance(action_properties_mapping[action]['arguments'],list)):
|
||||
# pass
|
||||
json_data = action_properties_mapping[action]["arguments"]
|
||||
url = action_url_mappings[action]
|
||||
reply = requests.post(url = url,json=json_data,verify=False)
|
||||
list_of_func_ids.append(reply.json()["activation_id"])
|
||||
# print("Line 292------------",reply.json()["activation_id"])
|
||||
redis_instace.set(action+"-output",pickle.dumps(reply.json()))
|
||||
action_type = action_properties_mapping[action]["primitive"]
|
||||
|
||||
if(action_type=="condition"):
|
||||
branching_action = action_properties_mapping[action]["branch_1"]
|
||||
alternate_action = action_properties_mapping[action]["branch_2"]
|
||||
result=reply.json()["result"]
|
||||
condition_op = action_properties_mapping[action]["condition"]["operator"]
|
||||
if(condition_op=="equals"):
|
||||
if(isinstance(action_properties_mapping[action]["condition"]["target"], str)):
|
||||
target = action_properties_mapping[action]["condition"]["target"]
|
||||
else:
|
||||
target=int(action_properties_mapping[action]["condition"]["target"])
|
||||
|
||||
if(result==target):
|
||||
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
|
||||
queue.append(branching_action)
|
||||
action_names = action_properties_mapping[branching_action]["outputs_from"] # Get the list of actions whose output will be used
|
||||
if(len(action_names)==1): # if only output of one action is required
|
||||
key = action_names[0]+"-output"
|
||||
output = pickle.loads(redis_instace.get(key))
|
||||
action_properties_mapping[branching_action]["arguments"] = output
|
||||
else:
|
||||
for item in action_names:
|
||||
key = item+"-output"
|
||||
output = pickle.loads(redis_instace.get(key))
|
||||
output_list.append(output)
|
||||
action_properties_mapping[branching_action]["arguments"] = output_list
|
||||
|
||||
else:
|
||||
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
|
||||
queue.append(alternate_action)
|
||||
action_names = action_properties_mapping[alternate_action]["outputs_from"] # Get the list of actions whose output will be used
|
||||
if(len(action_names)==1): # if only output of one action is required
|
||||
key = action_names[0]+"-output"
|
||||
output = pickle.loads(redis_instace.get(key))
|
||||
action_properties_mapping[alternate_action]["arguments"] = output
|
||||
else:
|
||||
for item in action_names:
|
||||
key = item+"-output"
|
||||
output = pickle.loads(redis_instace.get(key))
|
||||
output_list.append(output)
|
||||
action_properties_mapping[alternate_action]["arguments"] = output_list
|
||||
|
||||
|
||||
if(condition_op=="greater_than"):
|
||||
pass
|
||||
if(condition_op=="greater_than_equals"):
|
||||
pass
|
||||
if(condition_op=="less_than"):
|
||||
pass
|
||||
if(condition_op=="less_than_equals"):
|
||||
pass
|
||||
elif(action_type=="serial"):
|
||||
next_action = action_properties_mapping[action]["next"]
|
||||
if(next_action!=""):
|
||||
output_list = [] # List to store the output of actions whose outputs are required by downstream operations
|
||||
queue.append(next_action)
|
||||
action_names = action_properties_mapping[next_action]["outputs_from"] # Get the list of actions whose output will be used
|
||||
if(len(action_names)==1): # if only output of one action is required
|
||||
key = action_names[0]+"-output"
|
||||
output = pickle.loads(redis_instace.get(key))
|
||||
action_properties_mapping[next_action]["arguments"] = output
|
||||
else:
|
||||
for item in action_names:
|
||||
key = item+"-output"
|
||||
output = pickle.loads(redis_instace.get(key))
|
||||
output_list.append(output)
|
||||
action_properties_mapping[next_action]["arguments"] = output_list
|
||||
|
||||
elif(action_type=="parallel"):
|
||||
parallel_action_list = action_properties_mapping[action]["next"]
|
||||
queue.append(parallel_action_list)
|
||||
|
||||
|
||||
else:
|
||||
reply = handle_parallel(queue,redis_instace,action_properties_mapping,action)
|
||||
|
||||
|
||||
|
||||
|
||||
dag_metadata["function_activation_ids"] = list_of_func_ids
|
||||
# print("DAG SPEC AFTER WORKFLOW EXECUTION--------\n")
|
||||
# print(action_properties_mapping)
|
||||
# print('\n')
|
||||
submit_dag_metadata(dag_metadata)
|
||||
print("DAG ID---->FUNC IDS",dag_metadata)
|
||||
print('\n')
|
||||
# print('INTERMEDIATE OUTPUTS FROM ALL ACTIONS-----\n')
|
||||
# get_redis_contents(redis_instace)
|
||||
# print('\n')
|
||||
redis_instace.flushdb()
|
||||
print("Cleaned up in-memory intermediate outputs successfully\n")
|
||||
|
||||
if(isinstance(reply,list)):
|
||||
res = {"dag_id": dag_metadata["dag_id"],
|
||||
"result": reply
|
||||
}
|
||||
else:
|
||||
res = {
|
||||
"dag_id": dag_metadata["dag_id"],
|
||||
"result": reply.json()
|
||||
}
|
||||
|
||||
dag_responses.append(res)
|
||||
|
||||
|
@ -0,0 +1,51 @@
|
||||
import os
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
aws_access_key_id = os.environ.get('AWS_ACCESS_KEY_ID')
|
||||
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
aws_region = os.getenv('AWS_REGION')
|
||||
print(aws_access_key_id,aws_secret_access_key)
|
||||
|
||||
# s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
|
||||
|
||||
# upload_file_path = "dag_register.py"
|
||||
# bucket_name = 'dagit-store'
|
||||
# key_name = upload_file_path
|
||||
# folder_path = 'images'
|
||||
# folder_name = "images"
|
||||
# try:
|
||||
# s3.upload_file(upload_file_path,bucket_name,key_name)
|
||||
# s3.put_object_acl(Bucket=bucket_name, Key=key_name, ACL='public-read')
|
||||
# object_url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+key_name
|
||||
# print("Uploaded....\n")
|
||||
# print(object_url)
|
||||
# except ClientError as e:
|
||||
# print(e)
|
||||
# loop through files in folder
|
||||
# for subdir, dirs, files in os.walk(folder_path):
|
||||
# for file in files:
|
||||
# # get full path of file
|
||||
# file_path = os.path.join(subdir, file)
|
||||
# # get S3 object key
|
||||
# object_key = os.path.relpath(file_path, folder_path)
|
||||
# # upload file to S3
|
||||
# # s3.Object(bucket_name, object_key).upload_file(file_path)
|
||||
# # s3.upload_file(file_path,bucket_name,object_key)
|
||||
# s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
|
||||
# s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
|
||||
# print("Uploaded....\n")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# try:
|
||||
|
||||
# response = s3.generate_presigned_url('get_object',
|
||||
# Params={'Bucket': bucket_name,
|
||||
# 'Key': key_name},
|
||||
# ExpiresIn=3600)
|
||||
# print(response)
|
||||
# except ClientError as e:
|
||||
# print(e)
|
@ -1,48 +1,346 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import sys
|
||||
import subprocess
|
||||
import threading
|
||||
import queue
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from flask import Flask, request,jsonify,send_file
|
||||
import pymongo
|
||||
|
||||
def get_trigger():
|
||||
import orchestrator
|
||||
import validate_trigger
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
action_url_mappings = {} #Store action->url mappings
|
||||
action_properties_mapping = {} #Stores the action name and its corresponding properties
|
||||
responses = []
|
||||
list_of_func_ids = []
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def home():
|
||||
data = {"message": "Hello,welcome to DAGit","author":"Anubhav Jana"}
|
||||
return jsonify(data)
|
||||
|
||||
@app.route('/view/functions', methods=['GET'])
|
||||
def list_actions():
|
||||
list_of_actions = []
|
||||
stream = os.popen(' wsk -i action list')
|
||||
actions = stream.read().strip().split(' ')
|
||||
try:
|
||||
for action in actions:
|
||||
if action=='' or action=='private' or action=='blackbox':
|
||||
continue
|
||||
else:
|
||||
list_of_actions.append(action.split('/')[2])
|
||||
data = {"status": 200,"DAGit functions":list_of_actions}
|
||||
return data
|
||||
except Exception as e:
|
||||
data = {"status": 404, "failure reason": e}
|
||||
|
||||
@app.route('/register/trigger/',methods=['POST'])
|
||||
def register_trigger():
|
||||
trigger_json = request.json
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["trigger_store"]
|
||||
mycol = mydb["triggers"]
|
||||
# query = {"dag_id":dag_id}
|
||||
projection = {"_id": 0,"trigger_name":1,"type":1,"trigger":1,"dags":1,"functions":1}
|
||||
try:
|
||||
cursor = mycol.insert_one(trigger_json)
|
||||
print("OBJECT ID GENERATED",cursor.inserted_id)
|
||||
if(trigger_json["type"]=="dag"):
|
||||
targets = trigger_json["dags"]
|
||||
elif(trigger_json["type"]=="function"):
|
||||
targets = trigger_json["functions"]
|
||||
data = {"status":"success","trigger_name":trigger_json["trigger_name"],"trigger_type":trigger_json["type"],"trigger_target":targets}
|
||||
return json.dumps(data)
|
||||
except Exception as e:
|
||||
data = {"status":"fail","reason":e}
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
@app.route('/register/function/<function_name>',methods=['POST'])
|
||||
def register_function(function_name):
|
||||
list_of_file_keys = []
|
||||
document = {}
|
||||
function_dir = '/home/faasapp/Desktop/anubhav/function_modules' # Library of functions
|
||||
new_dir = function_name
|
||||
destination = os.path.join(function_dir, new_dir)
|
||||
# Create the directory
|
||||
os.makedirs(destination, exist_ok=True)
|
||||
files = request.files
|
||||
for filekey in files:
|
||||
if filekey!='description':
|
||||
list_of_file_keys.append(filekey)
|
||||
for key in list_of_file_keys:
|
||||
file = request.files[key]
|
||||
filename = file.filename
|
||||
# Save, copy, remove
|
||||
file.save(file.filename)
|
||||
shutil.copy(filename, destination)
|
||||
os.remove(filename)
|
||||
image_build_script = 'buildAndPush.sh'
|
||||
shutil.copy(image_build_script, destination)
|
||||
|
||||
# Prepare data
|
||||
document["function_name"] = function_name
|
||||
document["image_build_script"] = 'buildAndPush.sh'
|
||||
document["python_script"] = (request.files[list_of_file_keys[0]]).filename
|
||||
document["dockerfile"] = (request.files[list_of_file_keys[1]]).filename
|
||||
document["requirements.txt"] =(request.files[list_of_file_keys[2]]).filename
|
||||
|
||||
docker_image_name = "10.129.28.219:5000/"+function_name+"-image"
|
||||
api_name = "/"+function_name+"-api"
|
||||
path_name = "/"+function_name+"-path"
|
||||
password = '1234'
|
||||
# build docker image
|
||||
cmd = ["sudo", "-S", "/home/faasapp/Desktop/anubhav/controlplane/build_image.sh",destination,docker_image_name]
|
||||
# open subprocess with Popen
|
||||
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
|
||||
|
||||
# pass password to standard input
|
||||
process.stdin.write(password + "\n")
|
||||
process.stdin.flush()
|
||||
|
||||
# wait for process to complete and get output
|
||||
output, errors = process.communicate()
|
||||
print("OUTPUT---------",output)
|
||||
print("ERRORS---------",errors)
|
||||
# if(errors):
|
||||
# print("There is error building docker file")
|
||||
# data = {"message":"fail","reason":"docker build failed"}
|
||||
# return json.dumps(data)
|
||||
# else:
|
||||
|
||||
# create action, register action with api, populate its mapping
|
||||
subprocess.call(['./create_action.sh',destination,docker_image_name,function_name])
|
||||
subprocess.call(['./register.sh',api_name,path_name,function_name])
|
||||
subprocess.call(['bash', './actions.sh'])
|
||||
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["function_store"]
|
||||
mycol = mydb["functions"]
|
||||
try:
|
||||
cursor = mycol.insert_one(document)
|
||||
print("OBJECT ID GENERATED",cursor.inserted_id)
|
||||
data = {"message":"success"}
|
||||
return json.dumps(data)
|
||||
except Exception as e:
|
||||
print("Error--->",e)
|
||||
data = {"message":"fail","reason":e}
|
||||
return json.dumps(data)
|
||||
|
||||
# data = {"message":"success"}
|
||||
# return json.dumps(data)
|
||||
|
||||
|
||||
@app.route('/register/dag/',methods=['POST'])
|
||||
def register_dag():
|
||||
dag_json = request.json
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["dag_store"]
|
||||
mycol = mydb["dags"]
|
||||
try:
|
||||
cursor = mycol.insert_one(dag_json)
|
||||
print("OBJECT ID GENERATED",cursor.inserted_id)
|
||||
data = {"message":"success"}
|
||||
return json.dumps(data)
|
||||
except Exception as e:
|
||||
print("Error--->",e)
|
||||
data = {"message":"fail","reason":e}
|
||||
return json.dumps(data)
|
||||
|
||||
@app.route('/view/dag/<dag_name>',methods=['GET'])
|
||||
def view_dag(dag_name):
|
||||
dag_info_map = {}
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["dag_store"]
|
||||
mycol = mydb["dags"]
|
||||
document = mycol.find({"name":dag_name})
|
||||
data = list(document)
|
||||
dag_info_list = []
|
||||
for items in data:
|
||||
dag_info_list = items["dag"]
|
||||
dag_info_map["dag_name"] = items["name"]
|
||||
|
||||
dag_info_map["number_of_nodes"] = len(dag_info_list)
|
||||
dag_info_map["starting_node"] = dag_info_list[0]["node_id"]
|
||||
|
||||
for dag_items in dag_info_list:
|
||||
node_info_map = {}
|
||||
if(len(dag_items["properties"]["outputs_from"])==0):
|
||||
node_info_map["get_outputs_from"] = "Starting action - >No outputs consumed"
|
||||
else:
|
||||
node_info_map["get_outputs_from"] = dag_items["properties"]["outputs_from"]
|
||||
node_info_map["primitive"] = dag_items["properties"]["primitive"]
|
||||
if(dag_items["properties"]["primitive"]=="condition"):
|
||||
node_info_map["next_node_id_if_condition_true"] = dag_items["properties"]["branch_1"]
|
||||
node_info_map["next_node_id_if_condition_false"] = dag_items["properties"]["branch_2"]
|
||||
else:
|
||||
if(dag_items["properties"]["next"]!=""):
|
||||
node_info_map["next_function"] = dag_items["properties"]["next"]
|
||||
else:
|
||||
node_info_map["next_function"] = "Ending node_id of a path"
|
||||
dag_info_map[dag_items["node_id"]] = node_info_map
|
||||
response = {"dag_data":dag_info_map}
|
||||
# formatted_json = json.dumps(response, indent=20)
|
||||
return response
|
||||
|
||||
@app.route('/view/dags',methods=['GET'])
|
||||
def view_dags():
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["dag_store"]
|
||||
mycol = mydb["dags"]
|
||||
document = mycol.find()
|
||||
data = list(document)
|
||||
print(data)
|
||||
# Serialize the data to JSON
|
||||
json_data = json.dumps(data, default=str)
|
||||
json_string ='{"trigger_data":'+str(json_data)+'}'
|
||||
json_string ='{"dag":'+str(json_data)+'}'
|
||||
data = json.loads(json_string)
|
||||
# Format the JSON string with indentation
|
||||
formatted_json = json.dumps(data, indent=4)
|
||||
return formatted_json
|
||||
|
||||
@app.route('/view/triggers',methods=['GET'])
|
||||
def view_triggers():
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["trigger_store"]
|
||||
mycol = mydb["triggers"]
|
||||
document = mycol.find()
|
||||
data = list(document)
|
||||
# Serialize the data to JSON
|
||||
json_data = json.dumps(data, default=str)
|
||||
json_string ='{"trigger":'+str(json_data)+'}'
|
||||
data = json.loads(json_string)
|
||||
# Format the JSON string with indentation
|
||||
# formatted_json = json.dumps(data, indent=4)
|
||||
return data
|
||||
|
||||
@app.route('/view/trigger/<trigger_name>',methods=['GET'])
|
||||
def view_trigger(trigger_name):
|
||||
print(request.url)
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["trigger_store"]
|
||||
mycol = mydb["triggers"]
|
||||
query = {"trigger_name":trigger_name}
|
||||
projection = {"_id": 0,"trigger_name":1,"type":1,"trigger":1,"dags":1,"functions":1}
|
||||
document = mycol.find(query,projection)
|
||||
data = list(document)
|
||||
# print(data)
|
||||
json_data = json.dumps(data, default=str)
|
||||
json_string ='{"trigger":'+str(json_data)+'}'
|
||||
data = json.loads(json_string)
|
||||
formatted_json = json.dumps(data, indent=4)
|
||||
return formatted_json
|
||||
|
||||
def main():
|
||||
res = json.loads(get_trigger())
|
||||
print(res)
|
||||
# EXAMPLE URL: http://10.129.28.219:5001/view/activation/8d7df93e8f2940b8bdf93e8f2910b80f
|
||||
@app.route('/view/activation/<activation_id>', methods=['GET', 'POST'])
|
||||
def list_activations(activation_id):
|
||||
# activation_id = '74a7b6c707d14973a7b6c707d1a97392'
|
||||
cmd = ['wsk', '-i', 'activation', 'get', activation_id]
|
||||
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
json_res = result.stdout.decode().split('\n')[1:] # Ignore first line of output
|
||||
res = json.loads('\n'.join(json_res))
|
||||
d={}
|
||||
d["action_name"] = res["name"]
|
||||
d["duration"] = res["duration"]
|
||||
d["status"] = res["response"]["status"]
|
||||
d["result"] = res["response"]["result"]
|
||||
return({"action_name":res["name"],
|
||||
"duration": res["duration"],
|
||||
"status": res["response"]["status"],
|
||||
"result":res["response"]["result"]
|
||||
})
|
||||
|
||||
# EXAMPLE URL: http://10.129.28.219:5001/view/dag/76cc8a53-0a63-47bb-a5b5-9e6744f67c61
|
||||
@app.route('/view/<dag_id>',methods=['GET'])
|
||||
def view_dag_metadata(dag_id):
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["dag_store"]
|
||||
mycol = mydb["dag_metadata"]
|
||||
query = {"dag_id":dag_id}
|
||||
projection = {"_id": 0,"dag_id":1,"dag_name":1,"function_activation_ids":1}
|
||||
document = mycol.find(query, projection)
|
||||
data = list(document)
|
||||
response = {"dag_metadata":data}
|
||||
return json.dumps(response)
|
||||
|
||||
# EXAMPLE URL: http://10.129.28.219:5001/run/action/odd-even-action
|
||||
# http://10.129.28.219:5001/run/action/decode-function
|
||||
|
||||
# def server():
|
||||
# # server_ip = "10.129.28.219"
|
||||
# # server_port = "5001"
|
||||
# url = "http://10.129.28.219:5001/register/trigger/myfirsttrigger"
|
||||
# # data = {"trigger_name":"myfirsttrigger", "dags":['odd-even-test']}
|
||||
# # json_data = json.dumps(data)
|
||||
# input_json_file = open(sys.argv[1])
|
||||
# params = json.load(input_json_file)
|
||||
# reply = requests.post(url = url,json = params,verify=False)
|
||||
# print(reply.json())
|
||||
|
||||
# @app.route('/run/action/<action_name>/', methods=['POST'])
|
||||
def execute_action(action_name):
|
||||
try:
|
||||
res = orchestrator.execute_action(action_name)
|
||||
data = {"status": 200,"dag_output":res}
|
||||
return data
|
||||
except Exception as e:
|
||||
data = {"status": 404 ,"failure_reason":e}
|
||||
return data
|
||||
|
||||
|
||||
|
||||
# EXAMPLE URL: http://10.129.28.219:5001/run/dag/odd-even-test/{"number":16}
|
||||
@app.route('/run/<trigger_name>', methods=['GET', 'POST'])
|
||||
def orchestrate_dag(trigger_name):
|
||||
try:
|
||||
triggers = validate_trigger.get_trigger_json(trigger_name)
|
||||
# print(triggers)
|
||||
if(len(triggers)==0): #could not fetch registered trigger
|
||||
return {"response": "the given trigger is not registered in DAGit trigger store"}
|
||||
else:
|
||||
thread_list = []
|
||||
result_queue = queue.Queue()
|
||||
if(triggers[0]['type']=='dag'):
|
||||
dags = triggers[0]['dags']
|
||||
try:
|
||||
|
||||
# def main():
|
||||
# server()
|
||||
for dag in dags:
|
||||
thread_list.append(threading.Thread(target=orchestrator.execute_dag, args=[dag]))
|
||||
for thread in thread_list:
|
||||
thread.start()
|
||||
for thread in thread_list:
|
||||
thread.join()
|
||||
print(orchestrator.dag_responses)
|
||||
print(orchestrator.x)
|
||||
# results = []
|
||||
# while not result_queue.empty():
|
||||
# result = result_queue.get()
|
||||
# results.append(result)
|
||||
return {"response":orchestrator.dag_responses}
|
||||
# res = orchestrator.execute_dag(dag)
|
||||
# return {"response":res,"status":200}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {"response":"failed","status":400}
|
||||
# thread_list.append(threading.Thread(target=orchestrator.execute_dag, args=[dag]))
|
||||
# for thread in thread_list:
|
||||
# thread.start()
|
||||
# for thread in thread_list:
|
||||
# thread.join()
|
||||
# return {"response": dags}
|
||||
else:
|
||||
functions = triggers[0]['functions']
|
||||
for function in functions:
|
||||
thread_list.append(threading.Thread(target=orchestrator.execute_action, args=[function]))
|
||||
for thread in thread_list:
|
||||
thread.start()
|
||||
for thread in thread_list:
|
||||
thread.join()
|
||||
|
||||
if __name__=="__main__":
|
||||
main()
|
||||
# return {"response": function}
|
||||
|
||||
# res = orchestrator.execute_dag(dag_name)
|
||||
# data = {"status": 200,"dag_output":res}
|
||||
# return data
|
||||
except Exception as e:
|
||||
print(e)
|
||||
data = {"status": 404 ,"message":"failed"}
|
||||
return data
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5001)
|
||||
|
||||
|
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pymongo
|
||||
|
||||
def get_trigger_json(trigger_name):
|
||||
myclient = pymongo.MongoClient("mongodb://127.0.0.1/27017")
|
||||
mydb = myclient["trigger_store"]
|
||||
mycol = mydb["triggers"]
|
||||
query = {"trigger_name":trigger_name}
|
||||
projection = {"_id": 0, "trigger_name": 1,"type": 1,"dags": 1, "functions":1}
|
||||
document = mycol.find(query, projection)
|
||||
data = list(document)
|
||||
return data
|
||||
|
||||
|
@ -1,2 +1,7 @@
|
||||
sudo ./buildAndPush.sh 10.129.28.219:5000/decode-function-image
|
||||
wsk -i action create decode --docker 10.129.28.219:5000/decode-function-image --web=true --timeout=300000
|
||||
wsk -i action create decode-function --docker 10.129.28.219:5000/decode-function-image --web=true --timeout=420000 -m 4096
|
||||
wsk -i action update decode-function --docker 10.129.28.219:5000/decode-function-image decode.py --timeout 300000
|
||||
|
||||
./register.sh /decode-function /decode decode-function
|
||||
|
||||
// "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4",
|
||||
|
@ -1,4 +1,4 @@
|
||||
{
|
||||
"filename": "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4",
|
||||
"parts": 10
|
||||
"filename": "https://dagit-store.s3.ap-south-1.amazonaws.com/Sci-Fi+Short+Film+%E2%80%9CTears+of+Steel_+_+DUST.mp4",
|
||||
"parts": 20
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
boto3
|
||||
redis
|
||||
ffmpeg-python
|
||||
requests
|
||||
|
@ -1,104 +1,155 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import redis
|
||||
import requests
|
||||
import boto3
|
||||
import pickle
|
||||
from io import BytesIO
|
||||
import cv2
|
||||
import time
|
||||
import numpy as np
|
||||
import subprocess
|
||||
import logging
|
||||
import json
|
||||
import sys
|
||||
import paramiko
|
||||
import pysftp
|
||||
|
||||
def main():
|
||||
import time as time1
|
||||
start = time1.time()
|
||||
cnopts = pysftp.CnOpts()
|
||||
cnopts.hostkeys = None
|
||||
try:
|
||||
|
||||
sftp = pysftp.Connection(
|
||||
host="10.129.28.219",
|
||||
username="faasapp",
|
||||
password="1234",
|
||||
cnopts=cnopts
|
||||
)
|
||||
logging.info("connection established successfully")
|
||||
except:
|
||||
logging.info('failed to establish connection to targeted server')
|
||||
|
||||
edge_detect__directory = "edge-detected-images"
|
||||
is_edgedetect_dir = os.path.isdir(edge_detect__directory)
|
||||
if(is_edgedetect_dir == False):
|
||||
os.mkdir(edge_detect__directory)
|
||||
|
||||
images_dir = "images"
|
||||
images_dir = "edge-detected-images"
|
||||
is_images_dir = os.path.isdir(images_dir)
|
||||
if(is_images_dir == False):
|
||||
os.mkdir(images_dir)
|
||||
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
|
||||
activation_id = os.environ.get('__OW_ACTIVATION_ID')
|
||||
params = json.loads(sys.argv[1])
|
||||
# edge_detected_images = {}
|
||||
edge_detected_result = []
|
||||
try:
|
||||
decode_activation_id = params["activation_id"]
|
||||
parts = params["parts"]
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/edge_detected_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/edge_detected_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
|
||||
load_image = pickle.loads(r.get(decode_output))
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, 'wb') as f:
|
||||
f.write(load_image)
|
||||
|
||||
img = cv2.imread(image_name)
|
||||
# height, width = img.shape[:2]
|
||||
# size = os.stat(img_name).st_size
|
||||
# decoded_images_sizes[img_name] = size
|
||||
image= cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
canny_output = cv2.Canny(image, 80, 150)
|
||||
output_image = images_dir+'/edge_detected_image_'+str(i)+'.jpg'
|
||||
cv2.imwrite(output_image, canny_output)
|
||||
edge_detected_result.append('edge_detected_image_'+str(i)+'.jpg')
|
||||
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
|
||||
image_url_list = params["image_url_links"]
|
||||
parts = len(image_url_list)
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/edge_detected_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/edge_detected_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
response = requests.get(image_url_list[i])
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, "wb") as f:
|
||||
f.write(response.content)
|
||||
img = cv2.imread(image_name)
|
||||
image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
canny_output = cv2.Canny(image, 80, 150)
|
||||
output_image = images_dir+'/edge_detected_image_'+str(i)+'.jpg'
|
||||
cv2.imwrite(output_image, canny_output)
|
||||
edge_detected_result.append('edge_detected_image_'+str(i)+'.jpg')
|
||||
|
||||
|
||||
|
||||
remote_download_path = "/home/faasapp/Desktop/anubhav/sprocket-decode/"+images_dir
|
||||
|
||||
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
|
||||
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
aws_region = os.getenv('AWS_REGION')
|
||||
|
||||
|
||||
remote_upload_path = "/home/faasapp/Desktop/anubhav/edge-detection/"+edge_detect__directory
|
||||
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
|
||||
|
||||
try:
|
||||
sftp.chdir(remote_download_path) # Test if remote_path exists
|
||||
except IOError:
|
||||
sftp.mkdir(remote_download_path) # Create remote_path
|
||||
sftp.chdir(remote_download_path)
|
||||
|
||||
bucket_name = 'dagit-store'
|
||||
folder_path = images_dir
|
||||
folder_name = images_dir
|
||||
for subdir, dirs, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(subdir, file)
|
||||
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
|
||||
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
|
||||
url_list=[]
|
||||
for image in edge_detected_result:
|
||||
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
|
||||
url_list.append(url)
|
||||
|
||||
print(json.dumps({"edge_detected_image_url_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"number_of_images": parts
|
||||
|
||||
}))
|
||||
|
||||
try:
|
||||
sftp.chdir(remote_upload_path) # Test if remote_path exists
|
||||
except IOError:
|
||||
sftp.mkdir(remote_upload_path) # Create remote_path
|
||||
sftp.chdir(remote_upload_path)
|
||||
return({"edge_detected_image_url_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"number_of_images": parts
|
||||
|
||||
})
|
||||
|
||||
sftp.get_d(remote_download_path,preserve_mtime=True,localdir=images_dir)
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
activation_id = os.environ.get('__OW_ACTIVATION_ID')
|
||||
params = json.loads(sys.argv[1])
|
||||
|
||||
decode_activation_id = params["activation_id"]
|
||||
decoded_images_sizes = {}
|
||||
edge_detected_images = {}
|
||||
parts = params["parts"]
|
||||
for i in range(0,parts):
|
||||
img_name = images_dir+'/Image' + str(i) + '.jpg'
|
||||
img = cv2.imread(img_name)
|
||||
# height, width = img.shape[:2]
|
||||
size = os.stat(img_name).st_size
|
||||
decoded_images_sizes[img_name] = size
|
||||
image= cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
canny_output = cv2.Canny(image, 80, 150)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
filename = 'detected-edges-' + str(i) +'.jpg'
|
||||
# Saving the image
|
||||
cv2.imwrite(edge_detect__directory+"/"+filename, canny_output)
|
||||
|
||||
|
||||
edge_img = cv2.imread(edge_detect__directory+"/"+filename)
|
||||
# edge_height, edge_width = edge_img.shape[:2]
|
||||
|
||||
|
||||
# decode_activation_id = params["activation_id"]
|
||||
# decoded_images_sizes = {}
|
||||
# edge_detected_images = {}
|
||||
# parts = params["parts"]
|
||||
# for i in range(0,parts):
|
||||
# img_name = images_dir+'/Image' + str(i) + '.jpg'
|
||||
# img = cv2.imread(img_name)
|
||||
# # height, width = img.shape[:2]
|
||||
# size = os.stat(img_name).st_size
|
||||
# decoded_images_sizes[img_name] = size
|
||||
# image= cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
# canny_output = cv2.Canny(image, 80, 150)
|
||||
|
||||
edge_detected_size = os.stat(edge_detect__directory+"/"+filename).st_size
|
||||
edge_detected_images[edge_detect__directory+"/"+filename] = edge_detected_size
|
||||
|
||||
current_path = os.getcwd()
|
||||
sftp.put_d(current_path+"/"+edge_detect__directory,preserve_mtime=True,remotepath=remote_upload_path)
|
||||
detected_edge_images = os.listdir(current_path+"/"+edge_detect__directory)
|
||||
end = time1.time()
|
||||
exec_time = end-start
|
||||
decode_execution_time = params["exec_time_decode"]
|
||||
print(json.dumps({ "edge_detection_output": detected_edge_images,
|
||||
"edge_detect_activation_id": str(activation_id),
|
||||
"number_of_images_processed": parts,
|
||||
"edge_detection_execution_time": exec_time,
|
||||
"decode_execution_time": decode_execution_time,
|
||||
"edge_detected_images_size": edge_detected_images,
|
||||
"decoded_images_size": decoded_images_sizes
|
||||
}))
|
||||
# filename = 'detected-edges-' + str(i) +'.jpg'
|
||||
# # Saving the image
|
||||
# cv2.imwrite(edge_detect__directory+"/"+filename, canny_output)
|
||||
|
||||
# edge_img = cv2.imread(edge_detect__directory+"/"+filename)
|
||||
# # edge_height, edge_width = edge_img.shape[:2]
|
||||
|
||||
# edge_detected_size = os.stat(edge_detect__directory+"/"+filename).st_size
|
||||
# edge_detected_images[edge_detect__directory+"/"+filename] = edge_detected_size
|
||||
|
||||
# current_path = os.getcwd()
|
||||
# sftp.put_d(current_path+"/"+edge_detect__directory,preserve_mtime=True,remotepath=remote_upload_path)
|
||||
# detected_edge_images = os.listdir(current_path+"/"+edge_detect__directory)
|
||||
# print(json.dumps({ "edge_detection_output": detected_edge_images,
|
||||
# "edge_detect_activation_id": str(activation_id),
|
||||
# "number_of_images_processed": parts,
|
||||
# "edge_detection_execution_time": exec_time,
|
||||
# "decode_execution_time": decode_execution_time,
|
||||
# "edge_detected_images_size": edge_detected_images,
|
||||
# "decoded_images_size": decoded_images_sizes
|
||||
# }))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,6 +1,4 @@
|
||||
opencv-python
|
||||
redis
|
||||
paramiko==2.11.0
|
||||
pycparser==2.21
|
||||
PyNaCl==1.5.0
|
||||
pysftp==0.2.9
|
||||
requests
|
||||
boto3
|
@ -0,0 +1,25 @@
|
||||
# Dockerfile for Python whisk docker action
|
||||
FROM openwhisk/dockerskeleton
|
||||
|
||||
ENV FLASK_PROXY_PORT 8080
|
||||
|
||||
## Install our action's Python dependencies
|
||||
ADD requirements.txt /action/requirements.txt
|
||||
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
|
||||
ENV AWS_SECRET_ACCESS_KEY="OZPLMjN/2ao6OlSd5PpIkT5d7cWD9WAP/DXSZbEs"
|
||||
ENV AWS_REGION="ap-south-1"
|
||||
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
|
||||
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
|
||||
&& apk add jpeg-dev zlib-dev libjpeg \
|
||||
&& pip install --upgrade pip
|
||||
RUN cd /action; pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install opencv-python
|
||||
# Ensure source assets are not drawn from the cacheafter this date
|
||||
ENV REFRESHED_AT 2016-09-05T13:59:39Z
|
||||
# Add all source assets
|
||||
ADD . /action
|
||||
# Rename our executable Python action
|
||||
ADD blur.py /action/exec
|
||||
|
||||
# Leave CMD as is for Openwhisk
|
||||
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]
|
@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
import requests
|
||||
import os
|
||||
import boto3
|
||||
import redis
|
||||
import pickle
|
||||
import json
|
||||
import cv2
|
||||
import sys
|
||||
|
||||
def main():
|
||||
images_dir = "bilateral-images"
|
||||
is_images_dir = os.path.isdir(images_dir)
|
||||
if(is_images_dir == False):
|
||||
os.mkdir(images_dir)
|
||||
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
|
||||
activation_id = os.environ.get('__OW_ACTIVATION_ID')
|
||||
params = json.loads(sys.argv[1])
|
||||
bilateral_result = []
|
||||
try:
|
||||
decode_activation_id = params["activation_id"]
|
||||
parts = params["parts"]
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/bilateral_filtered_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/bilateral_filtered_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
|
||||
load_image = pickle.loads(r.get(decode_output))
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, 'wb') as f:
|
||||
f.write(load_image)
|
||||
|
||||
originalmage = cv2.imread(image_name)
|
||||
|
||||
ReSized1 = cv2.resize(originalmage, (720, 640))
|
||||
grayScaleImage = cv2.cvtColor(originalmage, cv2.COLOR_BGR2GRAY)
|
||||
ReSized2 = cv2.resize(grayScaleImage, (720, 640))
|
||||
#applying median blur to smoothen an image
|
||||
smoothGrayScale = cv2.medianBlur(grayScaleImage, 5)
|
||||
ReSized3 = cv2.resize(smoothGrayScale, (720, 640))
|
||||
#retrieving the edges for cartoon effect
|
||||
getEdge = cv2.adaptiveThreshold(smoothGrayScale, 255,cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 9, 9)
|
||||
ReSized4 = cv2.resize(getEdge, (720, 640))
|
||||
#applying bilateral filter to remove noise and keep edge sharp as required
|
||||
colorImage = cv2.bilateralFilter(originalmage, 9, 300, 300)
|
||||
ReSized5 = cv2.resize(colorImage, (720, 640))
|
||||
#masking edged image with our "BEAUTIFY" image
|
||||
cartoonImage = cv2.bitwise_and(colorImage, colorImage, mask=getEdge)
|
||||
cartoon_image = cv2.resize(cartoonImage, (720, 640))
|
||||
|
||||
|
||||
output_image = images_dir+'/bilateral_filtered_image_'+str(i)+'.jpg'
|
||||
cv2.imwrite(output_image, cartoon_image)
|
||||
|
||||
img = open(output_image,"rb").read()
|
||||
pickled_object = pickle.dumps(img)
|
||||
bilateral_output = "bilateral-output-image"+activation_id+"-"+str(i)
|
||||
r.set(bilateral_output,pickled_object)
|
||||
|
||||
bilateral_result.append('bilateral_filtered_image_'+str(i)+'.jpg')
|
||||
|
||||
|
||||
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
|
||||
image_url_list = params["image_url_links"]
|
||||
parts = len(image_url_list)
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/bilateral_filtered_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/bilateral_filtered_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
response = requests.get(image_url_list[i])
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, "wb") as f:
|
||||
f.write(response.content)
|
||||
|
||||
|
||||
originalmage = cv2.imread(image_name)
|
||||
|
||||
ReSized1 = cv2.resize(originalmage, (720, 640))
|
||||
grayScaleImage = cv2.cvtColor(originalmage, cv2.COLOR_BGR2GRAY)
|
||||
ReSized2 = cv2.resize(grayScaleImage, (720, 640))
|
||||
#applying median blur to smoothen an image
|
||||
smoothGrayScale = cv2.medianBlur(grayScaleImage, 5)
|
||||
ReSized3 = cv2.resize(smoothGrayScale, (720, 640))
|
||||
#retrieving the edges for cartoon effect
|
||||
getEdge = cv2.adaptiveThreshold(smoothGrayScale, 255,cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 9, 9)
|
||||
ReSized4 = cv2.resize(getEdge, (720, 640))
|
||||
#applying bilateral filter to remove noise and keep edge sharp as required
|
||||
colorImage = cv2.bilateralFilter(originalmage, 9, 300, 300)
|
||||
ReSized5 = cv2.resize(colorImage, (720, 640))
|
||||
#masking edged image with our "BEAUTIFY" image
|
||||
cartoonImage = cv2.bitwise_and(colorImage, colorImage, mask=getEdge)
|
||||
cartoon_image = cv2.resize(cartoonImage, (720, 640))
|
||||
|
||||
|
||||
output_image = images_dir+'/bilateral_filtered_image_'+str(i)+'.jpg'
|
||||
cv2.imwrite(output_image, cartoon_image)
|
||||
bilateral_result.append('bilateral_filtered_image_'+str(i)+'.jpg')
|
||||
|
||||
|
||||
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
|
||||
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
aws_region = os.getenv('AWS_REGION')
|
||||
|
||||
|
||||
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
|
||||
|
||||
|
||||
bucket_name = 'dagit-store'
|
||||
folder_path = images_dir
|
||||
folder_name = images_dir
|
||||
for subdir, dirs, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(subdir, file)
|
||||
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
|
||||
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
|
||||
url_list=[]
|
||||
for image in bilateral_result:
|
||||
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
|
||||
url_list.append(url)
|
||||
|
||||
print(json.dumps({"bilateral_filtered_image_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
}))
|
||||
|
||||
return({"bilateral_filtered_image_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
})
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,5 @@
|
||||
sudo ./buildAndPush.sh 10.129.28.219:5000/image-processing
|
||||
wsk -i action create image-bilateral-filter --docker 10.129.28.219:5000/image-processing bilateral.py --web=true --timeout=420000 -m 4096
|
||||
wsk -i action update image-bilateral-filter --docker 10.129.28.219:5000/image-processing bilateral.py --timeout 300000
|
||||
|
||||
./register.sh /image-bilateral-api /image-bilateral-path image-bilateral-filter --response-type=json
|
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script will build the docker image and push it to dockerhub.
|
||||
#
|
||||
# Usage: buildAndPush.sh imageName
|
||||
#
|
||||
# Dockerhub image names look like "username/appname" and must be all lower case.
|
||||
# For example, "janesmith/calculator"
|
||||
|
||||
IMAGE_NAME=$1
|
||||
echo "Using $IMAGE_NAME as the image name"
|
||||
|
||||
# Make the docker image
|
||||
docker build -t $IMAGE_NAME .
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Docker build failed"
|
||||
exit
|
||||
fi
|
||||
docker push $IMAGE_NAME
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Docker push failed"
|
||||
exit
|
||||
fi
|
||||
|
@ -0,0 +1,4 @@
|
||||
requests
|
||||
boto3
|
||||
redis
|
||||
opencv-python
|
@ -0,0 +1,29 @@
|
||||
# Dockerfile for Python whisk docker action
|
||||
FROM openwhisk/dockerskeleton
|
||||
|
||||
ENV FLASK_PROXY_PORT 8080
|
||||
|
||||
## Install our action's Python dependencies
|
||||
ADD requirements.txt /action/requirements.txt
|
||||
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
|
||||
ENV AWS_SECRET_ACCESS_KEY="OZPLMjN/2ao6OlSd5PpIkT5d7cWD9WAP/DXSZbEs"
|
||||
ENV AWS_REGION="ap-south-1"
|
||||
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
|
||||
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
|
||||
&& apk add jpeg-dev zlib-dev libjpeg \
|
||||
&& pip install --upgrade pip
|
||||
RUN cd /action; pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install opencv-python
|
||||
RUN pip install matplotlib
|
||||
|
||||
# Ensure source assets are not drawn from the cache after this date
|
||||
ENV REFRESHED_AT 2016-09-05T13:59:39Z
|
||||
# Add all source assets
|
||||
ADD . /action
|
||||
# Rename our executable Python action
|
||||
ADD face_detect.py /action/exec
|
||||
ADD haarcascade_car.xml /action
|
||||
ADD haarcascade_frontalface_default.xml /action
|
||||
|
||||
# Leave CMD as is for Openwhisk
|
||||
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]
|
@ -0,0 +1,5 @@
|
||||
sudo ./buildAndPush.sh 10.129.28.219:5000/image-denoise-image
|
||||
wsk -i action create face-detection --docker 10.129.28.219:5000/image-denoise-image --web=true --timeout=300000
|
||||
./register.sh /image-face-api /image-face-path face-detection --response-type=json
|
||||
|
||||
wsk -i action update face-detection --docker 10.129.28.219:5000/image-denoise-image face_detect.py --timeout 300000
|
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script will build the docker image and push it to dockerhub.
|
||||
#
|
||||
# Usage: buildAndPush.sh imageName
|
||||
#
|
||||
# Dockerhub image names look like "username/appname" and must be all lower case.
|
||||
# For example, "janesmith/calculator"
|
||||
|
||||
IMAGE_NAME=$1
|
||||
echo "Using $IMAGE_NAME as the image name"
|
||||
|
||||
# Make the docker image
|
||||
docker build -t $IMAGE_NAME .
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Docker build failed"
|
||||
exit
|
||||
fi
|
||||
docker push $IMAGE_NAME
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Docker push failed"
|
||||
exit
|
||||
fi
|
||||
|
@ -0,0 +1,167 @@
|
||||
#!/usr/bin/env python3
|
||||
import requests
|
||||
import os
|
||||
import boto3
|
||||
import redis
|
||||
import pickle
|
||||
import json
|
||||
import cv2
|
||||
import sys
|
||||
|
||||
def main():
|
||||
|
||||
images_dir = "face-detected-images"
|
||||
is_images_dir = os.path.isdir(images_dir)
|
||||
if(is_images_dir == False):
|
||||
os.mkdir(images_dir)
|
||||
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
|
||||
activation_id = os.environ.get('__OW_ACTIVATION_ID')
|
||||
params = json.loads(sys.argv[1])
|
||||
face_detected_result = []
|
||||
try:
|
||||
decode_activation_id = params["activation_id"]
|
||||
parts = params["parts"]
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/face_detected_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/face_detected_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
|
||||
load_image = pickle.loads(r.get(decode_output))
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, 'wb') as f:
|
||||
f.write(load_image)
|
||||
|
||||
img = cv2.imread(image_name)
|
||||
# Load Haar cascade for face detection
|
||||
face_cascade = cv2.CascadeClassifier('../haarcascade_frontalface_default.xml')
|
||||
|
||||
# Convert to grayscale
|
||||
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
|
||||
# Detect faces
|
||||
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
|
||||
|
||||
# Draw bounding boxes around faces
|
||||
for (x,y,w,h) in faces:
|
||||
cv2.rectangle(img,(x,y),(x+w,y+h),(0,0,255),2)[]
|
||||
# face = img[y:y+h, x:x+w]
|
||||
# # Apply a Gaussian blur to the face ROI
|
||||
# blurred_face = cv2.GaussianBlur(face, (23, 23), 30)
|
||||
# # Replace the face ROI with the blurred face
|
||||
# img[y:y+h, x:x+w] = blurred_face
|
||||
output_image = images_dir+'/face_detected_image_'+str(i)+'.jpg'
|
||||
# face_blurred_image = images_dir+'/face_blurred_image_'+str(i)+'.jpg'
|
||||
|
||||
cv2.imwrite(output_image, img)
|
||||
# cv2.imwrite(face_blurred_image, blurred_face)
|
||||
|
||||
imag = open(output_image,"rb").read()
|
||||
pickled_object = pickle.dumps(imag)
|
||||
face_detected_output = "face-detected-image"+activation_id+"-"+str(i)
|
||||
print(pickled_object)
|
||||
r.set(face_detected_output,pickled_object)
|
||||
|
||||
face_detected_result.append('face_detected_image_'+str(i)+'.jpg')
|
||||
|
||||
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
|
||||
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
aws_region = os.getenv('AWS_REGION')
|
||||
|
||||
|
||||
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
|
||||
|
||||
|
||||
bucket_name = 'dagit-store'
|
||||
folder_path = images_dir
|
||||
folder_name = images_dir
|
||||
for subdir, dirs, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(subdir, file)
|
||||
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
|
||||
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
|
||||
url_list=[]
|
||||
for image in face_detected_result:
|
||||
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
|
||||
url_list.append(url)
|
||||
|
||||
print(json.dumps({"face_detected_image_url_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
|
||||
}))
|
||||
|
||||
return({"face_detected_image_url_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
|
||||
})
|
||||
|
||||
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
|
||||
image_url_list = params["image_url_links"]
|
||||
parts = len(image_url_list)
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/face_detected_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/face_detected_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
response = requests.get(image_url_list[i])
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, "wb") as f:
|
||||
f.write(response.content)
|
||||
img = cv2.imread(image_name)
|
||||
# Load Haar cascade for face detection
|
||||
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
|
||||
|
||||
# Convert to grayscale
|
||||
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
|
||||
# Detect faces
|
||||
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
|
||||
|
||||
# Draw bounding boxes around faces
|
||||
for (x,y,w,h) in faces:
|
||||
cv2.rectangle(img,(x,y),(x+w,y+h),(0,0,255),2)
|
||||
output_image = images_dir+'/face_detected_image_'+str(i)+'.jpg'
|
||||
cv2.imwrite(output_image, img)
|
||||
|
||||
face_detected_result.append('face_detected_image_'+str(i)+'.jpg')
|
||||
|
||||
|
||||
|
||||
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
|
||||
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
aws_region = os.getenv('AWS_REGION')
|
||||
|
||||
|
||||
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
|
||||
|
||||
|
||||
bucket_name = 'dagit-store'
|
||||
folder_path = images_dir
|
||||
folder_name = images_dir
|
||||
for subdir, dirs, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(subdir, file)
|
||||
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
|
||||
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
|
||||
url_list=[]
|
||||
for image in face_detected_result:
|
||||
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
|
||||
url_list.append(url)
|
||||
|
||||
print(json.dumps({"face_detected_image_url_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
}))
|
||||
|
||||
return({"face_detected_image_url_links":url_list,
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts,
|
||||
"pickled_object":pickled_object
|
||||
|
||||
})
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,5 @@
|
||||
requests
|
||||
boto3
|
||||
redis
|
||||
opencv-python
|
||||
|
@ -0,0 +1,26 @@
|
||||
# Dockerfile for Python whisk docker action
|
||||
FROM openwhisk/dockerskeleton
|
||||
|
||||
ENV FLASK_PROXY_PORT 8080
|
||||
|
||||
## Install our action's Python dependencies
|
||||
ADD requirements.txt /action/requirements.txt
|
||||
ENV AWS_ACCESS_KEY_ID="AKIAYFB773UVZSOAVZN4"
|
||||
ENV AWS_SECRET_ACCESS_KEY="OZPLMjN/2ao6OlSd5PpIkT5d7cWD9WAP/DXSZbEs"
|
||||
ENV AWS_REGION="ap-south-1"
|
||||
RUN apk --update add python py-pip openssl ca-certificates py-openssl wget
|
||||
RUN apk --update add --virtual build-dependencies libffi-dev openssl-dev python-dev py-pip build-base \
|
||||
&& apk add jpeg-dev zlib-dev libjpeg \
|
||||
&& pip install --upgrade pip
|
||||
RUN cd /action; pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install opencv-python
|
||||
RUN pip install matplotlib
|
||||
# Ensure source assets are not drawn from the cache after this date
|
||||
ENV REFRESHED_AT 2016-09-05T13:59:39Z
|
||||
# Add all source assets
|
||||
ADD . /action
|
||||
# Rename our executable Python action
|
||||
ADD img_hist.py /action/exec
|
||||
|
||||
# Leave CMD as is for Openwhisk
|
||||
CMD ["/bin/bash", "-c", "cd actionProxy && python3 -u actionproxy.py"]
|
@ -0,0 +1,4 @@
|
||||
sudo ./buildAndPush.sh 10.129.28.219:5000/image-processing
|
||||
./register.sh /image-hist-api /image-hist-path image-histogram --response-type=json
|
||||
wsk -i action create image-histogram --docker 10.129.28.219:5000/image-denoise-image --web=true --timeout=300000
|
||||
wsk -i action update image-histogram --docker 10.129.28.219:5000/image-denoise-image img_hist.py --timeout 300000
|
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script will build the docker image and push it to dockerhub.
|
||||
#
|
||||
# Usage: buildAndPush.sh imageName
|
||||
#
|
||||
# Dockerhub image names look like "username/appname" and must be all lower case.
|
||||
# For example, "janesmith/calculator"
|
||||
|
||||
IMAGE_NAME=$1
|
||||
echo "Using $IMAGE_NAME as the image name"
|
||||
|
||||
# Make the docker image
|
||||
docker build -t $IMAGE_NAME .
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Docker build failed"
|
||||
exit
|
||||
fi
|
||||
docker push $IMAGE_NAME
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Docker push failed"
|
||||
exit
|
||||
fi
|
||||
|
@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env python3
|
||||
import requests
|
||||
import os
|
||||
import boto3
|
||||
import redis
|
||||
import pickle
|
||||
import json
|
||||
import cv2
|
||||
import sys
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
def main():
|
||||
images_dir = "histogram-images"
|
||||
|
||||
is_images_dir = os.path.isdir(images_dir)
|
||||
if(is_images_dir == False):
|
||||
os.mkdir(images_dir)
|
||||
r = redis.Redis(host="10.129.28.219", port=6379, db=2)
|
||||
activation_id = os.environ.get('__OW_ACTIVATION_ID')
|
||||
params = json.loads(sys.argv[1])
|
||||
histogram_result = []
|
||||
try:
|
||||
decode_activation_id = params["activation_id"]
|
||||
parts = params["parts"]
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/histogram_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/histogram_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
decode_output = "decode-output-image"+decode_activation_id+"-"+str(i)
|
||||
load_image = pickle.loads(r.get(decode_output))
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, 'wb') as f:
|
||||
f.write(load_image)
|
||||
|
||||
# Load image
|
||||
img = cv2.imread(image_name, 0) # 0 for grayscale
|
||||
|
||||
# Calculate histogram
|
||||
hist = cv2.calcHist([img], [0], None, [256], [0, 256])
|
||||
bins = range(256)
|
||||
output_image = images_dir+'/histogram_image_'+str(i)+'.jpg'
|
||||
# Plot histogram
|
||||
plt.hist(img.ravel(), bins, [0, 256])
|
||||
plt.title('Histogram')
|
||||
plt.xlabel('Intensity')
|
||||
plt.ylabel('Pixel Count')
|
||||
plt.savefig(output_image)
|
||||
histogram_result.append('histogram_image_'+str(i)+'.jpg')
|
||||
except Exception as e: #If not running as a part of DAG workflow and implemented as a single standalone function
|
||||
image_url_list = params["image_url_links"]
|
||||
parts = len(image_url_list)
|
||||
for i in range(0,parts):
|
||||
if os.path.exists(images_dir+'/histogram_image_'+str(i)+'.jpg'):
|
||||
os.remove(images_dir+'/histogram_image_'+str(i)+'.jpg')
|
||||
for i in range(0,parts):
|
||||
response = requests.get(image_url_list[i])
|
||||
image_name = 'Image'+str(i)+'.jpg'
|
||||
with open(image_name, "wb") as f:
|
||||
f.write(response.content)
|
||||
# Load image
|
||||
img = cv2.imread(image_name, 0) # 0 for grayscale
|
||||
# Calculate histogram
|
||||
hist = cv2.calcHist([img], [0], None, [256], [0, 256])
|
||||
bins = range(256)
|
||||
output_image = images_dir+'/histogram_image_'+str(i)+'.jpg'
|
||||
# Plot histogram
|
||||
plt.hist(img.ravel(), bins, [0, 256])
|
||||
plt.title('Histogram')
|
||||
plt.xlabel('Intensity')
|
||||
plt.ylabel('Pixel Count')
|
||||
plt.savefig(output_image)
|
||||
histogram_result.append('histogram_image_'+str(i)+'.jpg')
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
|
||||
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
aws_region = os.getenv('AWS_REGION')
|
||||
|
||||
|
||||
s3 = boto3.client('s3', aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key,region_name=aws_region)
|
||||
|
||||
|
||||
bucket_name = 'dagit-store'
|
||||
folder_path = images_dir
|
||||
folder_name = images_dir
|
||||
for subdir, dirs, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(subdir, file)
|
||||
s3.upload_file(file_path, bucket_name, f'{folder_name}/{file_path.split("/")[-1]}')
|
||||
s3.put_object_acl(Bucket=bucket_name, Key=f'{folder_name}/{file_path.split("/")[-1]}', ACL='public-read')
|
||||
url_list=[]
|
||||
for image in histogram_result:
|
||||
url = "https://dagit-store.s3.ap-south-1.amazonaws.com/"+images_dir+"/"+image
|
||||
url_list.append(url)
|
||||
|
||||
print(json.dumps({"histogram_image_url_links":url_list,
|
||||
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
}))
|
||||
|
||||
return({"histogram_image_url_links":url_list,
|
||||
|
||||
"activation_id": str(activation_id),
|
||||
"parts": parts
|
||||
|
||||
})
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,5 @@
|
||||
requests
|
||||
boto3
|
||||
redis
|
||||
opencv-python
|
||||
|
Loading…
Reference in new issue