Commit 7bac27b1 by BoxuanXu

add multy thread

1 parent 591a5745
Showing with 55 additions and 14 deletions
......@@ -7,11 +7,17 @@ import subprocess
from libpywrap import *
import requests
import threading
from converter import Run_Converter
from flask import Flask,request
from jenkinsapi.jenkins import Jenkins
import Queue
__website__ = "www.seetatech.com"
__author__ = "seetatech"
__editor__ = "xuboxuan"
......@@ -28,6 +34,9 @@ download_path="."
#be userd to jenkins
host = 'http://192.168.1.242:8080/'
#init post info queue
Info_Queue = Queue.Queue()
#initlization the logging
logging.basicConfig(
level=logging.INFO,
......@@ -92,7 +101,8 @@ def get_path_from_db(modelid,seetanet_model):
#Driver converter model by params and model graph
Run_Converter(params_name,graph_name,seetanet_model)
#Run_Converter(params_name,graph_name,seetanet_model)
Run_Converter("model-0015.params","model-symbol.json",seetanet_model)
return params_name,graph_name
......@@ -107,16 +117,18 @@ def upload_filetoFastDFS(params_name, graph_name,seetanet_model):
return None
return stmodel_fid
def get_info_from_queue(arg):
while 1:
if not Info_Queue.empty():
Info = Info_Queue.get()
#get parameters and driver the transition function
@app.route('/',methods=['POST'])
def Dirver_Convert():
#get parameter modelid from post stream
modelid=request.form['modelid']
output_layer=request.form['output_layer']
logging.info("We get modelid :%s from post stream,Start conversion:" % modelid)
logging.info("Begin Convert Info is : modelid = %s, output_layer = %s ,Post_Host = %s" % (Info["modelid"],Info["output_layer"],Info["post_url"]))
try:
modelid = Info["modelid"]
output_layer = Info["output_layer"]
Post_Host = Info["post_url"]
seetanet_model = "model_" + str(modelid) + ".data"
params_name,graph_name = get_path_from_db(modelid,seetanet_model)
......@@ -129,7 +141,7 @@ def Dirver_Convert():
if stmodel_fid is None:
logging.info("upload filed")
return "false"
return_flag = "false"
else:
#remove params file and graph file
......@@ -140,7 +152,7 @@ def Dirver_Convert():
except subprocess.CalledProcessError as err:
logging.info("shell command error!")
return "false"
return_flag = "false"
logging.info("convert successfully!")
......@@ -156,13 +168,42 @@ def Dirver_Convert():
params = eval(params)
job.invoke(block=True,build_params=params)
return "true"
return_flag = "true"
finally:
curl_atlas_exe.close()
curl_atlas.close()
db_atlas.close()
#curl_atlas_exe.close()
#curl_atlas.close()
#db_atlas.close()
logging('return %s',return_flag)
post_return = { "modelid": modelid, "return_flag" : return_flag }
requests.post(post_url, data=post_return)
#get parameters and driver the transition function
@app.route('/convert',methods=['POST'])
def Dirver_Convert():
#get parameter modelid from post stream
return_flag = "queue"
print("haha")
try:
modelid=request.form['modelid']
output_layer=request.form['output_layer']
post_url=request.form['post_url']
logging.info("New Post Connect: modelid : %s , post_url : %s, Queue size : %d" % (modelid,post_url,Info_Queue.qsize()))
Post_Info = { "modelid": modelid, "output_layer" : output_layer,"post_url" : post_url}
Info_Queue.put(Post_Info)
finally:
return return_flag
if __name__ == '__main__':
try:
t = threading.Thread(target=get_info_from_queue,args=(1,))
t.start();
app.run(host='0.0.0.0')
finally:
curl_atlas_exe.close()
curl_atlas.close()
db_atlas.close()
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!