add lock for task update

Change-Id: I53aa58d7d8ae75f9e229fe2c5d7d75b3b9afbed1
This commit is contained in:
Kun Huang 2015-10-29 10:55:59 +08:00
parent c0f8f6711e
commit c2e118b367
3 changed files with 21 additions and 6 deletions

View File

@ -3,3 +3,4 @@ oslo.db
python-novaclient
sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2
psutil>=1.1.1,<2.0.0
tooz>=1.19.0 # Apache-2.0

View File

@ -8,6 +8,8 @@ import sys
from scalpels.db import api as db_api
from copy import deepcopy as copy
import signal
from tooz import coordination
import time
"""
python <path-to-dir>/agent.py <uuid> mysql
@ -37,9 +39,17 @@ if __name__ == "__main__":
# this kill is to script process
worker_p = psutil.Process(worker.pid)
worker_p.send_signal(signal.SIGINT)
task = db_api.task_get(task_uuid)
results = copy(task.results)
ret = db_api.result_create(out)
results.append(ret.uuid)
# TODO set this behaviour concurrable
db_api.task_update(task_uuid, results=results)
# TODO file lock is okay in localhost, here need redis for distributed
# lock istead
co = coordination.get_coordinator("file:///tmp", b"localhost")
co.start()
lock = co.get_lock("task_update_lock")
with lock:
task = db_api.task_get(task_uuid)
results = copy(task.results)
ret = db_api.result_create(out)
results.append(ret.uuid)
db_api.task_update(task_uuid, results=results)
time.sleep(2)
co.stop()

View File

@ -16,10 +16,14 @@ sca start -a rpc -a rabbit -a traffic
echo "running load"
source /opt/stack/new/devstack/openrc admin admin
sca load --storm
sleep 10
echo stop those agents
sca stop
echo waiting agent write data into db before report
sleep 20
echo report data
sca report