mv scripts/agent.py to cmd/

Change-Id: Ifd91bc6966f485a3fe28971362f8c284f4b1b1f1
This commit is contained in:
Kun Huang 2015-11-11 13:44:46 +08:00
parent b1e3372293
commit eba48346bc
4 changed files with 64 additions and 68 deletions

View File

@ -10,7 +10,7 @@ def run_agent(task_uuid, ag):
"""
python <path-to-dir>/agent.py <uuid> mysql
"""
cmd = "python %s/agent.py %s %s" % (data_dir, task_uuid, ag)
cmd = "sca-tracer %s %s" % (task_uuid, ag)
ag = subprocess.Popen(cmd.split())
return ag.pid

View File

@ -5,11 +5,9 @@
from scalpels.agents.server import server
def main():
try:
# TODO handle stop later
server.start()
server.wait()
except KeyboardInterrupt:
server.stop()
if __name__ == "__main__":
main()

View File

@ -2,8 +2,67 @@
#-*- coding:utf-8 -*-
# Author: Kun Huang <academicgareth@gmail.com>
import subprocess
import psutil
import sys
from scalpels.db import api as db_api
from copy import deepcopy as copy
import signal
from tooz import coordination
import time
from scalpels.agents import base
"""
example:
sca-tracer <uuid> mysql
TODO:
add help (-h) message
config key-word arguments for each tracer
"""
def read_from_ag(ag):
# wrong impl. here, need read from config or db instead
from scalpels.client.utils import tracers_map as agents_map
data_dir = db_api.setup_config_get()["data_dir"].rstrip("/")
return agents_map.get(ag) % data_dir
def main():
raise NotImplementedError()
task_uuid, ag = sys.argv[1], sys.argv[2]
cmd = read_from_ag(ag)
worker = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
out = []
try:
while True:
t = worker.stdout.readline()
if not len(t):
break
_t = (time.time(), t.strip())
out.append(_t)
except KeyboardInterrupt:
pass
# psutil is much more professional... I have to use it instead
# this kill is to script process
worker_p = psutil.Process(worker.pid)
worker_p.send_signal(signal.SIGINT)
parse_func = getattr(base, "parse_%s" % ag)
# TODO file lock is okay in localhost, here need redis for distributed
# lock istead
co = coordination.get_coordinator("file:///tmp", b"localhost")
co.start()
lock = co.get_lock("task_update_lock")
with lock:
task = db_api.task_get(task_uuid)
results = copy(task.results)
for ret in parse_func(out):
ret = db_api.result_create(**ret)
results.append(ret.uuid)
db_api.task_update(task_uuid, results=results)
time.sleep(2)
co.stop()
if __name__ == "__main__":
main()

View File

@ -1,61 +0,0 @@
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# Author: Kun Huang <academicgareth@gmail.com>
import subprocess
import psutil
import sys
from scalpels.db import api as db_api
from copy import deepcopy as copy
import signal
from tooz import coordination
import time
from scalpels.agents import base
"""
python <path-to-dir>/agent.py <uuid> mysql
"""
def read_from_ag(ag):
# wrong impl. here, need read from config or db instead
from scalpels.client.utils import tracers_map as agents_map
data_dir = db_api.setup_config_get()["data_dir"].rstrip("/")
return agents_map.get(ag) % data_dir
if __name__ == "__main__":
task_uuid, ag = sys.argv[1], sys.argv[2]
cmd = read_from_ag(ag)
worker = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
out = []
try:
while True:
t = worker.stdout.readline()
if not len(t):
break
_t = (time.time(), t.strip())
out.append(_t)
except KeyboardInterrupt:
pass
# psutil is much more professional... I have to use it instead
# this kill is to script process
worker_p = psutil.Process(worker.pid)
worker_p.send_signal(signal.SIGINT)
parse_func = getattr(base, "parse_%s" % ag)
# TODO file lock is okay in localhost, here need redis for distributed
# lock istead
co = coordination.get_coordinator("file:///tmp", b"localhost")
co.start()
lock = co.get_lock("task_update_lock")
with lock:
task = db_api.task_get(task_uuid)
results = copy(task.results)
for ret in parse_func(out):
ret = db_api.result_create(**ret)
results.append(ret.uuid)
db_api.task_update(task_uuid, results=results)
time.sleep(2)
co.stop()