Initial conductor implementation
This commit is contained in:
parent
aa2917f79c
commit
2185c2298a
3
conductor/bin/app.py
Normal file
3
conductor/bin/app.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
from conductor import app
|
0
conductor/conductor/__init__.py
Normal file
0
conductor/conductor/__init__.py
Normal file
59
conductor/conductor/app.py
Normal file
59
conductor/conductor/app.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import glob
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import tornado.ioloop
|
||||||
|
|
||||||
|
|
||||||
|
import rabbitmq
|
||||||
|
from workflow import Workflow
|
||||||
|
import cloud_formation
|
||||||
|
import windows_agent
|
||||||
|
from commands.dispatcher import CommandDispatcher
|
||||||
|
from config import Config
|
||||||
|
|
||||||
|
config = Config(sys.argv[1] if len(sys.argv) > 1 else None)
|
||||||
|
|
||||||
|
rmqclient = rabbitmq.RabbitMqClient(
|
||||||
|
virtual_host=config.get_setting('rabbitmq', 'vhost', '/'),
|
||||||
|
login=config.get_setting('rabbitmq', 'login', 'guest'),
|
||||||
|
password=config.get_setting('rabbitmq', 'password', 'guest'),
|
||||||
|
host=config.get_setting('rabbitmq', 'host', 'localhost'))
|
||||||
|
|
||||||
|
def schedule(callback, *args, **kwargs):
|
||||||
|
tornado.ioloop.IOLoop.instance().add_timeout(time.time() + 0.1,
|
||||||
|
lambda args=args, kwargs = kwargs: callback(*args, **kwargs))
|
||||||
|
|
||||||
|
def task_received(task):
|
||||||
|
command_dispatcher = CommandDispatcher(task['name'], rmqclient)
|
||||||
|
workflows = []
|
||||||
|
for path in glob.glob("data/workflows/*.xml"):
|
||||||
|
print "loading", path
|
||||||
|
workflow = Workflow(path, task, command_dispatcher, config)
|
||||||
|
workflows.append(workflow)
|
||||||
|
|
||||||
|
def loop(callback):
|
||||||
|
for workflow in workflows:
|
||||||
|
workflow.execute()
|
||||||
|
if not command_dispatcher.execute_pending(lambda: schedule(loop, callback)):
|
||||||
|
callback()
|
||||||
|
|
||||||
|
def shutdown():
|
||||||
|
command_dispatcher.close()
|
||||||
|
rmqclient.send('task-results', json.dumps(task))
|
||||||
|
print "Done!!!!!!!!!!"
|
||||||
|
|
||||||
|
loop(shutdown)
|
||||||
|
|
||||||
|
|
||||||
|
def message_received(body):
|
||||||
|
task_received(json.loads(body))
|
||||||
|
|
||||||
|
|
||||||
|
def start():
|
||||||
|
rmqclient.subscribe("tasks", message_received)
|
||||||
|
|
||||||
|
rmqclient.start(start)
|
||||||
|
tornado.ioloop.IOLoop.instance().start()
|
||||||
|
|
10
conductor/conductor/cloud_formation.py
Normal file
10
conductor/conductor/cloud_formation.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import xml_code_engine
|
||||||
|
|
||||||
|
def update_cf_stack(engine, context, body, template, mappings, arguments, **kwargs):
|
||||||
|
command_dispatcher = context['/commandDispatcher']
|
||||||
|
|
||||||
|
callback = lambda result: engine.evaluate_content(body.find('success'), context)
|
||||||
|
command_dispatcher.execute(name='cf', template=template, mappings=mappings, arguments=arguments, callback=callback)
|
||||||
|
|
||||||
|
|
||||||
|
xml_code_engine.XmlCodeEngine.register_function(update_cf_stack, "update-cf-stack")
|
1
conductor/conductor/commands/__init__.py
Normal file
1
conductor/conductor/commands/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
import command
|
75
conductor/conductor/commands/cloud_formation.py
Normal file
75
conductor/conductor/commands/cloud_formation.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import conductor.helpers
|
||||||
|
from command import CommandBase
|
||||||
|
from subprocess import call
|
||||||
|
|
||||||
|
class HeatExecutor(CommandBase):
|
||||||
|
def __init__(self, stack):
|
||||||
|
self._pending_list = []
|
||||||
|
self._stack = stack
|
||||||
|
|
||||||
|
def execute(self, template, mappings, arguments, callback):
|
||||||
|
with open('data/templates/cf/%s.template' % template) as template_file:
|
||||||
|
template_data = template_file.read()
|
||||||
|
|
||||||
|
template_data = conductor.helpers.transform_json(json.loads(template_data), mappings)
|
||||||
|
|
||||||
|
self._pending_list.append({
|
||||||
|
'template': template_data,
|
||||||
|
'arguments': arguments,
|
||||||
|
'callback': callback
|
||||||
|
})
|
||||||
|
|
||||||
|
def has_pending_commands(self):
|
||||||
|
return len(self._pending_list) > 0
|
||||||
|
|
||||||
|
def execute_pending(self, callback):
|
||||||
|
if not self._pending_list:
|
||||||
|
return False
|
||||||
|
|
||||||
|
template = {}
|
||||||
|
arguments = {}
|
||||||
|
for t in self._pending_list:
|
||||||
|
template = conductor.helpers.merge_dicts(template, t['template'], max_levels=2)
|
||||||
|
arguments = conductor.helpers.merge_dicts(arguments, t['arguments'], max_levels=1)
|
||||||
|
|
||||||
|
print 'Executing heat template', json.dumps(template), 'with arguments', arguments, 'on stack', self._stack
|
||||||
|
|
||||||
|
if not os.path.exists("tmp"):
|
||||||
|
os.mkdir("tmp")
|
||||||
|
file_name = "tmp/"+str(uuid.uuid4())
|
||||||
|
print "Saving template to", file_name
|
||||||
|
with open(file_name, "w") as f:
|
||||||
|
f.write(json.dumps(template))
|
||||||
|
|
||||||
|
arguments_str = ";".join(['%s=%s' % (key, value) for (key, value) in arguments.items()])
|
||||||
|
call([
|
||||||
|
"./heat_run","stack-create",
|
||||||
|
"-f" + file_name,
|
||||||
|
"-P" + arguments_str,
|
||||||
|
self._stack
|
||||||
|
])
|
||||||
|
|
||||||
|
callbacks = []
|
||||||
|
for t in self._pending_list:
|
||||||
|
if t['callback']:
|
||||||
|
callbacks.append(t['callback'])
|
||||||
|
|
||||||
|
self._pending_list = []
|
||||||
|
|
||||||
|
for cb in callbacks:
|
||||||
|
cb(True)
|
||||||
|
|
||||||
|
callback()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
13
conductor/conductor/commands/command.py
Normal file
13
conductor/conductor/commands/command.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
|
||||||
|
class CommandBase(object):
|
||||||
|
def execute(self, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def execute_pending(self, callback):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_pending_commands(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
44
conductor/conductor/commands/dispatcher.py
Normal file
44
conductor/conductor/commands/dispatcher.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import command
|
||||||
|
import cloud_formation
|
||||||
|
import windows_agent
|
||||||
|
|
||||||
|
class CommandDispatcher(command.CommandBase):
|
||||||
|
def __init__(self, environment_name, rmqclient):
|
||||||
|
self._command_map = {
|
||||||
|
'cf': cloud_formation.HeatExecutor(environment_name),
|
||||||
|
'agent': windows_agent.WindowsAgentExecutor(environment_name, rmqclient)
|
||||||
|
}
|
||||||
|
|
||||||
|
def execute(self, name, **kwargs):
|
||||||
|
self._command_map[name].execute(**kwargs)
|
||||||
|
|
||||||
|
def execute_pending(self, callback):
|
||||||
|
result = 0
|
||||||
|
count = [0]
|
||||||
|
|
||||||
|
def on_result():
|
||||||
|
count[0] -= 1
|
||||||
|
if not count[0]:
|
||||||
|
callback()
|
||||||
|
|
||||||
|
for command in self._command_map.values():
|
||||||
|
count[0] += 1
|
||||||
|
result += 1
|
||||||
|
if not command.execute_pending(on_result):
|
||||||
|
count[0] -= 1
|
||||||
|
result -= 1
|
||||||
|
|
||||||
|
|
||||||
|
return result > 0
|
||||||
|
|
||||||
|
|
||||||
|
def has_pending_commands(self):
|
||||||
|
result = False
|
||||||
|
for command in self._command_map.values():
|
||||||
|
result |= command.has_pending_commands()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
for t in self._command_map.values():
|
||||||
|
t.close()
|
58
conductor/conductor/commands/windows_agent.py
Normal file
58
conductor/conductor/commands/windows_agent.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
import conductor.helpers
|
||||||
|
from command import CommandBase
|
||||||
|
|
||||||
|
class WindowsAgentExecutor(CommandBase):
|
||||||
|
def __init__(self, stack, rmqclient):
|
||||||
|
self._pending_list = []
|
||||||
|
self._stack = stack
|
||||||
|
self._rmqclient = rmqclient
|
||||||
|
self._callback = None
|
||||||
|
rmqclient.subscribe('-execution-results', self._on_message)
|
||||||
|
print "--------------------"
|
||||||
|
|
||||||
|
def execute(self, template, mappings, host, callback):
|
||||||
|
with open('data/templates/agent/%s.template' % template) as template_file:
|
||||||
|
template_data = template_file.read()
|
||||||
|
|
||||||
|
template_data = json.dumps(conductor.helpers.transform_json(json.loads(template_data), mappings))
|
||||||
|
|
||||||
|
self._pending_list.append({
|
||||||
|
'template': template_data,
|
||||||
|
'host': ('%s-%s' % (self._stack, host)).lower().replace(' ', '-'),
|
||||||
|
'callback': callback
|
||||||
|
})
|
||||||
|
|
||||||
|
def _on_message(self, body):
|
||||||
|
if self._pending_list:
|
||||||
|
item = self._pending_list.pop()
|
||||||
|
item['callback'](json.loads(body))
|
||||||
|
if self._callback and not self._pending_list:
|
||||||
|
cb = self._callback
|
||||||
|
self._callback = None
|
||||||
|
cb()
|
||||||
|
|
||||||
|
def has_pending_commands(self):
|
||||||
|
return len(self._pending_list) > 0
|
||||||
|
|
||||||
|
def execute_pending(self, callback):
|
||||||
|
if not self._pending_list:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._callback = callback
|
||||||
|
|
||||||
|
for t in self._pending_list:
|
||||||
|
self._rmqclient.send(queue=t['host'], data=t['template'])
|
||||||
|
print 'Sending RMQ message %s to %s' % (t['template'], t['host'])
|
||||||
|
|
||||||
|
callbacks = []
|
||||||
|
for t in self._pending_list:
|
||||||
|
if t['callback']:
|
||||||
|
callbacks.append(t['callback'])
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self._rmqclient.unsubscribe('-execution-results')
|
||||||
|
|
17
conductor/conductor/config.py
Normal file
17
conductor/conductor/config.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from ConfigParser import SafeConfigParser
|
||||||
|
|
||||||
|
class Config(object):
|
||||||
|
CONFIG_PATH = './etc/app.config'
|
||||||
|
|
||||||
|
def __init__(self, filename=None):
|
||||||
|
self.config = SafeConfigParser()
|
||||||
|
self.config.read(filename or self.CONFIG_PATH)
|
||||||
|
|
||||||
|
def get_setting(self, section, name, default=None):
|
||||||
|
if not self.config.has_option(section, name):
|
||||||
|
return default
|
||||||
|
return self.config.get(section, name)
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
parts = item.rsplit('.', 1)
|
||||||
|
return self.get_setting(parts[0] if len(parts) == 2 else 'DEFAULT', parts[-1])
|
50
conductor/conductor/function_context.py
Normal file
50
conductor/conductor/function_context.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
class Context(object):
|
||||||
|
def __init__(self, parent=None):
|
||||||
|
self._parent = parent
|
||||||
|
self._data = None
|
||||||
|
|
||||||
|
def _get_data(self):
|
||||||
|
if self._data is None:
|
||||||
|
self._data = {} if self._parent is None else self._parent._get_data().copy()
|
||||||
|
return self._data
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
context, path = self._parseContext(item)
|
||||||
|
return context._get_data().get(path)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
context, path = self._parseContext(key)
|
||||||
|
context._get_data()[path] = value
|
||||||
|
|
||||||
|
def _parseContext(self, path):
|
||||||
|
context = self
|
||||||
|
index = 0
|
||||||
|
for c in path:
|
||||||
|
if c == ':' and context._parent is not None:
|
||||||
|
context = context._parent
|
||||||
|
elif c == '/':
|
||||||
|
while context._parent is not None:
|
||||||
|
context = context._parent
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
return context, path[index:]
|
||||||
|
|
||||||
|
def assign_from(self, context, copy=False):
|
||||||
|
self._parent = context._parent
|
||||||
|
self._data = context._data
|
||||||
|
if copy and self._data is not None:
|
||||||
|
self._data = self._data.copy()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parent(self):
|
||||||
|
return self._parent
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self._data is not None:
|
||||||
|
return str(self._data)
|
||||||
|
if self._parent:
|
||||||
|
return str(self._parent)
|
||||||
|
return str({})
|
36
conductor/conductor/helpers.py
Normal file
36
conductor/conductor/helpers.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import types
|
||||||
|
|
||||||
|
def transform_json(json, mappings):
|
||||||
|
if isinstance(json, types.ListType):
|
||||||
|
result=[]
|
||||||
|
for t in json:
|
||||||
|
result.append(transform_json(t, mappings))
|
||||||
|
return result
|
||||||
|
|
||||||
|
if isinstance(json, types.DictionaryType):
|
||||||
|
result = {}
|
||||||
|
for key, value in json.items():
|
||||||
|
result[transform_json(key, mappings)] = transform_json(value, mappings)
|
||||||
|
return result
|
||||||
|
|
||||||
|
if isinstance(json, types.StringTypes) and json.startswith('$'):
|
||||||
|
value = mappings.get(json[1:])
|
||||||
|
if value is not None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return json
|
||||||
|
|
||||||
|
def merge_dicts(dict1, dict2, max_levels=0):
|
||||||
|
result = {}
|
||||||
|
for key, value in dict1.items():
|
||||||
|
result[key] = value
|
||||||
|
if key in dict2:
|
||||||
|
other_value = dict2[key]
|
||||||
|
if max_levels == 1 or not isinstance(other_value, types.DictionaryType):
|
||||||
|
result[key] = other_value
|
||||||
|
else:
|
||||||
|
result[key] = merge_dicts(value, other_value, 0 if max_levels == 0 else max_levels-1)
|
||||||
|
for key, value in dict2.items():
|
||||||
|
if key not in result:
|
||||||
|
result[key] = value
|
||||||
|
return result
|
62
conductor/conductor/rabbitmq.py
Normal file
62
conductor/conductor/rabbitmq.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
import uuid
|
||||||
|
import pika
|
||||||
|
from pika.adapters import TornadoConnection
|
||||||
|
import time
|
||||||
|
|
||||||
|
try:
|
||||||
|
import tornado.ioloop
|
||||||
|
IOLoop = tornado.ioloop.IOLoop
|
||||||
|
except ImportError:
|
||||||
|
IOLoop = None
|
||||||
|
|
||||||
|
|
||||||
|
class RabbitMqClient(object):
|
||||||
|
def __init__(self, host='localhost', login='guest', password='guest', virtual_host='/'):
|
||||||
|
credentials = pika.PlainCredentials(login, password)
|
||||||
|
self._connection_parameters = pika.ConnectionParameters(
|
||||||
|
credentials = credentials, host = host, virtual_host = virtual_host)
|
||||||
|
self._subscriptions = {}
|
||||||
|
|
||||||
|
def _create_connection(self):
|
||||||
|
self.connection = TornadoConnection(
|
||||||
|
parameters = self._connection_parameters,
|
||||||
|
on_open_callback = self._on_connected)
|
||||||
|
|
||||||
|
def _on_connected(self, connection):
|
||||||
|
self._channel = connection.channel(self._on_channel_open)
|
||||||
|
|
||||||
|
def _on_channel_open(self, channel):
|
||||||
|
self._channel = channel
|
||||||
|
if self._started_callback:
|
||||||
|
self._started_callback()
|
||||||
|
|
||||||
|
def _on_queue_declared(self, frame, queue, callback, ctag):
|
||||||
|
def invoke_callback(ch, method_frame, header_frame, body):
|
||||||
|
callback(body)
|
||||||
|
|
||||||
|
self._channel.basic_consume(invoke_callback, queue=queue, no_ack=True, consumer_tag=ctag)
|
||||||
|
|
||||||
|
def subscribe(self, queue, callback):
|
||||||
|
ctag = str(uuid.uuid4())
|
||||||
|
self._subscriptions[queue] = ctag
|
||||||
|
|
||||||
|
self._channel.queue_declare(queue=queue, durable=True,
|
||||||
|
callback= lambda frame, ctag=ctag : self._on_queue_declared(frame, queue, callback, ctag))
|
||||||
|
|
||||||
|
def unsubscribe(self, queue):
|
||||||
|
self._channel.basic_cancel(consumer_tag=self._subscriptions[queue])
|
||||||
|
del self._subscriptions[queue]
|
||||||
|
|
||||||
|
|
||||||
|
def start(self, callback=None):
|
||||||
|
if IOLoop is None: raise ImportError("Tornado not installed")
|
||||||
|
self._started_callback = callback
|
||||||
|
ioloop = IOLoop.instance()
|
||||||
|
self.timeout_id = ioloop.add_timeout(time.time() + 0.1, self._create_connection)
|
||||||
|
|
||||||
|
def send(self, queue, data, exchange=""):
|
||||||
|
self._channel.queue_declare(queue=queue, durable=True,
|
||||||
|
callback = lambda frame: self._channel.basic_publish(exchange=exchange, routing_key=queue, body=data))
|
||||||
|
|
||||||
|
|
||||||
|
|
13
conductor/conductor/windows_agent.py
Normal file
13
conductor/conductor/windows_agent.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import xml_code_engine
|
||||||
|
|
||||||
|
def send_command(engine, context, body, template, mappings, host, **kwargs):
|
||||||
|
command_dispatcher = context['/commandDispatcher']
|
||||||
|
|
||||||
|
def callback(result):
|
||||||
|
print "Received ", result
|
||||||
|
engine.evaluate_content(body.find('success'), context)
|
||||||
|
|
||||||
|
command_dispatcher.execute(name='agent', template=template, mappings=mappings, host=host, callback=callback)
|
||||||
|
|
||||||
|
|
||||||
|
xml_code_engine.XmlCodeEngine.register_function(send_command, "send-command")
|
142
conductor/conductor/workflow.py
Normal file
142
conductor/conductor/workflow.py
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
import jsonpath
|
||||||
|
import types
|
||||||
|
import re
|
||||||
|
|
||||||
|
import xml_code_engine
|
||||||
|
import function_context
|
||||||
|
|
||||||
|
class Workflow(object):
|
||||||
|
def __init__(self, filename, data, command_dispatcher, config):
|
||||||
|
self._data = data
|
||||||
|
self._engine = xml_code_engine.XmlCodeEngine()
|
||||||
|
with open(filename) as xml:
|
||||||
|
self._engine.load(xml)
|
||||||
|
self._command_dispatcher = command_dispatcher
|
||||||
|
self._config = config
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
while True:
|
||||||
|
context = function_context.Context()
|
||||||
|
context['/dataSource'] = self._data
|
||||||
|
context['/commandDispatcher'] = self._command_dispatcher
|
||||||
|
context['/config'] = self._config
|
||||||
|
if not self._engine.execute(context):
|
||||||
|
break
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_path(obj, path, create_non_existing=False):
|
||||||
|
current = obj
|
||||||
|
for part in path:
|
||||||
|
if isinstance(current, types.ListType):
|
||||||
|
current = current[int(part)]
|
||||||
|
elif isinstance(current, types.DictionaryType):
|
||||||
|
if part not in current:
|
||||||
|
if create_non_existing:
|
||||||
|
current[part] = {}
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
current = current[part]
|
||||||
|
else:
|
||||||
|
raise ValueError()
|
||||||
|
|
||||||
|
return current
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _set_path(obj, path, value):
|
||||||
|
current = Workflow._get_path(obj, path[:-1], True)
|
||||||
|
if isinstance(current, types.ListType):
|
||||||
|
current[int(path[-1])] = value
|
||||||
|
elif isinstance(current, types.DictionaryType):
|
||||||
|
current[path[-1]] = value
|
||||||
|
else:
|
||||||
|
raise ValueError()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _select_func(path, context, **kwargs):
|
||||||
|
if path.startswith('##'):
|
||||||
|
config = context['/config']
|
||||||
|
return config[path[2:]]
|
||||||
|
elif path.startswith('#'):
|
||||||
|
return context[path[1:]]
|
||||||
|
|
||||||
|
position = context['dataSource_currentPosition'] or []
|
||||||
|
data = context['dataSource']
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
for c in path:
|
||||||
|
if c == ':':
|
||||||
|
if len(position) > 0:
|
||||||
|
position = position[:-1]
|
||||||
|
elif c == '/':
|
||||||
|
position = []
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
return Workflow._get_path(data, position + path[index:].split('.'))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _set_func(path, context, body, engine, **kwargs):
|
||||||
|
body_data = engine.evaluate_content(body, context)
|
||||||
|
|
||||||
|
if path[0] == '#':
|
||||||
|
context[path[1:]] = body_data
|
||||||
|
return
|
||||||
|
|
||||||
|
position = context['dataSource_currentPosition'] or []
|
||||||
|
data = context['dataSource']
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
for c in path:
|
||||||
|
if c == ':':
|
||||||
|
if len(position) > 0:
|
||||||
|
position = position[:-1]
|
||||||
|
elif c == '/':
|
||||||
|
position = []
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
new_position = position + path[index:].split('.')
|
||||||
|
if Workflow._get_path(data, new_position) != body_data:
|
||||||
|
Workflow._set_path(data, new_position, body_data)
|
||||||
|
context['/hasSideEffects'] = True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _rule_func(match, context, body, engine, limit = 0, **kwargs):
|
||||||
|
position = context['dataSource_currentPosition'] or []
|
||||||
|
data = context['dataSource_currentObj']
|
||||||
|
if data is None:
|
||||||
|
data = context['dataSource']
|
||||||
|
match = re.sub(r'@\.([\w.]+)', r"Workflow._get_path(@, '\1'.split('.'))", match)
|
||||||
|
selected = jsonpath.jsonpath(data, match, 'IPATH') or []
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
for found_match in selected:
|
||||||
|
if 0 < int(limit) <= index:
|
||||||
|
break
|
||||||
|
index += 1
|
||||||
|
new_position = position + found_match
|
||||||
|
context['dataSource_currentPosition'] = new_position
|
||||||
|
context['dataSource_currentObj'] = Workflow._get_path(context['dataSource'], new_position)
|
||||||
|
for element in body:
|
||||||
|
engine.evaluate(element, context)
|
||||||
|
if element.tag == 'rule' and context['/hasSideEffects']:
|
||||||
|
break
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _workflow_func(context, body, engine, **kwargs):
|
||||||
|
context['/hasSideEffects'] = False
|
||||||
|
for element in body:
|
||||||
|
engine.evaluate(element, context)
|
||||||
|
if element.tag == 'rule' and context['/hasSideEffects']:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
xml_code_engine.XmlCodeEngine.register_function(Workflow._rule_func, 'rule')
|
||||||
|
xml_code_engine.XmlCodeEngine.register_function(Workflow._workflow_func, 'workflow')
|
||||||
|
xml_code_engine.XmlCodeEngine.register_function(Workflow._set_func, 'set')
|
||||||
|
xml_code_engine.XmlCodeEngine.register_function(Workflow._select_func, 'select')
|
141
conductor/conductor/xml_code_engine.py
Normal file
141
conductor/conductor/xml_code_engine.py
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
#from lxml import etree
|
||||||
|
import xml.etree.ElementTree as etree
|
||||||
|
import function_context
|
||||||
|
|
||||||
|
class XmlCodeEngine(object):
|
||||||
|
_functionMap = {}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._document = None
|
||||||
|
|
||||||
|
def load(self, file_obj):
|
||||||
|
self._document = etree.parse(file_obj)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def register_function(func, name):
|
||||||
|
XmlCodeEngine._functionMap[name] = func
|
||||||
|
|
||||||
|
def _execute_function(self, name, element, parent_context):
|
||||||
|
if name == 'parameter':
|
||||||
|
return None
|
||||||
|
|
||||||
|
if name not in self._functionMap:
|
||||||
|
raise KeyError('Unknown function %s' % name)
|
||||||
|
|
||||||
|
definition = self._functionMap[name]
|
||||||
|
context = function_context.Context(parent_context)
|
||||||
|
args = { 'engine': self, 'body': element, 'context': context }
|
||||||
|
|
||||||
|
for key, value in element.items():
|
||||||
|
args[key] = value
|
||||||
|
|
||||||
|
for parameter in element.findall('parameter'):
|
||||||
|
args[parameter.get('name')] = self.evaluate_content(parameter, context)
|
||||||
|
|
||||||
|
return definition(**args)
|
||||||
|
|
||||||
|
def evaluate(self, element, parent_context):
|
||||||
|
return self._execute_function(element.tag, element, parent_context)
|
||||||
|
|
||||||
|
def evaluate_content(self, element, context):
|
||||||
|
parts = [element.text or '']
|
||||||
|
do_strip = False
|
||||||
|
for sub_element in element:
|
||||||
|
if sub_element.tag == 'parameter':
|
||||||
|
continue
|
||||||
|
do_strip = True
|
||||||
|
parts.append(self._execute_function(sub_element.tag, sub_element, context))
|
||||||
|
parts.append(sub_element.tail or '')
|
||||||
|
|
||||||
|
result = []
|
||||||
|
|
||||||
|
for t in parts:
|
||||||
|
if not isinstance(t, (str, unicode)):
|
||||||
|
result.append(t)
|
||||||
|
if len(result) == 0:
|
||||||
|
return_value = ''.join(parts)
|
||||||
|
if do_strip: return_value = return_value.strip()
|
||||||
|
elif len(result) == 1:
|
||||||
|
return_value = result[0]
|
||||||
|
else:
|
||||||
|
return_value = result
|
||||||
|
|
||||||
|
return return_value
|
||||||
|
|
||||||
|
def execute(self, parent_context=None):
|
||||||
|
root = self._document.getroot()
|
||||||
|
return self.evaluate(root, parent_context)
|
||||||
|
|
||||||
|
|
||||||
|
def _dict_func(engine, body, context, **kwargs):
|
||||||
|
result = {}
|
||||||
|
for item in body:
|
||||||
|
key = item.get('name')
|
||||||
|
value = engine.evaluate_content(item, context)
|
||||||
|
result[key] = value
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _array_func(engine, body, context, **kwargs):
|
||||||
|
result = []
|
||||||
|
for item in body:
|
||||||
|
result.append(engine.evaluate(item, context))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _text_func(engine, body, context, **kwargs):
|
||||||
|
return str(engine.evaluate_content(body, context))
|
||||||
|
|
||||||
|
def _int_func(engine, body, context, **kwargs):
|
||||||
|
return int(engine.evaluate_content(body, context))
|
||||||
|
|
||||||
|
def _function_func(engine, body, context, **kwargs):
|
||||||
|
return lambda: engine.evaluate_content(body, context)
|
||||||
|
|
||||||
|
def _null_func(**kwargs):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _true_func(**kwargs):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _false_func(**kwargs):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
XmlCodeEngine.register_function(_dict_func, "map")
|
||||||
|
XmlCodeEngine.register_function(_array_func, "list")
|
||||||
|
XmlCodeEngine.register_function(_text_func, "text")
|
||||||
|
XmlCodeEngine.register_function(_int_func, "int")
|
||||||
|
XmlCodeEngine.register_function(_function_func, "function")
|
||||||
|
XmlCodeEngine.register_function(_null_func, "null")
|
||||||
|
XmlCodeEngine.register_function(_true_func, "true")
|
||||||
|
XmlCodeEngine.register_function(_false_func, "false")
|
||||||
|
|
||||||
|
|
||||||
|
def xprint(context, body, **kwargs):
|
||||||
|
print "------------------------ start ------------------------"
|
||||||
|
for arg in kwargs:
|
||||||
|
print "%s = %s" % (arg, kwargs[arg])
|
||||||
|
print 'context = ', context
|
||||||
|
print 'body = %s (%s)' %(body, body.text)
|
||||||
|
print "------------------------- end -------------------------"
|
||||||
|
XmlCodeEngine.register_function(xprint, "print")
|
||||||
|
|
||||||
|
|
||||||
|
# parser = XmlCodeEngine()
|
||||||
|
# file = open('test2.xml')
|
||||||
|
# parser.load(file)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# context = functioncontext.Context()
|
||||||
|
# context['test'] = 'xxx'
|
||||||
|
#
|
||||||
|
# parser.execute(context)
|
||||||
|
#
|
||||||
|
# print etree.xpath('/')
|
||||||
|
# root = parser._document.getroot()
|
||||||
|
# print root.items()
|
||||||
|
# print root.text.lstrip() + "|"
|
||||||
|
# for x in root:
|
||||||
|
# print x, type(x), x.tail
|
||||||
|
|
21
conductor/data/templates/agent/CreatePrimaryDC.template
Normal file
21
conductor/data/templates/agent/CreatePrimaryDC.template
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"Scripts": [
|
||||||
|
"RnVuY3Rpb24gU2V0LUxvY2FsVXNlclBhc3N3b3JkIHsNCiAgICBwYXJhbSAoDQogICAgICAgIFtTdHJpbmddICRVc2VyTmFtZSwNCiAgICAgICAgW1N0cmluZ10gJFBhc3N3b3JkLA0KICAgICAgICBbU3dpdGNoXSAkRm9yY2UNCiAgICApDQogICAgDQogICAgdHJhcCB7IFN0b3AtRXhlY3V0aW9uICRfIH0NCiAgICANCiAgICBpZiAoKEdldC1XbWlPYmplY3QgV2luMzJfVXNlckFjY291bnQgLUZpbHRlciAiTG9jYWxBY2NvdW50ID0gJ1RydWUnIEFORCBOYW1lPSckVXNlck5hbWUnIikgLWVxICRudWxsKSB7DQogICAgICAgIHRocm93ICJVbmFibGUgdG8gZmluZCBsb2NhbCB1c2VyIGFjY291bnQgJyRVc2VyTmFtZSciDQogICAgfQ0KICAgIA0KICAgIGlmICgkRm9yY2UpIHsNCiAgICAgICAgV3JpdGUtTG9nICJDaGFuZ2luZyBwYXNzd29yZCBmb3IgdXNlciAnJFVzZXJOYW1lJyB0byAnKioqKionIiAjIDopDQogICAgICAgIChbQURTSV0gIldpbk5UOi8vLi8kVXNlck5hbWUiKS5TZXRQYXNzd29yZCgkUGFzc3dvcmQpDQogICAgfQ0KICAgIGVsc2Ugew0KICAgICAgICBXcml0ZS1Mb2dXYXJuaW5nICJZb3UgYXJlIHRyeWluZyB0byBjaGFuZ2UgcGFzc3dvcmQgZm9yIHVzZXIgJyRVc2VyTmFtZScuIFRvIGRvIHRoaXMgcGxlYXNlIHJ1biB0aGUgY29tbWFuZCBhZ2FpbiB3aXRoIC1Gb3JjZSBwYXJhbWV0ZXIuIg0KICAgICAgICAkVXNlckFjY291bnQNCiAgICB9DQp9DQoNCg0KDQpGdW5jdGlvbiBJbnN0YWxsLVJvbGVQcmltYXJ5RG9tYWluQ29udHJvbGxlcg0Kew0KPCMNCi5TWU5PUFNJUw0KQ29uZmlndXJlIG5vZGUncyBuZXR3b3JrIGFkYXB0ZXJzLg0KQ3JlYXRlIGZpcnN0IGRvbWFpbiBjb250cm9sbGVyIGluIHRoZSBmb3Jlc3QuDQoNCi5FWEFNUExFDQpQUz4gSW5zdGFsbC1Sb2xlUHJpbWFyeURvbWFpbkNvbnRyb2xsZXIgLURvbWFpbk5hbWUgYWNtZS5sb2NhbCAtU2FmZU1vZGVQYXNzd29yZCAiUEBzc3cwcmQiDQoNCkluc3RhbGwgRE5TIGFuZCBBRERTLCBjcmVhdGUgZm9yZXN0IGFuZCBkb21haW4gJ2FjbWUubG9jYWwnLg0KU2V0IERDIHJlY292ZXJ5IG1vZGUgcGFzc3dvcmQgdG8gJ1BAc3N3MHJkJy4NCiM+DQoJDQoJcGFyYW0NCgkoDQoJCVtTdHJpbmddDQoJCSMgTmV3IGRvbWFpbiBuYW1lLg0KCQkkRG9tYWluTmFtZSwNCgkJDQoJCVtTdHJpbmddDQoJCSMgRG9tYWluIGNvbnRyb2xsZXIgcmVjb3ZlcnkgbW9kZSBwYXNzd29yZC4NCgkJJFNhZmVNb2RlUGFzc3dvcmQNCgkpDQoNCgl0cmFwIHsgU3RvcC1FeGVjdXRpb24gJF8gfQ0KDQogICAgICAgICMgQWRkIHJlcXVpcmVkIHdpbmRvd3MgZmVhdHVyZXMNCglBZGQtV2luZG93c0ZlYXR1cmVXcmFwcGVyIGANCgkJLU5hbWUgIkROUyIsIkFELURvbWFpbi1TZXJ2aWNlcyIsIlJTQVQtREZTLU1nbXQtQ29uIiBgDQoJCS1JbmNsdWRlTWFuYWdlbWVudFRvb2xzIGANCiAgICAgICAgLU5vdGlmeVJlc3RhcnQNCg0KDQoJV3JpdGUtTG9nICJDcmVhdGluZyBmaXJzdCBkb21haW4gY29udHJvbGxlciAuLi4iDQoJCQ0KCSRTTUFQID0gQ29udmVydFRvLVNlY3VyZVN0cmluZyAtU3RyaW5nICRTYWZlTW9kZVBhc3N3b3JkIC1Bc1BsYWluVGV4dCAtRm9yY2UNCgkJDQoJSW5zdGFsbC1BRERTRm9yZXN0IGANCgkJLURvbWFpbk5hbWUgJERvbWFpbk5hbWUgYA0KCQktU2FmZU1vZGVBZG1pbmlzdHJhdG9yUGFzc3dvcmQgJFNNQVAgYA0KCQktRG9tYWluTW9kZSBEZWZhdWx0IGANCgkJLUZvcmVzdE1vZGUgRGVmYXVsdCBgDQoJCS1Ob1JlYm9vdE9uQ29tcGxldGlvbiBgDQoJCS1Gb3JjZSBgDQoJCS1FcnJvckFjdGlvbiBTdG9wIHwgT3V0LU51bGwNCg0KCVdyaXRlLUhvc3QgIldhaXRpbmcgZm9yIHJlYm9vdCAuLi4iCQkNCiMJU3RvcC1FeGVjdXRpb24gLUV4aXRDb2RlIDMwMTAgLUV4aXRTdHJpbmcgIkNvbXB1dGVyIG11c3QgYmUgcmVzdGFydGVkIHRvIGZpbmlzaCBkb21haW4gY29udHJvbGxlciBwcm9tb3Rpb24uIg0KIwlXcml0ZS1Mb2cgIlJlc3RhcmluZyBjb21wdXRlciAuLi4iDQojCVJlc3RhcnQtQ29tcHV0ZXIgLUZvcmNlDQp9DQo="
|
||||||
|
],
|
||||||
|
"Commands": [
|
||||||
|
{
|
||||||
|
"Name": "Import-Module",
|
||||||
|
"Arguments": {
|
||||||
|
"Name": "CoreFunctions"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Name": "Install-RolePrimaryDomainController",
|
||||||
|
"Arguments": {
|
||||||
|
"DomainName": "$dc_name",
|
||||||
|
"SafeModePassword": "$recovery_password"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"RebootOnCompletion": 1
|
||||||
|
}
|
22
conductor/data/templates/agent/SetPassword.template
Normal file
22
conductor/data/templates/agent/SetPassword.template
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"Scripts": [
|
||||||
|
"RnVuY3Rpb24gU2V0LUxvY2FsVXNlclBhc3N3b3JkIHsNCiAgICBwYXJhbSAoDQogICAgICAgIFtTdHJpbmddICRVc2VyTmFtZSwNCiAgICAgICAgW1N0cmluZ10gJFBhc3N3b3JkLA0KICAgICAgICBbU3dpdGNoXSAkRm9yY2UNCiAgICApDQogICAgDQogICAgdHJhcCB7IFN0b3AtRXhlY3V0aW9uICRfIH0NCiAgICANCiAgICBpZiAoKEdldC1XbWlPYmplY3QgV2luMzJfVXNlckFjY291bnQgLUZpbHRlciAiTG9jYWxBY2NvdW50ID0gJ1RydWUnIEFORCBOYW1lPSckVXNlck5hbWUnIikgLWVxICRudWxsKSB7DQogICAgICAgIHRocm93ICJVbmFibGUgdG8gZmluZCBsb2NhbCB1c2VyIGFjY291bnQgJyRVc2VyTmFtZSciDQogICAgfQ0KICAgIA0KICAgIGlmICgkRm9yY2UpIHsNCiAgICAgICAgV3JpdGUtTG9nICJDaGFuZ2luZyBwYXNzd29yZCBmb3IgdXNlciAnJFVzZXJOYW1lJyB0byAnKioqKionIiAjIDopDQogICAgICAgIChbQURTSV0gIldpbk5UOi8vLi8kVXNlck5hbWUiKS5TZXRQYXNzd29yZCgkUGFzc3dvcmQpDQogICAgfQ0KICAgIGVsc2Ugew0KICAgICAgICBXcml0ZS1Mb2dXYXJuaW5nICJZb3UgYXJlIHRyeWluZyB0byBjaGFuZ2UgcGFzc3dvcmQgZm9yIHVzZXIgJyRVc2VyTmFtZScuIFRvIGRvIHRoaXMgcGxlYXNlIHJ1biB0aGUgY29tbWFuZCBhZ2FpbiB3aXRoIC1Gb3JjZSBwYXJhbWV0ZXIuIg0KICAgICAgICAkVXNlckFjY291bnQNCiAgICB9DQp9DQoNCg0KDQpGdW5jdGlvbiBJbnN0YWxsLVJvbGVQcmltYXJ5RG9tYWluQ29udHJvbGxlcg0Kew0KPCMNCi5TWU5PUFNJUw0KQ29uZmlndXJlIG5vZGUncyBuZXR3b3JrIGFkYXB0ZXJzLg0KQ3JlYXRlIGZpcnN0IGRvbWFpbiBjb250cm9sbGVyIGluIHRoZSBmb3Jlc3QuDQoNCi5FWEFNUExFDQpQUz4gSW5zdGFsbC1Sb2xlUHJpbWFyeURvbWFpbkNvbnRyb2xsZXIgLURvbWFpbk5hbWUgYWNtZS5sb2NhbCAtU2FmZU1vZGVQYXNzd29yZCAiUEBzc3cwcmQiDQoNCkluc3RhbGwgRE5TIGFuZCBBRERTLCBjcmVhdGUgZm9yZXN0IGFuZCBkb21haW4gJ2FjbWUubG9jYWwnLg0KU2V0IERDIHJlY292ZXJ5IG1vZGUgcGFzc3dvcmQgdG8gJ1BAc3N3MHJkJy4NCiM+DQoJDQoJcGFyYW0NCgkoDQoJCVtTdHJpbmddDQoJCSMgTmV3IGRvbWFpbiBuYW1lLg0KCQkkRG9tYWluTmFtZSwNCgkJDQoJCVtTdHJpbmddDQoJCSMgRG9tYWluIGNvbnRyb2xsZXIgcmVjb3ZlcnkgbW9kZSBwYXNzd29yZC4NCgkJJFNhZmVNb2RlUGFzc3dvcmQNCgkpDQoNCgl0cmFwIHsgU3RvcC1FeGVjdXRpb24gJF8gfQ0KDQogICAgICAgICMgQWRkIHJlcXVpcmVkIHdpbmRvd3MgZmVhdHVyZXMNCglBZGQtV2luZG93c0ZlYXR1cmVXcmFwcGVyIGANCgkJLU5hbWUgIkROUyIsIkFELURvbWFpbi1TZXJ2aWNlcyIsIlJTQVQtREZTLU1nbXQtQ29uIiBgDQoJCS1JbmNsdWRlTWFuYWdlbWVudFRvb2xzIGANCiAgICAgICAgLU5vdGlmeVJlc3RhcnQNCg0KDQoJV3JpdGUtTG9nICJDcmVhdGluZyBmaXJzdCBkb21haW4gY29udHJvbGxlciAuLi4iDQoJCQ0KCSRTTUFQID0gQ29udmVydFRvLVNlY3VyZVN0cmluZyAtU3RyaW5nICRTYWZlTW9kZVBhc3N3b3JkIC1Bc1BsYWluVGV4dCAtRm9yY2UNCgkJDQoJSW5zdGFsbC1BRERTRm9yZXN0IGANCgkJLURvbWFpbk5hbWUgJERvbWFpbk5hbWUgYA0KCQktU2FmZU1vZGVBZG1pbmlzdHJhdG9yUGFzc3dvcmQgJFNNQVAgYA0KCQktRG9tYWluTW9kZSBEZWZhdWx0IGANCgkJLUZvcmVzdE1vZGUgRGVmYXVsdCBgDQoJCS1Ob1JlYm9vdE9uQ29tcGxldGlvbiBgDQoJCS1Gb3JjZSBgDQoJCS1FcnJvckFjdGlvbiBTdG9wIHwgT3V0LU51bGwNCg0KCVdyaXRlLUhvc3QgIldhaXRpbmcgZm9yIHJlYm9vdCAuLi4iCQkNCiMJU3RvcC1FeGVjdXRpb24gLUV4aXRDb2RlIDMwMTAgLUV4aXRTdHJpbmcgIkNvbXB1dGVyIG11c3QgYmUgcmVzdGFydGVkIHRvIGZpbmlzaCBkb21haW4gY29udHJvbGxlciBwcm9tb3Rpb24uIg0KIwlXcml0ZS1Mb2cgIlJlc3RhcmluZyBjb21wdXRlciAuLi4iDQojCVJlc3RhcnQtQ29tcHV0ZXIgLUZvcmNlDQp9DQo="
|
||||||
|
],
|
||||||
|
"Commands": [
|
||||||
|
{
|
||||||
|
"Name": "Import-Module",
|
||||||
|
"Arguments": {
|
||||||
|
"Name": "CoreFunctions"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Name": "Set-LocalUserPassword",
|
||||||
|
"Arguments": {
|
||||||
|
"UserName": "Administrator",
|
||||||
|
"Password": "$adm_password",
|
||||||
|
"Force": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"RebootOnCompletion": 0
|
||||||
|
}
|
61
conductor/data/templates/cf/Windows.template
Normal file
61
conductor/data/templates/cf/Windows.template
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"AWSTemplateFormatVersion" : "2010-09-09",
|
||||||
|
|
||||||
|
"Description" : "",
|
||||||
|
|
||||||
|
"Parameters" : {
|
||||||
|
"KeyName" : {
|
||||||
|
"Description" : "Name of an existing Amazon EC2 key pair for RDP access",
|
||||||
|
"Type" : "String",
|
||||||
|
"Default" : "keero_key"
|
||||||
|
},
|
||||||
|
"InstanceType" : {
|
||||||
|
"Description" : "Amazon EC2 instance type",
|
||||||
|
"Type" : "String",
|
||||||
|
"Default" : "m1.medium",
|
||||||
|
"AllowedValues" : [ "m1.small", "m1.medium", "m1.large" ]
|
||||||
|
},
|
||||||
|
"ImageName" : {
|
||||||
|
"Description" : "Image name",
|
||||||
|
"Type" : "String",
|
||||||
|
"Default" : "ws-2012-full-agent",
|
||||||
|
"AllowedValues" : [ "ws-2012-full", "ws-2012-core", "ws-2012-full-agent" ]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
"Resources" : {
|
||||||
|
"IAMUser" : {
|
||||||
|
"Type" : "AWS::IAM::User",
|
||||||
|
"Properties" : {
|
||||||
|
"Path": "/",
|
||||||
|
"Policies": [{
|
||||||
|
"PolicyName": "root",
|
||||||
|
"PolicyDocument": { "Statement":[{
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Action": "CloudFormation:DescribeStackResource",
|
||||||
|
"Resource": "*"
|
||||||
|
}]}
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
"IAMUserAccessKey" : {
|
||||||
|
"Type" : "AWS::IAM::AccessKey",
|
||||||
|
"Properties" : {
|
||||||
|
"UserName" : {"Ref": "IAMUser"}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
"$instanceName": {
|
||||||
|
"Type" : "AWS::EC2::Instance",
|
||||||
|
"Properties": {
|
||||||
|
"InstanceType" : { "Ref" : "InstanceType" },
|
||||||
|
"ImageId" : { "Ref" : "ImageName" },
|
||||||
|
"KeyName" : { "Ref" : "KeyName" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
"Outputs" : {
|
||||||
|
}
|
||||||
|
}
|
69
conductor/data/workflows/testDC.xml
Normal file
69
conductor/data/workflows/testDC.xml
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
<workflow>
|
||||||
|
<rule match="$.services.activeDirectory[*].units[?(@.state.instanceName is None)]">
|
||||||
|
<update-cf-stack template="Windows">
|
||||||
|
<parameter name="mappings">
|
||||||
|
<map>
|
||||||
|
<mapping name="instanceName">
|
||||||
|
<select path="name"/>
|
||||||
|
</mapping>
|
||||||
|
</map>
|
||||||
|
</parameter>
|
||||||
|
<parameter name="arguments">
|
||||||
|
<map>
|
||||||
|
<argument name="KeyName">keero-linux-keys</argument>
|
||||||
|
<argument name="InstanceType">m1.medium</argument>
|
||||||
|
<argument name="ImageName">ws-2012-full-agent</argument>
|
||||||
|
</map>
|
||||||
|
</parameter>
|
||||||
|
|
||||||
|
<success>
|
||||||
|
<set path="state.instanceName"><select path="name"/></set>
|
||||||
|
</success>
|
||||||
|
</update-cf-stack>
|
||||||
|
</rule>
|
||||||
|
<rule match="$.services.activeDirectory[*].units[?(@.state.instanceName is not None and @.adminPassword is not None)]">
|
||||||
|
<send-command template="SetPassword">
|
||||||
|
<parameter name="host">
|
||||||
|
<select path="name"/>
|
||||||
|
</parameter>
|
||||||
|
<parameter name="mappings">
|
||||||
|
<map>
|
||||||
|
<mapping name="adm_password">
|
||||||
|
<select path="adminPassword"/>
|
||||||
|
</mapping>
|
||||||
|
</map>
|
||||||
|
</parameter>
|
||||||
|
|
||||||
|
<success>
|
||||||
|
<set path="adminPassword"><null/></set>
|
||||||
|
</success>
|
||||||
|
|
||||||
|
</send-command>
|
||||||
|
</rule>
|
||||||
|
<rule match="$.services.activeDirectory[*].units[?(@.state.instanceName is not None and not @.state.isInRole and @.isMaster)]">
|
||||||
|
<send-command template="CreatePrimaryDC">
|
||||||
|
<parameter name="host">
|
||||||
|
<select path="name"/>
|
||||||
|
</parameter>
|
||||||
|
<parameter name="mappings">
|
||||||
|
<map>
|
||||||
|
<mapping name="dc_name">
|
||||||
|
<select path="::domain"/>
|
||||||
|
</mapping>
|
||||||
|
<mapping name="recovery_password">
|
||||||
|
<select path="recoveryPassword"/>
|
||||||
|
</mapping>
|
||||||
|
</map>
|
||||||
|
</parameter>
|
||||||
|
<success>
|
||||||
|
<set path="recoveryPassword"><null/></set>
|
||||||
|
<set path="state.isInRole"><true/></set>
|
||||||
|
<set path="::state.WeHavePrimaryDC"><true/></set>
|
||||||
|
</success>
|
||||||
|
|
||||||
|
</send-command>
|
||||||
|
</rule>
|
||||||
|
<rule match="$.services.activeDirectory[?(@state.WeHavePrimaryDC == True)].units[?(@.state.instanceName is not None and not @.state.isInRole and @.isMaster = False)]">
|
||||||
|
|
||||||
|
</rule>
|
||||||
|
</workflow>
|
5
conductor/etc/app.config
Normal file
5
conductor/etc/app.config
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[rabbitmq]
|
||||||
|
host = localhost
|
||||||
|
vhost = keero
|
||||||
|
login = keero
|
||||||
|
password = keero
|
23
conductor/test.json
Normal file
23
conductor/test.json
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"name": "MyDataCenter",
|
||||||
|
"services": {
|
||||||
|
"activeDirectory": [
|
||||||
|
{
|
||||||
|
"id": "1234567890",
|
||||||
|
"domain": "acme.loc",
|
||||||
|
"units": [
|
||||||
|
{
|
||||||
|
"name": "dc01",
|
||||||
|
"isMaster": true,
|
||||||
|
"recoveryPassword": "2SuperP@ssw0rd2"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "dc02",
|
||||||
|
"isMaster": false,
|
||||||
|
"adminPassword": "SuperP@ssw0rd"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
154
conductor/tools/install_venv.py
Normal file
154
conductor/tools/install_venv.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Copyright 2010 OpenStack LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Installation script for Glance's development virtualenv
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
VENV = os.path.join(ROOT, '.venv')
|
||||||
|
PIP_REQUIRES = os.path.join(ROOT, 'tools', 'pip-requires')
|
||||||
|
TEST_REQUIRES = os.path.join(ROOT, 'tools', 'test-requires')
|
||||||
|
|
||||||
|
|
||||||
|
def die(message, *args):
|
||||||
|
print >> sys.stderr, message % args
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(cmd, redirect_output=True, check_exit_code=True):
|
||||||
|
"""
|
||||||
|
Runs a command in an out-of-process shell, returning the
|
||||||
|
output of that command. Working directory is ROOT.
|
||||||
|
"""
|
||||||
|
if redirect_output:
|
||||||
|
stdout = subprocess.PIPE
|
||||||
|
else:
|
||||||
|
stdout = None
|
||||||
|
|
||||||
|
proc = subprocess.Popen(cmd, cwd=ROOT, stdout=stdout)
|
||||||
|
output = proc.communicate()[0]
|
||||||
|
if check_exit_code and proc.returncode != 0:
|
||||||
|
die('Command "%s" failed.\n%s', ' '.join(cmd), output)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
HAS_EASY_INSTALL = bool(run_command(['which', 'easy_install'],
|
||||||
|
check_exit_code=False).strip())
|
||||||
|
HAS_VIRTUALENV = bool(run_command(['which', 'virtualenv'],
|
||||||
|
check_exit_code=False).strip())
|
||||||
|
|
||||||
|
|
||||||
|
def check_dependencies():
|
||||||
|
"""Make sure virtualenv is in the path."""
|
||||||
|
|
||||||
|
if not HAS_VIRTUALENV:
|
||||||
|
print 'not found.'
|
||||||
|
# Try installing it via easy_install...
|
||||||
|
if HAS_EASY_INSTALL:
|
||||||
|
print 'Installing virtualenv via easy_install...',
|
||||||
|
if not run_command(['which', 'easy_install']):
|
||||||
|
die('ERROR: virtualenv not found.\n\n'
|
||||||
|
'Balancer development requires virtualenv, please install'
|
||||||
|
' it using your favorite package management tool')
|
||||||
|
print 'done.'
|
||||||
|
print 'done.'
|
||||||
|
|
||||||
|
|
||||||
|
def create_virtualenv(venv=VENV):
|
||||||
|
"""
|
||||||
|
Creates the virtual environment and installs PIP only into the
|
||||||
|
virtual environment
|
||||||
|
"""
|
||||||
|
print 'Creating venv...',
|
||||||
|
run_command(['virtualenv', '-q', '--no-site-packages', VENV])
|
||||||
|
print 'done.'
|
||||||
|
print 'Installing pip in virtualenv...',
|
||||||
|
if not run_command(['tools/with_venv.sh', 'easy_install',
|
||||||
|
'pip>1.0']).strip():
|
||||||
|
die("Failed to install pip.")
|
||||||
|
print 'done.'
|
||||||
|
|
||||||
|
|
||||||
|
def pip_install(*args):
|
||||||
|
run_command(['tools/with_venv.sh',
|
||||||
|
'pip', 'install', '--upgrade'] + list(args),
|
||||||
|
redirect_output=False)
|
||||||
|
|
||||||
|
|
||||||
|
def install_dependencies(venv=VENV):
|
||||||
|
print 'Installing dependencies with pip (this can take a while)...'
|
||||||
|
|
||||||
|
pip_install('pip')
|
||||||
|
|
||||||
|
pip_install('-r', PIP_REQUIRES)
|
||||||
|
pip_install('-r', TEST_REQUIRES)
|
||||||
|
|
||||||
|
# Tell the virtual env how to "import glance"
|
||||||
|
py_ver = _detect_python_version(venv)
|
||||||
|
pthfile = os.path.join(venv, "lib", py_ver,
|
||||||
|
"site-packages", "balancer.pth")
|
||||||
|
f = open(pthfile, 'w')
|
||||||
|
f.write("%s\n" % ROOT)
|
||||||
|
|
||||||
|
|
||||||
|
def _detect_python_version(venv):
|
||||||
|
lib_dir = os.path.join(venv, "lib")
|
||||||
|
for pathname in os.listdir(lib_dir):
|
||||||
|
if pathname.startswith('python'):
|
||||||
|
return pathname
|
||||||
|
raise Exception('Unable to detect Python version')
|
||||||
|
|
||||||
|
|
||||||
|
def print_help():
|
||||||
|
help = """
|
||||||
|
Glance development environment setup is complete.
|
||||||
|
|
||||||
|
Glance development uses virtualenv to track and manage Python dependencies
|
||||||
|
while in development and testing.
|
||||||
|
|
||||||
|
To activate the Glance virtualenv for the extent of your current shell session
|
||||||
|
you can run:
|
||||||
|
|
||||||
|
$ source .venv/bin/activate
|
||||||
|
|
||||||
|
Or, if you prefer, you can run commands in the virtualenv on a case by case
|
||||||
|
basis by running:
|
||||||
|
|
||||||
|
$ tools/with_venv.sh <your command>
|
||||||
|
|
||||||
|
Also, make test will automatically use the virtualenv.
|
||||||
|
"""
|
||||||
|
print help
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv):
|
||||||
|
check_dependencies()
|
||||||
|
create_virtualenv()
|
||||||
|
install_dependencies()
|
||||||
|
print_help()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main(sys.argv)
|
3
conductor/tools/pip-requires
Normal file
3
conductor/tools/pip-requires
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
pika
|
||||||
|
tornado
|
||||||
|
jsonpath
|
8
conductor/tools/test-requires
Normal file
8
conductor/tools/test-requires
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
unittest2
|
||||||
|
mock==0.8.0
|
||||||
|
nose
|
||||||
|
nose-exclude
|
||||||
|
nosexcover
|
||||||
|
#openstack.nose_plugin
|
||||||
|
pep8==1.0.1
|
||||||
|
sphinx>=1.1.2
|
4
conductor/tools/with_venv.sh
Normal file
4
conductor/tools/with_venv.sh
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
TOOLS=`dirname $0`
|
||||||
|
VENV=$TOOLS/../.venv
|
||||||
|
source $VENV/bin/activate && $@
|
Loading…
x
Reference in New Issue
Block a user