Remove oahu mentions ... and fix up README

Change-Id: I9c077b6e5ed10be76ef6b702d10a30f33aff6fa2
This commit is contained in:
Sandy Walsh 2015-03-05 12:12:48 -08:00
parent c30f3db329
commit ddda9be458
4 changed files with 9 additions and 154 deletions

View File

@ -9,16 +9,13 @@ Prerequisites:
make sure you have a working python dev environment (2.7+ ideally) make sure you have a working python dev environment (2.7+ ideally)
including virtualenv. including virtualenv.
Install rabbitmq. Install rabbitmq and mysql-server.
If using the oahu pipeline engine, install mongodb.
If using the winchester pipeline engine, install MySQL.
TL;DR: TL;DR:
handle the prerequisites above. handle the prerequisites above.
git clone https://github.com/StackTach/sandbox.git git clone https://github.com/stackforge/stacktach-sandbox
cd sandbox cd sandbox
If running winchester:
create a mysql database to use create a mysql database to use
set the database url appropriately in winchester.yaml set the database url appropriately in winchester.yaml
./build.sh ./build.sh
@ -34,17 +31,12 @@ Using Vagrant for fast local provisioning:
* cd stacktach-sandbox * cd stacktach-sandbox
* ./build.sh * ./build.sh
Note:
This uses sandbox defaults including the use of the Winchester
pipeline.
Tweaks: Tweaks:
You can create a `local.sh` to override the defaults: You can create a `local.sh` to override the defaults:
SOURCE_DIR=git # where the StackTach repos are cloned SOURCE_DIR=git # where the StackTach repos are cloned
VENV_DIR=.venv # name of the .venv VENV_DIR=.venv # name of the .venv
PIPELINE_ENGINE=oahu # Name of pipeline processing library to run.
The `build.sh` script will create clone each of the StackTach projects The `build.sh` script will create clone each of the StackTach projects
into the `$SOURCE_DIR` directory (so you can work on them in a running env). into the `$SOURCE_DIR` directory (so you can work on them in a running env).
@ -60,8 +52,11 @@ with the `yagi.conf` configuration file. This will read events from
the rabbit queue and save them to local files. The working directory the rabbit queue and save them to local files. The working directory
and archive directory for `shoebox` is specified in `yagi.conf`. and archive directory for `shoebox` is specified in `yagi.conf`.
The sandbox environment configures `shoebox` to upload archive files The sandbox environment configures `shoebox` to archive notifications
to Swift automatically. This requires you create a credentials file to local .json files and tarball them up after the directory reaches
20GB.
To have shoebox upload to Swift, you are required to create a credentials file
in the `.../sandbox/` directory (like in in the `.../sandbox/` directory (like in
`.../git/sandbox/etc/sample_rax_credentials.conf`) Call it `.../git/sandbox/etc/sample_rax_credentials.conf`) Call it
`swift_credentials.conf` or alter the `shoebox.conf` file accordingly. If `swift_credentials.conf` or alter the `shoebox.conf` file accordingly. If

View File

@ -1,108 +0,0 @@
import random
import oahu.config
from oahu import debugging
from oahu import mongodb_driver as driver
from oahu import trigger_definition
from oahu import pipeline_callback
from oahu import criteria
# We determine which operation this is by the first of these we see.
OPERATIONS = [
'compute.instance.shutdown.start',
'compute.instance.delete.start',
'compute.instance.snapshot.start',
'compute.instance.create.start',
'compute.instance.reboot.start',
'compute.instance.rebuild.start',
'compute.instance.resize.start',
'compute.instance.finish_resize.start',
'compute.instance.resize.confirm.start',
'compute.instance.resize.prep.start',
'compute.instance.power_off.start',
'compute.instance.rescue.start',
'compute.instance.unrescue.start',
]
class RequestIdCallback(pipeline_callback.PipelineCallback):
def on_trigger(self, stream, scratchpad):
if not len(stream.events):
return
# Try to guess the operation by the first know event_type ...
operation = None
for e in stream.events:
if e['event_type'] in OPERATIONS:
operation = e['event_type']
break
scratchpad['operation'] = operation
# How long did this operation take?
first = stream.events[0]
last = stream.events[-1]
delta = last['timestamp'] - first['timestamp']
scratchpad['request_id'] = first['_context_request_id']
scratchpad['delta'] = delta
def commit(self, stream, scratchpad):
print "Req: %s %s time delta = %s" % (scratchpad['request_id'],
scratchpad['operation'],
scratchpad['delta'])
class EodExistsCallback(pipeline_callback.PipelineCallback):
def on_trigger(self, stream, scratchpad):
print "EOD-Exists:", stream
#for event in stream.events:
# print event['timestamp'], event['event_type']
if random.choice([True, False]):
raise Exception("Trigger Exception %d" % random.randrange(100))
def commit(self, stream, scratchpad):
if random.choice([True, False]):
raise Exception("Commit Exception %d" % random.randrange(100))
class Config(oahu.config.Config):
def get_driver(self):
self.request_id_callback = RequestIdCallback()
self.eod_exists_callback = EodExistsCallback()
# Trigger names have to be consistent across all workers
# (yagi and daemons).
by_request = trigger_definition.TriggerDefinition("request-id",
["_context_request_id", ], # Match criteria
criteria.Inactive(60), # Trigger criteria
[self.request_id_callback,], # Pipeline processing
debug=True)
# This trigger requires a Trait called "when_date" which is
# the date-only portion of the "when" trait. We will create
# streams based on uuid for a given day. The distiller will
# create this trait for us.
instance_usage = trigger_definition.TriggerDefinition("eod-exists",
["payload/instance_id", "audit_bucket"],
criteria.EndOfDayExists(
'compute.instance.exists'),
[self.eod_exists_callback,],
debug=True,
dumper=debugging.DetailedDumper())
triggers = [by_request, instance_usage]
return driver.MongoDBDriver(triggers)
def get_distiller_config(self):
return ""
def get_ready_chunk_size(self): # Check for Ready streams.
return 100
def get_trigger_chunk_size(self): # Find streams ready to Trigger.
return 1000
def get_completed_chunk_size(self): # Cleanup completed streams.
return -1

View File

@ -1,22 +0,0 @@
sessionname tach
hardstatus alwayslastline '%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})'
screen -t quincy bash
stuff "cd git/quincy/quincy; gunicorn --log-file=- 'api:get_api(config_location=\"../../../quincy.conf\")'\r"
#stuff "cd git/quincy/quincy; gunicorn --log-file=- 'api:get_api()'\r"
screen -t bash bash
stuff "klugman streams\r"
screen -t yagi1 bash
stuff "yagi-event --config yagi.conf\r"
screen -t yagi2 bash
stuff "yagi-event --config yagi.conf\r"
screen -t trigger bash
stuff "pipeline trigger \".|oahu_config:Config\" --polling_rate=20\r"
screen -t ready1 bash
stuff "pipeline ready \".|oahu_config:Config\" --polling_rate=20\r"
screen -t ready2 bash
stuff "pipeline ready \".|oahu_config:Config\" --polling_rate=20\r"
screen -t completed bash
stuff "pipeline completed \".|oahu_config:Config\" --polling_rate=20\r"
screen -t gen bash
stuff "cd git/notigen/bin; python event_pump.py ../templates 2 0\r"

View File

@ -1,10 +0,0 @@
[consumer:monitor.info]
#apps = yagi.handler.shoebox_handler.ShoeboxHandler
apps = oahu.yagi_handler.OahuHandler
exchange = monitor
exchange_type = topic
routing_key = monitor.info
durable = True
max_messages = 100