Migrate to flask
This commit migrate the valence code to flask and aslo removed rabbitmq. Change-Id: I70234515960e7e2106c5208ced8defc760a4531e
This commit is contained in:
parent
cac939f819
commit
da241b473f
34
README.rst
34
README.rst
@ -16,45 +16,35 @@ Download and Installation
|
|||||||
|
|
||||||
The following steps capture how to install valence. All installation steps require super user permissions.
|
The following steps capture how to install valence. All installation steps require super user permissions.
|
||||||
|
|
||||||
********************
|
*******************************************
|
||||||
Valence installation
|
Valence installation
|
||||||
********************
|
*******************************************
|
||||||
|
|
||||||
1. Install software dependencies
|
1. Install software dependencies
|
||||||
|
|
||||||
``$ sudo apt-get install git python-pip rabbitmq-server libyaml-0-2 python-dev``
|
``$ sudo apt-get install git python-pip``
|
||||||
|
|
||||||
2. Configure RabbitMq Server
|
2. Clone the Valence code from git repo.
|
||||||
|
|
||||||
``$ sudo rabbitmqctl add_user valence valence #use this username/pwd in valence.conf``
|
``$ git clone https://git.openstack.org/openstack/rsc``
|
||||||
|
|
||||||
``$ sudo rabbitmqctl set_user_tags valence administrator``
|
3. Install all necessary software pre-requisites using the pip requirements file.
|
||||||
|
|
||||||
``$ sudo rabbitmqctl set_permissions valence ".*" ".*" ".*"``
|
``$ pip install -r requirements.txt``
|
||||||
|
|
||||||
3. Clone the Valence code from git repo and change the directory to root Valence folder.
|
|
||||||
|
|
||||||
4. Install all necessary software pre-requisites using the pip requirements file.
|
|
||||||
|
|
||||||
``$ sudo -E pip install -r requirements.txt``
|
|
||||||
|
|
||||||
5. Execute the 'install_valence.sh' file the Valence root directory.
|
5. Execute the 'install_valence.sh' file the Valence root directory.
|
||||||
|
|
||||||
``$ ./install_valence.sh``
|
``$ sudo bash install_valence.sh``
|
||||||
|
|
||||||
6. Check the values in valence.conf located at /etc/valence/valence.conf
|
6. Check the values in valence.conf located at /etc/valence/valence.conf
|
||||||
|
|
||||||
``set the ip/credentials of podm for which this Valence will interact``
|
``set the ip/credentials of podm for which this Valence will interact``
|
||||||
|
|
||||||
``set the rabbitmq user/password to the one given above(Step 2)``
|
7. Check the PYTHON_HOME and other variables in /etc/init/valence.conf
|
||||||
|
|
||||||
7. Check the values in /etc/init/valence-api.conf, /etc/init/valence-controller.conf
|
8. Start valence service
|
||||||
|
|
||||||
8. Start api and controller services
|
``$ sudo service valence start``
|
||||||
|
|
||||||
``$ sudo service valence-api start``
|
|
||||||
|
|
||||||
``$ sudo service valence-controller start``
|
|
||||||
|
|
||||||
9. Logs are located at /var/logs/valence/
|
9. Logs are located at /var/logs/valence/
|
||||||
|
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
description "Valence Controller server"
|
|
||||||
|
|
||||||
start on runlevel [2345]
|
|
||||||
stop on runlevel [!2345]
|
|
||||||
|
|
||||||
env PYTHON_HOME=PYHOME
|
|
||||||
|
|
||||||
exec start-stop-daemon --start --verbose --chuid ${CHUID} \
|
|
||||||
--name valence-controller \
|
|
||||||
--exec /usr/local/bin/valence-controller -- \
|
|
||||||
--log-file=/var/log/valence/valence-controller.log
|
|
||||||
|
|
||||||
respawn
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
|||||||
description "Valence API server"
|
description "Valence server"
|
||||||
|
|
||||||
start on runlevel [2345]
|
start on runlevel [2345]
|
||||||
stop on runlevel [!2345]
|
stop on runlevel [!2345]
|
||||||
@ -7,9 +7,8 @@ env PYTHON_HOME=PYHOME
|
|||||||
|
|
||||||
# change the chuid to match yours
|
# change the chuid to match yours
|
||||||
exec start-stop-daemon --start --verbose --chuid ${CHUID} \
|
exec start-stop-daemon --start --verbose --chuid ${CHUID} \
|
||||||
--name valence-api \
|
--name valence \
|
||||||
--exec /usr/local/bin/valence-api -- \
|
--exec $PYTHON_HOME/valence -- \
|
||||||
--log-file=/var/log/valence/valence-api.log
|
|
||||||
|
|
||||||
respawn
|
respawn
|
||||||
|
|
@ -1,37 +1,20 @@
|
|||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
# Show more verbose log output (sets INFO log level output)
|
#LOG Levels - debug, info, warning, error, critical
|
||||||
verbose = True
|
log_level= debug
|
||||||
|
|
||||||
# Show debugging output in logs (sets DEBUG log level output)
|
#Server log settings
|
||||||
debug = False
|
debug=True
|
||||||
|
|
||||||
auth_strategy=noauth
|
|
||||||
|
|
||||||
# Log to this file. Make sure the user running rsc has
|
# Log to this file. Make sure the user running rsc has
|
||||||
# permissions to write to this file!
|
# permissions to write to this file!
|
||||||
|
log_file=/var/log/valence/valence.log
|
||||||
|
|
||||||
|
#address and port the server binds too
|
||||||
log_dir=/var/log/valence
|
|
||||||
rpc_response_timeout = 300
|
|
||||||
|
|
||||||
|
|
||||||
[api]
|
|
||||||
#address to bind the server to
|
|
||||||
bind_host = 0.0.0.0
|
bind_host = 0.0.0.0
|
||||||
|
|
||||||
# Port the bind the server to
|
|
||||||
bind_port = 8181
|
bind_port = 8181
|
||||||
|
|
||||||
[oslo_messaging_rabbit]
|
|
||||||
rabbit_host = localhost
|
|
||||||
rabbit_port = 5672
|
|
||||||
rabbit_userid = valence
|
|
||||||
rabbit_password = valence
|
|
||||||
|
|
||||||
[podm]
|
[podm]
|
||||||
#url=http://10.223.197.204
|
|
||||||
url=http://<ip address>
|
url=http://<ip address>
|
||||||
user=<user>
|
user=<podm user>
|
||||||
password=<password>
|
password=<podm admin>
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,36 +4,30 @@
|
|||||||
#author :Intel Corporation
|
#author :Intel Corporation
|
||||||
#date :17-10-2016
|
#date :17-10-2016
|
||||||
#version :0.1
|
#version :0.1
|
||||||
#usage :bash install_valence.sh
|
#usage :sudo -E bash install_valence.sh
|
||||||
|
#notes :Run this script as sudo user and not as root.
|
||||||
|
# This script is needed still valence is packaged in to .deb/.rpm
|
||||||
#==============================================================================
|
#==============================================================================
|
||||||
|
|
||||||
install_log=install_valence.log
|
install_log=install_valence.log
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
|
||||||
cd $DIR
|
cd $DIR
|
||||||
echo "Current directory: $DIR" >> $install_log
|
echo "Current directory: $DIR" >> $install_log
|
||||||
if [ "$USER" != 'root' ]; then
|
if [ "$USER" != 'root' ]; then
|
||||||
echo "You must be root to install."
|
echo "You must be root to install."
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
PYHOME=$(python -c "import site; print site.getsitepackages()[0]")
|
|
||||||
|
PYHOME="/usr/local/bin"
|
||||||
echo "Detected PYTHON HOME: $PYHOME" >> $install_log
|
echo "Detected PYTHON HOME: $PYHOME" >> $install_log
|
||||||
|
|
||||||
# Copy the config files
|
# Copy the config files
|
||||||
cp $DIR/doc/source/init/valence-api.conf /tmp/valence-api.conf
|
echo "Setting up valence config" >> $install_log
|
||||||
sed -i s/\${CHUID}/$USER/ /tmp/valence-api.conf
|
sed s/\${CHUID}/$USER/ $DIR/doc/source/init/valence.conf > /tmp/valence.conf
|
||||||
#Use alternate sed delimiter because path will
|
#Use alternate sed delimiter because path will have /
|
||||||
#have /
|
sed -i "s#PYHOME#$PYHOME#" /tmp/valence.conf
|
||||||
sed -i "s#PYHOME#$PYHOME#" /tmp/valence-api.conf
|
mv /tmp/valence.conf /etc/init/valence.conf
|
||||||
mv /tmp/valence-api.conf /etc/init/valence-api.conf
|
|
||||||
echo "Setting up valence-api config" >> $install_log
|
|
||||||
|
|
||||||
cp $DIR/doc/source/init/valence-controller.conf /tmp/valence-controller.conf
|
|
||||||
sed -i s/\${CHUID}/$USER/ /tmp/valence-controller.conf
|
|
||||||
#Use alternate sed delimiter because path will
|
|
||||||
#have /
|
|
||||||
sed -i "s#PYHOME#$PYHOME#" /tmp/valence-controller.conf
|
|
||||||
mv /tmp/valence-controller.conf /etc/init/valence-controller.conf
|
|
||||||
echo "Setting up valence-controller config" >> $install_log
|
|
||||||
|
|
||||||
# create conf directory for valence
|
# create conf directory for valence
|
||||||
mkdir /etc/valence
|
mkdir /etc/valence
|
||||||
@ -52,5 +46,4 @@ if [ $? -ne 0 ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Installation Completed"
|
echo "Installation Completed"
|
||||||
echo "To start api : sudo service valence-api start"
|
echo "To start valence : sudo service valence start"
|
||||||
echo "To start controller : sudo service valence-controller start"
|
|
||||||
|
@ -1,41 +1,14 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
|
||||||
# process, which may cause wedges in the gate later.
|
|
||||||
|
|
||||||
pbr>=1.6
|
pbr>=1.6
|
||||||
Babel>=2.3.4
|
aniso8601==1.2.0
|
||||||
Paste>=2.0.3
|
click==6.6
|
||||||
PasteDeploy>=1.5.2
|
Flask==0.11.1
|
||||||
PyYAML>=3.11
|
Flask-Cors==3.0.2
|
||||||
WebOb>=1.6.1
|
Flask-RESTful==0.3.5
|
||||||
amqp<=2.0
|
itsdangerous==0.24
|
||||||
anyjson>=0.3.3
|
Jinja2==2.8
|
||||||
argparse>=1.2.1
|
MarkupSafe==0.23
|
||||||
contextlib2>=0.5.3
|
python-dateutil==2.5.3
|
||||||
eventlet>=0.19.0
|
pytz==2016.7
|
||||||
greenlet>=0.4.10
|
requests==2.11.1
|
||||||
kombu>=3.0.35
|
six==1.10.0
|
||||||
logutils>=0.3.3
|
Werkzeug==0.11.11
|
||||||
monotonic>=1.1
|
|
||||||
netaddr>=0.7.18
|
|
||||||
netifaces>=0.10.4
|
|
||||||
oslo.concurrency>=3.10.0
|
|
||||||
oslo.config>=3.11.0
|
|
||||||
oslo.context>=2.5.0
|
|
||||||
oslo.i18n>=3.7.0
|
|
||||||
oslo.log>=3.10.0
|
|
||||||
oslo.messaging>=5.4.0
|
|
||||||
oslo.middleware>=3.13.0
|
|
||||||
oslo.reports>=1.11.0
|
|
||||||
oslo.serialization>=2.9.0
|
|
||||||
oslo.service>=1.12.0
|
|
||||||
oslo.utils>=3.13.0
|
|
||||||
oslo.versionedobjects>=1.12.0
|
|
||||||
pecan>=1.1.1
|
|
||||||
requests>=2.10.0
|
|
||||||
six>=1.10.0
|
|
||||||
stevedore>=1.15.0
|
|
||||||
waitress>=0.9.0
|
|
||||||
wrapt>=1.10.8
|
|
||||||
wsgiref>=0.1.2
|
|
||||||
|
|
||||||
|
@ -52,8 +52,4 @@ source-dir = releasenotes/source
|
|||||||
|
|
||||||
[entry_points]
|
[entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
valence-api = valence.cmd.api:main
|
valence = valence.run:main
|
||||||
valence-controller = valence.cmd.controller:main
|
|
||||||
|
|
||||||
oslo.config.opts =
|
|
||||||
valence = valence.api.config:list_opts
|
|
||||||
|
@ -10,51 +10,31 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from oslo_config import cfg
|
from flask import Flask
|
||||||
from oslo_service import service
|
import logging
|
||||||
from pecan import configuration
|
from logging.handlers import RotatingFileHandler
|
||||||
from pecan import make_app
|
from valence import config as cfg
|
||||||
from valence.api import hooks
|
|
||||||
|
_app = None
|
||||||
|
|
||||||
|
|
||||||
def setup_app(*args, **kwargs):
|
def setup_app():
|
||||||
config = {
|
"""Return Flask application"""
|
||||||
'server': {
|
app = Flask(cfg.PROJECT_NAME)
|
||||||
'host': cfg.CONF.api.bind_port,
|
app.url_map.strict_slashes = False
|
||||||
'port': cfg.CONF.api.bind_host
|
|
||||||
},
|
|
||||||
'app': {
|
|
||||||
'root': 'valence.api.controllers.root.RootController',
|
|
||||||
'modules': ['valence.api'],
|
|
||||||
'errors': {
|
|
||||||
400: '/error',
|
|
||||||
'__force_dict__': True
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pecan_config = configuration.conf_from_dict(config)
|
|
||||||
|
|
||||||
app_hooks = [hooks.CORSHook()]
|
# Configure logging
|
||||||
|
handler = RotatingFileHandler(cfg.log_file, maxBytes=10000, backupCount=1)
|
||||||
app = make_app(
|
handler.setLevel(cfg.log_level)
|
||||||
pecan_config.app.root,
|
formatter = logging.Formatter(cfg.log_format)
|
||||||
hooks=app_hooks,
|
handler.setFormatter(formatter)
|
||||||
force_canonical=False,
|
app.logger.setLevel(cfg.log_level)
|
||||||
logging=getattr(config, 'logging', {})
|
app.logger.addHandler(handler)
|
||||||
)
|
|
||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
_launcher = None
|
def get_app():
|
||||||
|
global _app
|
||||||
|
if not _app:
|
||||||
def serve(api_service, conf, workers=1):
|
_app = setup_app()
|
||||||
global _launcher
|
return _app
|
||||||
if _launcher:
|
|
||||||
raise RuntimeError('serve() can only be called once')
|
|
||||||
|
|
||||||
_launcher = service.launch(conf, api_service, workers=workers)
|
|
||||||
|
|
||||||
|
|
||||||
def wait():
|
|
||||||
_launcher.wait()
|
|
||||||
|
@ -1,66 +0,0 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
import sys
|
|
||||||
from valence.common import rpc
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
common_opts = [
|
|
||||||
cfg.StrOpt('auth_strategy', default='noauth',
|
|
||||||
help=("The type of authentication to use")),
|
|
||||||
cfg.BoolOpt('allow_pagination', default=False,
|
|
||||||
help=("Allow the usage of the pagination")),
|
|
||||||
cfg.BoolOpt('allow_sorting', default=False,
|
|
||||||
help=("Allow the usage of the sorting")),
|
|
||||||
cfg.StrOpt('pagination_max_limit', default="-1",
|
|
||||||
help=("The maximum number of items returned in a single "
|
|
||||||
"response, value was 'infinite' or negative integer "
|
|
||||||
"means no limit")),
|
|
||||||
]
|
|
||||||
|
|
||||||
api_opts = [
|
|
||||||
cfg.StrOpt('bind_host', default='0.0.0.0',
|
|
||||||
help=("The host IP to bind to")),
|
|
||||||
cfg.IntOpt('bind_port', default=8181,
|
|
||||||
help=("The port to bind to")),
|
|
||||||
cfg.IntOpt('api_workers', default=2,
|
|
||||||
help=("number of api workers"))
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def init(args, **kwargs):
|
|
||||||
# Register the configuration options
|
|
||||||
api_conf_group = cfg.OptGroup(name='api', title='Valence API options')
|
|
||||||
cfg.CONF.register_group(api_conf_group)
|
|
||||||
cfg.CONF.register_opts(api_opts, group=api_conf_group)
|
|
||||||
cfg.CONF.register_opts(common_opts)
|
|
||||||
logging.register_options(cfg.CONF)
|
|
||||||
|
|
||||||
cfg.CONF(args=args, project='valence',
|
|
||||||
**kwargs)
|
|
||||||
|
|
||||||
rpc.init(cfg.CONF)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
|
||||||
"""Sets up the logging options for a log with supplied name."""
|
|
||||||
product_name = "valence"
|
|
||||||
logging.setup(cfg.CONF, product_name)
|
|
||||||
LOG.info("Logging enabled!")
|
|
||||||
LOG.debug("command line: %s", " ".join(sys.argv))
|
|
||||||
|
|
||||||
|
|
||||||
def list_opts():
|
|
||||||
yield None, common_opts
|
|
@ -1,44 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
from pecan import expose
|
|
||||||
from pecan import request
|
|
||||||
from valence.controller import api as controller_api
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class FlavorController(object):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(FlavorController, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# HTTP GET /flavor/
|
|
||||||
@expose(generic=True, template='json')
|
|
||||||
def index(self):
|
|
||||||
LOG.debug("GET /flavor")
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
res = rpcapi.flavor_options()
|
|
||||||
return res
|
|
||||||
|
|
||||||
# HTTP POST /flavor/
|
|
||||||
@index.when(method='POST', template='json')
|
|
||||||
def index_POST(self, **kw):
|
|
||||||
LOG.debug("POST /flavor")
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
res = rpcapi.flavor_generate(criteria=kw['criteria'])
|
|
||||||
return res
|
|
@ -1,81 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
import pecan
|
|
||||||
from pecan import expose
|
|
||||||
from pecan import request
|
|
||||||
from pecan.rest import RestController
|
|
||||||
from valence.controller import api as controller_api
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class NodeDetailController(RestController):
|
|
||||||
def __init__(self, nodeid):
|
|
||||||
self.nodeid = nodeid
|
|
||||||
|
|
||||||
# HTTP GET /nodes/
|
|
||||||
@expose()
|
|
||||||
def delete(self):
|
|
||||||
LOG.debug("DELETE /nodes")
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
res = rpcapi.delete_composednode(nodeid=self.nodeid)
|
|
||||||
LOG.info(str(res))
|
|
||||||
return res
|
|
||||||
|
|
||||||
@expose()
|
|
||||||
def storages(self):
|
|
||||||
pecan.abort(501, "/nodes/node id/storages")
|
|
||||||
|
|
||||||
|
|
||||||
class NodesController(RestController):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(NodesController, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# HTTP GET /nodes/
|
|
||||||
@expose(template='json')
|
|
||||||
def get_all(self, **kwargs):
|
|
||||||
LOG.debug("GET /nodes")
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
res = rpcapi.list_nodes(filters=kwargs)
|
|
||||||
return res
|
|
||||||
|
|
||||||
# HTTP GET /nodes/
|
|
||||||
@expose(template='json')
|
|
||||||
def post(self, **kwargs):
|
|
||||||
LOG.debug("POST /nodes")
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
res = rpcapi.compose_nodes(criteria=kwargs)
|
|
||||||
return res
|
|
||||||
|
|
||||||
@expose(template='json')
|
|
||||||
def get(self, nodeid):
|
|
||||||
LOG.debug("GET /nodes" + nodeid)
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
node = rpcapi.get_nodebyid(nodeid=nodeid)
|
|
||||||
if not node:
|
|
||||||
pecan.abort(404)
|
|
||||||
return node
|
|
||||||
|
|
||||||
@expose()
|
|
||||||
def _lookup(self, nodeid, *remainder):
|
|
||||||
# node = get_student_by_primary_key(primary_key)
|
|
||||||
if nodeid:
|
|
||||||
return NodeDetailController(nodeid), remainder
|
|
||||||
else:
|
|
||||||
pecan.abort(404)
|
|
@ -1,44 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
import pecan
|
|
||||||
from pecan import expose
|
|
||||||
from pecan import request
|
|
||||||
from valence.controller import api as controller_api
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class StoragesController(object):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(StoragesController, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# HTTP GET /storages/
|
|
||||||
@expose(generic=True, template='json')
|
|
||||||
def index(self):
|
|
||||||
LOG.debug("GET /storages")
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
LOG.debug(rpcapi)
|
|
||||||
pecan.abort(501, "GET /storages is Not yet implemented")
|
|
||||||
|
|
||||||
@expose(template='json')
|
|
||||||
def get(self, storageid):
|
|
||||||
LOG.debug("GET /storages" + storageid)
|
|
||||||
rpcapi = controller_api.API(context=request.context)
|
|
||||||
LOG.debug(rpcapi)
|
|
||||||
pecan.abort(501, "GET /storages/storage is Not yet implemented")
|
|
@ -1,14 +0,0 @@
|
|||||||
from pecan.hooks import PecanHook
|
|
||||||
|
|
||||||
|
|
||||||
class CORSHook(PecanHook):
|
|
||||||
|
|
||||||
def after(self, state):
|
|
||||||
state.response.headers['Access-Control-Allow-Origin'] = '*'
|
|
||||||
state.response.headers['Access-Control-Allow-Methods'] = (
|
|
||||||
'GET, POST, DELETE, PUT, LIST, OPTIONS')
|
|
||||||
state.response.headers['Access-Control-Allow-Headers'] = (
|
|
||||||
'origin, authorization, content-type, accept')
|
|
||||||
if not state.response.headers['Content-Length']:
|
|
||||||
state.response.headers['Content-Length'] = (
|
|
||||||
str(len(state.response.body)))
|
|
@ -13,19 +13,16 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
import pecan
|
from flask import request
|
||||||
from valence.api.controllers import base
|
from valence.api import base
|
||||||
from valence.api.controllers import types
|
from valence.api import types
|
||||||
|
|
||||||
|
|
||||||
def build_url(resource, resource_args, bookmark=False, base_url=None):
|
def build_url(resource, resource_args, bookmark=False, base_url=None):
|
||||||
if base_url is None:
|
if base_url is None:
|
||||||
base_url = pecan.request.host_url
|
base_url = request.root_url
|
||||||
|
base_url = base_url.rstrip("//")
|
||||||
template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s'
|
template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s'
|
||||||
# FIXME(lucasagomes): I'm getting a 404 when doing a GET on
|
|
||||||
# a nested resource that the URL ends with a '/'.
|
|
||||||
# https://groups.google.com/forum/#!topic/pecan-dev/QfSeviLg5qs
|
|
||||||
template += '%(args)s' if resource_args.startswith('?') else '/%(args)s'
|
template += '%(args)s' if resource_args.startswith('?') else '/%(args)s'
|
||||||
return template % {'url': base_url, 'res': resource, 'args': resource_args}
|
return template % {'url': base_url, 'res': resource, 'args': resource_args}
|
||||||
|
|
@ -13,13 +13,14 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
from pecan import expose
|
from flask import abort
|
||||||
from pecan import request
|
from flask import request
|
||||||
from pecan import route
|
from flask_restful import Resource
|
||||||
from valence.api.controllers import base
|
import json
|
||||||
from valence.api.controllers import link
|
from valence.api import base
|
||||||
from valence.api.controllers import types
|
from valence.api import link
|
||||||
from valence.api.controllers.v1 import controller as v1controller
|
from valence.api import types
|
||||||
|
from valence.redfish import redfish as rfs
|
||||||
|
|
||||||
|
|
||||||
class Version(base.APIBase):
|
class Version(base.APIBase):
|
||||||
@ -32,18 +33,26 @@ class Version(base.APIBase):
|
|||||||
'links': {
|
'links': {
|
||||||
'validate': types.List(types.Custom(link.Link)).validate
|
'validate': types.List(types.Custom(link.Link)).validate
|
||||||
},
|
},
|
||||||
|
'min_version': {
|
||||||
|
'validate': types.Text.validate
|
||||||
|
},
|
||||||
|
'status': {
|
||||||
|
'validate': types.Text.validate
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def convert(id):
|
def convert(id, min_version, current=False):
|
||||||
version = Version()
|
version = Version()
|
||||||
version.id = id
|
version.id = id
|
||||||
version.links = [link.Link.make_link('self', request.host_url,
|
version.status = "CURRENT" if current else "DEPRECTED"
|
||||||
|
version.min_version = min_version
|
||||||
|
version.links = [link.Link.make_link('self', request.url_root,
|
||||||
id, '', bookmark=True)]
|
id, '', bookmark=True)]
|
||||||
return version
|
return version
|
||||||
|
|
||||||
|
|
||||||
class Root(base.APIBase):
|
class RootBase(base.APIBase):
|
||||||
|
|
||||||
fields = {
|
fields = {
|
||||||
'id': {
|
'id': {
|
||||||
@ -62,17 +71,34 @@ class Root(base.APIBase):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def convert():
|
def convert():
|
||||||
root = Root()
|
root = RootBase()
|
||||||
root.name = "OpenStack Valence API"
|
root.name = "OpenStack Valence API"
|
||||||
root.description = ("Valence is an OpenStack project")
|
root.description = "Valence is an OpenStack project"
|
||||||
root.versions = [Version.convert('v1')]
|
root.versions = [Version.convert('v1', '1.0', True)]
|
||||||
root.default_version = Version.convert('v1')
|
root.default_version = Version.convert('v1', '1.0', True)
|
||||||
return root
|
return root
|
||||||
|
|
||||||
|
|
||||||
class RootController(object):
|
class Root(Resource):
|
||||||
@expose('json')
|
|
||||||
def index(self):
|
|
||||||
return Root.convert()
|
|
||||||
|
|
||||||
route(RootController, 'v1', v1controller.V1Controller())
|
def get(self):
|
||||||
|
obj = RootBase.convert()
|
||||||
|
return json.dumps(obj, default=lambda o: o.as_dict())
|
||||||
|
|
||||||
|
|
||||||
|
class PODMProxy(Resource):
|
||||||
|
"""Passthrough Proxy for PODM.
|
||||||
|
|
||||||
|
This function byepasses valence processing
|
||||||
|
and calls PODM directly. This function may be temperory
|
||||||
|
|
||||||
|
"""
|
||||||
|
def get(self, url):
|
||||||
|
op = url.split("/")[0]
|
||||||
|
filterext = ["Chassis", "Services", "Managers", "Systems",
|
||||||
|
"EventService", "Nodes", "EthernetSwitches"]
|
||||||
|
if op in filterext:
|
||||||
|
resp = rfs.send_request(url)
|
||||||
|
return resp.json()
|
||||||
|
else:
|
||||||
|
abort(404)
|
64
valence/api/route.py
Normal file
64
valence/api/route.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# Copyright (c) 2016 Intel, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from flask_cors import CORS
|
||||||
|
from flask_restful import Api
|
||||||
|
from valence.api import app as flaskapp
|
||||||
|
from valence.api.root import PODMProxy
|
||||||
|
from valence.api.root import Root
|
||||||
|
from valence.api.v1.flavor import Flavors as v1Flavors
|
||||||
|
from valence.api.v1.nodes import Nodes as v1Nodes
|
||||||
|
from valence.api.v1.nodes import NodesList as v1NodesList
|
||||||
|
from valence.api.v1.nodes import NodesStorage as v1NodesStorage
|
||||||
|
from valence.api.v1.storages import Storages as v1Storages
|
||||||
|
from valence.api.v1.storages import StoragesList as v1StoragesList
|
||||||
|
from valence.api.v1.systems import Systems as v1Systems
|
||||||
|
from valence.api.v1.systems import SystemsList as v1SystemsList
|
||||||
|
from valence.api.v1.version import V1
|
||||||
|
|
||||||
|
app = flaskapp.get_app()
|
||||||
|
cors = CORS(app)
|
||||||
|
api = Api(app)
|
||||||
|
|
||||||
|
"""API V1.0 Operations"""
|
||||||
|
|
||||||
|
|
||||||
|
# API Root operation
|
||||||
|
api.add_resource(Root, '/', endpoint='root')
|
||||||
|
|
||||||
|
# V1 Root operations
|
||||||
|
api.add_resource(V1, '/v1', endpoint='v1')
|
||||||
|
|
||||||
|
# Node(s) operations
|
||||||
|
api.add_resource(v1NodesList, '/v1/nodes', endpoint='nodes')
|
||||||
|
api.add_resource(v1Nodes, '/v1/nodes/<string:nodeid>', endpoint='node')
|
||||||
|
api.add_resource(v1NodesStorage,
|
||||||
|
'/v1/nodes/<string:nodeid>/storages',
|
||||||
|
endpoint='nodes_storages')
|
||||||
|
|
||||||
|
# System(s) operations
|
||||||
|
api.add_resource(v1SystemsList, '/v1/systems', endpoint='systems')
|
||||||
|
api.add_resource(v1Systems, '/v1/systems/<string:systemid>', endpoint='system')
|
||||||
|
|
||||||
|
# Flavor(s) operations
|
||||||
|
api.add_resource(v1Flavors, '/v1/flavor', endpoint='flavor')
|
||||||
|
|
||||||
|
|
||||||
|
# Storage(s) operations
|
||||||
|
api.add_resource(v1StoragesList, '/v1/storages', endpoint='storages')
|
||||||
|
api.add_resource(v1Storages,
|
||||||
|
'/v1/storages/<string:storageid>', endpoint='storage')
|
||||||
|
|
||||||
|
# Proxy to PODM
|
||||||
|
api.add_resource(PODMProxy, '/<path:url>', endpoint='podmproxy')
|
@ -11,9 +11,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from oslo_utils import strutils
|
|
||||||
import six
|
import six
|
||||||
from valence.common import exceptions as exception
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -27,7 +25,7 @@ class Text(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if not isinstance(value, six.string_types):
|
if not isinstance(value, six.string_types):
|
||||||
raise exception.InvalidValue(value=value, type=cls.type_name)
|
raise ValueError("An invalid value was provided")
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -41,12 +39,15 @@ class String(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
strutils.check_string_length(value, min_length=min_length,
|
strlen = len(value)
|
||||||
max_length=max_length)
|
if strlen < min_length:
|
||||||
|
raise TypeError('String length is less than' + min_length)
|
||||||
|
if max_length and strlen > max_length:
|
||||||
|
raise TypeError('String length is greater than' + max_length)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
raise exception.InvalidValue(value=value, type=cls.type_name)
|
raise ValueError("An invalid value was provided")
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise exception.InvalidValue(message=str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -64,12 +65,12 @@ class Integer(object):
|
|||||||
value = int(value)
|
value = int(value)
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception('Failed to convert value to int')
|
LOG.exception('Failed to convert value to int')
|
||||||
raise exception.InvalidValue(value=value, type=cls.type_name)
|
raise ValueError("Failed to convert value to int")
|
||||||
|
|
||||||
if minimum is not None and value < minimum:
|
if minimum is not None and value < minimum:
|
||||||
message = _("Integer '%(value)s' is smaller than "
|
message = _("Integer '%(value)s' is smaller than "
|
||||||
"'%(min)d'.") % {'value': value, 'min': minimum}
|
"'%(min)d'.") % {'value': value, 'min': minimum}
|
||||||
raise exception.InvalidValue(message=message)
|
raise ValueError(message)
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -84,10 +85,10 @@ class Bool(object):
|
|||||||
|
|
||||||
if not isinstance(value, bool):
|
if not isinstance(value, bool):
|
||||||
try:
|
try:
|
||||||
value = strutils.bool_from_string(value, strict=True)
|
value = value.lower() in ("yes", "true", "t", "1")
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception('Failed to convert value to bool')
|
LOG.exception('Failed to convert value to bool')
|
||||||
raise exception.InvalidValue(value=value, type=cls.type_name)
|
raise ValueError("Failed to convert value to bool")
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -107,7 +108,7 @@ class Custom(object):
|
|||||||
value = self.user_class(**value)
|
value = self.user_class(**value)
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception('Failed to validate received value')
|
LOG.exception('Failed to validate received value')
|
||||||
raise exception.InvalidValue(value=value, type=self.type_name)
|
raise ValueError("Failed to validate received value")
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -123,10 +124,10 @@ class List(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if not isinstance(value, list):
|
if not isinstance(value, list):
|
||||||
raise exception.InvalidValue(value=value, type=self.type_name)
|
raise ValueError("Failed to validate received value")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return [self.type.validate(v) for v in value]
|
return [self.type.validate(v) for v in value]
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.exception('Failed to validate received value')
|
LOG.exception('Failed to validate received value')
|
||||||
raise exception.InvalidValue(value=value, type=self.type_name)
|
raise ValueError("Failed to validate received value")
|
@ -12,26 +12,20 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from flask import request
|
||||||
|
from flask_restful import Resource
|
||||||
|
import logging
|
||||||
from valence.flavor import flavor
|
from valence.flavor import flavor
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Handler(object):
|
class Flavors(Resource):
|
||||||
"""Valence Flavor RPC handler.
|
|
||||||
|
|
||||||
These are the backend operations. They are executed by the backend ervice.
|
def get(self):
|
||||||
API calls via AMQP (within the ReST API) trigger the handlers to be called.
|
LOG.debug("GET /flavor")
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Handler, self).__init__()
|
|
||||||
|
|
||||||
def flavor_options(self, context):
|
|
||||||
return flavor.get_available_criteria()
|
return flavor.get_available_criteria()
|
||||||
|
|
||||||
def flavor_generate(self, context, criteria):
|
def post(self):
|
||||||
LOG.debug("Getting flavor options")
|
LOG.debug("POST /flavor")
|
||||||
return flavor.create_flavors(criteria)
|
return flavor.create_flavors(request.get_json())
|
51
valence/api/v1/nodes.py
Normal file
51
valence/api/v1/nodes.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# Copyright (c) 2016 Intel, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from flask import request
|
||||||
|
from flask_restful import abort
|
||||||
|
from flask_restful import Resource
|
||||||
|
import logging
|
||||||
|
from valence.redfish import redfish as rfs
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NodesList(Resource):
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
LOG.debug("GET /nodes")
|
||||||
|
return rfs.nodes_list(request.args)
|
||||||
|
|
||||||
|
def post(self):
|
||||||
|
LOG.debug("POST /nodes/")
|
||||||
|
return rfs.compose_node(request.get_json())
|
||||||
|
|
||||||
|
|
||||||
|
class Nodes(Resource):
|
||||||
|
|
||||||
|
def get(self, nodeid):
|
||||||
|
LOG.debug("GET /nodes/" + nodeid)
|
||||||
|
return rfs.get_nodebyid(nodeid)
|
||||||
|
|
||||||
|
def delete(self, nodeid):
|
||||||
|
LOG.debug("DELETE /nodes/" + nodeid)
|
||||||
|
return rfs.delete_composednode(nodeid)
|
||||||
|
|
||||||
|
|
||||||
|
class NodesStorage(Resource):
|
||||||
|
|
||||||
|
def get(self, nodeid):
|
||||||
|
LOG.debug("GET /nodes/%s/storage" % nodeid)
|
||||||
|
return abort(501)
|
@ -1,4 +1,3 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright (c) 2016 Intel, Inc.
|
# Copyright (c) 2016 Intel, Inc.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
@ -13,21 +12,22 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_config import cfg
|
from flask_restful import abort
|
||||||
|
from flask_restful import Resource
|
||||||
|
import logging
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# Configurations
|
class StoragesList(Resource):
|
||||||
podm_opts = [
|
|
||||||
cfg.StrOpt('url',
|
|
||||||
default='http://localhost:80',
|
|
||||||
help=("The complete url string of PODM")),
|
|
||||||
cfg.StrOpt('user',
|
|
||||||
default='admin',
|
|
||||||
help=("User for the PODM")),
|
|
||||||
cfg.StrOpt('password',
|
|
||||||
default='admin',
|
|
||||||
help=("Passoword for PODM"))]
|
|
||||||
|
|
||||||
podm_conf_group = cfg.OptGroup(name='podm', title='RSC PODM options')
|
def get(self):
|
||||||
cfg.CONF.register_group(podm_conf_group)
|
LOG.debug("GET /storages")
|
||||||
cfg.CONF.register_opts(podm_opts, group=podm_conf_group)
|
return abort(501)
|
||||||
|
|
||||||
|
|
||||||
|
class Storages(Resource):
|
||||||
|
|
||||||
|
def get(self, storageid):
|
||||||
|
LOG.debug("GET /storages" + storageid)
|
||||||
|
return abort(501)
|
@ -1,3 +1,5 @@
|
|||||||
|
# Copyright (c) 2016 Intel, Inc.
|
||||||
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# not use this file except in compliance with the License. You may obtain
|
||||||
# a copy of the License at
|
# a copy of the License at
|
||||||
@ -10,28 +12,24 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
# Server Specific Configurations
|
|
||||||
server = {
|
|
||||||
'port': '8080',
|
|
||||||
'host': '0.0.0.0'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Pecan Application Configurations
|
from flask import request
|
||||||
app = {
|
from flask_restful import Resource
|
||||||
'root': 'valence.controllers.root.RootController',
|
import logging
|
||||||
'modules': ['valence'],
|
from valence.redfish import redfish as rfs
|
||||||
'static_root': '%(confdir)s/../../public',
|
|
||||||
'template_path': '%(confdir)s/../templates',
|
|
||||||
'debug': True,
|
|
||||||
'errors': {
|
|
||||||
'404': '/error/404',
|
|
||||||
'__force_dict__': True
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Custom Configurations must be in Python dictionary format::
|
LOG = logging.getLogger(__name__)
|
||||||
#
|
|
||||||
# foo = {'bar':'baz'}
|
|
||||||
#
|
class SystemsList(Resource):
|
||||||
# All configurations are accessible at::
|
|
||||||
# pecan.conf
|
def get(self):
|
||||||
|
LOG.debug("GET /systems")
|
||||||
|
return rfs.systems_list(request.args)
|
||||||
|
|
||||||
|
|
||||||
|
class Systems(Resource):
|
||||||
|
|
||||||
|
def get(self, systemid):
|
||||||
|
LOG.debug("GET /systems/" + systemid)
|
||||||
|
return rfs.get_systembyid(systemid)
|
@ -12,16 +12,13 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from pecan import abort
|
|
||||||
from pecan import expose
|
from flask import request
|
||||||
from pecan import request
|
from flask_restful import Resource
|
||||||
from pecan import route
|
import json
|
||||||
from valence.api.controllers import base
|
from valence.api import base
|
||||||
from valence.api.controllers import link
|
from valence.api import link
|
||||||
from valence.api.controllers import types
|
from valence.api import types
|
||||||
from valence.api.controllers.v1 import flavor as v1flavor
|
|
||||||
from valence.api.controllers.v1 import nodes as v1nodes
|
|
||||||
from valence.common.redfish import api as rfsapi
|
|
||||||
|
|
||||||
|
|
||||||
class MediaType(base.APIBase):
|
class MediaType(base.APIBase):
|
||||||
@ -37,7 +34,7 @@ class MediaType(base.APIBase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class V1(base.APIBase):
|
class V1Base(base.APIBase):
|
||||||
"""The representation of the version 1 of the API."""
|
"""The representation of the version 1 of the API."""
|
||||||
|
|
||||||
fields = {
|
fields = {
|
||||||
@ -50,16 +47,23 @@ class V1(base.APIBase):
|
|||||||
'links': {
|
'links': {
|
||||||
'validate': types.List(types.Custom(link.Link)).validate
|
'validate': types.List(types.Custom(link.Link)).validate
|
||||||
},
|
},
|
||||||
'services': {
|
'nodes': {
|
||||||
|
'validate': types.List(types.Custom(link.Link)).validate
|
||||||
|
},
|
||||||
|
'storages': {
|
||||||
|
'validate': types.List(types.Custom(link.Link)).validate
|
||||||
|
},
|
||||||
|
'flavors': {
|
||||||
'validate': types.List(types.Custom(link.Link)).validate
|
'validate': types.List(types.Custom(link.Link)).validate
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def convert():
|
def convert():
|
||||||
v1 = V1()
|
v1 = V1Base()
|
||||||
v1.id = "v1"
|
v1.id = "v1"
|
||||||
v1.links = [link.Link.make_link('self', request.host_url,
|
v1_base_url = request.url_root.rstrip('//')
|
||||||
|
v1.links = [link.Link.make_link('self', request.url_root,
|
||||||
'v1', '', bookmark=True),
|
'v1', '', bookmark=True),
|
||||||
link.Link.make_link('describedby',
|
link.Link.make_link('describedby',
|
||||||
'http://docs.openstack.org',
|
'http://docs.openstack.org',
|
||||||
@ -68,37 +72,29 @@ class V1(base.APIBase):
|
|||||||
bookmark=True, type='text/html')]
|
bookmark=True, type='text/html')]
|
||||||
v1.media_types = [MediaType(base='application/json',
|
v1.media_types = [MediaType(base='application/json',
|
||||||
type='application/vnd.openstack.valence.v1+json')]
|
type='application/vnd.openstack.valence.v1+json')]
|
||||||
v1.services = [link.Link.make_link('self', request.host_url,
|
v1.nodes = [link.Link.make_link('self', v1_base_url + '/nodes',
|
||||||
'services', ''),
|
'nodes', ''),
|
||||||
|
link.Link.make_link('bookmark',
|
||||||
|
v1_base_url + '/nodes',
|
||||||
|
'nodes', '',
|
||||||
|
bookmark=True)]
|
||||||
|
v1.storages = [link.Link.make_link('self', v1_base_url,
|
||||||
|
'storages', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
request.host_url,
|
v1_base_url,
|
||||||
'services', '',
|
'storages', '',
|
||||||
bookmark=True)]
|
bookmark=True)]
|
||||||
|
v1.flavors = [link.Link.make_link('self', v1_base_url,
|
||||||
|
'flavors', ''),
|
||||||
|
link.Link.make_link('bookmark',
|
||||||
|
v1_base_url,
|
||||||
|
'flavors', '',
|
||||||
|
bookmark=True)]
|
||||||
return v1
|
return v1
|
||||||
|
|
||||||
|
|
||||||
class V1Controller(object):
|
class V1(Resource):
|
||||||
@expose('json')
|
|
||||||
def index(self):
|
|
||||||
return V1.convert()
|
|
||||||
|
|
||||||
@expose('json')
|
def get(self):
|
||||||
def _default(self, *args):
|
vobj = V1Base.convert()
|
||||||
"""Passthrough Proxy for PODM.
|
return json.dumps(vobj, default=lambda o: o.as_dict())
|
||||||
|
|
||||||
This function byepasses valence controller handlers
|
|
||||||
and calls PODM directly.
|
|
||||||
|
|
||||||
"""
|
|
||||||
ext = args[0]
|
|
||||||
filterext = ["Chassis", "Services", "Managers", "Systems",
|
|
||||||
"EventService", "Nodes", "EthernetSwitches"]
|
|
||||||
if ext in filterext:
|
|
||||||
urlext = '/'.join(args)
|
|
||||||
resp = rfsapi.send_request(urlext)
|
|
||||||
return resp.json()
|
|
||||||
else:
|
|
||||||
abort(404)
|
|
||||||
|
|
||||||
route(V1Controller, 'flavor', v1flavor.FlavorController())
|
|
||||||
route(V1Controller, 'nodes', v1nodes.NodesController())
|
|
@ -1,49 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
from oslo_service import wsgi
|
|
||||||
|
|
||||||
from valence.api import app
|
|
||||||
from valence.api import config as api_config
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
LOG = logging.getLogger('valence.api')
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
api_config.init(sys.argv[1:])
|
|
||||||
api_config.setup_logging()
|
|
||||||
application = app.setup_app()
|
|
||||||
host = CONF.api.bind_host
|
|
||||||
port = CONF.api.bind_port
|
|
||||||
workers = 1
|
|
||||||
|
|
||||||
LOG.info(("Server on http://%(host)s:%(port)s with %(workers)s"),
|
|
||||||
{'host': host, 'port': port, 'workers': workers})
|
|
||||||
|
|
||||||
service = wsgi.Server(CONF, "valence", application, host, port)
|
|
||||||
|
|
||||||
app.serve(service, CONF, workers)
|
|
||||||
|
|
||||||
LOG.info("Configuration:")
|
|
||||||
app.wait()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,51 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Starter script for the Valence controller service."""
|
|
||||||
|
|
||||||
import os
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
from oslo_service import service
|
|
||||||
import sys
|
|
||||||
import uuid
|
|
||||||
from valence.common import rpc_service
|
|
||||||
from valence.controller import config as controller_config
|
|
||||||
from valence.controller.handlers import flavor_controller
|
|
||||||
from valence.controller.handlers import node_controller
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
controller_config.init(sys.argv[1:])
|
|
||||||
controller_config.setup_logging()
|
|
||||||
LOG.info(('Starting valence-controller in PID %s'), os.getpid())
|
|
||||||
LOG.debug("Configuration:")
|
|
||||||
controller_id = uuid.uuid4()
|
|
||||||
endpoints = [
|
|
||||||
flavor_controller.Handler(),
|
|
||||||
node_controller.Handler()
|
|
||||||
]
|
|
||||||
|
|
||||||
server = rpc_service.Service.create(cfg.CONF.controller.topic,
|
|
||||||
controller_id, endpoints,
|
|
||||||
binary='valence-controller')
|
|
||||||
launcher = service.launch(cfg.CONF, server)
|
|
||||||
launcher.wait()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,75 +0,0 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from oslo_context import context as oslo_ctx
|
|
||||||
|
|
||||||
|
|
||||||
class ContextBase(oslo_ctx.RequestContext):
|
|
||||||
def __init__(self, auth_token=None, user_id=None, tenant_id=None,
|
|
||||||
is_admin=False, request_id=None, overwrite=True,
|
|
||||||
user_name=None, tenant_name=None, auth_url=None,
|
|
||||||
region=None, password=None, domain='default',
|
|
||||||
project_name=None, **kwargs):
|
|
||||||
super(ContextBase, self).__init__(
|
|
||||||
auth_token=auth_token,
|
|
||||||
user=user_id or kwargs.get('user', None),
|
|
||||||
tenant=tenant_id or kwargs.get('tenant', None),
|
|
||||||
domain=kwargs.get('domain', None),
|
|
||||||
user_domain=kwargs.get('user_domain', None),
|
|
||||||
project_domain=kwargs.get('project_domain', None),
|
|
||||||
is_admin=is_admin,
|
|
||||||
read_only=kwargs.get('read_only', False),
|
|
||||||
show_deleted=kwargs.get('show_deleted', False),
|
|
||||||
request_id=request_id,
|
|
||||||
resource_uuid=kwargs.get('resource_uuid', None),
|
|
||||||
overwrite=overwrite)
|
|
||||||
self.user_name = user_name
|
|
||||||
self.tenant_name = tenant_name
|
|
||||||
self.tenant_id = tenant_id
|
|
||||||
self.auth_url = auth_url
|
|
||||||
self.password = password
|
|
||||||
self.default_name = domain
|
|
||||||
self.region_name = region
|
|
||||||
self.project_name = project_name
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
ctx_dict = super(ContextBase, self).to_dict()
|
|
||||||
# ctx_dict.update({
|
|
||||||
# to do : dict update
|
|
||||||
# })
|
|
||||||
return ctx_dict
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, ctx):
|
|
||||||
return cls(**ctx)
|
|
||||||
|
|
||||||
|
|
||||||
class Context(ContextBase):
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
super(Context, self).__init__(**kwargs)
|
|
||||||
self._session = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def session(self):
|
|
||||||
return self._session
|
|
||||||
|
|
||||||
|
|
||||||
def get_admin_context(read_only=True):
|
|
||||||
return ContextBase(user_id=None,
|
|
||||||
project_id=None,
|
|
||||||
is_admin=True,
|
|
||||||
overwrite=False,
|
|
||||||
read_only=read_only)
|
|
||||||
|
|
||||||
|
|
||||||
def get_current():
|
|
||||||
return oslo_ctx.get_current()
|
|
@ -1,79 +0,0 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
RSC base exception handling.
|
|
||||||
"""
|
|
||||||
import six
|
|
||||||
|
|
||||||
from oslo_utils import excutils
|
|
||||||
|
|
||||||
|
|
||||||
class RSCException(Exception):
|
|
||||||
"""Base RSC Exception."""
|
|
||||||
|
|
||||||
message = "An unknown exception occurred."
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
try:
|
|
||||||
super(RSCException, self).__init__(self.message % kwargs)
|
|
||||||
self.msg = self.message % kwargs
|
|
||||||
except Exception:
|
|
||||||
with excutils.save_and_reraise_exception() as ctxt:
|
|
||||||
if not self.use_fatal_exceptions():
|
|
||||||
ctxt.reraise = False
|
|
||||||
# at least get the core message out if something happened
|
|
||||||
super(RSCException, self).__init__(self.message)
|
|
||||||
|
|
||||||
if six.PY2:
|
|
||||||
def __unicode__(self):
|
|
||||||
return unicode(self.msg)
|
|
||||||
|
|
||||||
def use_fatal_exceptions(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class BadRequest(RSCException):
|
|
||||||
message = 'Bad %(resource)s request'
|
|
||||||
|
|
||||||
|
|
||||||
class NotImplemented(RSCException):
|
|
||||||
message = ("Not yet implemented in RSC %(func_name)s: ")
|
|
||||||
|
|
||||||
|
|
||||||
class NotFound(RSCException):
|
|
||||||
message = ("URL not Found")
|
|
||||||
|
|
||||||
|
|
||||||
class Conflict(RSCException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ServiceUnavailable(RSCException):
|
|
||||||
message = "The service is unavailable"
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionRefused(RSCException):
|
|
||||||
message = "Connection to the service endpoint is refused"
|
|
||||||
|
|
||||||
|
|
||||||
class TimeOut(RSCException):
|
|
||||||
message = "Timeout when connecting to OpenStack Service"
|
|
||||||
|
|
||||||
|
|
||||||
class InternalError(RSCException):
|
|
||||||
message = "Error when performing operation"
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidInputError(RSCException):
|
|
||||||
message = ("An invalid value was provided for %(opt_name)s: "
|
|
||||||
"%(opt_value)s")
|
|
@ -1,97 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
|
|
||||||
import json
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
import requests
|
|
||||||
from requests.auth import HTTPBasicAuth
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
cfg.CONF.import_group('undercloud', 'valence.controller.config')
|
|
||||||
|
|
||||||
|
|
||||||
def _send_request(url, method, headers, requestbody=None):
|
|
||||||
defaultheaders = {'Content-Type': 'application/json'}
|
|
||||||
auth = HTTPBasicAuth(cfg.CONF.undercloud.os_user,
|
|
||||||
cfg.CONF.undercloud.os_password)
|
|
||||||
headers = defaultheaders.update(headers)
|
|
||||||
LOG.debug(url)
|
|
||||||
resp = requests.request(method,
|
|
||||||
url,
|
|
||||||
headers=defaultheaders,
|
|
||||||
data=requestbody,
|
|
||||||
auth=auth)
|
|
||||||
LOG.debug(resp.status_code)
|
|
||||||
return resp.json()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_servicecatalogue_endpoint(keystonejson, servicename):
|
|
||||||
"""Fetch particular endpoint from Keystone.
|
|
||||||
|
|
||||||
This function is to get the particular endpoint from the
|
|
||||||
list of endpoints returned fro keystone.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
for d in keystonejson["access"]["serviceCatalog"]:
|
|
||||||
if(d["name"] == servicename):
|
|
||||||
return d["endpoints"][0]["publicURL"]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_token_and_url(nameofendpoint):
|
|
||||||
"""Fetch token from the endpoint
|
|
||||||
|
|
||||||
This function get new token and associated endpoint.
|
|
||||||
name of endpoint carries the name of the service whose
|
|
||||||
endpoint need to be found.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = cfg.CONF.undercloud.os_admin_url + "/tokens"
|
|
||||||
data = {"auth":
|
|
||||||
{"tenantName": cfg.CONF.undercloud.os_tenant,
|
|
||||||
"passwordCredentials":
|
|
||||||
{"username": cfg.CONF.undercloud.os_user,
|
|
||||||
"password": cfg.CONF.undercloud.os_password}}}
|
|
||||||
rdata = _send_request(url, "POST", {}, json.dumps(data))
|
|
||||||
tokenid = rdata["access"]["token"]["id"]
|
|
||||||
endpoint = _get_servicecatalogue_endpoint(rdata, nameofendpoint)
|
|
||||||
LOG.debug("Token,Endpoint %s: %s from keystone for %s"
|
|
||||||
% (tokenid, endpoint, nameofendpoint))
|
|
||||||
return (tokenid, endpoint)
|
|
||||||
|
|
||||||
|
|
||||||
# put this function in utils.py later
|
|
||||||
def _get_imageid(jsondata, imgname):
|
|
||||||
# write a generic funciton for this and _get_servicecatalogue_endpoint
|
|
||||||
for d in jsondata["images"]:
|
|
||||||
if(d["name"] == imgname):
|
|
||||||
return d["id"]
|
|
||||||
|
|
||||||
|
|
||||||
def get_undercloud_images():
|
|
||||||
tokenid, endpoint = _get_token_and_url("glance")
|
|
||||||
resp = _send_request(endpoint + "/v2/images",
|
|
||||||
"GET",
|
|
||||||
{'X-Auth-Token': tokenid})
|
|
||||||
imagemap = {"deploy_ramdisk": _get_imageid(resp, "bm-deploy-ramdisk"),
|
|
||||||
"deploy_kernel": _get_imageid(resp, "bm-deploy-kernel"),
|
|
||||||
"image_source": _get_imageid(resp, "overcloud-full"),
|
|
||||||
"ramdisk": _get_imageid(resp, "overcloud-full-initrd"),
|
|
||||||
"kernel": _get_imageid(resp, "overcloud-full-vmlinuz")}
|
|
||||||
return imagemap
|
|
@ -1,138 +0,0 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
|
|
||||||
# import oslo_messaging as messaging
|
|
||||||
# from oslo_serialization import jsonutils
|
|
||||||
# from valence.common import valencecontext
|
|
||||||
from oslo_config import cfg
|
|
||||||
import oslo_messaging as messaging
|
|
||||||
from oslo_serialization import jsonutils
|
|
||||||
from valence.common import context as valence_ctx
|
|
||||||
import valence.common.exceptions
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'init',
|
|
||||||
'cleanup',
|
|
||||||
'set_defaults',
|
|
||||||
'add_extra_exmods',
|
|
||||||
'clear_extra_exmods',
|
|
||||||
'get_allowed_exmods',
|
|
||||||
'RequestContextSerializer',
|
|
||||||
'get_client',
|
|
||||||
'get_server',
|
|
||||||
'get_notifier',
|
|
||||||
]
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
TRANSPORT = None
|
|
||||||
NOTIFIER = None
|
|
||||||
|
|
||||||
ALLOWED_EXMODS = [
|
|
||||||
valence.common.exceptions.__name__,
|
|
||||||
]
|
|
||||||
EXTRA_EXMODS = []
|
|
||||||
|
|
||||||
|
|
||||||
def init(conf):
|
|
||||||
global TRANSPORT, NOTIFIER
|
|
||||||
exmods = get_allowed_exmods()
|
|
||||||
TRANSPORT = messaging.get_transport(conf,
|
|
||||||
allowed_remote_exmods=exmods)
|
|
||||||
serializer = RequestContextSerializer(JsonPayloadSerializer())
|
|
||||||
NOTIFIER = messaging.Notifier(TRANSPORT, serializer=serializer)
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup():
|
|
||||||
global TRANSPORT, NOTIFIER
|
|
||||||
assert TRANSPORT is not None
|
|
||||||
assert NOTIFIER is not None
|
|
||||||
TRANSPORT.cleanup()
|
|
||||||
TRANSPORT = NOTIFIER = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_defaults(control_exchange):
|
|
||||||
messaging.set_transport_defaults(control_exchange)
|
|
||||||
|
|
||||||
|
|
||||||
def add_extra_exmods(*args):
|
|
||||||
EXTRA_EXMODS.extend(args)
|
|
||||||
|
|
||||||
|
|
||||||
def clear_extra_exmods():
|
|
||||||
del EXTRA_EXMODS[:]
|
|
||||||
|
|
||||||
|
|
||||||
def get_allowed_exmods():
|
|
||||||
return ALLOWED_EXMODS + EXTRA_EXMODS
|
|
||||||
|
|
||||||
|
|
||||||
class JsonPayloadSerializer(messaging.NoOpSerializer):
|
|
||||||
@staticmethod
|
|
||||||
def serialize_entity(context, entity):
|
|
||||||
return jsonutils.to_primitive(entity, convert_instances=True)
|
|
||||||
|
|
||||||
|
|
||||||
class RequestContextSerializer(messaging.Serializer):
|
|
||||||
|
|
||||||
def __init__(self, base):
|
|
||||||
self._base = base
|
|
||||||
|
|
||||||
def serialize_entity(self, context, entity):
|
|
||||||
if not self._base:
|
|
||||||
return entity
|
|
||||||
return self._base.serialize_entity(context, entity)
|
|
||||||
|
|
||||||
def deserialize_entity(self, context, entity):
|
|
||||||
if not self._base:
|
|
||||||
return entity
|
|
||||||
return self._base.deserialize_entity(context, entity)
|
|
||||||
|
|
||||||
def serialize_context(self, context):
|
|
||||||
if isinstance(context, dict):
|
|
||||||
return context
|
|
||||||
else:
|
|
||||||
return context.to_dict()
|
|
||||||
|
|
||||||
def deserialize_context(self, context):
|
|
||||||
return valence_ctx.Context.from_dict(context)
|
|
||||||
|
|
||||||
|
|
||||||
def get_transport_url(url_str=None):
|
|
||||||
return messaging.TransportURL.parse(CONF, url_str)
|
|
||||||
|
|
||||||
|
|
||||||
def get_client(target, version_cap=None, serializer=None):
|
|
||||||
assert TRANSPORT is not None
|
|
||||||
serializer = RequestContextSerializer(serializer)
|
|
||||||
return messaging.RPCClient(TRANSPORT,
|
|
||||||
target,
|
|
||||||
version_cap=version_cap,
|
|
||||||
serializer=serializer)
|
|
||||||
|
|
||||||
|
|
||||||
def get_server(target, endpoints, serializer=None):
|
|
||||||
assert TRANSPORT is not None
|
|
||||||
serializer = RequestContextSerializer(serializer)
|
|
||||||
return messaging.get_rpc_server(TRANSPORT,
|
|
||||||
target,
|
|
||||||
endpoints,
|
|
||||||
executor='eventlet',
|
|
||||||
serializer=serializer)
|
|
||||||
|
|
||||||
|
|
||||||
def get_notifier(service, host=None, publisher_id=None):
|
|
||||||
assert NOTIFIER is not None
|
|
||||||
if not publisher_id:
|
|
||||||
publisher_id = "%s.%s" % (service, host or CONF.host)
|
|
||||||
return NOTIFIER.prepare(publisher_id=publisher_id)
|
|
@ -1,89 +0,0 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
"""Common RPC service and API tools for Valence."""
|
|
||||||
|
|
||||||
import eventlet
|
|
||||||
from oslo_config import cfg
|
|
||||||
import oslo_messaging as messaging
|
|
||||||
from oslo_service import service
|
|
||||||
|
|
||||||
from valence.common import rpc
|
|
||||||
from valence.objects import base as objects_base
|
|
||||||
|
|
||||||
eventlet.monkey_patch()
|
|
||||||
|
|
||||||
periodic_opts = [
|
|
||||||
cfg.IntOpt('periodic_interval_max',
|
|
||||||
default=60,
|
|
||||||
help='Max interval size between periodic tasks execution in '
|
|
||||||
'seconds.'),
|
|
||||||
]
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
CONF.register_opts(periodic_opts)
|
|
||||||
|
|
||||||
|
|
||||||
class Service(service.Service):
|
|
||||||
|
|
||||||
def __init__(self, topic, server, handlers, binary):
|
|
||||||
super(Service, self).__init__()
|
|
||||||
serializer = rpc.RequestContextSerializer(
|
|
||||||
objects_base.ValenceObjectSerializer())
|
|
||||||
transport = messaging.get_transport(cfg.CONF)
|
|
||||||
# TODO(asalkeld) add support for version='x.y'
|
|
||||||
target = messaging.Target(topic=topic, server=server)
|
|
||||||
self._server = messaging.get_rpc_server(transport, target, handlers,
|
|
||||||
serializer=serializer)
|
|
||||||
self.binary = binary
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
# servicegroup.setup(CONF, self.binary, self.tg)
|
|
||||||
self._server.start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
if self._server:
|
|
||||||
self._server.stop()
|
|
||||||
self._server.wait()
|
|
||||||
super(Service, self).stop()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, topic, server, handlers, binary):
|
|
||||||
service_obj = cls(topic, server, handlers, binary)
|
|
||||||
return service_obj
|
|
||||||
|
|
||||||
|
|
||||||
class API(object):
|
|
||||||
def __init__(self, transport=None, context=None, topic=None, server=None,
|
|
||||||
timeout=None):
|
|
||||||
serializer = rpc.RequestContextSerializer(
|
|
||||||
objects_base.ValenceObjectSerializer())
|
|
||||||
if transport is None:
|
|
||||||
exmods = rpc.get_allowed_exmods()
|
|
||||||
transport = messaging.get_transport(cfg.CONF,
|
|
||||||
allowed_remote_exmods=exmods)
|
|
||||||
self._context = context
|
|
||||||
if topic is None:
|
|
||||||
topic = ''
|
|
||||||
target = messaging.Target(topic=topic, server=server)
|
|
||||||
self._client = messaging.RPCClient(transport, target,
|
|
||||||
serializer=serializer,
|
|
||||||
timeout=timeout)
|
|
||||||
|
|
||||||
def _call(self, method, *args, **kwargs):
|
|
||||||
return self._client.call(self._context, method, *args, **kwargs)
|
|
||||||
|
|
||||||
def _cast(self, method, *args, **kwargs):
|
|
||||||
self._client.cast(self._context, method, *args, **kwargs)
|
|
||||||
|
|
||||||
def echo(self, message):
|
|
||||||
self._cast('echo', message=message)
|
|
68
valence/config.py
Normal file
68
valence/config.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
# Copyright 2016 Intel Corporation
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""This Module reads the configuration from .conf file
|
||||||
|
and set default values if the expected values are not set
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from six.moves import configparser
|
||||||
|
|
||||||
|
|
||||||
|
def get_option(section, key, default, type=str):
|
||||||
|
"""Function to support default values
|
||||||
|
|
||||||
|
Though config fallback feature could be used
|
||||||
|
Py 2.7 doesnt support it
|
||||||
|
|
||||||
|
"""
|
||||||
|
if config.has_option(section, key):
|
||||||
|
return type(config.get(section, key))
|
||||||
|
else:
|
||||||
|
return type(default)
|
||||||
|
|
||||||
|
|
||||||
|
PROJECT_NAME = 'valence'
|
||||||
|
|
||||||
|
config_file = "/etc/%s/%s.conf" % (PROJECT_NAME, PROJECT_NAME)
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(config_file)
|
||||||
|
|
||||||
|
# Log settings
|
||||||
|
log_level_map = {'debug': logging.DEBUG,
|
||||||
|
'info': logging.INFO,
|
||||||
|
'warning': logging.WARNING,
|
||||||
|
'error': logging.ERROR,
|
||||||
|
'critical': logging.CRITICAL,
|
||||||
|
'notset': logging.NOTSET}
|
||||||
|
|
||||||
|
log_default_loc = "/var/log/%s/%s.log" % (PROJECT_NAME, PROJECT_NAME)
|
||||||
|
log_default_format = "%(asctime)s %(name)-4s %(levelname)-4s %(message)s"
|
||||||
|
log_level_name = get_option("DEFAULT", "log_level", 'error')
|
||||||
|
|
||||||
|
log_file = get_option("DEFAULT", "log_file", log_default_loc)
|
||||||
|
log_level = log_level_map.get(log_level_name.lower())
|
||||||
|
log_format = get_option("DEFAULT", "log_format", log_default_format)
|
||||||
|
|
||||||
|
# Server Settings
|
||||||
|
bind_port = get_option("DEFAULT", "bind_port", 8181, int)
|
||||||
|
bind_host = get_option("DEFAULT", "bind_host", "0.0.0.0")
|
||||||
|
debug = get_option("DEFAULT", "debug", False, bool)
|
||||||
|
|
||||||
|
# PODM Settings
|
||||||
|
podm_url = get_option("podm", "url", "http://127.0.0.1")
|
||||||
|
podm_user = get_option("podm", "user", "admin")
|
||||||
|
podm_password = get_option("podm", "password", "admin")
|
@ -1,67 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""controller API for interfacing with Other modules"""
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
from valence.common import rpc_service
|
|
||||||
|
|
||||||
|
|
||||||
# The Backend API class serves as a AMQP client for communicating
|
|
||||||
# on a topic exchange specific to the controllers. This allows the ReST
|
|
||||||
# API to trigger operations on the controllers
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class API(rpc_service.API):
|
|
||||||
def __init__(self, transport=None, context=None, topic=None):
|
|
||||||
if topic is None:
|
|
||||||
cfg.CONF.import_opt('topic', 'valence.controller.config',
|
|
||||||
group='controller')
|
|
||||||
super(API, self).__init__(transport, context,
|
|
||||||
topic=cfg.CONF.controller.topic)
|
|
||||||
|
|
||||||
# Flavor Operations
|
|
||||||
|
|
||||||
def flavor_options(self):
|
|
||||||
return self._call('flavor_options')
|
|
||||||
|
|
||||||
def flavor_generate(self, criteria):
|
|
||||||
return self._call('flavor_generate', criteria=criteria)
|
|
||||||
|
|
||||||
# Node(s) Operations
|
|
||||||
def list_nodes(self, filters):
|
|
||||||
return self._call('list_nodes', filters=filters)
|
|
||||||
|
|
||||||
def get_nodebyid(self, nodeid):
|
|
||||||
return self._call('get_nodebyid', nodeid=nodeid)
|
|
||||||
|
|
||||||
def delete_composednode(self, nodeid):
|
|
||||||
return self._call('delete_composednode', nodeid=nodeid)
|
|
||||||
|
|
||||||
def update_node(self, nodeid):
|
|
||||||
return self._call('update_node')
|
|
||||||
|
|
||||||
def compose_nodes(self, criteria):
|
|
||||||
return self._call('compose_nodes', criteria=criteria)
|
|
||||||
|
|
||||||
def list_node_storages(self, data):
|
|
||||||
return self._call('list_node_storages')
|
|
||||||
|
|
||||||
def map_node_storage(self, data):
|
|
||||||
return self._call('map_node_storage')
|
|
||||||
|
|
||||||
def delete_node_storage(self, data):
|
|
||||||
return self._call('delete_node_storage')
|
|
@ -1,65 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Config options for Valence controller Service"""
|
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_log import log as logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CONTROLLER_OPTS = [
|
|
||||||
cfg.StrOpt('topic',
|
|
||||||
default='valence-controller',
|
|
||||||
help='The queue to add controller tasks to.')
|
|
||||||
]
|
|
||||||
|
|
||||||
OS_INTERFACE_OPTS = [
|
|
||||||
cfg.StrOpt('os_admin_url',
|
|
||||||
help='Admin URL of Openstack'),
|
|
||||||
cfg.StrOpt('os_tenant',
|
|
||||||
default='admin',
|
|
||||||
help='Tenant for Openstack'),
|
|
||||||
cfg.StrOpt('os_user',
|
|
||||||
default='admin',
|
|
||||||
help='User for openstack'),
|
|
||||||
cfg.StrOpt('os_password',
|
|
||||||
default='addmin',
|
|
||||||
help='Password for openstack')
|
|
||||||
]
|
|
||||||
|
|
||||||
controller_conf_group = cfg.OptGroup(name='controller',
|
|
||||||
title='Valence controller options')
|
|
||||||
cfg.CONF.register_group(controller_conf_group)
|
|
||||||
cfg.CONF.register_opts(CONTROLLER_OPTS, group=controller_conf_group)
|
|
||||||
|
|
||||||
os_conf_group = cfg.OptGroup(name='undercloud',
|
|
||||||
title='Valence Openstack interface options')
|
|
||||||
cfg.CONF.register_group(os_conf_group)
|
|
||||||
cfg.CONF.register_opts(OS_INTERFACE_OPTS, group=os_conf_group)
|
|
||||||
|
|
||||||
|
|
||||||
def init(args, **kwargs):
|
|
||||||
# Register the configuration options
|
|
||||||
logging.register_options(cfg.CONF)
|
|
||||||
cfg.CONF(args=args, project='valence', **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
|
||||||
"""Sets up the logging options for a log with supplied name."""
|
|
||||||
domain = "valence"
|
|
||||||
logging.setup(cfg.CONF, domain)
|
|
||||||
LOG.info("Logging enabled!")
|
|
||||||
LOG.debug("command line: %s", " ".join(sys.argv))
|
|
@ -1,57 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_log import log as logging
|
|
||||||
from valence.common.redfish import api as rfsapi
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Handler(object):
|
|
||||||
"""Valence Node RPC handler.
|
|
||||||
|
|
||||||
These are the backend operations. They are executed by the backend ervice.
|
|
||||||
API calls via AMQP (within the ReST API) trigger the handlers to be called.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Handler, self).__init__()
|
|
||||||
|
|
||||||
def list_nodes(self, context, filters):
|
|
||||||
LOG.info(str(filters))
|
|
||||||
return rfsapi.nodes_list(None, filters)
|
|
||||||
|
|
||||||
def get_nodebyid(self, context, nodeid):
|
|
||||||
return rfsapi.get_nodebyid(nodeid)
|
|
||||||
|
|
||||||
def delete_composednode(self, context, nodeid):
|
|
||||||
return rfsapi.delete_composednode(nodeid)
|
|
||||||
|
|
||||||
def update_node(self, context, nodeid):
|
|
||||||
return {"node": "Update node attributes"}
|
|
||||||
|
|
||||||
def compose_nodes(self, context, criteria):
|
|
||||||
"""Chassis details could also be fetched and inserted"""
|
|
||||||
node_criteria = criteria["filter"] if "filter" in criteria else {}
|
|
||||||
return rfsapi.compose_node(node_criteria)
|
|
||||||
|
|
||||||
def list_node_storages(self, context, data):
|
|
||||||
return {"node": "List the storages attached to the node"}
|
|
||||||
|
|
||||||
def map_node_storage(self, context, data):
|
|
||||||
return {"node": "Map storages to a node"}
|
|
||||||
|
|
||||||
def delete_node_storage(self, context, data):
|
|
||||||
return {"node": "Deleted storages mapped to a node"}
|
|
@ -13,13 +13,12 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
# from valence.flavor.plugins import *
|
import logging
|
||||||
import os
|
import os
|
||||||
from oslo_log import log as logging
|
from valence.redfish import redfish as rfs
|
||||||
from valence.common.redfish import api as rfs
|
|
||||||
|
|
||||||
FLAVOR_PLUGIN_PATH = os.path.dirname(os.path.abspath(__file__)) + '/plugins'
|
FLAVOR_PLUGIN_PATH = os.path.dirname(os.path.abspath(__file__)) + '/plugins'
|
||||||
logger = logging.getLogger()
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_available_criteria():
|
def get_available_criteria():
|
||||||
@ -28,27 +27,29 @@ def get_available_criteria():
|
|||||||
if os.path.isfile(os.path.join(FLAVOR_PLUGIN_PATH, f))
|
if os.path.isfile(os.path.join(FLAVOR_PLUGIN_PATH, f))
|
||||||
and not f.startswith('__') and f.endswith('.py')]
|
and not f.startswith('__') and f.endswith('.py')]
|
||||||
resp = []
|
resp = []
|
||||||
for p in pluginfiles:
|
for filename in pluginfiles:
|
||||||
module = import_module("valence.flavor.plugins." + p)
|
module = import_module("valence.flavor.plugins." + filename)
|
||||||
myclass = getattr(module, p + 'Generator')
|
myclass = getattr(module, filename + 'Generator')
|
||||||
inst = myclass([])
|
inst = myclass([])
|
||||||
resp.append({'name': p, 'description': inst.description()})
|
resp.append({'name': filename, 'description': inst.description()})
|
||||||
return {'criteria': resp}
|
return {'criteria': resp}
|
||||||
|
|
||||||
|
|
||||||
def create_flavors(criteria):
|
def create_flavors(data):
|
||||||
"""criteria : comma seperated generator names
|
"""criteria : comma seperated generator names
|
||||||
|
|
||||||
This should be same as thier file name)
|
This should be same as thier file name)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
criteria = data["criteria"]
|
||||||
respjson = []
|
respjson = []
|
||||||
lst_nodes = rfs.nodes_list()
|
lst_systems = rfs.systems_list()
|
||||||
for g in criteria.split(","):
|
for criteria_name in criteria.split(","):
|
||||||
if g:
|
if criteria_name:
|
||||||
logger.info("Calling generator : %s ." % g)
|
LOG.info("Calling generator : %s ." % criteria_name)
|
||||||
module = __import__("valence.flavor.plugins." + g, fromlist=["*"])
|
module = __import__("valence.flavor.plugins." + criteria_name,
|
||||||
classobj = getattr(module, g + "Generator")
|
fromlist=["*"])
|
||||||
inst = classobj(lst_nodes)
|
classobj = getattr(module, criteria_name + "Generator")
|
||||||
|
inst = classobj(lst_systems)
|
||||||
respjson.append(inst.generate())
|
respjson.append(inst.generate())
|
||||||
return respjson
|
return respjson
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from valence.flavor.generatorbase import generatorbase
|
from valence.flavor.generatorbase import generatorbase
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ class assettagGenerator(generatorbase):
|
|||||||
def generate(self):
|
def generate(self):
|
||||||
LOG.info("Default Generator")
|
LOG.info("Default Generator")
|
||||||
for node in self.nodes:
|
for node in self.nodes:
|
||||||
LOG.info("Node ID " + node['nodeid'])
|
LOG.info("Node ID " + node['id'])
|
||||||
location = node['location']
|
location = node['location']
|
||||||
location = location.split('Sled')[0]
|
location = location.split('Sled')[0]
|
||||||
location_lst = re.split("(\d+)", location)
|
location_lst = re.split("(\d+)", location)
|
||||||
|
@ -12,10 +12,10 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
import logging
|
||||||
from valence.flavor.generatorbase import generatorbase
|
from valence.flavor.generatorbase import generatorbase
|
||||||
|
|
||||||
LOG = logging.getLogger()
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class defaultGenerator(generatorbase):
|
class defaultGenerator(generatorbase):
|
||||||
@ -29,14 +29,15 @@ class defaultGenerator(generatorbase):
|
|||||||
def generate(self):
|
def generate(self):
|
||||||
LOG.info("Default Generator")
|
LOG.info("Default Generator")
|
||||||
for node in self.nodes:
|
for node in self.nodes:
|
||||||
LOG.info("Node ID " + node['nodeid'])
|
LOG.debug("Node ID " + node['id'])
|
||||||
location = node['location']
|
location = node['location']
|
||||||
|
LOG.debug(location)
|
||||||
location_lst = location.split("_")
|
location_lst = location.split("_")
|
||||||
location_lst = list(filter(None, location_lst))
|
location_lst = list(filter(None, location_lst))
|
||||||
extraspecs = (
|
extraspecs = ({l[0]: l[1]
|
||||||
{l[0]: l[1] for l in (l.split(":") for l in location_lst)})
|
for l in (l.split(":") for l in location_lst)})
|
||||||
name = self.prepend_name + location
|
name = self.prepend_name + node['id']
|
||||||
return {
|
return [
|
||||||
self._flavor_template("L_" + name,
|
self._flavor_template("L_" + name,
|
||||||
node['ram'],
|
node['ram'],
|
||||||
node['cpu']["count"],
|
node['cpu']["count"],
|
||||||
@ -52,4 +53,4 @@ class defaultGenerator(generatorbase):
|
|||||||
int(node['cpu']["count"]) / 4,
|
int(node['cpu']["count"]) / 4,
|
||||||
int(node['storage']) / 4,
|
int(node['storage']) / 4,
|
||||||
extraspecs)
|
extraspecs)
|
||||||
}
|
]
|
||||||
|
@ -12,10 +12,10 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
import logging
|
||||||
from valence.flavor.generatorbase import generatorbase
|
from valence.flavor.generatorbase import generatorbase
|
||||||
|
|
||||||
logger = logging.getLogger()
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class exampleGenerator(generatorbase):
|
class exampleGenerator(generatorbase):
|
||||||
@ -23,5 +23,5 @@ class exampleGenerator(generatorbase):
|
|||||||
generatorbase.__init__(self, nodes)
|
generatorbase.__init__(self, nodes)
|
||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
logger.info("Example Flavor Generate")
|
LOG.info("Example Flavor Generate")
|
||||||
return {"Error": "Example Flavor Generator- Not Yet Implemented"}
|
return {"Info": "Example Flavor Generator- Not Yet Implemented"}
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
# Copyright (c) 2016 Intel, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Valence common internal object model"""
|
|
||||||
|
|
||||||
from oslo_versionedobjects import base as ovoo_base
|
|
||||||
from oslo_versionedobjects import fields as ovoo_fields
|
|
||||||
|
|
||||||
|
|
||||||
remotable_classmethod = ovoo_base.remotable_classmethod
|
|
||||||
remotable = ovoo_base.remotable
|
|
||||||
|
|
||||||
|
|
||||||
class ValenceObjectRegistry(ovoo_base.VersionedObjectRegistry):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ValenceObject(ovoo_base.VersionedObject):
|
|
||||||
"""Base class and object factory.
|
|
||||||
|
|
||||||
This forms the base of all objects that can be remoted or instantiated
|
|
||||||
via RPC. Simply defining a class that inherits from this base class
|
|
||||||
will make it remotely instantiatable. Objects should implement the
|
|
||||||
necessary "get" classmethod routines as well as "save" object methods
|
|
||||||
as appropriate.
|
|
||||||
"""
|
|
||||||
OBJ_PROJECT_NAMESPACE = 'Valence'
|
|
||||||
|
|
||||||
def as_dict(self):
|
|
||||||
return {k: getattr(self, k)
|
|
||||||
for k in self.fields
|
|
||||||
if self.obj_attr_is_set(k)}
|
|
||||||
|
|
||||||
|
|
||||||
class ValenceObjectDictCompat(ovoo_base.VersionedObjectDictCompat):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ValencePersistentObject(object):
|
|
||||||
"""Mixin class for Persistent objects.
|
|
||||||
|
|
||||||
This adds the fields that we use in common for all persistent objects.
|
|
||||||
"""
|
|
||||||
fields = {
|
|
||||||
'created_at': ovoo_fields.DateTimeField(nullable=True),
|
|
||||||
'updated_at': ovoo_fields.DateTimeField(nullable=True),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ValenceObjectSerializer(ovoo_base.VersionedObjectSerializer):
|
|
||||||
# Base class to use for object hydration
|
|
||||||
OBJ_BASE_CLASS = ValenceObject
|
|
@ -14,14 +14,14 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from oslo_config import cfg
|
import logging
|
||||||
from oslo_log import log as logging
|
|
||||||
import requests
|
import requests
|
||||||
from requests.auth import HTTPBasicAuth
|
from requests.auth import HTTPBasicAuth
|
||||||
from valence.common.redfish import tree
|
from valence import config as cfg
|
||||||
|
from valence.redfish import tree
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
cfg.CONF.import_group('podm', 'valence.common.redfish.config')
|
|
||||||
|
|
||||||
|
|
||||||
def get_rfs_url(serviceext):
|
def get_rfs_url(serviceext):
|
||||||
@ -29,17 +29,18 @@ def get_rfs_url(serviceext):
|
|||||||
INDEX = ''
|
INDEX = ''
|
||||||
# '/index.json'
|
# '/index.json'
|
||||||
if REDFISH_BASE_EXT in serviceext:
|
if REDFISH_BASE_EXT in serviceext:
|
||||||
return cfg.CONF.podm.url + serviceext + INDEX
|
return cfg.podm_url + serviceext + INDEX
|
||||||
else:
|
else:
|
||||||
return cfg.CONF.podm.url + REDFISH_BASE_EXT + serviceext + INDEX
|
return cfg.podm_url + REDFISH_BASE_EXT + serviceext + INDEX
|
||||||
|
|
||||||
|
|
||||||
def send_request(resource, method="GET", **kwargs):
|
def send_request(resource, method="GET", **kwargs):
|
||||||
# The verify=false param in the request should be removed eventually
|
# The verify=false param in the request should be removed eventually
|
||||||
url = get_rfs_url(resource)
|
url = get_rfs_url(resource)
|
||||||
httpuser = cfg.CONF.podm.user
|
httpuser = cfg.podm_user
|
||||||
httppwd = cfg.CONF.podm.password
|
httppwd = cfg.podm_password
|
||||||
resp = None
|
resp = None
|
||||||
|
LOG.debug(url)
|
||||||
try:
|
try:
|
||||||
resp = requests.request(method, url, verify=False,
|
resp = requests.request(method, url, verify=False,
|
||||||
auth=HTTPBasicAuth(httpuser, httppwd),
|
auth=HTTPBasicAuth(httpuser, httppwd),
|
||||||
@ -92,57 +93,6 @@ def generic_filter(jsonContent, filterConditions):
|
|||||||
return is_filter_passed
|
return is_filter_passed
|
||||||
|
|
||||||
|
|
||||||
def get_details(source):
|
|
||||||
returnJSONObj = []
|
|
||||||
members = source['Members']
|
|
||||||
for member in members:
|
|
||||||
resource = member['@odata.id']
|
|
||||||
resp = send_request(resource)
|
|
||||||
memberJson = resp.json()
|
|
||||||
memberJsonObj = json.loads(memberJson)
|
|
||||||
returnJSONObj[resource] = memberJsonObj
|
|
||||||
return returnJSONObj
|
|
||||||
|
|
||||||
|
|
||||||
def systemdetails():
|
|
||||||
returnJSONObj = []
|
|
||||||
parsed = send_request('Systems')
|
|
||||||
members = parsed['Members']
|
|
||||||
for member in members:
|
|
||||||
resource = member['@odata.id']
|
|
||||||
resp = send_request(resource)
|
|
||||||
memberJsonContent = resp.json()
|
|
||||||
memberJSONObj = json.loads(memberJsonContent)
|
|
||||||
returnJSONObj[resource] = memberJSONObj
|
|
||||||
return(json.dumps(returnJSONObj))
|
|
||||||
|
|
||||||
|
|
||||||
def nodedetails():
|
|
||||||
returnJSONObj = []
|
|
||||||
parsed = send_request('Nodes')
|
|
||||||
members = parsed['Members']
|
|
||||||
for member in members:
|
|
||||||
resource = member['@odata.id']
|
|
||||||
resp = send_request(resource)
|
|
||||||
memberJSONObj = resp.json()
|
|
||||||
returnJSONObj[resource] = memberJSONObj
|
|
||||||
return(json.dumps(returnJSONObj))
|
|
||||||
|
|
||||||
|
|
||||||
def podsdetails():
|
|
||||||
jsonContent = send_request('Chassis')
|
|
||||||
pods = filter_chassis(jsonContent, 'Pod')
|
|
||||||
podsDetails = get_details(pods)
|
|
||||||
return json.dumps(podsDetails)
|
|
||||||
|
|
||||||
|
|
||||||
def racksdetails():
|
|
||||||
jsonContent = send_request('Chassis')
|
|
||||||
racks = filter_chassis(jsonContent, 'Rack')
|
|
||||||
racksDetails = get_details(racks)
|
|
||||||
return json.dumps(racksDetails)
|
|
||||||
|
|
||||||
|
|
||||||
def racks():
|
def racks():
|
||||||
jsonContent = send_request('Chassis')
|
jsonContent = send_request('Chassis')
|
||||||
racks = filter_chassis(jsonContent, 'Rack')
|
racks = filter_chassis(jsonContent, 'Rack')
|
||||||
@ -165,36 +115,39 @@ def urls2list(url):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def extract_val(data, path):
|
def extract_val(data, path, defaultval=None):
|
||||||
# function to select value at particularpath
|
# function to select value at particularpath
|
||||||
patharr = path.split("/")
|
patharr = path.split("/")
|
||||||
for p in patharr:
|
for p in patharr:
|
||||||
data = data[p]
|
data = data[p]
|
||||||
|
data = (data if data else defaultval)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def node_cpu_details(nodeurl):
|
def node_cpu_details(nodeurl):
|
||||||
cpucnt = 0
|
cpucnt = 0
|
||||||
cpuarch = ""
|
cpuarch = ""
|
||||||
|
cpumodel = ""
|
||||||
cpulist = urls2list(nodeurl + '/Processors')
|
cpulist = urls2list(nodeurl + '/Processors')
|
||||||
for lnk in cpulist:
|
for lnk in cpulist:
|
||||||
LOG.info("Processing CPU %s" % lnk)
|
LOG.info("Processing CPU %s" % lnk)
|
||||||
resp = send_request(lnk)
|
resp = send_request(lnk)
|
||||||
respdata = resp.json()
|
respdata = resp.json()
|
||||||
cpucnt += extract_val(respdata, "TotalCores")
|
# Check if CPU data is populated. It also may have NULL values
|
||||||
cpuarch = extract_val(respdata, "InstructionSet")
|
cpucnt += extract_val(respdata, "TotalCores", 0)
|
||||||
cpumodel = extract_val(respdata, "Model")
|
cpuarch = extract_val(respdata, "InstructionSet", "")
|
||||||
|
cpumodel = extract_val(respdata, "Model", "")
|
||||||
LOG.debug(" Cpu details %s: %d: %s: %s "
|
LOG.debug(" Cpu details %s: %d: %s: %s "
|
||||||
% (nodeurl, cpucnt, cpuarch, cpumodel))
|
% (nodeurl, cpucnt, cpuarch, cpumodel))
|
||||||
return {"count": str(cpucnt), "arch": cpuarch, "model": cpumodel}
|
return {"cores": str(cpucnt), "arch": cpuarch, "model": cpumodel}
|
||||||
|
|
||||||
|
|
||||||
def node_ram_details(nodeurl):
|
def node_ram_details(nodeurl):
|
||||||
# this extracts the RAM and returns as dictionary
|
# this extracts the RAM and returns as dictionary
|
||||||
resp = send_request(nodeurl)
|
resp = send_request(nodeurl)
|
||||||
respjson = resp.json()
|
respjson = resp.json()
|
||||||
ram = extract_val(respjson, "MemorySummary/TotalSystemMemoryGiB")
|
ram = extract_val(respjson, "MemorySummary/TotalSystemMemoryGiB", "0")
|
||||||
return str(ram) if ram else "0"
|
return str(ram)
|
||||||
|
|
||||||
|
|
||||||
def node_nw_details(nodeurl):
|
def node_nw_details(nodeurl):
|
||||||
@ -214,6 +167,8 @@ def node_storage_details(nodeurl):
|
|||||||
resp = send_request(lnk)
|
resp = send_request(lnk)
|
||||||
respbody = resp.json()
|
respbody = resp.json()
|
||||||
hdds = extract_val(respbody, "Devices")
|
hdds = extract_val(respbody, "Devices")
|
||||||
|
if not hdds:
|
||||||
|
continue
|
||||||
for sd in hdds:
|
for sd in hdds:
|
||||||
if "CapacityBytes" in sd:
|
if "CapacityBytes" in sd:
|
||||||
if sd["CapacityBytes"] is not None:
|
if sd["CapacityBytes"] is not None:
|
||||||
@ -223,21 +178,17 @@ def node_storage_details(nodeurl):
|
|||||||
return str(storagecnt / 1073741824).split(".")[0]
|
return str(storagecnt / 1073741824).split(".")[0]
|
||||||
|
|
||||||
|
|
||||||
def systems_list(count=None, filters={}):
|
def systems_list(filters={}):
|
||||||
# comment the count value which is set to 2 now..
|
|
||||||
# list of nodes with hardware details needed for flavor creation
|
# list of nodes with hardware details needed for flavor creation
|
||||||
# count = 2
|
|
||||||
lst_systems = []
|
lst_systems = []
|
||||||
systemurllist = urls2list("Systems")
|
systemurllist = urls2list("Systems")
|
||||||
podmtree = build_hierarchy_tree()
|
podmtree = build_hierarchy_tree()
|
||||||
|
LOG.info(systemurllist)
|
||||||
for lnk in systemurllist[:count]:
|
for lnk in systemurllist:
|
||||||
filterPassed = True
|
filterPassed = True
|
||||||
resp = send_request(lnk)
|
resp = send_request(lnk)
|
||||||
system = resp.json()
|
system = resp.json()
|
||||||
|
|
||||||
# this below code need to be changed when proper query mechanism
|
|
||||||
# is implemented
|
|
||||||
if any(filters):
|
if any(filters):
|
||||||
filterPassed = generic_filter(system, filters)
|
filterPassed = generic_filter(system, filters)
|
||||||
if not filterPassed:
|
if not filterPassed:
|
||||||
@ -250,10 +201,10 @@ def systems_list(count=None, filters={}):
|
|||||||
ram = node_ram_details(lnk)
|
ram = node_ram_details(lnk)
|
||||||
nw = node_nw_details(lnk)
|
nw = node_nw_details(lnk)
|
||||||
storage = node_storage_details(lnk)
|
storage = node_storage_details(lnk)
|
||||||
node = {"nodeid": systemid, "cpu": cpu,
|
system = {"id": systemid, "cpu": cpu,
|
||||||
"ram": ram, "storage": storage,
|
"ram": ram, "storage": storage,
|
||||||
"nw": nw, "location": systemlocation,
|
"nw": nw, "location": systemlocation,
|
||||||
"uuid": systemuuid}
|
"uuid": systemuuid}
|
||||||
|
|
||||||
# filter based on RAM, CPU, NETWORK..etc
|
# filter based on RAM, CPU, NETWORK..etc
|
||||||
if 'ram' in filters:
|
if 'ram' in filters:
|
||||||
@ -274,8 +225,7 @@ def systems_list(count=None, filters={}):
|
|||||||
else False)
|
else False)
|
||||||
|
|
||||||
if filterPassed:
|
if filterPassed:
|
||||||
lst_systems.append(node)
|
lst_systems.append(system)
|
||||||
# LOG.info(str(node))
|
|
||||||
return lst_systems
|
return lst_systems
|
||||||
|
|
||||||
|
|
||||||
@ -315,9 +265,12 @@ def get_chassis_list():
|
|||||||
return lst_chassis
|
return lst_chassis
|
||||||
|
|
||||||
|
|
||||||
|
def get_systembyid(systemid):
|
||||||
|
return systems_list({"Id": systemid})
|
||||||
|
|
||||||
|
|
||||||
def get_nodebyid(nodeid):
|
def get_nodebyid(nodeid):
|
||||||
resp = send_request("Nodes/" + nodeid)
|
return nodes_list({"Id": nodeid})
|
||||||
return resp.json()
|
|
||||||
|
|
||||||
|
|
||||||
def build_hierarchy_tree():
|
def build_hierarchy_tree():
|
||||||
@ -338,18 +291,16 @@ def build_hierarchy_tree():
|
|||||||
return podmtree
|
return podmtree
|
||||||
|
|
||||||
|
|
||||||
def compose_node(criteria={}):
|
def compose_node(data):
|
||||||
composeurl = "Nodes/Actions/Allocate"
|
composeurl = "Nodes/Actions/Allocate"
|
||||||
headers = {'Content-type': 'application/json'}
|
headers = {'Content-type': 'application/json'}
|
||||||
|
criteria = data["criteria"]
|
||||||
if not criteria:
|
if not criteria:
|
||||||
resp = send_request(composeurl, "POST", headers=headers)
|
resp = send_request(composeurl, "POST", headers=headers)
|
||||||
else:
|
else:
|
||||||
resp = send_request(composeurl, "POST", json=criteria, headers=headers)
|
resp = send_request(composeurl, "POST", json=criteria, headers=headers)
|
||||||
LOG.info(resp.headers)
|
|
||||||
LOG.info(resp.text)
|
|
||||||
LOG.info(resp.status_code)
|
|
||||||
composednode = resp.headers['Location']
|
|
||||||
|
|
||||||
|
composednode = resp.headers['Location']
|
||||||
return {"node": composednode}
|
return {"node": composednode}
|
||||||
|
|
||||||
|
|
||||||
@ -359,10 +310,9 @@ def delete_composednode(nodeid):
|
|||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
def nodes_list(count=None, filters={}):
|
def nodes_list(filters={}):
|
||||||
# comment the count value which is set to 2 now..
|
|
||||||
# list of nodes with hardware details needed for flavor creation
|
# list of nodes with hardware details needed for flavor creation
|
||||||
# count = 2
|
LOG.debug(filters)
|
||||||
lst_nodes = []
|
lst_nodes = []
|
||||||
nodeurllist = urls2list("Nodes")
|
nodeurllist = urls2list("Nodes")
|
||||||
# podmtree = build_hierarchy_tree()
|
# podmtree = build_hierarchy_tree()
|
||||||
@ -376,10 +326,9 @@ def nodes_list(count=None, filters={}):
|
|||||||
else:
|
else:
|
||||||
node = resp.json()
|
node = resp.json()
|
||||||
|
|
||||||
# this below code need to be changed when proper query mechanism
|
|
||||||
# is implemented
|
|
||||||
if any(filters):
|
if any(filters):
|
||||||
filterPassed = generic_filter(node, filters)
|
filterPassed = generic_filter(node, filters)
|
||||||
|
LOG.info("FILTER PASSED" + str(filterPassed))
|
||||||
if not filterPassed:
|
if not filterPassed:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -392,25 +341,41 @@ def nodes_list(count=None, filters={}):
|
|||||||
cpu = {}
|
cpu = {}
|
||||||
ram = 0
|
ram = 0
|
||||||
nw = 0
|
nw = 0
|
||||||
localstorage = node_storage_details(nodesystemurl)
|
storage = node_storage_details(nodesystemurl)
|
||||||
if "Processors" in node:
|
cpu = node_cpu_details(lnk)
|
||||||
cpu = {"count": node["Processors"]["Count"],
|
|
||||||
"model": node["Processors"]["Model"]}
|
|
||||||
|
|
||||||
if "Memory" in node:
|
if "Memory" in node:
|
||||||
ram = node["Memory"]["TotalSystemMemoryGiB"]
|
ram = node["Memory"]["TotalSystemMemoryGiB"]
|
||||||
|
|
||||||
if "EthernetInterfaces" in node["Links"] and node[
|
if ("EthernetInterfaces" in node["Links"] and
|
||||||
"Links"]["EthernetInterfaces"]:
|
node["Links"]["EthernetInterfaces"]):
|
||||||
nw = len(node["Links"]["EthernetInterfaces"])
|
nw = len(node["Links"]["EthernetInterfaces"])
|
||||||
|
|
||||||
bmcip = "127.0.0.1" # system['Oem']['Dell_G5MC']['BmcIp']
|
bmcip = "127.0.0.1" # system['Oem']['Dell_G5MC']['BmcIp']
|
||||||
bmcmac = "00:00:00:00:00" # system['Oem']['Dell_G5MC']['BmcMac']
|
bmcmac = "00:00:00:00:00" # system['Oem']['Dell_G5MC']['BmcMac']
|
||||||
node = {"nodeid": nodeid, "cpu": cpu,
|
node = {"id": nodeid, "cpu": cpu,
|
||||||
"ram": ram, "storage": localstorage,
|
"ram": ram, "storage": storage,
|
||||||
"nw": nw, "location": nodelocation,
|
"nw": nw, "location": nodelocation,
|
||||||
"uuid": nodeuuid, "bmcip": bmcip, "bmcmac": bmcmac}
|
"uuid": nodeuuid, "bmcip": bmcip, "bmcmac": bmcmac}
|
||||||
|
|
||||||
|
# filter based on RAM, CPU, NETWORK..etc
|
||||||
|
if 'ram' in filters:
|
||||||
|
filterPassed = (True
|
||||||
|
if int(ram) >= int(filters['ram'])
|
||||||
|
else False)
|
||||||
|
|
||||||
|
# filter based on RAM, CPU, NETWORK..etc
|
||||||
|
if 'nw' in filters:
|
||||||
|
filterPassed = (True
|
||||||
|
if int(nw) >= int(filters['nw'])
|
||||||
|
else False)
|
||||||
|
|
||||||
|
# filter based on RAM, CPU, NETWORK..etc
|
||||||
|
if 'storage' in filters:
|
||||||
|
filterPassed = (True
|
||||||
|
if int(storage) >= int(filters['storage'])
|
||||||
|
else False)
|
||||||
|
|
||||||
if filterPassed:
|
if filterPassed:
|
||||||
lst_nodes.append(node)
|
lst_nodes.append(node)
|
||||||
# LOG.info(str(node))
|
return lst_nodes
|
||||||
return lst_nodes
|
|
29
valence/run.py
Executable file
29
valence/run.py
Executable file
@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# copyright (c) 2016 Intel, Inc.
|
||||||
|
#
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from valence.api.route import app as application
|
||||||
|
from valence import config as cfg
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
application.run(host=cfg.bind_host, port=cfg.bind_port, debug=cfg.debug)
|
||||||
|
LOG.info(("Valence Server on http://%(host)s:%(port)s"),
|
||||||
|
{'host': cfg.bind_host, 'port': cfg.bind_port})
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -1,7 +1,5 @@
|
|||||||
import os
|
|
||||||
from pecan import set_config
|
|
||||||
from pecan.testing import load_test_app
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
from valence.api.route import app
|
||||||
|
|
||||||
__all__ = ['FunctionalTest']
|
__all__ = ['FunctionalTest']
|
||||||
|
|
||||||
@ -15,10 +13,8 @@ class FunctionalTest(TestCase):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.app = load_test_app(os.path.join(
|
self.app = app.test_client()
|
||||||
os.path.dirname(__file__),
|
self.app.testing = True
|
||||||
'config.py'
|
|
||||||
))
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
set_config({}, overwrite=True)
|
pass
|
||||||
|
@ -1,22 +1,12 @@
|
|||||||
from valence.tests import FunctionalTest
|
from valence.tests import FunctionalTest
|
||||||
# from unittest import TestCase
|
|
||||||
# from webtest import TestApp
|
|
||||||
|
|
||||||
|
|
||||||
class TestRootController(FunctionalTest):
|
class TestRootController(FunctionalTest):
|
||||||
|
|
||||||
def test_get(self):
|
def test_root_get(self):
|
||||||
response = self.app.get('/')
|
response = self.app.get('/')
|
||||||
assert response.status_int == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
def test_search(self):
|
def test_v1_get(self):
|
||||||
response = self.app.post('/', params={'q': 'RestController'})
|
response = self.app.get('/v1')
|
||||||
assert response.status_int == 302
|
assert response.status_code == 200
|
||||||
assert response.headers['Location'] == (
|
|
||||||
'http://pecan.readthedocs.org/en/latest/search.html'
|
|
||||||
'?q=RestController'
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_get_not_found(self):
|
|
||||||
response = self.app.get('/a/bogus/url', expect_errors=True)
|
|
||||||
assert response.status_int == 404
|
|
||||||
|
Loading…
Reference in New Issue
Block a user