diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5d13da1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,19 @@ +*.pyc +*.retry +*.tox/ +.idea/* +.venv +.stestr/ + +dist +build/* +venus.egg-info/ +venus/hacking/__pycache__/ + +doc/build/* +doc/source/_static/*.sample +api-ref/build/* +releasenotes/build* + +AUTHORS +Authors diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..d55e885 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,20 @@ +The source repository for this project can be found at: + + https://opendev.org/openstack/venus + +Pull requests submitted through GitHub are not monitored. + +To start contributing to OpenStack, follow the steps in the contribution guide +to set up and use Gerrit: + + https://docs.openstack.org/contributors/code-and-documentation/quick-start.html + +Bugs should be filed on Storyboard: + + https://storyboard.openstack.org/#!/project/openstack/venus + +For more specific information about contributing to this repository, see the +Cyborg contributor guide: + + https://docs.openstack.org/venus/latest/contributor/contributing.html + diff --git a/HACKING.rst b/HACKING.rst new file mode 100644 index 0000000..7092ade --- /dev/null +++ b/HACKING.rst @@ -0,0 +1,58 @@ +Venus Style Commandments +========================= + +- Step 1: Read the OpenStack Style Commandments + http://docs.openstack.org/developer/hacking/ +- Step 2: Read on + +Venus Specific Commandments +---------------------------- +- [N314] Check for vi editor configuration in source files. +- [N319] Validate that debug level logs are not translated. +- [N322] Ensure default arguments are not mutable. +- [N323] Add check for explicit import of _() to ensure proper translation. +- [N325] str() and unicode() cannot be used on an exception. Remove or use six.text_type(). +- [N328] LOG.info messages require translations `_LI()`. +- [N329] LOG.exception and LOG.error messages require translations `_LE()`. +- [N330] LOG.warning messages require translations `_LW()`. +- [N333] Ensure that oslo namespaces are used for namespaced libraries. +- [N336] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs. +- [C301] timeutils.utcnow() from oslo_utils should be used instead of datetime.now(). +- [C302] six.text_type should be used instead of unicode. +- [C303] Ensure that there are no 'print()' statements in code that is being committed. +- [C304] Enforce no use of LOG.audit messages. LOG.info should be used instead. +- [C305] Prevent use of deprecated contextlib.nested. +- [C306] timeutils.strtime() must not be used (deprecated). +- [C307] LOG.warn is deprecated. Enforce use of LOG.warning. +- [C308] timeutils.isotime() must not be used (deprecated). +- [C309] Unit tests should not perform logging. +- [C310] Check for improper use of logging format arguments. + +General +------- +- Use 'raise' instead of 'raise e' to preserve original traceback or exception being reraised:: + + except Exception as e: + ... + raise e # BAD + + except Exception: + ... + raise # OKAY + + + +Creating Unit Tests +------------------- +For every new feature, unit tests should be created that both test and +(implicitly) document the usage of said feature. If submitting a patch for a +bug that had no unit test, a new passing unit test should be added. If a +submitted bug fix does have a unit test, be sure to add a new one that fails +without the patch and passes with the patch. + +Venus is transitioning to use mock, rather than mox, and so new tests should +use mock only. + +For more information on creating unit tests and utilizing the testing +infrastructure in OpenStack Venus, please read the Venus testing +`README.rst `_. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..68c771a --- /dev/null +++ b/LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..c978a52 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,6 @@ +include AUTHORS +include ChangeLog +exclude .gitignore +exclude .gitreview + +global-exclude *.pyc diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..ed9ddaf --- /dev/null +++ b/README.rst @@ -0,0 +1,23 @@ +====== +VENUS +====== + +# TODO(brinzhang): Description the readme for Venus project. + +You have come across a storage service for an open cloud computing service. +It has identified itself as `Venus`. It was abstracted from the Cinder project. + +* Wiki: https://github.com/hahaps/openstack-project-generator +* Developer docs: https://github.com/hahaps/openstack-project-generator + +Getting Started +--------------- + +If you'd like to run from the master branch, you can clone the git repo: + + git clone https://github.com/hahaps/openstack-project-generator + +For developer information please see +`HACKING.rst `_ + +You can raise bugs here https://github.com/hahaps/openstack-project-generator diff --git a/api-ref/source/conf.py b/api-ref/source/conf.py new file mode 100644 index 0000000..541cbc5 --- /dev/null +++ b/api-ref/source/conf.py @@ -0,0 +1,63 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +extensions = [ + 'openstackdocstheme', + 'os_api_ref', +] + +# -- General configuration ---------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +copyright = u'2016-present, OpenStack Foundation' + +# openstackdocstheme options +openstackdocs_repo_name = 'openstack/venus' +openstackdocs_bug_project = 'venus' +openstackdocs_bug_tag = 'api-ref' + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'native' + +# -- Options for HTML output -------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +html_theme = 'openstackdocs' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "sidebar_mode": "toc", +} + +# -- Options for LaTeX output ------------------------------------------------- + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ('index', 'Venus.tex', u'OpenStack Log API Documentation', + u'OpenStack Foundation', 'manual'), +] diff --git a/api-ref/source/index.rst b/api-ref/source/index.rst new file mode 100644 index 0000000..6822a0c --- /dev/null +++ b/api-ref/source/index.rst @@ -0,0 +1,11 @@ +=========================== +OpenStack Log APIs +=========================== + +This is a reference for the OpenStack Log API which is provided by +the Venus project. + +.. toctree:: + :maxdepth: 1 + + v2/index diff --git a/babel.cfg b/babel.cfg new file mode 100644 index 0000000..efceab8 --- /dev/null +++ b/babel.cfg @@ -0,0 +1 @@ +[python: **.py] diff --git a/doc/README.rst b/doc/README.rst new file mode 100644 index 0000000..b387bd0 --- /dev/null +++ b/doc/README.rst @@ -0,0 +1,17 @@ +======================= +Venus Development Docs +======================= + +Files under this directory tree are used for generating the documentation +for the Venus source code. + +Developer documentation is built to: +https://docs.openstack.org/venus/latest/ + +Building Documentation +====================== + +Doc builds are performed using tox with the ``docs`` target:: + + % cd .. + % tox -e docs diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000..37137ab --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,12 @@ +# The order of packages is significant, because pip processes them in the order +# of appearance. Changing the order has an impact on the overall integration +# process, which may cause wedges in the gate later. + +sphinx>=2.0.0,!=2.1.0 # BSD +sphinxcontrib-httpdomain>=1.3.0 # BSD +sphinxcontrib-pecanwsme>=0.2 # Apache-2.0 +sphinxcontrib-seqdiag>=0.8.4 # BSD +sphinxcontrib-svg2pdfconverter>=0.1.0 # BSD +reno>=3.1.0 # Apache-2.0 +os-api-ref>=1.4.0 # Apache-2.0 +openstackdocstheme>=2.2.1 # Apache-2.0 diff --git a/doc/source/.gitreview b/doc/source/.gitreview new file mode 100644 index 0000000..0b37a52 --- /dev/null +++ b/doc/source/.gitreview @@ -0,0 +1,4 @@ +[gerrit] +host=review.opendev.org +port=29418 +project=inspur/venus.git diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 0000000..e22750b --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +sys.path.insert(0, os.path.abspath('../..')) +# -- General configuration ---------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + 'sphinx.ext.autodoc', + 'openstackdocstheme', + 'oslo_config.sphinxconfiggen', + 'oslo_config.sphinxext', + 'oslo_policy.sphinxext', + 'oslo_policy.sphinxpolicygen', + 'sphinxcontrib.rsvgconverter', +] + +# autodoc generation is a bit aggressive and a nuisance when doing heavy +# text edit cycles. +# execute "export SPHINX_DEBUG=1" in your terminal to disable + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +config_generator_config_file = '../../tools/config/venus-config-generator.conf' +sample_config_basename = '_static/venus' + +policy_generator_config_file = [ + ('../../tools/config/venus-policy-generator.conf', + '_static/venus'), +] + +# If true, '()' will be appended to :func: etc. cross-reference text. +add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +add_module_names = True + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'native' diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 0000000..8196b93 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,18 @@ +Log management service (Venus) +============================== + +Venus is an OpenStack project that aims to provide a one-stop solution +to log collection, cleaning, indexing, analysis, alarm, visualization, +report generation and other needs, which involves helping operator or +maintainer to quickly solve retrieve problems, grasp the operational +health of the platform, and improve the level of platform management. + +Which can include OpenStack logs, operating system logs, cloud +platform service logs, and virtualized application related logs. + + +Overview +-------- + +.. toctree:: + :maxdepth: 1 diff --git a/etc/venus/README-venus.conf.sample b/etc/venus/README-venus.conf.sample new file mode 100644 index 0000000..c8f8b15 --- /dev/null +++ b/etc/venus/README-venus.conf.sample @@ -0,0 +1,5 @@ +The venus.conf sample file is no longer generated and +maintained in Trunk. To generate your own version of +venus.conf, use the following command: + +tox -egenconfig diff --git a/etc/venus/api-httpd.conf b/etc/venus/api-httpd.conf new file mode 100644 index 0000000..4b8ae0b --- /dev/null +++ b/etc/venus/api-httpd.conf @@ -0,0 +1,16 @@ +Listen 10010 +LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\" %D(us)" venus_combined + + + WSGIDaemonProcess osapi_venus processes=2 threads=1 user=venus display-name=%{GROUP} + WSGIProcessGroup osapi_venus + WSGIScriptAlias / /var/www/cgi-bin/venus/osapi_venus + WSGIApplicationGroup %{GLOBAL} + WSGIPassAuthorization On + = 2.4> + ErrorLogFormat "%{cu}t %M" + + ErrorLog /var/log/apache2/venus_error.log + CustomLog /var/log/apache2/venus.log venus_combined + + diff --git a/etc/venus/api-paste.ini b/etc/venus/api-paste.ini new file mode 100644 index 0000000..6c509ca --- /dev/null +++ b/etc/venus/api-paste.ini @@ -0,0 +1,53 @@ +############# +# OpenStack # +############# + +[composite:osapi_venus] +use = call:venus.api:root_app_factory +/: apiversions +/v1: openstack_venus_api_v1 + +[composite:openstack_venus_api_v1] +use = call:venus.api.middleware.auth:pipeline_factory +noauth = request_id faultwrap sizelimit osprofiler noauth apiv1 +keystone = request_id faultwrap sizelimit osprofiler authtoken keystonecontext forwardunionfilter apiv1 +keystone_nolimit = request_id faultwrap sizelimit osprofiler authtoken keystonecontext forwardunionfilter apiv1 + +[filter:request_id] +paste.filter_factory = oslo_middleware.request_id:RequestId.factory + +[filter:faultwrap] +paste.filter_factory = venus.api.middleware.fault:FaultWrapper.factory + +[filter:osprofiler] +paste.filter_factory = osprofiler.web:WsgiMiddleware.factory +hmac_keys = SECRET_KEY +enabled = yes + +[filter:noauth] +paste.filter_factory = venus.api.middleware.auth:NoAuthMiddleware.factory + +[filter:sizelimit] +paste.filter_factory = venus.api.middleware.sizelimit:RequestBodySizeLimiter.factory + +[app:apiv1] +paste.app_factory = venus.api.v1.router:APIRouter.factory + +[pipeline:apiversions] +pipeline = faultwrap osvenusversionapp + +[app:osvenusversionapp] +paste.app_factory = venus.api.versions:Versions.factory + +########## +# Shared # +########## + +[filter:keystonecontext] +paste.filter_factory = venus.api.middleware.auth:VenusKeystoneContext.factory + +[filter:authtoken] +paste.filter_factory = keystonemiddleware.auth_token:filter_factory + +[filter:forwardunionfilter] +paste.filter_factory = venus.api.middleware.env:ForwardUnionFilter.factory diff --git a/etc/venus/logging_sample.conf b/etc/venus/logging_sample.conf new file mode 100644 index 0000000..4bcd597 --- /dev/null +++ b/etc/venus/logging_sample.conf @@ -0,0 +1,93 @@ +[loggers] +keys = root, venus, taskflow, venus_flow_utils + +[handlers] +keys = stderr, stdout, watchedfile, syslog, tasks, null + +[formatters] +keys = context, default + +[logger_root] +level = WARNING +handlers = null + +[logger_venus] +level = INFO +handlers = stderr +qualname = venus + +# Both of these are used for tracking what venus and taskflow is doing with +# regard to flows and tasks (and the activity there-in). +[logger_venus_flow_utils] +level = INFO +handlers = tasks,stderr +qualname = venus.flow_utils + +[logger_taskflow] +level = INFO +handlers = tasks +qualname = taskflow + +[logger_amqplib] +level = WARNING +handlers = stderr +qualname = amqplib + +[logger_sqlalchemy] +level = WARNING +handlers = stderr +qualname = sqlalchemy +# "level = INFO" logs SQL queries. +# "level = DEBUG" logs SQL queries and results. +# "level = WARNING" logs neither. (Recommended for production systems.) + +[logger_boto] +level = WARNING +handlers = stderr +qualname = boto + +[logger_suds] +level = INFO +handlers = stderr +qualname = suds + +[logger_eventletwsgi] +level = WARNING +handlers = stderr +qualname = eventlet.wsgi.server + +[handler_stderr] +class = StreamHandler +args = (sys.stderr,) +formatter = context + +[handler_stdout] +class = StreamHandler +args = (sys.stdout,) +formatter = context + +[handler_watchedfile] +class = handlers.WatchedFileHandler +args = ('venus.log',) +formatter = context + +[handler_tasks] +class = handlers.WatchedFileHandler +args = ('tasks.log',) +formatter = context + +[handler_syslog] +class = handlers.SysLogHandler +args = ('/dev/log', handlers.SysLogHandler.LOG_USER) +formatter = context + +[handler_null] +class = logging.NullHandler +formatter = default +args = () + +[formatter_context] +class = oslo_log.formatters.ContextFormatter + +[formatter_default] +format = %(message)s diff --git a/etc/venus/policy.json b/etc/venus/policy.json new file mode 100644 index 0000000..92d7c8e --- /dev/null +++ b/etc/venus/policy.json @@ -0,0 +1,6 @@ +{ + "context_is_admin": "role:admin", + "admin_or_owner": "is_admin:True or project_id:%(project_id)s", + "admin_api": "is_admin:True", + "default": "rule:admin_api", +} diff --git a/etc/venus/rootwrap.conf b/etc/venus/rootwrap.conf new file mode 100644 index 0000000..b4cb741 --- /dev/null +++ b/etc/venus/rootwrap.conf @@ -0,0 +1,27 @@ +# Configuration for venus-rootwrap +# This file should be owned by (and only-writeable by) the root user + +[DEFAULT] +# List of directories to load filter definitions from (separated by ','). +# These directories MUST all be only writeable by root ! +filters_path=/etc/venus/rootwrap.d,/usr/share/venus/rootwrap + +# List of directories to search executables in, in case filters do not +# explicitely specify a full path (separated by ',') +# If not specified, defaults to system PATH environment variable. +# These directories MUST all be only writeable by root ! +exec_dirs=/sbin,/usr/sbin,/bin,/usr/bin,/usr/local/bin,/usr/local/sbin + +# Enable logging to syslog +# Default value is False +use_syslog=False + +# Which syslog facility to use. +# Valid values include auth, authpriv, syslog, local0, local1... +# Default value is 'syslog' +syslog_log_facility=syslog + +# Which messages to log. +# INFO means log all usage +# ERROR means only log unsuccessful attempts +syslog_log_level=ERROR diff --git a/etc/venus/rootwrap.d/venus.filters b/etc/venus/rootwrap.d/venus.filters new file mode 100644 index 0000000..309b1e5 --- /dev/null +++ b/etc/venus/rootwrap.d/venus.filters @@ -0,0 +1,4 @@ +# venus-rootwrap command filters for venus nodes +# This file should be owned by (and only-writeable by) the root user + +[Filters] diff --git a/etc/venus/venus.conf b/etc/venus/venus.conf new file mode 100644 index 0000000..57e3f70 --- /dev/null +++ b/etc/venus/venus.conf @@ -0,0 +1,43 @@ +[keystone_authtoken] +memcached_servers = 100.2.30.241:11211,100.2.30.242:11211,100.2.30.243:11211signing_dir = /var/cache/venus +signing_dir = /var/cache/venus +cafile = /opt/stack/data/ca-bundle.pem +project_domain_name = default +project_name = service +user_domain_name = default +password = dTa74mdF29CyGLQvH8RCKAhFPlRd1zHtp2Ai4NGw +username = venus +auth_uri = http://100.2.28.240:5000 +auth_url = http://100.2.28.240:35357 +project_domain_id = default +user_domain_id = default +auth_type = password + +[DEFAULT] +transport_url = rabbit://openstack:R8axM8sde8Dq5tV1PcDHmDRPLsA9fBLpXrGQccfE@100.2.30.243:5672 +my_ip = 100.2.30.243 +periodic_interval = 60 +rootwrap_config = /etc/venus/rootwrap.conf +api_paste_config = /etc/venus/api-paste.ini +log_dir = /var/log/kolla/venus/ +debug = True +auth_strategy = keystone +os_region_name = RegionOne +osapi_venus_listen = 100.2.30.243 +osapi_venus_listen_port = 8686 + +[database] +connection = mysql+pymysql://root:Irpzw6tic9ezyUEh4c0JnT0kK7U1oKqbRPRIfkwW@100.2.28.72:3306/venus?charset=utf8 + +[influxdb] +username = admin +password = BjMQnWqcRp1S9JAk3eYHP2aLvgEhQUlgdsMBnE3l +hostname = 100.2.30.24 +port = 8086 +dbname = telegraf +alert_dbname = alert + +[elasticsearch] +url = http://100.2.28.30:9200 +username = admin +password = DlR7Y4vcPPbwbOCHYO8f8zG9VtwnLrd1t5R1A3B9 diff --git a/releasenotes/notes/.placeholder b/releasenotes/notes/.placeholder new file mode 100644 index 0000000..e69de29 diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py new file mode 100644 index 0000000..2ab504c --- /dev/null +++ b/releasenotes/source/conf.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'reno.sphinxext', + 'openstackdocstheme', +] + +# openstackdocstheme options +openstackdocs_repo_name = 'openstack/venus' +openstackdocs_use_storyboard = True + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +copyright = u'2020, Venus developers' +author = u'venus developers' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = "" +# The full version, including alpha/beta/rc tags. +release = "" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'native' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'openstackdocs' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: https://alabaster.readthedocs.io/en/latest/installation.html#sidebars +# html_sidebars = {} + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'VenusReleaseNotesdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'VenusReleaseNotes.tex', + u'Venus Release Notes Documentation', + u'Venus developers', 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'VenusReleaseNotes', u'Venus Release Notes Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'VenusReleaseNotes', u'Venus Release Notes Documentation', + author, 'VenusReleaseNotes', 'One line description of project.', + 'Miscellaneous'), +] + +# -- Options for Internationalization output ------------------------------ +locale_dirs = ['locale/'] diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst new file mode 100644 index 0000000..6c9666e --- /dev/null +++ b/releasenotes/source/index.rst @@ -0,0 +1,8 @@ +====================== + Venus Release Notes +====================== + +.. toctree:: + :maxdepth: 1 + + unreleased diff --git a/releasenotes/source/unreleased.rst b/releasenotes/source/unreleased.rst new file mode 100644 index 0000000..cd22aab --- /dev/null +++ b/releasenotes/source/unreleased.rst @@ -0,0 +1,5 @@ +============================== + Current Series Release Notes +============================== + +.. release-notes:: diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..a6f6260 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,29 @@ +# The order of packages is significant, because pip processes them in the order +# of appearance. Changing the order has an impact on the overall integration +# process, which may cause wedges in the gate later. + +pbr>=1.6 +anyjson>=0.3.3 +keystonemiddleware!=2.4.0,>=2.0.0 +oslo.config>=2.3.0 # Apache-2.0 +oslo.concurrency>=2.3.0 # Apache-2.0 +oslo.context>=0.2.0 # Apache-2.0 +oslo.db>=2.4.1 # Apache-2.0 +oslo.log>=1.8.0 # Apache-2.0 +oslo.messaging!=1.17.0,!=1.17.1,!=2.6.0,!=2.6.1,!=2.7.0,!=2.8.0,!=2.8.1,!=2.9.0,!=3.1.0,>=1.16.0 # Apache-2.0 +oslo.middleware!=3.0.0,!=3.1.0,!=3.2.0,>=2.8.0 # Apache-2.0 +oslo.policy>=0.5.0 # Apache-2.0 +oslo.reports>=0.1.0 # Apache-2.0 +oslo.rootwrap>=2.0.0 # Apache-2.0 +oslo.serialization>=1.4.0 # Apache-2.0 +oslo.service>=0.7.0 # Apache-2.0 +oslo.utils!=2.6.0,>=2.0.0 # Apache-2.0 +oslo.versionedobjects>=0.9.0 +oslo.i18n>=1.5.0 # Apache-2.0 +osprofiler>=0.3.0 # Apache-2.0 +openstacksdk>=0.46.0 # Apache-2.0 +six>=1.9.0 +SQLAlchemy<1.1.0,>=0.9.9 +sqlalchemy-migrate>=0.9.6 +PyMySQL>=0.7.11 +elasticsearch>=5.0.0,<6.0.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..d09ca4b --- /dev/null +++ b/setup.cfg @@ -0,0 +1,29 @@ +[metadata] +name = venus +summary = OpenStack Log Management as a Service +description-file = + README.rst +author = Brin Zhang +author-email = zhangbailin@inspur.com +python-requires = >=3.6 +classifier = + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python + Programming Language :: Python :: Implementation :: CPython + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + +[files] +packages = + venus + +[entry_points] +venus.database.migration_backend = + sqlalchemy = venus.db.sqlalchemy.migration diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..5f49080 --- /dev/null +++ b/setup.py @@ -0,0 +1,28 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT +import setuptools + +# In python < 2.7.4, a lazy loading of package `pbr` will break +# setuptools if some other modules registered functions in `atexit`. +# solution from: http://bugs.python.org/issue15881#msg170215 +try: + import multiprocessing # noqa +except ImportError: + pass + +setuptools.setup( + setup_requires=['pbr>=1.8'], + pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..3c954c9 --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,19 @@ +# The order of packages is significant, because pip processes them in the order +# of appearance. Changing the order has an impact on the overall integration +# process, which may cause wedges in the gate later. + +hacking>=3.0.1,<3.1.0 # Apache-2.0 +bandit>=1.6.0 # Apache-2.0 +coverage>=3.6,!=4.4 # Apache-2.0 +fixtures>=3.0.0 # Apache-2.0/BSD +ddt>=1.2.1 # MIT +oslotest>=3.2.0 # Apache-2.0 +stestr>=2.2.0 # Apache-2.0 +testresources>=2.0.0 # Apache-2.0/BSD +testscenarios>=0.4 # Apache-2.0/BSD +testtools>=2.4.0 # MIT +tempest>=17.1.0 # Apache-2.0 +doc8>=0.6.0 # Apache-2.0 +Pygments>=2.2.0 # BSD license +os-resource-classes>=0.5.0 # Apache-2.0 +cursive>=0.2.1 # Apache-2.0 diff --git a/tools/config/venus-config-generator.conf b/tools/config/venus-config-generator.conf new file mode 100644 index 0000000..9d57a9b --- /dev/null +++ b/tools/config/venus-config-generator.conf @@ -0,0 +1,12 @@ +[DEFAULT] +output_file = etc/venus/venus.conf.sample +wrap_width = 62 +namespace = venus +namespace = oslo.db +namespace = oslo.messaging +namespace = oslo.policy +namespace = oslo.log +namespace = oslo.service.service +namespace = oslo.service.periodic_task +namespace = oslo.service.sslutils +namespace = keystonemiddleware.auth_token diff --git a/tools/config/venus-policy-generator.conf b/tools/config/venus-policy-generator.conf new file mode 100644 index 0000000..451411b --- /dev/null +++ b/tools/config/venus-policy-generator.conf @@ -0,0 +1,3 @@ +[DEFAULT] +output_file = etc/venus/policy.yaml.sample +namespace = venus.api diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..3af3dae --- /dev/null +++ b/tox.ini @@ -0,0 +1,76 @@ +[tox] +minversion = 1.6 +skipsdist = True +envlist = pep8 + +[testenv] +basepython = python3 +# Note the hash seed is set to 0 until venus can be tested with a +# random hash seed successfully. +setenv = VIRTUAL_ENV={envdir} + PYTHONHASHSEED=0 +usedevelop = True +install_command = pip install {opts} {packages} + +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +[testenv:releasenotes] +deps = -r{toxinidir}/doc/requirements.txt +commands = sphinx-build -a -E -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html + +[testenv:pep8] +commands = + flake8 {posargs} . venus/common + # Check that .po and .pot files are valid: + doc8 --ignore D001 doc/source/ CONTRIBUTING.rst HACKING.rst README.rst + +[doc8] +ignore-path = .venv,.git,.tox,*cyborg/locale*,*lib/python*,*cyborg.egg*,api-ref/build,doc/build,doc/source/contributor/api + +[testenv:docs] +deps = -r{toxinidir}/doc/requirements.txt +commands = + rm -rf doc/build/html + sphinx-build -W -b html doc/source doc/build/html + +[flake8] +# Following checks are ignored on purpose. +# +# E251 unexpected spaces around keyword / parameter equals +# reason: no improvement in readability +# +# Due to the upgrade to hacking 0.9.2 the following checking are +# ignored on purpose for the moment and should be re-enabled. +# +# H405 +# Due to the upgrade to hacking 0.10.0 the following checking are +# ignored on purpose for the moment and should be cleaned up and re-enabled. +# +# H105 Don't use author tags +# +filename = *.py,app.wsgi +show-source = True +ignore = E123,E125,H405,W503,W504,E251,H105,W605 +builtins = _ +enable-extensions = H106,H203,H904 +exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes + +[flake8:local-plugins] +extension = + M302 = checks:assert_equal_not_none + M310 = checks:use_timeutils_utcnow + M316 = checks:assert_true_isinstance + M322 = checks:no_mutable_default_args + M336 = checks:dict_constructor_with_list_copy + M338 = checks:assert_equal_in + M339 = checks:no_xrange + M340 = checks:check_explicit_underscore_import + M352 = checks:no_log_warn + N366 = checks:import_stock_mock +paths = ./venus/hacking + +[hacking] +local-check-factory = venus.hacking.checks.factory +import_exceptions = venus.i18n diff --git a/venus/__init__.py b/venus/__init__.py new file mode 100644 index 0000000..71a65f4 --- /dev/null +++ b/venus/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +:mod:`venus` -- Cloud IaaS Platform +=================================== + +.. automodule:: venus + :platform: Unix + :synopsis: Infrastructure-as-a-Service Cloud platform. +.. moduleauthor:: Li Xipeng +""" diff --git a/venus/api/__init__.py b/venus/api/__init__.py new file mode 100644 index 0000000..a975328 --- /dev/null +++ b/venus/api/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo_config import cfg +from oslo_log import log as logging +import paste.urlmap + + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + + +def root_app_factory(loader, global_conf, **local_conf): + if not CONF.enable_v1_api: + del local_conf['/v1'] + return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf) diff --git a/venus/api/common.py b/venus/api/common.py new file mode 100644 index 0000000..d46ad28 --- /dev/null +++ b/venus/api/common.py @@ -0,0 +1,429 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import re + +import enum +from oslo_config import cfg +from oslo_log import log as logging +from six.moves import urllib +import webob + +from venus.api.openstack import wsgi +from venus.api import xmlutil +from venus.i18n import _ +from venus import utils + + +api_common_opts = [ + cfg.IntOpt('osapi_max_limit', + default=1000, + help='The maximum number of items that a collection ' + 'resource returns in a single response'), + cfg.StrOpt('osapi_venus_base_URL', + default=None, + help='Base URL that will be presented to users in links ' + 'to the OpenStack Venus API', + deprecated_name='osapi_compute_link_prefix'), +] + +CONF = cfg.CONF +CONF.register_opts(api_common_opts) + +LOG = logging.getLogger(__name__) + +XML_NS_V1 = 'https://www.openstack.org/mediawiki/Venus/1.0/content' + +METADATA_TYPES = enum.Enum('METADATA_TYPES', 'user image') + + +# Regex that matches alphanumeric characters, periods, hyphens, +# colons and underscores: +# ^ assert position at start of the string +# [\w\.\-\:\_] match expression +# $ assert position at end of the string +VALID_KEY_NAME_REGEX = re.compile(r"^[\w\.\-\:\_]+$", re.UNICODE) + + +def validate_key_names(key_names_list): + """Validate each item of the list to match key name regex.""" + for key_name in key_names_list: + if not VALID_KEY_NAME_REGEX.match(key_name): + return False + return True + + +def get_pagination_params(params, max_limit=None): + """Return marker, limit, offset tuple from request. + + :param params: `wsgi.Request`'s GET dictionary, possibly containing + 'marker', 'limit', and 'offset' variables. 'marker' is the + id of the last element the client has seen, 'limit' is the + maximum number of items to return and 'offset' is the number + of items to skip from the marker or from the first element. + If 'limit' is not specified, or > max_limit, we default to + max_limit. Negative values for either offset or limit will + cause exc.HTTPBadRequest() exceptions to be raised. If no + offset is present we'll default to 0 and if no marker is + present we'll default to None. + :max_limit: Max value 'limit' return value can take + :returns: Tuple (marker, limit, offset) + """ + max_limit = max_limit or CONF.osapi_max_limit + limit = _get_limit_param(params, max_limit) + marker = _get_marker_param(params) + offset = _get_offset_param(params) + return marker, limit, offset + + +def _get_limit_param(params, max_limit=None): + """Extract integer limit from request's dictionary or fail. + + Defaults to max_limit if not present and returns max_limit if present + 'limit' is greater than max_limit. + """ + max_limit = max_limit or CONF.osapi_max_limit + try: + limit = int(params.pop('limit', max_limit)) + except ValueError: + msg = _('limit param must be an integer') + raise webob.exc.HTTPBadRequest(explanation=msg) + if limit < 0: + msg = _('limit param must be positive') + raise webob.exc.HTTPBadRequest(explanation=msg) + limit = min(limit, max_limit) + return limit + + +def _get_marker_param(params): + """Extract marker id from request's dictionary (defaults to None).""" + return params.pop('marker', None) + + +def _get_offset_param(params): + """Extract offset id from request's dictionary (defaults to 0) or fail.""" + try: + offset = int(params.pop('offset', 0)) + except ValueError: + msg = _('offset param must be an integer') + raise webob.exc.HTTPBadRequest(explanation=msg) + + if offset < 0: + msg = _('offset param must be positive') + raise webob.exc.HTTPBadRequest(explanation=msg) + + return offset + + +def limited(items, request, max_limit=None): + """Return a slice of items according to requested offset and limit. + + :param items: A sliceable entity + :param request: ``wsgi.Request`` possibly containing 'offset' and 'limit' + GET variables. 'offset' is where to start in the list, + and 'limit' is the maximum number of items to return. If + 'limit' is not specified, 0, or > max_limit, we default + to max_limit. Negative values for either offset or limit + will cause exc.HTTPBadRequest() exceptions to be raised. + :kwarg max_limit: The maximum number of items to return from 'items' + """ + max_limit = max_limit or CONF.osapi_max_limit + marker, limit, offset = get_pagination_params(request.GET.copy(), + max_limit) + range_end = offset + (limit or max_limit) + return items[offset:range_end] + + +def limited_by_marker(items, request, max_limit=None): + """Return a slice of items according to the requested marker and limit.""" + max_limit = max_limit or CONF.osapi_max_limit + marker, limit, __ = get_pagination_params(request.GET.copy(), max_limit) + + start_index = 0 + if marker: + start_index = -1 + for i, item in enumerate(items): + if 'flavorid' in item: + if item['flavorid'] == marker: + start_index = i + 1 + break + elif item['id'] == marker or item.get('uuid') == marker: + start_index = i + 1 + break + if start_index < 0: + msg = _('marker [%s] not found') % marker + raise webob.exc.HTTPBadRequest(explanation=msg) + range_end = start_index + limit + return items[start_index:range_end] + + +def get_sort_params(params, default_key='created_at', default_dir='desc'): + """Retrieves sort keys/directions parameters. + + Processes the parameters to create a list of sort keys and sort directions + that correspond to either the 'sort' parameter or the 'sort_key' and + 'sort_dir' parameter values. The value of the 'sort' parameter is a comma- + separated list of sort keys, each key is optionally appended with + ':'. + + Note that the 'sort_key' and 'sort_dir' parameters are deprecated in kilo + and an exception is raised if they are supplied with the 'sort' parameter. + + The sort parameters are removed from the request parameters by this + function. + + :param params: webob.multidict of request parameters (from + venus.api.openstack.wsgi.Request.params) + :param default_key: default sort key value, added to the list if no + sort keys are supplied + :param default_dir: default sort dir value, added to the list if the + corresponding key does not have a direction + specified + :returns: list of sort keys, list of sort dirs + :raise webob.exc.HTTPBadRequest: If both 'sort' and either 'sort_key' or + 'sort_dir' are supplied parameters + """ + if 'sort' in params and ('sort_key' in params or 'sort_dir' in params): + msg = _("The 'sort_key' and 'sort_dir' parameters are deprecated and " + "cannot be used with the 'sort' parameter.") + raise webob.exc.HTTPBadRequest(explanation=msg) + sort_keys = [] + sort_dirs = [] + if 'sort' in params: + for sort in params.pop('sort').strip().split(','): + sort_key, _sep, sort_dir = sort.partition(':') + if not sort_dir: + sort_dir = default_dir + sort_keys.append(sort_key.strip()) + sort_dirs.append(sort_dir.strip()) + else: + sort_key = params.pop('sort_key', default_key) + sort_dir = params.pop('sort_dir', default_dir) + sort_keys.append(sort_key.strip()) + sort_dirs.append(sort_dir.strip()) + return sort_keys, sort_dirs + + +def get_request_url(request): + url = request.application_url + headers = request.headers + forwarded = headers.get('X-Forwarded-Host') + if forwarded: + url_parts = list(urllib.parse.urlsplit(url)) + url_parts[1] = re.split(',\s?', forwarded)[-1] + url = urllib.parse.urlunsplit(url_parts).rstrip('/') + return url + + +def remove_version_from_href(href): + """Removes the first api version from the href. + + Given: 'http://www.venus.com/v1.1/123' + Returns: 'http://www.venus.com/123' + + Given: 'http://www.venus.com/v1.1' + Returns: 'http://www.venus.com' + + """ + parsed_url = urllib.parse.urlsplit(href) + url_parts = parsed_url.path.split('/', 2) + + # NOTE: this should match vX.X or vX + expression = re.compile(r'^v([0-9]+|[0-9]+\.[0-9]+)(/.*|$)') + if expression.match(url_parts[1]): + del url_parts[1] + + new_path = '/'.join(url_parts) + + if new_path == parsed_url.path: + msg = 'href %s does not contain version' % href + LOG.debug(msg) + raise ValueError(msg) + + parsed_url = list(parsed_url) + parsed_url[2] = new_path + return urllib.parse.urlunsplit(parsed_url) + + +class ViewBuilder(object): + """Model API responses as dictionaries.""" + + _collection_name = None + + def _get_links(self, request, identifier): + return [{"rel": "self", + "href": self._get_href_link(request, identifier), }, + {"rel": "bookmark", + "href": self._get_bookmark_link(request, identifier), }] + + def _get_next_link(self, request, identifier, collection_name): + """Return href string with proper limit and marker params.""" + params = request.params.copy() + params["marker"] = identifier + prefix = self._update_link_prefix(get_request_url(request), + CONF.osapi_venus_base_URL) + url = os.path.join(prefix, + request.environ["venus.context"].project_id, + collection_name) + return "%s?%s" % (url, urllib.parse.urlencode(params)) + + def _get_href_link(self, request, identifier): + """Return an href string pointing to this object.""" + prefix = self._update_link_prefix(get_request_url(request), + CONF.osapi_venus_base_URL) + return os.path.join(prefix, + request.environ["venus.context"].project_id, + self._collection_name, + str(identifier)) + + def _get_bookmark_link(self, request, identifier): + """Create a URL that refers to a specific resource.""" + base_url = remove_version_from_href(get_request_url(request)) + base_url = self._update_link_prefix(base_url, + CONF.osapi_venus_base_URL) + return os.path.join(base_url, + request.environ["venus.context"].project_id, + self._collection_name, + str(identifier)) + + def _get_collection_links(self, request, items, collection_name, + item_count=None, id_key="uuid"): + """Retrieve 'next' link, if applicable. + + The next link is included if we are returning as many items as we can, + given the restrictions of limit optional request parameter and + osapi_max_limit configuration parameter as long as we are returning + some elements. + + So we return next link if: + + 1) 'limit' param is specified and equal to the number of items. + 2) 'limit' param is NOT specified and the number of items is + equal to CONF.osapi_max_limit. + + :param request: API request + :param items: List of collection items + :param collection_name: Name of collection, used to generate the + next link for a pagination query + :param item_count: Length of the list of the original collection + items + :param id_key: Attribute key used to retrieve the unique ID, used + to generate the next link marker for a pagination query + :returns links + """ + item_count = item_count or len(items) + limit = _get_limit_param(request.GET.copy()) + if len(items) and limit <= item_count: + return self._generate_next_link(items, id_key, request, + collection_name) + + return [] + + def _generate_next_link(self, items, id_key, request, + collection_name): + links = [] + last_item = items[-1] + if id_key in last_item: + last_item_id = last_item[id_key] + else: + last_item_id = last_item["id"] + links.append({ + "rel": "next", + "href": self._get_next_link(request, last_item_id, + collection_name), + }) + return links + + def _update_link_prefix(self, orig_url, prefix): + if not prefix: + return orig_url + url_parts = list(urllib.parse.urlsplit(orig_url)) + prefix_parts = list(urllib.parse.urlsplit(prefix)) + url_parts[0:2] = prefix_parts[0:2] + url_parts[2] = prefix_parts[2] + url_parts[2] + + return urllib.parse.urlunsplit(url_parts).rstrip('/') + + +class MetadataDeserializer(wsgi.MetadataXMLDeserializer): + def deserialize(self, text): + dom = utils.safe_minidom_parse_string(text) + metadata_node = self.find_first_child_named(dom, "metadata") + metadata = self.extract_metadata(metadata_node) + return {'body': {'metadata': metadata}} + + +class MetaItemDeserializer(wsgi.MetadataXMLDeserializer): + def deserialize(self, text): + dom = utils.safe_minidom_parse_string(text) + metadata_item = self.extract_metadata(dom) + return {'body': {'meta': metadata_item}} + + +class MetadataXMLDeserializer(wsgi.XMLDeserializer): + + def extract_metadata(self, metadata_node): + """Marshal the metadata attribute of a parsed request.""" + if metadata_node is None: + return {} + metadata = {} + for meta_node in self.find_children_named(metadata_node, "meta"): + key = meta_node.getAttribute("key") + metadata[key] = self.extract_text(meta_node) + return metadata + + def _extract_metadata_container(self, datastring): + dom = utils.safe_minidom_parse_string(datastring) + metadata_node = self.find_first_child_named(dom, "metadata") + metadata = self.extract_metadata(metadata_node) + return {'body': {'metadata': metadata}} + + def create(self, datastring): + return self._extract_metadata_container(datastring) + + def update_all(self, datastring): + return self._extract_metadata_container(datastring) + + def update(self, datastring): + dom = utils.safe_minidom_parse_string(datastring) + metadata_item = self.extract_metadata(dom) + return {'body': {'meta': metadata_item}} + + +metadata_nsmap = {None: xmlutil.XMLNS_V11} + + +class MetaItemTemplate(xmlutil.TemplateBuilder): + def construct(self): + sel = xmlutil.Selector('meta', xmlutil.get_items, 0) + root = xmlutil.TemplateElement('meta', selector=sel) + root.set('key', 0) + root.text = 1 + return xmlutil.MasterTemplate(root, 1, nsmap=metadata_nsmap) + + +class MetadataTemplateElement(xmlutil.TemplateElement): + def will_render(self, datum): + return True + + +class MetadataTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = MetadataTemplateElement('metadata', selector='metadata') + elem = xmlutil.SubTemplateElement(root, 'meta', + selector=xmlutil.get_items) + elem.set('key', 0) + elem.text = 1 + return xmlutil.MasterTemplate(root, 1, nsmap=metadata_nsmap) diff --git a/venus/api/contrib/__init__.py b/venus/api/contrib/__init__.py new file mode 100644 index 0000000..df299f5 --- /dev/null +++ b/venus/api/contrib/__init__.py @@ -0,0 +1,37 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Contrib contains extensions that are shipped with venus. + +It can't be called 'extensions' because that causes namespacing problems. + +""" + +from oslo_config import cfg +from oslo_log import log as logging + +from venus.api import extensions + + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + + +def standard_extensions(ext_mgr): + extensions.load_standard_extensions(ext_mgr, LOG, __path__, __package__) + + +def select_extensions(ext_mgr): + extensions.load_standard_extensions(ext_mgr, LOG, __path__, __package__, + CONF.osapi_venus_ext_list) diff --git a/venus/api/extensions.py b/venus/api/extensions.py new file mode 100644 index 0000000..71c7273 --- /dev/null +++ b/venus/api/extensions.py @@ -0,0 +1,395 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import importutils +import webob.dec +import webob.exc + +from venus.api.openstack import wsgi +from venus.api import xmlutil +from venus import exception +from venus.i18n import _LE, _LI, _LW + + +CONF = cfg.CONF + +LOG = logging.getLogger(__name__) + + +class ExtensionDescriptor(object): + """Base class that defines the contract for extensions. + + Note that you don't have to derive from this class to have a valid + extension; it is purely a convenience. + + """ + + # The name of the extension, e.g., 'Fox In Socks' + name = None + + # The alias for the extension, e.g., 'FOXNSOX' + alias = None + + # Description comes from the docstring for the class + + # The XML namespace for the extension, e.g., + # 'http://www.fox.in.socks/api/ext/pie/v1.0' + namespace = None + + # The timestamp when the extension was last updated, e.g., + # '2011-01-22T13:25:27-06:00' + updated = None + + def __init__(self, ext_mgr): + """Rvenuster extension with the extension manager.""" + + ext_mgr.register(self) + self.ext_mgr = ext_mgr + + def get_resources(self): + """List of extensions.ResourceExtension extension objects. + + Resources define new nouns, and are accessible through URLs. + + """ + resources = [] + return resources + + def get_controller_extensions(self): + """List of extensions.ControllerExtension extension objects. + + Controller extensions are used to extend existing controllers. + """ + controller_exts = [] + return controller_exts + + @classmethod + def nsmap(cls): + """Synthesize a namespace map from extension.""" + + # Start with a base nsmap + nsmap = ext_nsmap.copy() + + # Add the namespace for the extension + nsmap[cls.alias] = cls.namespace + + return nsmap + + @classmethod + def xmlname(cls, name): + """Synthesize element and attribute names.""" + + return '{%s}%s' % (cls.namespace, name) + + +def make_ext(elem): + elem.set('name') + elem.set('namespace') + elem.set('alias') + elem.set('updated') + + desc = xmlutil.SubTemplateElement(elem, 'description') + desc.text = 'description' + + xmlutil.make_links(elem, 'links') + + +ext_nsmap = {None: xmlutil.XMLNS_COMMON_V10, 'atom': xmlutil.XMLNS_ATOM} + + +class ExtensionTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('extension', selector='extension') + make_ext(root) + return xmlutil.MasterTemplate(root, 1, nsmap=ext_nsmap) + + +class ExtensionsTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('extensions') + elem = xmlutil.SubTemplateElement(root, 'extension', + selector='extensions') + make_ext(elem) + return xmlutil.MasterTemplate(root, 1, nsmap=ext_nsmap) + + +class ExtensionsResource(wsgi.Resource): + + def __init__(self, extension_manager): + self.extension_manager = extension_manager + super(ExtensionsResource, self).__init__(None) + + def _translate(self, ext): + ext_data = {} + ext_data['name'] = ext.name + ext_data['alias'] = ext.alias + ext_data['description'] = ext.__doc__ + ext_data['namespace'] = ext.namespace + ext_data['updated'] = ext.updated + ext_data['links'] = [] # TODO(dprince): implement extension links + return ext_data + + @wsgi.serializers(xml=ExtensionsTemplate) + def index(self, req): + extensions = [] + for _alias, ext in self.extension_manager.extensions.items(): + extensions.append(self._translate(ext)) + return dict(extensions=extensions) + + @wsgi.serializers(xml=ExtensionTemplate) + def show(self, req, id): + try: + # NOTE(dprince): the extensions alias is used as the 'id' for show + ext = self.extension_manager.extensions[id] + except KeyError: + raise webob.exc.HTTPNotFound() + + return dict(extension=self._translate(ext)) + + def delete(self, req, id): + raise webob.exc.HTTPNotFound() + + def create(self, req): + raise webob.exc.HTTPNotFound() + + +class ExtensionManager(object): + """Load extensions from the configured extension path. + + See venus/tests/api/extensions/foxinsocks/extension.py for an + example extension implementation. + + """ + + def __init__(self): + LOG.info(_LI('Initializing extension manager.')) + + self.cls_list = CONF.osapi_venus_extension + self.extensions = {} + self._load_extensions() + + def is_loaded(self, alias): + return alias in self.extensions + + def register(self, ext): + # Do nothing if the extension doesn't check out + if not self._check_extension(ext): + return + + alias = ext.alias + LOG.info(_LI('Loaded extension: %s'), alias) + + if alias in self.extensions: + raise exception.Error("Found duplicate extension: %s" % alias) + self.extensions[alias] = ext + + def get_resources(self): + """Returns a list of ResourceExtension objects.""" + + resources = [] + resources.append(ResourceExtension('extensions', + ExtensionsResource(self))) + + for ext in self.extensions.values(): + try: + resources.extend(ext.get_resources()) + except AttributeError: + # NOTE(dprince): Extension aren't required to have resource + # extensions + pass + return resources + + def get_controller_extensions(self): + """Returns a list of ControllerExtension objects.""" + controller_exts = [] + for ext in self.extensions.values(): + try: + get_ext_method = ext.get_controller_extensions + except AttributeError: + # NOTE(Vek): Extensions aren't required to have + # controller extensions + continue + controller_exts.extend(get_ext_method()) + return controller_exts + + def _check_extension(self, extension): + """Checks for required methods in extension objects.""" + try: + LOG.debug('Ext name: %s', extension.name) + LOG.debug('Ext alias: %s', extension.alias) + LOG.debug('Ext description: %s', + ' '.join(extension.__doc__.strip().split())) + LOG.debug('Ext namespace: %s', extension.namespace) + LOG.debug('Ext updated: %s', extension.updated) + except AttributeError: + LOG.exception(_LE("Exception loading extension.")) + return False + + return True + + def load_extension(self, ext_factory): + """Execute an extension factory. + + Loads an extension. The 'ext_factory' is the name of a + callable that will be imported and called with one + argument--the extension manager. The factory callable is + expected to call the register() method at least once. + """ + + LOG.debug("Loading extension %s", ext_factory) + + # Load the factory + factory = importutils.import_class(ext_factory) + + # Call it + LOG.debug("Calling extension factory %s", ext_factory) + factory(self) + + def _load_extensions(self): + """Load extensions specified on the command line.""" + + extensions = list(self.cls_list) + + for ext_factory in extensions: + try: + self.load_extension(ext_factory) + except Exception as exc: + LOG.warning(_LW('Failed to load extension %(ext_factory)s: ' + '%(exc)s'), + {'ext_factory': ext_factory, 'exc': exc}) + + +class ControllerExtension(object): + """Extend core controllers of venus OpenStack API. + + Provide a way to extend existing venus OpenStack API core + controllers. + """ + + def __init__(self, extension, collection, controller): + self.extension = extension + self.collection = collection + self.controller = controller + + +class ResourceExtension(object): + """Add top level resources to the OpenStack API in venus.""" + + def __init__(self, collection, controller, parent=None, + collection_actions=None, member_actions=None, + custom_routes_fn=None): + if not collection_actions: + collection_actions = {} + if not member_actions: + member_actions = {} + self.collection = collection + self.controller = controller + self.parent = parent + self.collection_actions = collection_actions + self.member_actions = member_actions + self.custom_routes_fn = custom_routes_fn + + +def load_standard_extensions(ext_mgr, logger, path, package, ext_list=None): + """Rvenusters all standard API extensions.""" + + # Walk through all the modules in our directory... + our_dir = path[0] + for dirpath, dirnames, filenames in os.walk(our_dir): + # Compute the relative package name from the dirpath + relpath = os.path.relpath(dirpath, our_dir) + if relpath == '.': + relpkg = '' + else: + relpkg = '.%s' % '.'.join(relpath.split(os.sep)) + + # Now, consider each file in turn, only considering .py files + for fname in filenames: + root, ext = os.path.splitext(fname) + + # Skip __init__ and anything that's not .py + if ext != '.py' or root == '__init__': + continue + + # Try loading it + classname = "%s%s" % (root[0].upper(), root[1:]) + classpath = ("%s%s.%s.%s" % + (package, relpkg, root, classname)) + + if ext_list is not None and classname not in ext_list: + logger.debug("Skipping extension: %s" % classpath) + continue + + try: + ext_mgr.load_extension(classpath) + except Exception as exc: + logger.warning(_LW('Failed to load extension %(classpath)s: ' + '%(exc)s'), + {'classpath': classpath, 'exc': exc}) + + # Now, let's consider any subdirectories we may have... + subdirs = [] + for dname in dirnames: + # Skip it if it does not have __init__.py + if not os.path.exists(os.path.join(dirpath, dname, + '__init__.py')): + continue + + # If it has extension(), delegate... + ext_name = ("%s%s.%s.extension" % + (package, relpkg, dname)) + try: + ext = importutils.import_class(ext_name) + except ImportError: + # extension() doesn't exist on it, so we'll explore + # the directory for ourselves + subdirs.append(dname) + else: + try: + ext(ext_mgr) + except Exception as exc: + logger.warning(_LW('Failed to load extension ' + '%(ext_name)s: %(exc)s'), + {'ext_name': ext_name, 'exc': exc}) + + # Update the list of directories we'll explore... + dirnames[:] = subdirs + + +def extension_authorizer(api_name, extension_name): + def authorize(context, target=None, action=None): + if target is None: + target = {'project_id': context.project_id, + 'user_id': context.user_id} + if action is None: + act = '%s_extension:%s' % (api_name, extension_name) + else: + act = '%s_extension:%s:%s' % (api_name, extension_name, action) + #venus.policy.enforce(context, act, target) + return authorize + + +def soft_extension_authorizer(api_name, extension_name): + hard_authorize = extension_authorizer(api_name, extension_name) + + def authorize(context): + try: + hard_authorize(context) + return True + except exception.NotAuthorized: + return False + return authorize diff --git a/venus/api/middleware/__init__.py b/venus/api/middleware/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/api/middleware/auth.py b/venus/api/middleware/auth.py new file mode 100644 index 0000000..8f49f6f --- /dev/null +++ b/venus/api/middleware/auth.py @@ -0,0 +1,161 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Common Auth Middleware. + +""" + + +import os + +from oslo_config import cfg +from oslo_log import log as logging +from oslo_middleware import request_id +from oslo_serialization import jsonutils +import webob.dec +import webob.exc + +from venus.api.openstack import wsgi +from venus import context +from venus.i18n import _ +from venus.wsgi import common as base_wsgi + + +use_forwarded_for_opt = cfg.BoolOpt( + 'use_forwarded_for', + default=False, + help='Treat X-Forwarded-For as the canonical remote address. ' + 'Only enable this if you have a sanitizing proxy.') + +CONF = cfg.CONF +CONF.register_opt(use_forwarded_for_opt) + +LOG = logging.getLogger(__name__) + + +def pipeline_factory(loader, global_conf, **local_conf): + """A paste pipeline replica that keys off of auth_strategy.""" + pipeline = local_conf[CONF.auth_strategy] + if not CONF.api_rate_limit: + limit_name = CONF.auth_strategy + '_nolimit' + pipeline = local_conf.get(limit_name, pipeline) + pipeline = pipeline.split() + filters = [loader.get_filter(n) for n in pipeline[:-1]] + app = loader.get_app(pipeline[-1]) + filters.reverse() + for filter in filters: + app = filter(app) + return app + + +class InjectContext(base_wsgi.Middleware): + """Add a 'venus.context' to WSGI environ.""" + + def __init__(self, context, *args, **kwargs): + self.context = context + super(InjectContext, self).__init__(*args, **kwargs) + + @webob.dec.wsgify(RequestClass=base_wsgi.Request) + def __call__(self, req): + req.environ['venus.context'] = self.context + return self.application + + +class VenusKeystoneContext(base_wsgi.Middleware): + """Make a request context from keystone headers.""" + + @webob.dec.wsgify(RequestClass=base_wsgi.Request) + def __call__(self, req): + user_id = req.headers.get('X_USER') + user_id = req.headers.get('X_USER_ID', user_id) + if user_id is None: + LOG.debug("Neither X_USER_ID nor X_USER found in request") + return webob.exc.HTTPUnauthorized() + # get the roles + roles = [r.strip() for r in req.headers.get('X_ROLE', '').split(',')] + if 'X_TENANT_ID' in req.headers: + # This is the new header since Keystone went to ID/Name + project_id = req.headers['X_TENANT_ID'] + else: + # This is for legacy compatibility + project_id = req.headers['X_TENANT'] + + project_name = req.headers.get('X_TENANT_NAME') + + req_id = req.environ.get(request_id.ENV_REQUEST_ID) + + # Get the auth token + auth_token = req.headers.get('X_AUTH_TOKEN', + req.headers.get('X_STORAGE_TOKEN')) + + # Build a context, including the auth_token... + remote_address = req.remote_addr + + service_catalog = None + if req.headers.get('X_SERVICE_CATALOG') is not None: + try: + catalog_header = req.headers.get('X_SERVICE_CATALOG') + service_catalog = jsonutils.loads(catalog_header) + except ValueError: + raise webob.exc.HTTPInternalServerError( + explanation=_('Invalid service catalog json.')) + + if CONF.use_forwarded_for: + remote_address = req.headers.get('X-Forwarded-For', remote_address) + ctx = context.RequestContext(user_id, + project_id, + project_name=project_name, + roles=roles, + auth_token=auth_token, + remote_address=remote_address, + service_catalog=service_catalog, + request_id=req_id) + + req.environ['venus.context'] = ctx + return self.application + + +class NoAuthMiddleware(base_wsgi.Middleware): + """Return a fake token if one isn't specified.""" + + @webob.dec.wsgify(RequestClass=wsgi.Request) + def __call__(self, req): + if 'X-Auth-Token' not in req.headers: + user_id = req.headers.get('X-Auth-User', 'admin') + project_id = req.headers.get('X-Auth-Project-Id', 'admin') + os_url = os.path.join(req.url, project_id) + res = webob.Response() + # NOTE(vish): This is expecting and returning Auth(1.1), whereas + # keystone uses 2.0 auth. We should probably allow + # 2.0 auth here as well. + res.headers['X-Auth-Token'] = '%s:%s' % (user_id, project_id) + res.headers['X-Server-Management-Url'] = os_url + res.content_type = 'text/plain' + res.status = '204' + return res + + token = req.headers['X-Auth-Token'] + user_id, _sep, project_id = token.partition(':') + project_id = project_id or user_id + remote_address = getattr(req, 'remote_address', '127.0.0.1') + if CONF.use_forwarded_for: + remote_address = req.headers.get('X-Forwarded-For', remote_address) + ctx = context.RequestContext(user_id, + project_id, + is_admin=True, + remote_address=remote_address) + + req.environ['venus.context'] = ctx + return self.application diff --git a/venus/api/middleware/env.py b/venus/api/middleware/env.py new file mode 100644 index 0000000..72b21cb --- /dev/null +++ b/venus/api/middleware/env.py @@ -0,0 +1,164 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo_log import log as logging + +import six + +import webob.dec + +import webob.exc + +from venus import exception + +from venus.wsgi import common as base_wsgi + +from six.moves import http_client + +from oslo_serialization import jsonutils + +import functools + +LOG = logging.getLogger(__name__) + +JSON_ENCODE_CONTENT_TYPES = set(['application/json', + 'application/json-home']) + + +def middleware_exceptions(method): + @functools.wraps(method) + def _inner(self, request): + try: + return method(self, request) + except Exception as e: + LOG.exception(six.text_type(e)) + return render_exception(e, + request=request) + + return _inner + + +class ForwardUnionFilter(base_wsgi.Middleware): + def process_request(self, req): + if (req.headers.get('FORWARD_UNION') == 'ALL'): + return self.union(req) + else: + return self.forward(req) + + @webob.dec.wsgify(RequestClass=base_wsgi.Request) + @middleware_exceptions + def __call__(self, req): + forward_union = req.headers.get('FORWARD_UNION') + if (forward_union is None or forward_union == ''): + response = req.get_response(self.application) + return self.process_response(response) + + else: + response = self.process_request(req) + return response + + def forward(self, req): + return None + + def union(self, req): + return None + + +def render_exception(error, context=None, request=None, user_locale=None): + + if (hasattr(error, 'code')): + if (error.code is None or error.code == ''): + error = exception.VenusException + else: + if '401' in str(error): + error = exception.AuthFail + else: + error = exception.VenusException + + body = {'error': { + 'code': error.code, + 'title': http_client.responses[error.code], + 'message': error.message, + }} + + headers = [] + + return render_response( + status=(error.code, http_client.responses[error.code]), + body=body, + headers=headers) + + +def render_response(body=None, status=None, headers=None, method=None): + if headers is None: + headers = [] + else: + headers = list(headers) + headers.append(('Vary', 'X-Auth-Token')) + + if body is None: + body = b'' + status = status or (http_client.NO_CONTENT, + http_client.responses[http_client.NO_CONTENT]) + else: + content_types = [v for h, v in headers if h == 'Content-Type'] + if content_types: + content_type = content_types[0] + else: + content_type = None + + if content_type is None or content_type in JSON_ENCODE_CONTENT_TYPES: + body = jsonutils.dump_as_bytes(body, cls=SmarterEncoder) + if content_type is None: + headers.append(('Content-Type', 'application/json')) + status = status or (http_client.OK, + http_client.responses[http_client.OK]) + + def _convert_to_str(headers): + str_headers = [] + for header in headers: + str_header = [] + for value in header: + if not isinstance(value, str): + str_header.append(str(value)) + else: + str_header.append(value) + # convert the list to the immutable tuple to build the headers. + # header's key/value will be guaranteed to be str type. + str_headers.append(tuple(str_header)) + return str_headers + + headers = _convert_to_str(headers) + + resp = webob.Response(body=body, + status='%d %s' % status, + headerlist=headers) + + if method and method.upper() == 'HEAD': + + stored_headers = resp.headers.copy() + resp.body = b'' + for header, value in stored_headers.items(): + resp.headers[header] = value + + return resp + + +class SmarterEncoder(jsonutils.json.JSONEncoder): + """Help for JSON encoding dict-like objects.""" + + def default(self, obj): + if not isinstance(obj, dict) and hasattr(obj, 'iteritems'): + return dict(obj.iteritems()) + return super(SmarterEncoder, self).default(obj) diff --git a/venus/api/middleware/fault.py b/venus/api/middleware/fault.py new file mode 100644 index 0000000..61e08f3 --- /dev/null +++ b/venus/api/middleware/fault.py @@ -0,0 +1,76 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo_log import log as logging +import six +import webob.dec +import webob.exc + +from venus.api.openstack import wsgi +from venus import exception +from venus.i18n import _, _LI +from venus import utils +from venus.wsgi import common as base_wsgi + + +LOG = logging.getLogger(__name__) + + +class FaultWrapper(base_wsgi.Middleware): + """Calls down the middleware stack, making exceptions into faults.""" + + _status_to_type = {} + + @staticmethod + def status_to_type(status): + if not FaultWrapper._status_to_type: + for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError): + FaultWrapper._status_to_type[clazz.code] = clazz + return FaultWrapper._status_to_type.get( + status, webob.exc.HTTPInternalServerError)() + + def _error(self, inner, req): + safe = getattr(inner, 'safe', False) + headers = getattr(inner, 'headers', None) + status = getattr(inner, 'code', 500) + if status is None: + status = 500 + + msg_dict = dict(url=req.url, status=status) + LOG.info(_LI("%(url)s returned with HTTP %(status)d"), msg_dict) + outer = self.status_to_type(status) + if headers: + outer.headers = headers + # NOTE(johannes): We leave the explanation empty here on + # purpose. It could possibly have sensitive information + # that should not be returned back to the user. See + # bugs 868360 and 874472 + # NOTE(eglynn): However, it would be over-conservative and + # inconsistent with the EC2 API to hide every exception, + # including those that are safe to expose, see bug 1021373 + if safe: + msg = (inner.msg if isinstance(inner, exception.VenusException) + else six.text_type(inner)) + params = {'exception': inner.__class__.__name__, + 'explanation': msg} + outer.explanation = _('%(exception)s: %(explanation)s') % params + return wsgi.Fault(outer) + + @webob.dec.wsgify(RequestClass=wsgi.Request) + def __call__(self, req): + try: + return req.get_response(self.application) + except Exception as ex: + LOG.exception(ex) + return self._error(ex, req) diff --git a/venus/api/middleware/sizelimit.py b/venus/api/middleware/sizelimit.py new file mode 100644 index 0000000..f497cc1 --- /dev/null +++ b/venus/api/middleware/sizelimit.py @@ -0,0 +1,39 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Request Body limiting middleware. +Compatibility shim for Kilo, while operators migrate to oslo.middleware. +""" + + +from oslo_config import cfg +from oslo_log import versionutils +from oslo_middleware import sizelimit + + +# Default request size is 112k +max_request_body_size_opt = cfg.IntOpt('osapi_max_request_body_size', + default=114688, + help='Max size for body of a request') + +CONF = cfg.CONF +CONF.register_opt(max_request_body_size_opt) + + +@versionutils.deprecated(as_of=versionutils.deprecated.KILO, + in_favor_of='oslo_middleware.RequestBodySizeLimiter') +class RequestBodySizeLimiter(sizelimit.RequestBodySizeLimiter): + """Add a 'venus.context' to WSGI environ.""" + pass diff --git a/venus/api/openstack/__init__.py b/venus/api/openstack/__init__.py new file mode 100644 index 0000000..b6aa072 --- /dev/null +++ b/venus/api/openstack/__init__.py @@ -0,0 +1,130 @@ +# Copyright (c) 2013 OpenStack Foundation +# +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +WSGI middleware for OpenStack API controllers. +""" + +from oslo_log import log as logging +import routes + +from venus.api.openstack import wsgi +from venus.i18n import _, _LW +from venus.wsgi import common as base_wsgi + + +LOG = logging.getLogger(__name__) + + +class APIMapper(routes.Mapper): + def routematch(self, url=None, environ=None): + if url == "": + result = self._match("", environ) + return result[0], result[1] + return routes.Mapper.routematch(self, url, environ) + + def connect(self, *args, **kwargs): + # NOTE(inhye): Default the format part of a route to only accept json + # and xml so it doesn't eat all characters after a '.' + # in the url. + kwargs.setdefault('requirements', {}) + if not kwargs['requirements'].get('format'): + kwargs['requirements']['format'] = 'json|xml' + return routes.Mapper.connect(self, *args, **kwargs) + + +class ProjectMapper(APIMapper): + def resource(self, member_name, collection_name, **kwargs): + if 'parent_resource' not in kwargs: + kwargs['path_prefix'] = '{project_id}/' + else: + parent_resource = kwargs['parent_resource'] + p_collection = parent_resource['collection_name'] + p_member = parent_resource['member_name'] + kwargs['path_prefix'] = '{project_id}/%s/:%s_id' % (p_collection, + p_member) + routes.Mapper.resource(self, + member_name, + collection_name, + **kwargs) + + +class APIRouter(base_wsgi.Router): + """Routes requests on the API to the appropriate controller and method.""" + ExtensionManager = None # override in subclasses + + @classmethod + def factory(cls, global_config, **local_config): + """Simple paste factory, :class:`venus.wsgi.Router` doesn't have.""" + return cls() + + def __init__(self, ext_mgr=None): + if ext_mgr is None: + if self.ExtensionManager: + ext_mgr = self.ExtensionManager() + else: + raise Exception(_("Must specify an ExtensionManager class")) + + mapper = ProjectMapper() + self.resources = {} + self._setup_routes(mapper, ext_mgr) + self._setup_ext_routes(mapper, ext_mgr) + self._setup_extensions(ext_mgr) + super(APIRouter, self).__init__(mapper) + + def _setup_ext_routes(self, mapper, ext_mgr): + for resource in ext_mgr.get_resources(): + LOG.debug('Extended resource: %s', + resource.collection) + + wsgi_resource = wsgi.Resource(resource.controller) + self.resources[resource.collection] = wsgi_resource + kargs = dict( + controller=wsgi_resource, + collection=resource.collection_actions, + member=resource.member_actions) + + if resource.parent: + kargs['parent_resource'] = resource.parent + + mapper.resource(resource.collection, resource.collection, **kargs) + + if resource.custom_routes_fn: + resource.custom_routes_fn(mapper, wsgi_resource) + + def _setup_extensions(self, ext_mgr): + for extension in ext_mgr.get_controller_extensions(): + collection = extension.collection + controller = extension.controller + + if collection not in self.resources: + LOG.warning(_LW('Extension %(ext_name)s: Cannot extend ' + 'resource %(collection)s: No such resource'), + {'ext_name': extension.extension.name, + 'collection': collection}) + continue + + LOG.debug('Extension %(ext_name)s extending resource: ' + '%(collection)s', + {'ext_name': extension.extension.name, + 'collection': collection}) + + resource = self.resources[collection] + resource.register_actions(controller) + resource.register_extensions(controller) + + def _setup_routes(self, mapper, ext_mgr): + raise NotImplementedError diff --git a/venus/api/openstack/wsgi.py b/venus/api/openstack/wsgi.py new file mode 100644 index 0000000..21c2e19 --- /dev/null +++ b/venus/api/openstack/wsgi.py @@ -0,0 +1,1387 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import functools +import inspect +from lxml import etree +import math +import six +import time +import webob +from xml.dom import minidom +from xml.parsers import expat + +from oslo_log import log as logging +from oslo_serialization import jsonutils +from oslo_utils import excutils + +from venus import exception +from venus import i18n +from venus.i18n import _, _LE, _LI +from venus.objects import base as objects_base +from venus import utils +from venus.wsgi import common as wsgi + + +XML_NS_V1 = 'https://www.openstack.org/mediawiki/Venus/1.0/content' +XML_NS_ATOM = 'http://www.w3.org/2005/Atom' + +LOG = logging.getLogger(__name__) + +SUPPORTED_CONTENT_TYPES = ( + 'application/json', + 'application/vnd.openstack.venus+json', + 'application/xml', + 'application/vnd.openstack.venus+xml', +) + +_MEDIA_TYPE_MAP = { + 'application/vnd.openstack.venus+json': 'json', + 'application/json': 'json', + 'application/vnd.openstack.venus+xml': 'xml', + 'application/xml': 'xml', + 'application/atom+xml': 'atom', +} + + +def wrap_check_policy(func): + + @functools.wraps(func) + def wrapped(self, req, *args, **kwargs): + context = req.environ['venus.context'] + check_policy(context, func.__name__, None) + return func(self, req, *args, **kwargs) + return wrapped + + +def check_policy(context, action, target_obj=None): + target = { + 'project_id': context.project_id, + 'user_id': context.user_id, + } + + if isinstance(target_obj, objects_base.VenusObject): + target.update( + target_obj.obj_to_primitive()['versioned_object.date'] or {}) + else: + target.update(target_obj or {}) + _action = 'venus:%s' % action + #venus.policy.enforce(context, _action, target) + + +class Request(webob.Request): + """Add some OpenStack API-specific logic to the base webob.Request.""" + + def __init__(self, *args, **kwargs): + super(Request, self).__init__(*args, **kwargs) + self._resource_cache = {} + + def cache_resource(self, resource_to_cache, id_attribute='id', name=None): + """Cache the given resource. + + Allow API methods to cache objects, such as results from a DB query, + to be used by API extensions within the same API request. + + The resource_to_cache can be a list or an individual resource, + but ultimately resources are cached individually using the given + id_attribute. + + Different resources types might need to be cached during the same + request, they can be cached using the name parameter. For example: + + Controller 1: + request.cache_resource(db_pakages, 'packages') + Controller 2: + db_packages = request.cached_resource('packages') + + If no name is given, a default name will be used for the resource. + + An instance of this class only lives for the lifetime of a + single API request, so there's no need to implement full + cache management. + """ + if not isinstance(resource_to_cache, list): + resource_to_cache = [resource_to_cache] + if not name: + name = self.path + cached_resources = self._resource_cache.setdefault(name, {}) + for resource in resource_to_cache: + cached_resources[resource[id_attribute]] = resource + + def cached_resource(self, name=None): + """Get the cached resources cached under the given resource name. + + Allow an API extension to get previously stored objects within + the same API request. + + Note that the object data will be slightly stale. + + :returns: a dict of id_attribute to the resource from the cached + resources, an empty map if an empty collection was cached, + or None if nothing has been cached yet under this name + """ + if not name: + name = self.path + if name not in self._resource_cache: + # Nothing has been cached for this key yet + return None + return self._resource_cache[name] + + def cached_resource_by_id(self, resource_id, name=None): + """Get a resource by ID cached under the given resource name. + + Allow an API extension to get a previously stored object + within the same API request. This is basically a convenience method + to lookup by ID on the dictionary of all cached resources. + + Note that the object data will be slightly stale. + + :returns: the cached resource or None if the item is not in the cache + """ + resources = self.cached_resource(name) + if not resources: + # Nothing has been cached yet for this key yet + return None + return resources.get(resource_id) + + def cache_db_items(self, key, items, item_key='id'): + """Get cached database items. + + Allow API methods to store objects from a DB query to be + used by API extensions within the same API request. + + An instance of this class only lives for the lifetime of a + single API request, so there's no need to implement full + cache management. + """ + self.cache_resource(items, item_key, key) + + def get_db_items(self, key): + """Get database items. + + Allow an API extension to get previously stored objects within + the same API request. + + Note that the object data will be slightly stale. + """ + return self.cached_resource(key) + + def get_db_item(self, key, item_key): + """Get database item. + + Allow an API extension to get a previously stored object + within the same API request. + + Note that the object data will be slightly stale. + """ + return self.get_db_items(key).get(item_key) + + def cache_db_packages(self, packages): + # NOTE(mgagne) Cache it twice for backward compatibility reasons + self.cache_db_items('packages', packages, 'id') + self.cache_db_items(self.path, packages, 'id') + + def cache_db_package(self, package): + # NOTE(mgagne) Cache it twice for backward compatibility reasons + self.cache_db_items('packages', [package], 'id') + self.cache_db_items(self.path, [package], 'id') + + def get_db_packages(self): + return (self.get_db_items('packages') or + self.get_db_items(self.path)) + + def get_db_package(self, package_id): + return (self.get_db_item('packages', package_id) or + self.get_db_item(self.path, package_id)) + + def best_match_content_type(self): + """Determine the requested response content-type.""" + if 'venus.best_content_type' not in self.environ: + # Calculate the best MIME type + content_type = None + + # Check URL path suffix + parts = self.path.rsplit('.', 1) + if len(parts) > 1: + possible_type = 'application/' + parts[1] + if possible_type in SUPPORTED_CONTENT_TYPES: + content_type = possible_type + + if not content_type: + content_type = self.accept.best_match(SUPPORTED_CONTENT_TYPES) + + self.environ['venus.best_content_type'] = ( + content_type or 'application/json') + + return self.environ['venus.best_content_type'] + + def get_content_type(self): + """Determine content type of the request body. + + Does not do any body introspection, only checks header + """ + if "Content-Type" not in self.headers: + return None + + allowed_types = SUPPORTED_CONTENT_TYPES + content_type = self.content_type + + if content_type not in allowed_types: + raise exception.InvalidContentType(content_type=content_type) + + return content_type + + def best_match_language(self): + """Determines best available locale from the Accept-Language header. + + :returns: the best language match or None if the 'Accept-Language' + header was not available in the request. + """ + if not self.accept_language: + return None + all_languages = i18n.get_available_languages() + return self.accept_language.best_match(all_languages) + + +class ActionDispatcher(object): + """Maps method name to local methods through action name.""" + + def dispatch(self, *args, **kwargs): + """Find and call local method.""" + action = kwargs.pop('action', 'default') + action_method = getattr(self, str(action), self.default) + return action_method(*args, **kwargs) + + def default(self, data): + raise NotImplementedError() + + +class TextDeserializer(ActionDispatcher): + """Default request body deserialization.""" + + def deserialize(self, datastring, action='default'): + return self.dispatch(datastring, action=action) + + def default(self, datastring): + return {} + + +class JSONDeserializer(TextDeserializer): + + def _from_json(self, datastring): + try: + return jsonutils.loads(datastring) + except ValueError: + msg = _("cannot understand JSON") + raise exception.MalformedRequestBody(reason=msg) + + def default(self, datastring): + return {'body': self._from_json(datastring)} + + +class XMLDeserializer(TextDeserializer): + + def __init__(self, metadata=None): + """Initialize XMLDeserializer. + + :param metadata: information needed to deserialize xml into + a dictionary. + """ + super(XMLDeserializer, self).__init__() + self.metadata = metadata or {} + + def _from_xml(self, datastring): + plurals = set(self.metadata.get('plurals', {})) + + try: + node = utils.safe_minidom_parse_string(datastring).childNodes[0] + return {node.nodeName: self._from_xml_node(node, plurals)} + except expat.ExpatError: + msg = _("cannot understand XML") + raise exception.MalformedRequestBody(reason=msg) + + def _from_xml_node(self, node, listnames): + """Convert a minidom node to a simple Python type. + + :param listnames: list of XML node names whose subnodes should + be considered list items. + """ + if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: + return node.childNodes[0].nodeValue + elif node.nodeName in listnames: + return [self._from_xml_node(n, listnames) for n in node.childNodes] + else: + result = dict() + for attr in node.attributes.keys(): + result[attr] = node.attributes[attr].nodeValue + for child in node.childNodes: + if child.nodeType != node.TEXT_NODE: + result[child.nodeName] = self._from_xml_node(child, + listnames) + return result + + def find_first_child_named_in_namespace(self, parent, namespace, name): + """Search a nodes children for the first child with a given name.""" + for node in parent.childNodes: + if (node.localName == name and + node.namespaceURI and + node.namespaceURI == namespace): + return node + return None + + def find_first_child_named(self, parent, name): + """Search a nodes children for the first child with a given name.""" + for node in parent.childNodes: + if node.nodeName == name: + return node + return None + + def find_children_named(self, parent, name): + """Return all of a nodes children who have the given name.""" + for node in parent.childNodes: + if node.nodeName == name: + yield node + + def extract_text(self, node): + """Get the text field contained by the given node.""" + text = [] + # Cannot assume entire text will be in a single child node because SAX + # parsers may split contiguous character data into multiple chunks + for child in node.childNodes: + if child.nodeType == child.TEXT_NODE: + text.append(child.nodeValue) + return ''.join(text) + + def default(self, datastring): + return {'body': self._from_xml(datastring)} + + +class MetadataXMLDeserializer(XMLDeserializer): + + def extract_metadata(self, metadata_node): + """Marshal the metadata attribute of a parsed request.""" + metadata = {} + if metadata_node is not None: + for meta_node in self.find_children_named(metadata_node, "meta"): + key = meta_node.getAttribute("key") + metadata[key] = self.extract_text(meta_node) + return metadata + + +class DictSerializer(ActionDispatcher): + """Default request body serialization.""" + + def serialize(self, data, action='default'): + return self.dispatch(data, action=action) + + def default(self, data): + return "" + + +class JSONDictSerializer(DictSerializer): + """Default JSON request body serialization.""" + + def default(self, data): + return jsonutils.dumps(data) + + +class XMLDictSerializer(DictSerializer): + + def __init__(self, metadata=None, xmlns=None): + """Initialize XMLDictSerializer. + + :param metadata: information needed to deserialize xml into + a dictionary. + :param xmlns: XML namespace to include with serialized xml + """ + super(XMLDictSerializer, self).__init__() + self.metadata = metadata or {} + self.xmlns = xmlns + + def default(self, data): + # We expect data to contain a single key which is the XML root. + root_key = data.keys()[0] + doc = minidom.Document() + node = self._to_xml_node(doc, self.metadata, root_key, data[root_key]) + + return self.to_xml_string(node) + + def to_xml_string(self, node, has_atom=False): + self._add_xmlns(node, has_atom) + return node.toxml('UTF-8') + + # NOTE (ameade): the has_atom should be removed after all of the + # xml serializers and view builders have been updated to the current + # spec that required all responses include the xmlns:atom, the has_atom + # flag is to prevent current tests from breaking + def _add_xmlns(self, node, has_atom=False): + if self.xmlns is not None: + node.setAttribute('xmlns', self.xmlns) + if has_atom: + node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom") + + def _to_xml_node(self, doc, metadata, nodename, data): + """Recursive method to convert data members to XML nodes.""" + result = doc.createElement(nodename) + + # Set the xml namespace if one is specified + # TODO(justinsb): We could also use prefixes on the keys + xmlns = metadata.get('xmlns', None) + if xmlns: + result.setAttribute('xmlns', xmlns) + + # TODO(bcwaldon): accomplish this without a type-check + if isinstance(data, list): + collections = metadata.get('list_collections', {}) + if nodename in collections: + metadata = collections[nodename] + for item in data: + node = doc.createElement(metadata['item_name']) + node.setAttribute(metadata['item_key'], str(item)) + result.appendChild(node) + return result + singular = metadata.get('plurals', {}).get(nodename, None) + if singular is None: + if nodename.endswith('s'): + singular = nodename[:-1] + else: + singular = 'item' + for item in data: + node = self._to_xml_node(doc, metadata, singular, item) + result.appendChild(node) + # TODO(bcwaldon): accomplish this without a type-check + elif isinstance(data, dict): + collections = metadata.get('dict_collections', {}) + if nodename in collections: + metadata = collections[nodename] + for k, v in data.items(): + node = doc.createElement(metadata['item_name']) + node.setAttribute(metadata['item_key'], str(k)) + text = doc.createTextNode(str(v)) + node.appendChild(text) + result.appendChild(node) + return result + attrs = metadata.get('attributes', {}).get(nodename, {}) + for k, v in data.items(): + if k in attrs: + result.setAttribute(k, str(v)) + else: + node = self._to_xml_node(doc, metadata, k, v) + result.appendChild(node) + else: + # Type is atom + node = doc.createTextNode(str(data)) + result.appendChild(node) + return result + + def _create_link_nodes(self, xml_doc, links): + link_nodes = [] + for link in links: + link_node = xml_doc.createElement('atom:link') + link_node.setAttribute('rel', link['rel']) + link_node.setAttribute('href', link['href']) + if 'type' in link: + link_node.setAttribute('type', link['type']) + link_nodes.append(link_node) + return link_nodes + + def _to_xml(self, root): + """Convert the xml object to an xml string.""" + return etree.tostring(root, encoding='UTF-8', xml_declaration=True) + + +def serializers(**serializers): + """Attaches serializers to a method. + + This decorator associates a dictionary of serializers with a + method. Note that the function attributes are directly + manipulated; the method is not wrapped. + """ + + def decorator(func): + if not hasattr(func, 'wsgi_serializers'): + func.wsgi_serializers = {} + func.wsgi_serializers.update(serializers) + return func + return decorator + + +def deserializers(**deserializers): + """Attaches deserializers to a method. + + This decorator associates a dictionary of deserializers with a + method. Note that the function attributes are directly + manipulated; the method is not wrapped. + """ + + def decorator(func): + if not hasattr(func, 'wsgi_deserializers'): + func.wsgi_deserializers = {} + func.wsgi_deserializers.update(deserializers) + return func + return decorator + + +def response(code): + """Attaches response code to a method. + + This decorator associates a response code with a method. Note + that the function attributes are directly manipulated; the method + is not wrapped. + """ + + def decorator(func): + func.wsgi_code = code + return func + return decorator + + +class ResponseObject(object): + """Bundles a response object with appropriate serializers. + + Object that app methods may return in order to bind alternate + serializers with a response object to be serialized. Its use is + optional. + """ + + def __init__(self, obj, code=None, **serializers): + """Binds serializers with an object. + + Takes keyword arguments akin to the @serializer() decorator + for specifying serializers. Serializers specified will be + given preference over default serializers or method-specific + serializers on return. + """ + + self.obj = obj + self.serializers = serializers + self._default_code = 200 + self._code = code + self._headers = {} + self.serializer = None + self.media_type = None + + def __getitem__(self, key): + """Retrieves a header with the given name.""" + + return self._headers[key.lower()] + + def __setitem__(self, key, value): + """Sets a header with the given name to the given value.""" + + self._headers[key.lower()] = value + + def __delitem__(self, key): + """Deletes the header with the given name.""" + + del self._headers[key.lower()] + + def _bind_method_serializers(self, meth_serializers): + """Binds method serializers with the response object. + + Binds the method serializers with the response object. + Serializers specified to the constructor will take precedence + over serializers specified to this method. + + :param meth_serializers: A dictionary with keys mapping to + response types and values containing + serializer objects. + """ + + # We can't use update because that would be the wrong + # precedence + for mtype, serializer in meth_serializers.items(): + self.serializers.setdefault(mtype, serializer) + + def get_serializer(self, content_type, default_serializers=None): + """Returns the serializer for the wrapped object. + + Returns the serializer for the wrapped object subject to the + indicated content type. If no serializer matching the content + type is attached, an appropriate serializer drawn from the + default serializers will be used. If no appropriate + serializer is available, raises InvalidContentType. + """ + + default_serializers = default_serializers or {} + + try: + mtype = _MEDIA_TYPE_MAP.get(content_type, content_type) + if mtype in self.serializers: + return mtype, self.serializers[mtype] + else: + return mtype, default_serializers[mtype] + except (KeyError, TypeError): + raise exception.InvalidContentType(content_type=content_type) + + def preserialize(self, content_type, default_serializers=None): + """Prepares the serializer that will be used to serialize. + + Determines the serializer that will be used and prepares an + instance of it for later call. This allows the serializer to + be accessed by extensions for, e.g., template extension. + """ + + mtype, serializer = self.get_serializer(content_type, + default_serializers) + self.media_type = mtype + self.serializer = serializer() + + def attach(self, **kwargs): + """Attach slave templates to serializers.""" + + if self.media_type in kwargs: + self.serializer.attach(kwargs[self.media_type]) + + def serialize(self, request, content_type, default_serializers=None): + """Serializes the wrapped object. + + Utility method for serializing the wrapped object. Returns a + webob.Response object. + """ + + if self.serializer: + serializer = self.serializer + else: + _mtype, _serializer = self.get_serializer(content_type, + default_serializers) + serializer = _serializer() + + response = webob.Response() + response.status_int = self.code + for hdr, value in self._headers.items(): + response.headers[hdr] = value + response.headers['Content-Type'] = content_type + if self.obj is not None: + response.body = serializer.serialize(self.obj) + + return response + + @property + def code(self): + """Retrieve the response status.""" + + return self._code or self._default_code + + @property + def headers(self): + """Retrieve the headers.""" + + return self._headers.copy() + + +def action_peek_json(body): + """Determine action to invoke.""" + + try: + decoded = jsonutils.loads(body) + except ValueError: + msg = _("cannot understand JSON") + raise exception.MalformedRequestBody(reason=msg) + + # Make sure there's exactly one key... + if len(decoded) != 1: + msg = _("too many body keys") + raise exception.MalformedRequestBody(reason=msg) + + # Return the action and the decoded body... + return decoded.keys()[0] + + +def action_peek_xml(body): + """Determine action to invoke.""" + + dom = utils.safe_minidom_parse_string(body) + action_node = dom.childNodes[0] + + return action_node.tagName + + +class ResourceExceptionHandler(object): + """Context manager to handle Resource exceptions. + + Used when processing exceptions generated by API implementation + methods (or their extensions). Converts most exceptions to Fault + exceptions, with the appropriate logging. + """ + + def __enter__(self): + return None + + def __exit__(self, ex_type, ex_value, ex_traceback): + if not ex_value: + return True + + if isinstance(ex_value, exception.NotAuthorized): + raise Fault(webob.exc.HTTPForbidden(explanation=ex_value.msg)) + elif isinstance(ex_value, exception.Invalid): + raise Fault(exception.ConvertedException( + code=ex_value.code, explanation=ex_value.msg)) + elif isinstance(ex_value, TypeError): + exc_info = (ex_type, ex_value, ex_traceback) + LOG.error(_LE( + 'Exception handling resource: %s'), + ex_value, exc_info=exc_info) + raise Fault(webob.exc.HTTPBadRequest()) + elif isinstance(ex_value, Fault): + LOG.info(_LI("Fault thrown: %s"), ex_value) + raise ex_value + elif isinstance(ex_value, webob.exc.HTTPException): + LOG.info(_LI("HTTP exception thrown: %s"), ex_value) + raise Fault(ex_value) + + # We didn't handle the exception + return False + + +class Resource(wsgi.Application): + """WSGI app that handles (de)serialization and controller dispatch. + + WSGI app that reads routing information supplied by RoutesMiddleware + and calls the requested action method upon its controller. All + controller action methods must accept a 'req' argument, which is the + incoming wsgi.Request. If the operation is a PUT or POST, the controller + method must also accept a 'body' argument (the deserialized request body). + They may raise a webob.exc exception or return a dict, which will be + serialized by requested content type. + + Exceptions derived from webob.exc.HTTPException will be automatically + wrapped in Fault() to provide API friendly error responses. + """ + + def __init__(self, controller, action_peek=None, **deserializers): + """Initialize Resource. + + :param controller: object that implement methods created by routes lib + :param action_peek: dictionary of routines for peeking into an action + request body to determine the desired action + """ + + self.controller = controller + + default_deserializers = dict(xml=XMLDeserializer, + json=JSONDeserializer) + default_deserializers.update(deserializers) + + self.default_deserializers = default_deserializers + self.default_serializers = dict(xml=XMLDictSerializer, + json=JSONDictSerializer) + + self.action_peek = dict(xml=action_peek_xml, + json=action_peek_json) + self.action_peek.update(action_peek or {}) + + # Copy over the actions dictionary + self.wsgi_actions = {} + if controller: + self.register_actions(controller) + + # Save a mapping of extensions + self.wsgi_extensions = {} + self.wsgi_action_extensions = {} + + def register_actions(self, controller): + """Rvenusters controller actions with this resource.""" + + actions = getattr(controller, 'wsgi_actions', {}) + for key, method_name in actions.items(): + self.wsgi_actions[key] = getattr(controller, method_name) + + def register_extensions(self, controller): + """Rvenusters controller extensions with this resource.""" + + extensions = getattr(controller, 'wsgi_extensions', []) + for method_name, action_name in extensions: + # Look up the extending method + extension = getattr(controller, method_name) + + if action_name: + # Extending an action... + if action_name not in self.wsgi_action_extensions: + self.wsgi_action_extensions[action_name] = [] + self.wsgi_action_extensions[action_name].append(extension) + else: + # Extending a regular method + if method_name not in self.wsgi_extensions: + self.wsgi_extensions[method_name] = [] + self.wsgi_extensions[method_name].append(extension) + + def get_action_args(self, request_environment): + """Parse dictionary created by routes library.""" + + # NOTE(Vek): Check for get_action_args() override in the + # controller + if hasattr(self.controller, 'get_action_args'): + return self.controller.get_action_args(request_environment) + + try: + args = request_environment['wsgiorg.routing_args'][1].copy() + except (KeyError, IndexError, AttributeError): + return {} + + try: + del args['controller'] + except KeyError: + pass + + try: + del args['format'] + except KeyError: + pass + + return args + + def get_body(self, request): + + if len(request.body) == 0: + LOG.debug("Empty body provided in request") + return None, '' + + try: + content_type = request.get_content_type() + except exception.InvalidContentType: + LOG.debug("Unrecognized Content-Type provided in request") + return None, '' + + if not content_type: + LOG.debug("No Content-Type provided in request") + return None, '' + + return content_type, request.body + + def deserialize(self, meth, content_type, body): + meth_deserializers = getattr(meth, 'wsgi_deserializers', {}) + try: + mtype = _MEDIA_TYPE_MAP.get(content_type, content_type) + if mtype in meth_deserializers: + deserializer = meth_deserializers[mtype] + else: + deserializer = self.default_deserializers[mtype] + except (KeyError, TypeError): + raise exception.InvalidContentType(content_type=content_type) + + return deserializer().deserialize(body) + + def pre_process_extensions(self, extensions, request, action_args): + # List of callables for post-processing extensions + post = [] + + for ext in extensions: + if inspect.isgeneratorfunction(ext): + response = None + + # If it's a generator function, the part before the + # yield is the preprocessing stage + try: + with ResourceExceptionHandler(): + gen = ext(req=request, **action_args) + response = next(gen) + except Fault as ex: + response = ex + + # We had a response... + if response: + return response, [] + + # No response, queue up generator for post-processing + post.append(gen) + else: + # Regular functions only perform post-processing + post.append(ext) + + # Run post-processing in the reverse order + return None, reversed(post) + + def post_process_extensions(self, extensions, resp_obj, request, + action_args): + for ext in extensions: + response = None + if inspect.isgenerator(ext): + # If it's a generator, run the second half of + # processing + try: + with ResourceExceptionHandler(): + response = ext.send(resp_obj) + except StopIteration: + # Normal exit of generator + continue + except Fault as ex: + response = ex + else: + # Regular functions get post-processing... + try: + with ResourceExceptionHandler(): + response = ext(req=request, resp_obj=resp_obj, + **action_args) + except Fault as ex: + response = ex + + # We had a response... + if response: + return response + + return None + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, request): + """WSGI method that controls (de)serialization and method dispatch.""" + + LOG.info(_LI("%(method)s %(url)s"), + {"method": request.method, + "url": request.url}) + + # Identify the action, its arguments, and the requested + # content type + action_args = self.get_action_args(request.environ) + action = action_args.pop('action', None) + content_type, body = self.get_body(request) + accept = request.best_match_content_type() + + # NOTE(Vek): Splitting the function up this way allows for + # auditing by external tools that wrap the existing + # function. If we try to audit __call__(), we can + # run into troubles due to the @webob.dec.wsgify() + # decorator. + return self._process_stack(request, action, action_args, + content_type, body, accept) + + def _process_stack(self, request, action, action_args, + content_type, body, accept): + """Implement the processing stack.""" + + # Get the implementing method + try: + meth, extensions = self.get_method(request, action, + content_type, body) + except (AttributeError, TypeError): + return Fault(webob.exc.HTTPNotFound()) + except KeyError as ex: + msg = _("There is no such action: %s") % ex.args[0] + return Fault(webob.exc.HTTPBadRequest(explanation=msg)) + except exception.MalformedRequestBody: + msg = _("Malformed request body") + return Fault(webob.exc.HTTPBadRequest(explanation=msg)) + + # Now, deserialize the request body... + try: + if content_type: + contents = self.deserialize(meth, content_type, body) + else: + contents = {} + except exception.InvalidContentType: + msg = _("Unsupported Content-Type") + return Fault(webob.exc.HTTPBadRequest(explanation=msg)) + except exception.MalformedRequestBody: + msg = _("Malformed request body") + return Fault(webob.exc.HTTPBadRequest(explanation=msg)) + + # Update the action args + action_args.update(contents) + + project_id = action_args.pop("project_id", None) + context = request.environ.get('venus.context') + if (context and project_id and (project_id != context.project_id)): + msg = _("Malformed request url") + return Fault(webob.exc.HTTPBadRequest(explanation=msg)) + + # Run pre-processing extensions + response, post = self.pre_process_extensions(extensions, + request, action_args) + + if not response: + try: + with ResourceExceptionHandler(): + action_result = self.dispatch(meth, request, action_args) + except Fault as ex: + response = ex + + if not response: + # No exceptions; convert action_result into a + # ResponseObject + resp_obj = None + if type(action_result) is dict or action_result is None: + resp_obj = ResponseObject(action_result) + elif isinstance(action_result, ResponseObject): + resp_obj = action_result + else: + response = action_result + + # Run post-processing extensions + if resp_obj: + _set_request_id_header(request, resp_obj) + # Do a preserialize to set up the response object + serializers = getattr(meth, 'wsgi_serializers', {}) + resp_obj._bind_method_serializers(serializers) + if hasattr(meth, 'wsgi_code'): + resp_obj._default_code = meth.wsgi_code + resp_obj.preserialize(accept, self.default_serializers) + + # Process post-processing extensions + response = self.post_process_extensions(post, resp_obj, + request, action_args) + + if resp_obj and not response: + response = resp_obj.serialize(request, accept, + self.default_serializers) + + try: + msg_dict = dict(url=request.url, status=response.status_int) + msg = _LI("%(url)s returned with HTTP %(status)d") + except AttributeError as e: + msg_dict = dict(url=request.url, e=e) + msg = _LI("%(url)s returned a fault: %(e)s") + + LOG.info(msg, msg_dict) + + return response + + def get_method(self, request, action, content_type, body): + """Look up the action-specific method and its extensions.""" + + # Look up the method + try: + if not self.controller: + meth = getattr(self, action) + else: + meth = getattr(self.controller, action) + except AttributeError as e: + with excutils.save_and_reraise_exception(e) as ctxt: + if (not self.wsgi_actions or action not in ['action', + 'create', + 'delete', + 'update']): + LOG.exception(_LE('Get method error.')) + else: + ctxt.reraise = False + else: + return meth, self.wsgi_extensions.get(action, []) + + if action == 'action': + # OK, it's an action; figure out which action... + mtype = _MEDIA_TYPE_MAP.get(content_type) + action_name = self.action_peek[mtype](body) + LOG.debug("Action body: %s", body) + else: + action_name = action + + # Look up the action method + return (self.wsgi_actions[action_name], + self.wsgi_action_extensions.get(action_name, [])) + + def dispatch(self, method, request, action_args): + """Dispatch a call to the action-specific method.""" + + return method(req=request, **action_args) + + +def action(name): + """Mark a function as an action. + + The given name will be taken as the action key in the body. + + This is also overloaded to allow extensions to provide + non-extending definitions of create and delete operations. + """ + + def decorator(func): + func.wsgi_action = name + return func + return decorator + + +def extends(*args, **kwargs): + """Indicate a function extends an operation. + + Can be used as either:: + + @extends + def index(...): + pass + + or as:: + + @extends(action='resize') + def _action_resize(...): + pass + """ + + def decorator(func): + # Store enough information to find what we're extending + func.wsgi_extends = (func.__name__, kwargs.get('action')) + return func + + # If we have positional arguments, call the decorator + if args: + return decorator(*args) + + # OK, return the decorator instead + return decorator + + +class ControllerMetaclass(type): + """Controller metaclass. + + This metaclass automates the task of assembling a dictionary + mapping action keys to method names. + """ + + def __new__(mcs, name, bases, cls_dict): + """Adds the wsgi_actions dictionary to the class.""" + + # Find all actions + actions = {} + extensions = [] + # start with wsgi actions from base classes + for base in bases: + actions.update(getattr(base, 'wsgi_actions', {})) + for key, value in cls_dict.items(): + if not callable(value): + continue + if getattr(value, 'wsgi_action', None): + actions[value.wsgi_action] = key + elif getattr(value, 'wsgi_extends', None): + extensions.append(value.wsgi_extends) + + # Add the actions and extensions to the class dict + cls_dict['wsgi_actions'] = actions + cls_dict['wsgi_extensions'] = extensions + + return super(ControllerMetaclass, mcs).__new__(mcs, name, bases, + cls_dict) + + +@six.add_metaclass(ControllerMetaclass) +class Controller(object): + """Default controller.""" + + _view_builder_class = None + + def __init__(self, view_builder=None): + """Initialize controller with a view builder instance.""" + if view_builder: + self._view_builder = view_builder + elif self._view_builder_class: + self._view_builder = self._view_builder_class() + else: + self._view_builder = None + + @staticmethod + def is_valid_body(body, entity_name): + if not (body and entity_name in body): + return False + + def is_dict(d): + try: + d.get(None) + return True + except AttributeError: + return False + + if not is_dict(body[entity_name]): + return False + + return True + + @staticmethod + def assert_valid_body(body, entity_name): + # NOTE: After v1 api is deprecated need to merge 'is_valid_body' and + # 'assert_valid_body' in to one method. Right now it is not + # possible to modify 'is_valid_body' to raise exception because + # in case of V1 api when 'is_valid_body' return False, + # 'HTTPUnprocessableEntity' exception is getting raised and in + # V2 api 'HTTPBadRequest' exception is getting raised. + if not Controller.is_valid_body(body, entity_name): + raise webob.exc.HTTPBadRequest( + explanation=_("Missing required element '%s' in " + "request body.") % entity_name) + + @staticmethod + def validate_name_and_description(body): + name = body.get('name') + if name is not None: + if isinstance(name, six.string_types): + body['name'] = name.strip() + try: + utils.check_string_length(body['name'], 'Name', + min_length=0, max_length=255) + except exception.InvalidInput as error: + raise webob.exc.HTTPBadRequest(explanation=error.msg) + + description = body.get('description') + if description is not None: + try: + utils.check_string_length(description, 'Description', + min_length=0, max_length=255) + except exception.InvalidInput as error: + raise webob.exc.HTTPBadRequest(explanation=error.msg) + + @staticmethod + def validate_string_length(value, entity_name, min_length=0, + max_length=None, remove_whitespaces=False): + """Check the length of specified string. + + :param value: the value of the string + :param entity_name: the name of the string + :param min_length: the min_length of the string + :param max_length: the max_length of the string + :param remove_whitespaces: True if trimming whitespaces is needed + else False + """ + if isinstance(value, six.string_types) and remove_whitespaces: + value = value.strip() + try: + utils.check_string_length(value, entity_name, + min_length=min_length, + max_length=max_length) + except exception.InvalidInput as error: + raise webob.exc.HTTPBadRequest(explanation=error.msg) + + @staticmethod + def validate_integer(value, name, min_value=None, max_value=None): + """Make sure that value is a valid integer, potentially within range. + + :param value: the value of the integer + :param name: the name of the integer + :param min_length: the min_length of the integer + :param max_length: the max_length of the integer + :returns: integer + """ + try: + value = int(value) + except (TypeError, ValueError, UnicodeEncodeError): + raise webob.exc.HTTPBadRequest(explanation=( + _('%s must be an integer.') % name)) + + if min_value is not None and value < min_value: + raise webob.exc.HTTPBadRequest( + explanation=(_('%(value_name)s must be >= %(min_value)d') % + {'value_name': name, 'min_value': min_value})) + if max_value is not None and value > max_value: + raise webob.exc.HTTPBadRequest( + explanation=(_('%(value_name)s must be <= %(max_value)d') % + {'value_name': name, 'max_value': max_value})) + + return value + + +class Fault(webob.exc.HTTPException): + """Wrap webob.exc.HTTPException to provide API friendly response.""" + + _fault_names = {400: "badRequest", + 401: "unauthorized", + 403: "forbidden", + 404: "itemNotFound", + 405: "badMethod", + 409: "conflictingRequest", + 413: "overLimit", + 415: "badMediaType", + 501: "notImplemented", + 503: "serviceUnavailable"} + + def __init__(self, exception): + """Create a Fault for the given webob.exc.exception.""" + self.wrapped_exc = exception + self.status_int = exception.status_int + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, req): + """Generate a WSGI response based on the exception passed to ctor.""" + # Replace the body with fault details. + locale = req.best_match_language() + code = self.wrapped_exc.status_int + fault_name = self._fault_names.get(code, "computeFault") + explanation = self.wrapped_exc.explanation + fault_data = { + fault_name: { + 'code': code, + 'message': i18n.translate(explanation, locale)}} + if code == 413: + retry = self.wrapped_exc.headers.get('Retry-After', None) + if retry: + fault_data[fault_name]['retryAfter'] = retry + + # 'code' is an attribute on the fault tag itself + metadata = {'attributes': {fault_name: 'code'}} + + xml_serializer = XMLDictSerializer(metadata, XML_NS_V1) + + content_type = req.best_match_content_type() + serializer = { + 'application/xml': xml_serializer, + 'application/json': JSONDictSerializer(), + }[content_type] + + self.wrapped_exc.body = serializer.serialize(fault_data) + self.wrapped_exc.content_type = content_type + _set_request_id_header(req, self.wrapped_exc.headers) + + return self.wrapped_exc + + def __str__(self): + return self.wrapped_exc.__str__() + + +def _set_request_id_header(req, headers): + context = req.environ.get('venus.context') + if context: + headers['x-compute-request-id'] = context.request_id + + +class OverLimitFault(webob.exc.HTTPException): + """Rate-limited request response.""" + + def __init__(self, message, details, retry_time): + """Initialize new `OverLimitFault` with relevant information.""" + hdrs = OverLimitFault._retry_after(retry_time) + self.wrapped_exc = webob.exc.HTTPRequestEntityTooLarge(headers=hdrs) + self.content = { + "overLimitFault": { + "code": self.wrapped_exc.status_int, + "message": message, + "details": details, + }, + } + + @staticmethod + def _retry_after(retry_time): + delay = int(math.ceil(retry_time - time.time())) + retry_after = delay if delay > 0 else 0 + headers = {'Retry-After': '%d' % retry_after} + return headers + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, request): + """Serializes the wrapped exception conforming to our error format.""" + content_type = request.best_match_content_type() + metadata = {"attributes": {"overLimitFault": "code"}} + + def translate(msg): + locale = request.best_match_language() + return i18n.translate(msg, locale) + + self.content['overLimitFault']['message'] = \ + translate(self.content['overLimitFault']['message']) + self.content['overLimitFault']['details'] = \ + translate(self.content['overLimitFault']['details']) + + xml_serializer = XMLDictSerializer(metadata, XML_NS_V1) + serializer = { + 'application/xml': xml_serializer, + 'application/json': JSONDictSerializer(), + }[content_type] + + content = serializer.serialize(self.content) + self.wrapped_exc.body = content + + return self.wrapped_exc diff --git a/venus/api/schemas/atom-link.rng b/venus/api/schemas/atom-link.rng new file mode 100644 index 0000000..8d0e8eb --- /dev/null +++ b/venus/api/schemas/atom-link.rng @@ -0,0 +1,141 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + [^:]* + + + + + + .+/.+ + + + + + + [A-Za-z]{1,8}(-[A-Za-z0-9]{1,8})* + + + + + + + + + + + + xml:base + xml:lang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/venus/api/schemas/v1.1/extension.rng b/venus/api/schemas/v1.1/extension.rng new file mode 100644 index 0000000..4bf9c74 --- /dev/null +++ b/venus/api/schemas/v1.1/extension.rng @@ -0,0 +1,11 @@ + + + + + + + + + + diff --git a/venus/api/schemas/v1.1/extensions.rng b/venus/api/schemas/v1.1/extensions.rng new file mode 100644 index 0000000..088d9f5 --- /dev/null +++ b/venus/api/schemas/v1.1/extensions.rng @@ -0,0 +1,6 @@ + + + + + diff --git a/venus/api/urlmap.py b/venus/api/urlmap.py new file mode 100644 index 0000000..7bbe4d6 --- /dev/null +++ b/venus/api/urlmap.py @@ -0,0 +1,302 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import paste.urlmap +import re + +from oslo_log import log as logging + +try: + from urllib.request import parse_http_list # pylint: disable=E0611 +except ImportError: + from urllib2 import parse_http_list # Python 2 + +from venus.api.openstack import wsgi + + +_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"' +_option_header_piece_re = re.compile( + r';\s*([^\s;=]+|%s)\s*' + r'(?:=\s*([^;]+|%s))?\s*' % + (_quoted_string_re, _quoted_string_re)) + +LOG = logging.getLogger(__name__) + + +def unquote_header_value(value): + """Unquotes a header value. + + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + return value + + +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + :param value: a string with a list header. + :return: :class:`list` + """ + result = [] + for item in parse_http_list(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +def parse_options_header(value): + """Parse 'Content-Type'-like header into a tuple. + + Parse a ``Content-Type`` like header into a tuple with the content + type and the options: + + >>> parse_options_header('Content-Type: text/html; mimetype=text/html') + ('Content-Type:', {'mimetype': 'text/html'}) + + :param value: the header to parse. + :return: (str, options) + """ + def _tokenize(string): + for match in _option_header_piece_re.finditer(string): + key, value = match.groups() + key = unquote_header_value(key) + if value is not None: + value = unquote_header_value(value) + yield key, value + + if not value: + return '', {} + + parts = _tokenize(';' + value) + name = next(parts)[0] + extra = dict(parts) + return name, extra + + +class Accept(object): + def __init__(self, value): + self._content_types = [parse_options_header(v) for v in + parse_list_header(value)] + + def best_match(self, supported_content_types): + # FIXME: Should we have a more sophisticated matching algorithm that + # takes into account the version as well? + best_quality = -1 + best_content_type = None + best_params = {} + best_match = '*/*' + + for content_type in supported_content_types: + for content_mask, params in self._content_types: + try: + quality = float(params.get('q', 1)) + except ValueError: + continue + + if quality < best_quality: + continue + elif best_quality == quality: + if best_match.count('*') <= content_mask.count('*'): + continue + + if self._match_mask(content_mask, content_type): + best_quality = quality + best_content_type = content_type + best_params = params + best_match = content_mask + + return best_content_type, best_params + + def content_type_params(self, best_content_type): + """Find parameters in Accept header for given content type.""" + for content_type, params in self._content_types: + if best_content_type == content_type: + return params + + return {} + + def _match_mask(self, mask, content_type): + if '*' not in mask: + return content_type == mask + if mask == '*/*': + return True + mask_major = mask[:-2] + content_type_major = content_type.split('/', 1)[0] + return content_type_major == mask_major + + +def urlmap_factory(loader, global_conf, **local_conf): + if 'not_found_app' in local_conf: + not_found_app = local_conf.pop('not_found_app') + else: + not_found_app = global_conf.get('not_found_app') + if not_found_app: + not_found_app = loader.get_app(not_found_app, global_conf=global_conf) + urlmap = URLMap(not_found_app=not_found_app) + for path, app_name in local_conf.items(): + path = paste.urlmap.parse_path_expression(path) + app = loader.get_app(app_name, global_conf=global_conf) + urlmap[path] = app + return urlmap + + +class URLMap(paste.urlmap.URLMap): + def _match(self, host, port, path_info): + """Find longest match for a given URL path.""" + for (domain, app_url), app in self.applications: + if domain and domain != host and domain != host + ':' + port: + continue + if (path_info == app_url or path_info.startswith(app_url + '/')): + return app, app_url + + return None, None + + def _set_script_name(self, app, app_url): + def wrap(environ, start_response): + environ['SCRIPT_NAME'] += app_url + return app(environ, start_response) + + return wrap + + def _munge_path(self, app, path_info, app_url): + def wrap(environ, start_response): + environ['SCRIPT_NAME'] += app_url + environ['PATH_INFO'] = path_info[len(app_url):] + return app(environ, start_response) + + return wrap + + def _path_strategy(self, host, port, path_info): + """Check path suffix for MIME type and path prefix for API version.""" + mime_type = app = app_url = None + + parts = path_info.rsplit('.', 1) + if len(parts) > 1: + possible_type = 'application/' + parts[1] + if possible_type in wsgi.SUPPORTED_CONTENT_TYPES: + mime_type = possible_type + + parts = path_info.split('/') + if len(parts) > 1: + possible_app, possible_app_url = self._match(host, port, path_info) + # Don't use prefix if it ends up matching default + if possible_app and possible_app_url: + app_url = possible_app_url + app = self._munge_path(possible_app, path_info, app_url) + + return mime_type, app, app_url + + def _content_type_strategy(self, host, port, environ): + """Check Content-Type header for API version.""" + app = None + params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1] + if 'version' in params: + app, app_url = self._match(host, port, '/v' + params['version']) + if app: + app = self._set_script_name(app, app_url) + + return app + + def _accept_strategy(self, host, port, environ, supported_content_types): + """Check Accept header for best matching MIME type and API version.""" + accept = Accept(environ.get('HTTP_ACCEPT', '')) + + app = None + + # Find the best match in the Accept header + mime_type, params = accept.best_match(supported_content_types) + if 'version' in params: + app, app_url = self._match(host, port, '/v' + params['version']) + if app: + app = self._set_script_name(app, app_url) + + return mime_type, app + + def __call__(self, environ, start_response): + host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower() + if ':' in host: + host, port = host.split(':', 1) + else: + if environ['wsgi.url_scheme'] == 'http': + port = '80' + else: + port = '443' + + path_info = environ['PATH_INFO'] + path_info = self.normalize_url(path_info, False)[1] + + # The MIME type for the response is determined in one of two ways: + # 1) URL path suffix (eg /servers/detail.json) + # 2) Accept header (eg application/json;q=0.8, application/xml;q=0.2) + + # The API version is determined in one of three ways: + # 1) URL path prefix (eg /v1.1/tenant/servers/detail) + # 2) Content-Type header (eg application/json;version=1.1) + # 3) Accept header (eg application/json;q=0.8;version=1.1) + + supported_content_types = list(wsgi.SUPPORTED_CONTENT_TYPES) + + mime_type, app, app_url = self._path_strategy(host, port, path_info) + + # Accept application/atom+xml for the index query of each API + # version mount point as well as the root index + if (app_url and app_url + '/' == path_info) or path_info == '/': + supported_content_types.append('application/atom+xml') + + if not app: + app = self._content_type_strategy(host, port, environ) + + if not mime_type or not app: + possible_mime_type, possible_app = self._accept_strategy( + host, port, environ, supported_content_types) + if possible_mime_type and not mime_type: + mime_type = possible_mime_type + if possible_app and not app: + app = possible_app + + if not mime_type: + mime_type = 'application/json' + + if not app: + # Didn't match a particular version, probably matches default + app, app_url = self._match(host, port, path_info) + if app: + app = self._munge_path(app, path_info, app_url) + + if app: + environ['venus.best_content_type'] = mime_type + return app(environ, start_response) + + environ['paste.urlmap_object'] = self + return self.not_found_application(environ, start_response) diff --git a/venus/api/v1/__init__.py b/venus/api/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/api/v1/controller.py b/venus/api/v1/controller.py new file mode 100644 index 0000000..d1d0514 --- /dev/null +++ b/venus/api/v1/controller.py @@ -0,0 +1,98 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The template api.""" + +from oslo_log import log as logging + +from venus.api.openstack import wsgi +from venus.modules.search.action import SearchCore +from venus.modules.search.search_lib import ESSearchObj + +LOG = logging.getLogger(__name__) + + +class SearchController(wsgi.Controller): + def __init__(self, ext_mgr): + self.ext_mgr = ext_mgr + self.search_api = SearchCore() + self.search_lib = ESSearchObj() + super(SearchController, self).__init__() + + @wsgi.wrap_check_policy + def search_params(self, req): + type = req.params.get("type", None) + module_name = req.params.get("module_name", None) + index_type = req.params.get("index_type", None) + text = self.search_api.params(type, module_name, index_type) + return text + + @wsgi.wrap_check_policy + def search_logs(self, req): + host_name = req.params.get("host_name", None) + module_name = req.params.get("module_name", None) + program_name = req.params.get("program_name", None) + level = req.params.get("level", None) + user_id = req.params.get("user_id", None) + project_id = req.params.get("project_id", None) + query = req.params.get("query", None) + index_type = req.params.get("index_type", None) + start_time = req.params.get("start_time", None) + end_time = req.params.get("end_time", None) + page_num = req.params.get("page_num", None) + page_size = req.params.get("page_size", None) + text = self.search_api.logs(host_name, module_name, program_name, + level, user_id, project_id, query, + index_type, start_time, end_time, + page_num, page_size) + return text + + @wsgi.wrap_check_policy + def search_analyse_logs(self, req): + group_name = req.params.get("group_name", None) + host_name = req.params.get("host_name", None) + module_name = req.params.get("module_name", None) + program_name = req.params.get("program_name", None) + level = req.params.get("level", None) + start_time = req.params.get("start_time", None) + end_time = req.params.get("end_time", None) + text = self.search_api.analyse_logs(group_name, host_name, + module_name, program_name, + level, start_time, end_time) + return text + + @wsgi.wrap_check_policy + def search_typical_logs(self, req): + type = req.params.get("type", None) + start_time = req.params.get("start_time", None) + end_time = req.params.get("end_time", None) + text = self.search_api.typical_logs(type, start_time, end_time) + return text + + @wsgi.wrap_check_policy + def instance_call_chain(self, req): + request_id = req.params.get("request_id", None) + uuid = req.params.get("uuid", None) + text = self.search_api.instance_call_chain(request_id, uuid) + return text + + @wsgi.wrap_check_policy + def search_global_id(self, req): + global_id = req.params.get("global_id", None) + text = self.search_lib.get_global_log(global_id) + return text + + +def create_resource(ext_mgr): + return wsgi.Resource(SearchController(ext_mgr)) diff --git a/venus/api/v1/router.py b/venus/api/v1/router.py new file mode 100644 index 0000000..b4184e1 --- /dev/null +++ b/venus/api/v1/router.py @@ -0,0 +1,85 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +WSGI middleware for OpenStack Venus API. +""" + +from oslo_log import log as logging + +from venus.api import extensions +import venus.api.openstack +from venus.api.v1 import controller as search +from venus.api import versions +from venus.modules.custom_config import controller as custom_config + +LOG = logging.getLogger(__name__) + + +class APIRouter(venus.api.openstack.APIRouter): + """Routes requests on the API to the appropriate controller and method.""" + ExtensionManager = extensions.ExtensionManager + + def _setup_routes(self, mapper, ext_mgr): + # Register resources + versions_resource = versions.create_resource() + config_resource = custom_config.create_resource(ext_mgr) + search_resource = search.create_resource(ext_mgr) + + # Register routers + mapper.redirect("", "/") + + mapper.connect("versions", "/", + controller=versions_resource, + action='show') + + mapper.connect("get_custom_config", "/custom_config", + controller=config_resource, + action='get_config', + conditions={'method': ['GET']}) + + mapper.connect("get_custom_config", "/custom_config", + controller=config_resource, + action='set_config', + conditions={'method': ['POST']}) + + mapper.connect("search_params", "/search/params", + controller=search_resource, + action='search_params', + conditions={'method': ['GET']}) + + mapper.connect("search_logs", "/search/logs", + controller=search_resource, + action='search_logs', + conditions={'method': ['GET']}) + + mapper.connect("search_analyse_logs", "/search/analyse/logs", + controller=search_resource, + action='search_analyse_logs', + conditions={'method': ['GET']}) + + mapper.connect("search_typical_logs", "/search/typical/logs", + controller=search_resource, + action='search_typical_logs', + conditions={'method': ['GET']}) + + mapper.connect("instance_call_chain", "/search/instance/callchain", + controller=search_resource, + action='instance_call_chain', + conditions={'method': ['GET']}) + + mapper.connect("search_log_by_global_id", "/search/trace_log", + controller=search_resource, + action='search_global_id', + conditions={'method': ['GET']}) diff --git a/venus/api/v1/views/__init__.py b/venus/api/v1/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/api/v1/views/versions.py b/venus/api/v1/views/versions.py new file mode 100644 index 0000000..04cb892 --- /dev/null +++ b/venus/api/v1/views/versions.py @@ -0,0 +1,100 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import copy +import os + +from oslo_config import cfg + + +versions_opts = [ + cfg.StrOpt('public_endpoint', default=None, + help="Public url to use for versions endpoint. The default " + "is None, which will use the request's host_url " + "attribute to populate the URL base. If Venus is " + "operating behind a proxy, you will want to change " + "this to represent the proxy's URL."), +] + +CONF = cfg.CONF +CONF.register_opts(versions_opts) + + +def get_view_builder(req): + base_url = CONF.public_endpoint or req.application_url + return ViewBuilder(base_url) + + +class ViewBuilder(object): + def __init__(self, base_url): + """Initialize ViewBuilder. + + :param base_url: url of the root wsgi application + """ + self.base_url = base_url + + def build_choices(self, VERSIONS, req): + version_objs = [] + for version in VERSIONS: + version = VERSIONS[version] + version_objs.append({ + "id": version['id'], + "status": version['status'], + "links": [{"rel": "self", + "href": self.generate_href(version['id'], + req.path), }, ], + "media-types": version['media-types'], }) + + return dict(choices=version_objs) + + def build_versions(self, versions): + version_objs = [] + for version in sorted(versions.keys()): + version = versions[version] + version_objs.append({ + "id": version['id'], + "status": version['status'], + "updated": version['updated'], + "links": self._build_links(version), }) + + return dict(versions=version_objs) + + def build_version(self, version): + reval = copy.deepcopy(version) + reval['links'].insert(0, { + "rel": "self", + "href": self.base_url.rstrip('/') + '/', }) + return dict(version=reval) + + def _build_links(self, version_data): + """Generate a container of links that refer to the provided version.""" + href = self.generate_href(version_data['id']) + + links = [{'rel': 'self', + 'href': href, }, ] + + return links + + def generate_href(self, version, path=None): + """Create an url that refers to a specific version_number.""" + if version.find('v1.') == 0: + version_number = 'v1' + else: + raise Exception("Error version of %s" % version) + + if path: + path = path.strip('/') + return os.path.join(self.base_url, version_number, path) + else: + return os.path.join(self.base_url, version_number) + '/' diff --git a/venus/api/versions.py b/venus/api/versions.py new file mode 100644 index 0000000..d54743d --- /dev/null +++ b/venus/api/versions.py @@ -0,0 +1,241 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import datetime + +from lxml import etree +from oslo_config import cfg + +from venus.api.openstack import wsgi +from venus.api.v1.views import versions as views_versions +from venus.api import xmlutil + + +CONF = cfg.CONF + + +_KNOWN_VERSIONS = { + "v1.0": { + "id": "v1.0", + "status": "SUPPORTED", + "updated": "2014-06-28T12:20:21Z", + "links": [ + { + "rel": "describedby", + "type": "text/html", + "href": "http://docs.openstack.org/", + }, + ], + "media-types": [ + { + "base": "application/xml", + "type": "application/vnd.openstack.venus+xml;version=1", + }, + { + "base": "application/json", + "type": "application/vnd.openstack.venus+json;version=1", + } + ], + } +} + + +def get_supported_versions(): + versions = {} + + if CONF.enable_v1_api: + versions['v1.0'] = _KNOWN_VERSIONS['v1.0'] + + return versions + + +class MediaTypesTemplateElement(xmlutil.TemplateElement): + def will_render(self, datum): + return 'media-types' in datum + + +def make_version(elem): + elem.set('id') + elem.set('status') + elem.set('updated') + + mts = MediaTypesTemplateElement('media-types') + elem.append(mts) + + mt = xmlutil.SubTemplateElement(mts, 'media-type', selector='media-types') + mt.set('base') + mt.set('type') + + xmlutil.make_links(elem, 'links') + + +version_nsmap = {None: xmlutil.XMLNS_COMMON_V10, 'atom': xmlutil.XMLNS_ATOM} + + +class VersionTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('version', selector='version') + make_version(root) + return xmlutil.MasterTemplate(root, 1, nsmap=version_nsmap) + + +class VersionsTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('versions') + elem = xmlutil.SubTemplateElement(root, 'version', selector='versions') + make_version(elem) + return xmlutil.MasterTemplate(root, 1, nsmap=version_nsmap) + + +class ChoicesTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('choices') + elem = xmlutil.SubTemplateElement(root, 'version', selector='choices') + make_version(elem) + return xmlutil.MasterTemplate(root, 1, nsmap=version_nsmap) + + +class AtomSerializer(wsgi.XMLDictSerializer): + + NSMAP = {None: xmlutil.XMLNS_ATOM} + + def __init__(self, metadata=None, xmlns=None): + self.metadata = metadata or {} + if not xmlns: + self.xmlns = wsgi.XML_NS_ATOM + else: + self.xmlns = xmlns + + def _get_most_recent_update(self, versions): + recent = None + for version in versions: + updated = datetime.datetime.strptime(version['updated'], + '%Y-%m-%dT%H:%M:%SZ') + if not recent: + recent = updated + elif updated > recent: + recent = updated + + return recent.strftime('%Y-%m-%dT%H:%M:%SZ') + + def _get_base_url(self, link_href): + # Make sure no trailing / + link_href = link_href.rstrip('/') + return link_href.rsplit('/', 1)[0] + '/' + + def _create_feed(self, versions, feed_title, feed_id): + feed = etree.Element('feed', nsmap=self.NSMAP) + title = etree.SubElement(feed, 'title') + title.set('type', 'text') + title.text = feed_title + + # Set this updated to the most recently updated version + recent = self._get_most_recent_update(versions) + etree.SubElement(feed, 'updated').text = recent + + etree.SubElement(feed, 'id').text = feed_id + + link = etree.SubElement(feed, 'link') + link.set('rel', 'self') + link.set('href', feed_id) + + author = etree.SubElement(feed, 'author') + etree.SubElement(author, 'name').text = 'Rackspace' + etree.SubElement(author, 'uri').text = 'http://www.rackspace.com/' + + for version in versions: + feed.append(self._create_version_entry(version)) + + return feed + + def _create_version_entry(self, version): + entry = etree.Element('entry') + etree.SubElement(entry, 'id').text = version['links'][0]['href'] + title = etree.SubElement(entry, 'title') + title.set('type', 'text') + title.text = 'Version %s' % version['id'] + etree.SubElement(entry, 'updated').text = version['updated'] + + for link in version['links']: + link_elem = etree.SubElement(entry, 'link') + link_elem.set('rel', link['rel']) + link_elem.set('href', link['href']) + if 'type' in link: + link_elem.set('type', link['type']) + + content = etree.SubElement(entry, 'content') + content.set('type', 'text') + content.text = 'Version %s %s (%s)' % (version['id'], + version['status'], + version['updated']) + return entry + + +class VersionsAtomSerializer(AtomSerializer): + def default(self, data): + versions = data['versions'] + feed_id = self._get_base_url(versions[0]['links'][0]['href']) + feed = self._create_feed(versions, 'Available API Versions', feed_id) + return self._to_xml(feed) + + +class VersionAtomSerializer(AtomSerializer): + def default(self, data): + version = data['version'] + feed_id = version['links'][0]['href'] + feed = self._create_feed([version], 'About This Version', feed_id) + return self._to_xml(feed) + + +class Versions(wsgi.Resource): + + def __init__(self): + super(Versions, self).__init__(None) + + @wsgi.serializers(xml=VersionsTemplate, + atom=VersionsAtomSerializer) + def index(self, req): + """Return all versions.""" + builder = views_versions.get_view_builder(req) + return builder.build_versions(get_supported_versions()) + + @wsgi.serializers(xml=ChoicesTemplate) + @wsgi.response(300) + def multi(self, req): + """Return multiple choices.""" + builder = views_versions.get_view_builder(req) + return builder.build_choices(get_supported_versions(), req) + + def get_action_args(self, request_environment): + """Parse dictionary created by routes library.""" + args = {} + if request_environment['PATH_INFO'] == '/': + args['action'] = 'index' + else: + args['action'] = 'multi' + + return args + + +class VenusVersion(object): + @wsgi.serializers(xml=VersionTemplate, + atom=VersionAtomSerializer) + def show(self, req): + builder = views_versions.get_view_builder(req) + if 'v1' in builder.base_url: + return builder.build_version(_KNOWN_VERSIONS['v1.0']) + + +def create_resource(): + return wsgi.Resource(VenusVersion()) diff --git a/venus/api/xmlutil.py b/venus/api/xmlutil.py new file mode 100644 index 0000000..a604cdd --- /dev/null +++ b/venus/api/xmlutil.py @@ -0,0 +1,959 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import operator +import os.path +import re + +from lxml import etree +import six + +from venus.i18n import _ +from venus import utils + + +XMLNS_V10 = 'https://www.openstack.org/mediawiki/venus/api/v1.0' +XMLNS_V11 = 'https://www.openstack.org/mediawiki/venus/api/v1.0' +XMLNS_COMMON_V10 = 'https://www.openstack.org/mediawiki/venus/api/v1.0' +XMLNS_ATOM = 'http://www.w3.org/2005/Atom' +XMLNS_VENUS_V1 = ('https://www.openstack.org/mediawiki/venus/1.0/content') + +_split_pattern = re.compile(r'([^:{]*{[^}]*}[^:]*|[^:]+)') + + +def validate_schema(xml, schema_name): + if isinstance(xml, str): + xml = etree.fromstring(xml) + base_path = 'venus/api/schemas/v1.1/' + if schema_name in ('atom', 'atom-link'): + base_path = 'venus/api/schemas/' + schema_path = os.path.join(utils.venusdir(), + '%s%s.rng' % (base_path, schema_name)) + schema_doc = etree.parse(schema_path) + relaxng = etree.RelaxNG(schema_doc) + relaxng.assertValid(xml) + + +class Selector(object): + """Selects datum to operate on from an object.""" + + def __init__(self, *chain): + """Initialize the selector. + + Each argument is a subsequent index into the object. + """ + + self.chain = chain + + def __repr__(self): + """Return a representation of the selector.""" + + return "Selector" + repr(self.chain) + + def __call__(self, obj, do_raise=False): + """Select a datum to operate on. + + Selects the relevant datum within the object. + + :param obj: The object from which to select the object. + :param do_raise: If False (the default), return None if the + indexed datum does not exist. Otherwise, + raise a KeyError. + """ + + # Walk the selector list + for elem in self.chain: + # If it's callable, call it + if callable(elem): + obj = elem(obj) + else: + # Use indexing + try: + obj = obj[elem] + except (KeyError, IndexError): + # No sense going any further + if do_raise: + # Convert to a KeyError, for consistency + raise KeyError(elem) + return None + + # Return the finally-selected object + return obj + + +def get_items(obj): + """Get items in obj.""" + + return list(obj.items()) + + +class EmptyStringSelector(Selector): + """Returns the empty string if Selector would return None.""" + def __call__(self, obj, do_raise=False): + """Returns empty string if the selected value does not exist.""" + + try: + return super(EmptyStringSelector, self).__call__(obj, True) + except KeyError: + return "" + + +class ConstantSelector(object): + """Returns a constant.""" + + def __init__(self, value): + """Initialize the selector. + + :param value: The value to return. + """ + + self.value = value + + def __repr__(self): + """Return a representation of the selector.""" + + return repr(self.value) + + def __call__(self, _obj, _do_raise=False): + """Select a datum to operate on. + + Returns a constant value. Compatible with + Selector.__call__(). + """ + + return self.value + + +class TemplateElement(object): + """Represent an element in the template.""" + + def __init__(self, tag, attrib=None, selector=None, subselector=None, + **extra): + """Initialize an element. + + Initializes an element in the template. Keyword arguments + specify attributes to be set on the element; values must be + callables. See TemplateElement.set() for more information. + + :param tag: The name of the tag to create. + :param attrib: An optional dictionary of element attributes. + :param selector: An optional callable taking an object and + optional boolean do_raise indicator and + returning the object bound to the element. + :param subselector: An optional callable taking an object and + optional boolean do_raise indicator and + returning the object bound to the element. + This is used to further refine the datum + object returned by selector in the event + that it is a list of objects. + """ + + # Convert selector into a Selector + if selector is None: + selector = Selector() + elif not callable(selector): + selector = Selector(selector) + + # Convert subselector into a Selector + if subselector is not None and not callable(subselector): + subselector = Selector(subselector) + + self.tag = tag + self.selector = selector + self.subselector = subselector + self.attrib = {} + self._text = None + self._children = [] + self._childmap = {} + + # Run the incoming attributes through set() so that they + # become selectorized + if not attrib: + attrib = {} + attrib.update(extra) + for k, v in attrib.items(): + self.set(k, v) + + def __repr__(self): + """Return a representation of the template element.""" + + return ('<%s.%s %r at %#x>' % + (self.__class__.__module__, self.__class__.__name__, + self.tag, id(self))) + + def __len__(self): + """Return the number of child elements.""" + + return len(self._children) + + def __contains__(self, key): + """Determine whether a child node named by key exists.""" + + return key in self._childmap + + def __getitem__(self, idx): + """Retrieve a child node by index or name.""" + + if isinstance(idx, six.string_types): + # Allow access by node name + return self._childmap[idx] + else: + return self._children[idx] + + def append(self, elem): + """Append a child to the element.""" + + # Unwrap templates... + elem = elem.unwrap() + + # Avoid duplications + if elem.tag in self._childmap: + raise KeyError(elem.tag) + + self._children.append(elem) + self._childmap[elem.tag] = elem + + def extend(self, elems): + """Append children to the element.""" + + # Pre-evaluate the elements + elemmap = {} + elemlist = [] + for elem in elems: + # Unwrap templates... + elem = elem.unwrap() + + # Avoid duplications + if elem.tag in self._childmap or elem.tag in elemmap: + raise KeyError(elem.tag) + + elemmap[elem.tag] = elem + elemlist.append(elem) + + # Update the children + self._children.extend(elemlist) + self._childmap.update(elemmap) + + def insert(self, idx, elem): + """Insert a child element at the given index.""" + + # Unwrap templates... + elem = elem.unwrap() + + # Avoid duplications + if elem.tag in self._childmap: + raise KeyError(elem.tag) + + self._children.insert(idx, elem) + self._childmap[elem.tag] = elem + + def remove(self, elem): + """Remove a child element.""" + + # Unwrap templates... + elem = elem.unwrap() + + # Check if element exists + if elem.tag not in self._childmap or self._childmap[elem.tag] != elem: + raise ValueError(_('element is not a child')) + + self._children.remove(elem) + del self._childmap[elem.tag] + + def get(self, key): + """Get an attribute. + + Returns a callable which performs datum selection. + + :param key: The name of the attribute to get. + """ + + return self.attrib[key] + + def set(self, key, value=None): + """Set an attribute. + + :param key: The name of the attribute to set. + + :param value: A callable taking an object and optional boolean + do_raise indicator and returning the datum bound + to the attribute. If None, a Selector() will be + constructed from the key. If a string, a + Selector() will be constructed from the string. + """ + + # Convert value to a selector + if value is None: + value = Selector(key) + elif not callable(value): + value = Selector(value) + + self.attrib[key] = value + + def keys(self): + """Return the attribute names.""" + + return self.attrib.keys() + + def items(self): + """Return the attribute names and values.""" + + return self.attrib.items() + + def unwrap(self): + """Unwraps a template to return a template element.""" + + # We are a template element + return self + + def wrap(self): + """Wraps a template element to return a template.""" + + # Wrap in a basic Template + return Template(self) + + def apply(self, elem, obj): + """Apply text and attributes to an etree.Element. + + Applies the text and attribute instructions in the template + element to an etree.Element instance. + + :param elem: An etree.Element instance. + :param obj: The base object associated with this template + element. + """ + + # Start with the text... + if self.text is not None: + elem.text = six.text_type(self.text(obj)) + + # Now set up all the attributes... + for key, value in self.attrib.items(): + try: + elem.set(key, six.text_type(value(obj, True))) + except KeyError: + # Attribute has no value, so don't include it + pass + + def getAttrib(self, obj): + """Get attribute.""" + tmpattrib = {} + # Now set up all the attributes... + for key, value in self.attrib.items(): + try: + tmpattrib[key] = value(obj) + except KeyError: + # Attribute has no value, so don't include it + pass + return tmpattrib + + @staticmethod + def _splitTagName(name): + return _split_pattern.findall(name) + + def _render(self, parent, datum, patches, nsmap): + """Internal rendering. + + Renders the template node into an etree.Element object. + Returns the etree.Element object. + + :param parent: The parent etree.Element instance. + :param datum: The datum associated with this template element. + :param patches: A list of other template elements that must + also be applied. + :param nsmap: An optional namespace dictionary to be + associated with the etree.Element instance. + """ + + # Allocate a node + if callable(self.tag): + tagname = self.tag(datum) + else: + tagname = self.tag + + # If the datum is None + if datum is not None: + tmpattrib = self.getAttrib(datum) + else: + tmpattrib = {} + + tagnameList = self._splitTagName(tagname) + insertIndex = 0 + + # If parent is not none and has same tagname + if parent is not None: + for i in range(0, len(tagnameList)): + tmpInsertPos = parent.find(tagnameList[i]) + if tmpInsertPos is None: + break + elif not operator.eq(parent.attrib, tmpattrib): + break + parent = tmpInsertPos + insertIndex = i + 1 + + if insertIndex >= len(tagnameList): + insertIndex = insertIndex - 1 + + # Create root elem + elem = etree.Element(tagnameList[insertIndex], nsmap=nsmap) + rootelem = elem + subelem = elem + + # Create subelem + for i in range((insertIndex + 1), len(tagnameList)): + subelem = etree.SubElement(elem, tagnameList[i]) + elem = subelem + + # If we have a parent, append the node to the parent + if parent is not None: + # If we can merge this element, then insert + if insertIndex > 0: + parent.insert(len(list(parent)), rootelem) + else: + parent.append(rootelem) + + # If the datum is None, do nothing else + if datum is None: + return rootelem + + # Apply this template element to the element + self.apply(subelem, datum) + + # Additionally, apply the patches + for patch in patches: + patch.apply(subelem, datum) + + # We have fully rendered the element; return it + return rootelem + + def render(self, parent, obj, patches=None, nsmap=None): + """Render an object. + + Renders an object against this template node. Returns a list + of two-item tuples, where the first item is an etree.Element + instance and the second item is the datum associated with that + instance. + + :param parent: The parent for the etree.Element instances. + :param obj: The object to render this template element + against. + :param patches: A list of other template elements to apply + when rendering this template element. + :param nsmap: An optional namespace dictionary to attach to + the etree.Element instances. + """ + + patches = patches or [] + # First, get the datum we're rendering + data = None if obj is None else self.selector(obj) + + # Check if we should render at all + if not self.will_render(data): + return [] + elif data is None: + return [(self._render(parent, None, patches, nsmap), None)] + + # Make the data into a list if it isn't already + if not isinstance(data, list): + data = [data] + elif parent is None: + raise ValueError(_('root element selecting a list')) + + # Render all the elements + elems = [] + for datum in data: + if self.subselector is not None: + datum = self.subselector(datum) + elems.append((self._render(parent, datum, patches, nsmap), datum)) + + # Return all the elements rendered, as well as the + # corresponding datum for the next step down the tree + return elems + + def will_render(self, datum): + """Hook method. + + An overridable hook method to determine whether this template + element will be rendered at all. By default, returns False + (inhibiting rendering) if the datum is None. + + :param datum: The datum associated with this template element. + """ + + # Don't render if datum is None + return datum is not None + + def _text_get(self): + """Template element text. + + Either None or a callable taking an object and optional + boolean do_raise indicator and returning the datum bound to + the text of the template element. + """ + + return self._text + + def _text_set(self, value): + # Convert value to a selector + if value is not None and not callable(value): + value = Selector(value) + + self._text = value + + def _text_del(self): + self._text = None + + text = property(_text_get, _text_set, _text_del) + + def tree(self): + """Return string representation of the template tree. + + Returns a representation of the template rooted at this + element as a string, suitable for inclusion in debug logs. + """ + + # Build the inner contents of the tag... + contents = [self.tag, '!selector=%r' % self.selector] + + # Add the text... + if self.text is not None: + contents.append('!text=%r' % self.text) + + # Add all the other attributes + for key, value in self.attrib.items(): + contents.append('%s=%r' % (key, value)) + + # If there are no children, return it as a closed tag + if len(self) == 0: + return '<%s/>' % ' '.join([str(i) for i in contents]) + + # OK, recurse to our children + children = [c.tree() for c in self] + + # Return the result + return ('<%s>%s' % + (' '.join(contents), ''.join(children), self.tag)) + + +def SubTemplateElement(parent, tag, attrib=None, selector=None, + subselector=None, **extra): + """Create a template element as a child of another. + + Corresponds to the etree.SubElement interface. Parameters are as + for TemplateElement, with the addition of the parent. + """ + + # Convert attributes + attrib = attrib or {} + attrib.update(extra) + + # Get a TemplateElement + elem = TemplateElement(tag, attrib=attrib, selector=selector, + subselector=subselector) + + # Append the parent safely + if parent is not None: + parent.append(elem) + + return elem + + +class Template(object): + """Represent a template.""" + + def __init__(self, root, nsmap=None): + """Initialize a template. + + :param root: The root element of the template. + :param nsmap: An optional namespace dictionary to be + associated with the root element of the + template. + """ + + self.root = root.unwrap() if root is not None else None + self.nsmap = nsmap or {} + self.serialize_options = dict(encoding='UTF-8', xml_declaration=True) + + def _serialize(self, parent, obj, siblings, nsmap=None): + """Internal serialization. + + Recursive routine to build a tree of etree.Element instances + from an object based on the template. Returns the first + etree.Element instance rendered, or None. + + :param parent: The parent etree.Element instance. Can be + None. + :param obj: The object to render. + :param siblings: The TemplateElement instances against which + to render the object. + :param nsmap: An optional namespace dictionary to be + associated with the etree.Element instance + rendered. + """ + + # First step, render the element + elems = siblings[0].render(parent, obj, siblings[1:], nsmap) + + # Now, traverse all child elements + seen = set() + for idx, sibling in enumerate(siblings): + for child in sibling: + # Have we handled this child already? + if child.tag in seen: + continue + seen.add(child.tag) + + # Determine the child's siblings + nieces = [child] + for sib in siblings[idx + 1:]: + if child.tag in sib: + nieces.append(sib[child.tag]) + + # Now call this function for all data elements recursively + for elem, datum in elems: + self._serialize(elem, datum, nieces) + + # Return the first element; at the top level, this will be the + # root element + if elems: + return elems[0][0] + + def serialize(self, obj, *args, **kwargs): + """Serialize an object. + + Serializes an object against the template. Returns a string + with the serialized XML. Positional and keyword arguments are + passed to etree.tostring(). + + :param obj: The object to serialize. + """ + + elem = self.make_tree(obj) + if elem is None: + return '' + + for k, v in self.serialize_options.items(): + kwargs.setdefault(k, v) + + # Serialize it into XML + return etree.tostring(elem, *args, **kwargs) + + def make_tree(self, obj): + """Create a tree. + + Serializes an object against the template. Returns an Element + node with appropriate children. + + :param obj: The object to serialize. + """ + + # If the template is empty, return the empty string + if self.root is None: + return None + + # Get the siblings and nsmap of the root element + siblings = self._siblings() + nsmap = self._nsmap() + + # Form the element tree + return self._serialize(None, obj, siblings, nsmap) + + def _siblings(self): + """Hook method for computing root siblings. + + An overridable hook method to return the siblings of the root + element. By default, this is the root element itself. + """ + + return [self.root] + + def _nsmap(self): + """Hook method for computing the namespace dictionary. + + An overridable hook method to return the namespace dictionary. + """ + + return self.nsmap.copy() + + def unwrap(self): + """Unwraps a template to return a template element.""" + + # Return the root element + return self.root + + def wrap(self): + """Wraps a template element to return a template.""" + + # We are a template + return self + + def apply(self, master): + """Hook method for determining slave applicability. + + An overridable hook method used to determine if this template + is applicable as a slave to a given master template. + + :param master: The master template to test. + """ + + return True + + def tree(self): + """Return string representation of the template tree. + + Returns a representation of the template as a string, suitable + for inclusion in debug logs. + """ + + return "%r: %s" % (self, self.root.tree()) + + +class MasterTemplate(Template): + """Represent a master template. + + Master templates are versioned derivatives of templates that + additionally allow slave templates to be attached. Slave + templates allow modification of the serialized result without + directly changing the master. + """ + + def __init__(self, root, version, nsmap=None): + """Initialize a master template. + + :param root: The root element of the template. + :param version: The version number of the template. + :param nsmap: An optional namespace dictionary to be + associated with the root element of the + template. + """ + + super(MasterTemplate, self).__init__(root, nsmap) + self.version = version + self.slaves = [] + + def __repr__(self): + """Return string representation of the template.""" + + return ("<%s.%s object version %s at %#x>" % + (self.__class__.__module__, self.__class__.__name__, + self.version, id(self))) + + def _siblings(self): + """Hook method for computing root siblings. + + An overridable hook method to return the siblings of the root + element. This is the root element plus the root elements of + all the slave templates. + """ + + return [self.root] + [slave.root for slave in self.slaves] + + def _nsmap(self): + """Hook method for computing the namespace dictionary. + + An overridable hook method to return the namespace dictionary. + The namespace dictionary is computed by taking the master + template's namespace dictionary and updating it from all the + slave templates. + """ + + nsmap = self.nsmap.copy() + for slave in self.slaves: + nsmap.update(slave._nsmap()) + return nsmap + + def attach(self, *slaves): + """Attach one or more slave templates. + + Attaches one or more slave templates to the master template. + Slave templates must have a root element with the same tag as + the master template. The slave template's apply() method will + be called to determine if the slave should be applied to this + master; if it returns False, that slave will be skipped. + (This allows filtering of slaves based on the version of the + master template.) + """ + + slave_list = [] + for slave in slaves: + slave = slave.wrap() + + # Make sure we have a tree match + if slave.root.tag != self.root.tag: + msg = (_("Template tree mismatch; adding slave %(slavetag)s " + "to master %(mastertag)s") % + {'slavetag': slave.root.tag, + 'mastertag': self.root.tag}) + raise ValueError(msg) + + # Make sure slave applies to this template + if not slave.apply(self): + continue + + slave_list.append(slave) + + # Add the slaves + self.slaves.extend(slave_list) + + def copy(self): + """Return a copy of this master template.""" + + # Return a copy of the MasterTemplate + tmp = self.__class__(self.root, self.version, self.nsmap) + tmp.slaves = self.slaves[:] + return tmp + + +class SlaveTemplate(Template): + """Represent a slave template. + + Slave templates are versioned derivatives of templates. Each + slave has a minimum version and optional maximum version of the + master template to which they can be attached. + """ + + def __init__(self, root, min_vers, max_vers=None, nsmap=None): + """Initialize a slave template. + + :param root: The root element of the template. + :param min_vers: The minimum permissible version of the master + template for this slave template to apply. + :param max_vers: An optional upper bound for the master + template version. + :param nsmap: An optional namespace dictionary to be + associated with the root element of the + template. + """ + + super(SlaveTemplate, self).__init__(root, nsmap) + self.min_vers = min_vers + self.max_vers = max_vers + + def __repr__(self): + """Return string representation of the template.""" + + return ("<%s.%s object versions %s-%s at %#x>" % + (self.__class__.__module__, self.__class__.__name__, + self.min_vers, self.max_vers, id(self))) + + def apply(self, master): + """Hook method for determining slave applicability. + + An overridable hook method used to determine if this template + is applicable as a slave to a given master template. This + version requires the master template to have a version number + between min_vers and max_vers. + + :param master: The master template to test. + """ + + # Does the master meet our minimum version requirement? + if master.version < self.min_vers: + return False + + # How about our maximum version requirement? + if self.max_vers is not None and master.version > self.max_vers: + return False + + return True + + +class TemplateBuilder(object): + """Template builder. + + This class exists to allow templates to be lazily built without + having to build them each time they are needed. It must be + subclassed, and the subclass must implement the construct() + method, which must return a Template (or subclass) instance. The + constructor will always return the template returned by + construct(), or, if it has a copy() method, a copy of that + template. + """ + + _tmpl = None + + def __new__(cls, copy=True): + """Construct and return a template. + + :param copy: If True (the default), a copy of the template + will be constructed and returned, if possible. + """ + + # Do we need to construct the template? + if cls._tmpl is None: + tmp = super(TemplateBuilder, cls).__new__(cls) + + # Construct the template + cls._tmpl = tmp.construct() + + # If the template has a copy attribute, return the result of + # calling it + if copy and hasattr(cls._tmpl, 'copy'): + return cls._tmpl.copy() + + # Return the template + return cls._tmpl + + def construct(self): + """Construct a template. + + Called to construct a template instance, which it must return. + Only called once. + """ + + raise NotImplementedError(_("subclasses must implement construct()!")) + + +def make_links(parent, selector=None): + """Attach an Atom element to the parent.""" + + elem = SubTemplateElement(parent, '{%s}link' % XMLNS_ATOM, + selector=selector) + elem.set('rel') + elem.set('type') + elem.set('href') + + # Just for completeness... + return elem + + +def make_flat_dict(name, selector=None, subselector=None, ns=None): + """Utility for simple XML templates. + + Simple templates are templates that traditionally used + XMLDictSerializer with no metadata. + + Returns a template element where the top-level element has the + given tag name, and where sub-elements have tag names derived + from the object's keys and text derived from the object's values. + + This only works for flat dictionary objects, not dictionaries + containing nested lists or dictionaries. + """ + + # Set up the names we need... + if ns is None: + elemname = name + else: + elemname = '{%s}%s' % (ns, name) + + if selector is None: + selector = name + + # Build the root element + root = TemplateElement(elemname, selector=selector, + subselector=subselector) + + # Return the template + return root diff --git a/venus/cmd/__init__.py b/venus/cmd/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/cmd/api.py b/venus/cmd/api.py new file mode 100644 index 0000000..8248919 --- /dev/null +++ b/venus/cmd/api.py @@ -0,0 +1,58 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Starter script for Venus OS API.""" + +import eventlet +import os +import sys + +from oslo_config import cfg +from oslo_log import log as logging +from oslo_reports import guru_meditation_report as gmr + +from venus.common import config # noqa +from venus import i18n +from venus import objects +from venus import service +from venus import utils +from venus import version + +eventlet.monkey_patch() +i18n.enable_lazy() + +CONF = cfg.CONF + + +def main(): + objects.register_all() + CONF(sys.argv[1:], project='venus', + version=version.version_string()) + logdir = CONF.log_dir + is_exits = os.path.exists(logdir) + if not is_exits: + os.makedirs(logdir) + logging.setup(CONF, "venus") + utils.monkey_patch() + + gmr.TextGuruMeditation.setup_autorun(version) + + launcher = service.process_launcher() + server = service.WSGIService('osapi_venus') + launcher.launch_service(server, workers=server.workers) + launcher.wait() + + +if __name__ == "__main__": + main() diff --git a/venus/cmd/manage.py b/venus/cmd/manage.py new file mode 100644 index 0000000..d8696a4 --- /dev/null +++ b/venus/cmd/manage.py @@ -0,0 +1,376 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" + CLI interface for venus management. +""" + +from __future__ import print_function + +import os +import sys + +from oslo_config import cfg +from oslo_db.sqlalchemy import migration +from oslo_log import log as logging + +from venus.common import config # noqa +from venus import context +from venus import db +from venus.db import migration as db_migration +from venus.db.sqlalchemy import api as db_api +from venus import i18n +from venus.i18n import _ +from venus import objects +from venus.task import timer +from venus import version + +i18n.enable_lazy() + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + + +# Decorators for actions +def args(*args, **kwargs): + def _decorator(func): + func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) + return func + + return _decorator + + +class ShellCommands(object): + def bpython(self): + """Runs a bpython shell. + + Falls back to Ipython/python shell if unavailable + """ + self.run('bpython') + + def ipython(self): + """Runs an Ipython shell. + + Falls back to Python shell if unavailable + """ + self.run('ipython') + + def python(self): + """Runs a python shell. + + Falls back to Python shell if unavailable + """ + self.run('python') + + @args('--shell', dest="shell", + metavar='', + help='Python shell') + def run(self, shell=None): + """Runs a Python interactive interpreter.""" + if not shell: + shell = 'bpython' + + if shell == 'bpython': + try: + import bpython + bpython.embed() + except ImportError: + shell = 'ipython' + if shell == 'ipython': + try: + from IPython import embed + embed() + except ImportError: + try: + # Ipython < 0.11 + # Explicitly pass an empty list as arguments, because + # otherwise IPython would use sys.argv from this script. + import IPython + + shell = IPython.Shell.IPShell(argv=[]) + shell.mainloop() + except ImportError: + # no IPython module + shell = 'python' + + if shell == 'python': + import code + try: + # Try activating rlcompleter, because it's handy. + import readline + except ImportError: + pass + else: + # We don't have to wrap the following import in a 'try', + # because we already know 'readline' was imported successfully. + import rlcompleter # noqa + readline.parse_and_bind("tab:complete") + code.interact() + + # @args('--path', required=True, help='Script path') + # def script(self, path): + + +def _db_error(caught_exception): + print('%s' % caught_exception) + print(_("The above error may show that the database has not " + "been created.\nPlease create a database using " + "'venus-manage db sync' before running this command.")) + exit(1) + + +class DbCommands(object): + """Class for managing the database.""" + + def __init__(self): + pass + + @args('version', nargs='?', default=None, + help='Database version') + def sync(self, version=None): + """Sync the database up to the most recent version.""" + return db_migration.db_sync(version) + + def version(self): + """Print the current database version.""" + print(migration.db_version(db_api.get_engine(), + db_migration.MIGRATE_REPO_PATH, + db_migration.INIT_VERSION)) + + @args('age_in_days', type=int, + help='Purge deleted rows older than age in days') + def purge(self, age_in_days): + """Purge deleted rows older than a given age from venus tables.""" + age_in_days = int(age_in_days) + if age_in_days <= 0: + print(_("Must supply a positive, non-zero value for age")) + exit(1) + ctxt = context.get_admin_context() + db.purge_deleted_rows(ctxt, age_in_days) + + +class VersionCommands(object): + """Class for exposing the codebase version.""" + + def __init__(self): + pass + + def list(self): + print(version.version_string()) + + def __call__(self): + self.list() + + +class ConfigCommands(object): + """Class for exposing the flags defined by flag_file(s).""" + + def __init__(self): + pass + + @args('param', nargs='?', default=None, + help='Configuration parameter to display (default: %(default)s)') + def list(self, param=None): + """List parameters configured for venus. + + Lists all parameters configured for venus unless an optional argument + is specified. If the parameter is specified we only print the + requested parameter. If the parameter is not found an appropriate + error is produced by .get*(). + """ + param = param and param.strip() + if param: + print('%s = %s' % (param, CONF.get(param))) + else: + for key, value in CONF.items(): + print('%s = %s' % (key, value)) + + +class GetLogCommands(object): + """Get logging information.""" + + def errors(self): + """Get all of the errors from the log files.""" + error_found = 0 + if CONF.log_dir: + logs = [x for x in os.listdir(CONF.log_dir) if x.endswith('.log')] + for file in logs: + log_file = os.path.join(CONF.log_dir, file) + lines = [line.strip() for line in open(log_file, "r")] + lines.reverse() + print_name = 0 + for index, line in enumerate(lines): + if line.find(" ERROR ") > 0: + error_found += 1 + if print_name == 0: + print(log_file + ":-") + print_name = 1 + print(_("Line %(dis)d : %(line)s") % + {'dis': len(lines) - index, 'line': line}) + if error_found == 0: + print(_("No errors in logfiles!")) + + @args('num_entries', nargs='?', type=int, default=10, + help='Number of entries to list (default: %(default)d)') + def syslog(self, num_entries=10): + """Get of the venus syslog events.""" + entries = int(num_entries) + count = 0 + log_file = '' + if os.path.exists('/var/log/syslog'): + log_file = '/var/log/syslog' + elif os.path.exists('/var/log/messages'): + log_file = '/var/log/messages' + else: + print(_("Unable to find system log file!")) + sys.exit(1) + lines = [line.strip() for line in open(log_file, "r")] + lines.reverse() + print(_("Last %s venus syslog entries:-") % (entries)) + for line in lines: + if line.find("venus") > 0: + count += 1 + print(_("%s") % (line)) + if count == entries: + break + + if count == 0: + print(_("No venus entries in syslog!")) + + +class TaskCommands(object): + """Class for managing the database.""" + + def __init__(self): + pass + + def start(self): + timer.init_advanced_timer() + + +CATEGORIES = { + 'config': ConfigCommands, + 'db': DbCommands, + 'logs': GetLogCommands, + 'shell': ShellCommands, + 'version': VersionCommands, + 'task': TaskCommands +} + + +def methods_of(obj): + """Return non-private methods from an object. + + Get all callable methods of an object that don't start with underscore + :return: a list of tuples of the form (method_name, method) + """ + result = [] + for i in dir(obj): + if callable(getattr(obj, i)) and not i.startswith('_'): + result.append((i, getattr(obj, i))) + return result + + +def add_command_parsers(subparsers): + for category in CATEGORIES: + command_object = CATEGORIES[category]() + + parser = subparsers.add_parser(category) + parser.set_defaults(command_object=command_object) + + category_subparsers = parser.add_subparsers(dest='action') + + for (action, action_fn) in methods_of(command_object): + parser = category_subparsers.add_parser(action) + + action_kwargs = [] + for args, kwargs in getattr(action_fn, 'args', []): + parser.add_argument(*args, **kwargs) + + parser.set_defaults(action_fn=action_fn) + parser.set_defaults(action_kwargs=action_kwargs) + + +category_opt = cfg.SubCommandOpt('category', + title='Command categories', + handler=add_command_parsers) + + +def get_arg_string(args): + arg = None + if args[0] == '-': + # (Note)zhiteng: args starts with FLAGS.oparser.prefix_chars + # is optional args. Notice that cfg module takes care of + # actual ArgParser so prefix_chars is always '-'. + if args[1] == '-': + # This is long optional arg + arg = args[2:] + else: + arg = args[1:] + else: + arg = args + + return arg + + +def fetch_func_args(func): + fn_args = [] + for args, kwargs in getattr(func, 'args', []): + arg = get_arg_string(args[0]) + fn_args.append(getattr(CONF.category, arg)) + + return fn_args + + +def main(): + objects.register_all() + """Parse options and call the appropriate class/method.""" + CONF.register_cli_opt(category_opt) + script_name = sys.argv[0] + if len(sys.argv) < 2: + print(_("\nOpenStack Venus version: %(version)s\n") % + {'version': version.version_string()}) + print(script_name + " category action []") + print(_("Available categories:")) + for category in CATEGORIES: + print(_("\t%s") % category) + sys.exit(2) + + try: + CONF(sys.argv[1:], project='venus', + version=version.version_string()) + logdir = CONF.log_dir + is_exits = os.path.exists(logdir) + if not is_exits: + os.makedirs(logdir) + logging.setup(CONF, "venus") + except cfg.ConfigDirNotFoundError as details: + print(_("Invalid directory: %s") % details) + sys.exit(2) + except cfg.ConfigFilesNotFoundError: + cfgfile = CONF.config_file[-1] if CONF.config_file else None + if cfgfile and not os.access(cfgfile, os.R_OK): + st = os.stat(cfgfile) + print(_("Could not read %s. Re-running with sudo") % cfgfile) + try: + os.execvp('sudo', ['sudo', '-u', '#%s' % st.st_uid] + sys.argv) + except Exception: + print(_('sudo failed, continuing as if nothing happened')) + + print(_('Please re-run venus-manage as root.')) + sys.exit(2) + + fn = CONF.category.action_fn + fn_args = fetch_func_args(fn) + fn(*fn_args) diff --git a/venus/common/__init__.py b/venus/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/common/config.py b/venus/common/config.py new file mode 100644 index 0000000..b381d4a --- /dev/null +++ b/venus/common/config.py @@ -0,0 +1,118 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import socket + +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import netutils + +from venus.i18n import _ + +CONF = cfg.CONF +logging.register_options(CONF) + +core_opts = [ + cfg.StrOpt('api_paste_config', + default="api-paste.ini", + help='File name for the paste.' + 'deploy config for venus-api'), + cfg.StrOpt('state_path', + default='/var/lib/venus', + deprecated_name='pybasedir', + help="Top-level directory for " + "maintaining venus's state"), +] + +debug_opts = [ +] + +CONF.register_cli_opts(core_opts) +CONF.register_cli_opts(debug_opts) + +global_opts = [ + cfg.StrOpt('my_ip', + default=netutils.get_my_ipv4(), + help='IP address of this host'), + cfg.StrOpt('venusmanager_topic', + default='venus-venusmanager', + help='The topic that venusmanager nodes listen on'), + cfg.BoolOpt('enable_v1_api', + default=True, + help=_("DEPRECATED: Deploy v1 of the Venus API.")), + cfg.BoolOpt('api_rate_limit', + default=True, + help='Enables or disables rate limit of the API.'), + cfg.ListOpt('osapi_venus_ext_list', + default=[], + help='Specify list of extensions to load when using ' + 'osapi_venus_extension option with venus.api.' + 'contrib.select_extensions'), + cfg.MultiStrOpt('osapi_venus_extension', + default=['venus.api.contrib.standard_extensions'], + help='osapi venus extension to load'), + cfg.StrOpt('venusmanager_manager', + default='venus.venusmanager.' + 'manager.VenusmanagerManager', + help='Full class name for ' + 'the Manager for venusmanager'), + cfg.StrOpt('host', + default=socket.gethostname(), + help='Name of this node. This can be an opaque ' + 'identifier. It is not necessarily a host ' + 'name, FQDN, or IP address.'), + cfg.StrOpt('rootwrap_config', + default='/etc/venus/rootwrap.conf', + help='Path to the rootwrap configuration file to ' + 'use for running commands as root'), + cfg.BoolOpt('monkey_patch', + default=False, + help='Enable monkey patching'), + cfg.ListOpt('monkey_patch_modules', + default=[], + help='List of modules/decorators to monkey patch'), + cfg.StrOpt('venusmanager_api_class', + default='venus.venusmanager.api.API', + help='The full class name of the ' + 'venusmanager API class to use'), + cfg.StrOpt('auth_strategy', + default='keystone', + choices=['noauth', 'keystone', 'deprecated'], + help='The strategy to use for auth. Supports ' + 'noauth, keystone, ' + 'and deprecated.'), + cfg.StrOpt('os_privileged_user_name', + default=None, + help='OpenStack privileged account username. Used for ' + 'requests to other services (such as Nova) that ' + 'require an account with special rights.'), + cfg.StrOpt('os_privileged_user_password', + default=None, + help='Password associated with the OpenStack ' + 'privileged account.', + secret=True), + cfg.StrOpt('os_privileged_user_tenant', + default=None, + help='Tenant name associated with the OpenStack ' + 'privileged account.'), + cfg.StrOpt('os_privileged_user_auth_url', + default=None, + help='Auth URL associated with the OpenStack ' + 'privileged account.'), + cfg.StrOpt('os_region_name', + default='RegionOne', + help='os region name'), +] + +CONF.register_opts(global_opts) diff --git a/venus/common/utils.py b/venus/common/utils.py new file mode 100644 index 0000000..91e26a7 --- /dev/null +++ b/venus/common/utils.py @@ -0,0 +1,43 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of Utils.""" + +import json +import six +import urllib3 + +from oslo_log import log as logging + +from venus.i18n import _LE + + +LOG = logging.getLogger(__name__) + + +def request_es(url, method, data=None): + http = urllib3.PoolManager(timeout=30.0) + try: + if method == "GET" or method == "DELETE": + resp = http.request(method, url=url) + elif method == "POST": + resp = http.request(method, url=url, body=json.dumps(data)) + else: + return 0, None + + return resp.status, resp.data.strip() + + except Exception as e: + LOG.error(_LE("request es, catch exception:%s"), six.text_type(e)) + return 0, None diff --git a/venus/conf/__init__.py b/venus/conf/__init__.py new file mode 100644 index 0000000..1a93581 --- /dev/null +++ b/venus/conf/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2020, Inspur Electronic Information Industry Co.,Ltd. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo_config import cfg + +CONF = cfg.CONF diff --git a/venus/context.py b/venus/context.py new file mode 100644 index 0000000..de3743d --- /dev/null +++ b/venus/context.py @@ -0,0 +1,205 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""RequestContext: context for requests that persist through all of venus.""" + +import copy +import six + +from oslo_config import cfg +from oslo_context import context +from oslo_log import log as logging +from oslo_utils import timeutils + +from venus.i18n import _, _LW + +context_opts = [ + cfg.StrOpt('venus_internal_tenant_project_id', + default=None, + help='ID of the project which will be used as the Venus ' + 'internal tenant.'), + cfg.StrOpt('venus_internal_tenant_user_id', + default=None, + help='ID of the user to be used' + ' in venusmanager operations as the ' + 'Venus internal tenant.'), +] + +CONF = cfg.CONF +CONF.register_opts(context_opts) + +LOG = logging.getLogger(__name__) + + +class RequestContext(context.RequestContext): + """Security context and request information. + + Represents the user taking a given action within the system. + + """ + + def __init__(self, user_id, project_id, is_admin=None, read_deleted="no", + roles=None, project_name=None, remote_address=None, + timestamp=None, request_id=None, auth_token=None, + overwrite=True, quota_class=None, service_catalog=None, + domain=None, user_domain=None, project_domain=None, + **kwargs): + """Initialize RequestContext. + + :param read_deleted: 'no' indicates deleted records are hidden, 'yes' + indicates deleted records are visible, 'only' indicates that + *only* deleted records are visible. + + :param overwrite: Set to False to ensure that the greenthread local + copy of the index is not overwritten. + + :param kwargs: Extra arguments that might be present, but we ignore + because they possibly came in from older rpc messages. + """ + + super(RequestContext, self).__init__(auth_token=auth_token, + user=user_id, + tenant=project_id, + domain=domain, + user_domain=user_domain, + project_domain=project_domain, + is_admin=is_admin, + request_id=request_id) + self.roles = roles or [] + self.project_name = project_name + self.read_deleted = read_deleted + self.remote_address = remote_address + if not timestamp: + timestamp = timeutils.utcnow() + elif isinstance(timestamp, six.string_types): + timestamp = timeutils.parse_isotime(timestamp) + self.timestamp = timestamp + self.quota_class = quota_class + + if service_catalog: + # Only include required parts of service_catalog + self.service_catalog = [s for s in service_catalog + if s.get('type') in + ('identity', 'compute', 'object-store')] + else: + # if list is empty or none + self.service_catalog = [] + + # # We need to have RequestContext attributes defined + # # when policy.check_is_admin invokes request logging + # # to make it loggable. + # if self.is_admin is None: + # self.is_admin = policy.check_is_admin(self.roles, self) + # elif self.is_admin and 'admin' not in self.roles: + # self.roles.append('admin') + + def _get_read_deleted(self): + return self._read_deleted + + def _set_read_deleted(self, read_deleted): + if read_deleted not in ('no', 'yes', 'only'): + raise ValueError(_("read_deleted can only be one of 'no', " + "'yes' or 'only', not %r") % read_deleted) + self._read_deleted = read_deleted + + def _del_read_deleted(self): + del self._read_deleted + + read_deleted = property(_get_read_deleted, _set_read_deleted, + _del_read_deleted) + + def to_dict(self): + result = super(RequestContext, self).to_dict() + result['user_id'] = self.user_id + result['project_id'] = self.project_id + result['project_name'] = self.project_name + result['domain'] = self.domain + result['read_deleted'] = self.read_deleted + result['roles'] = self.roles + result['remote_address'] = self.remote_address + result['timestamp'] = self.timestamp.isoformat() + result['quota_class'] = self.quota_class + result['service_catalog'] = self.service_catalog + result['request_id'] = self.request_id + return result + + @classmethod + def from_dict(cls, values): + return cls(**values) + + def elevated(self, read_deleted=None, overwrite=False): + """Return a version of this context with admin flag set.""" + context = self.deepcopy() + context.is_admin = True + + if 'admin' not in context.roles: + context.roles.append('admin') + + if read_deleted is not None: + context.read_deleted = read_deleted + + return context + + def deepcopy(self): + return copy.deepcopy(self) + + # NOTE(sirp): the openstack/common version of RequestContext uses + # tenant/user whereas the Venus version uses project_id/user_id. + # NOTE(adrienverge): The Venus version of RequestContext now uses + # tenant/user internally, so it is compatible with context-aware code from + # openstack/common. We still need this shim for the rest of Venus's + # code. + @property + def project_id(self): + return self.tenant + + @project_id.setter + def project_id(self, value): + self.tenant = value + + @property + def user_id(self): + return self.user + + @user_id.setter + def user_id(self, value): + self.user = value + + +def get_admin_context(read_deleted="no"): + return RequestContext(user_id=None, + project_id=None, + is_admin=True, + read_deleted=read_deleted, + overwrite=False) + + +def get_internal_tenant_context(): + """Build and return the Venus internal tenant context object + + This request context will only work for internal Venus operations. It will + not be able to make requests to remote services. To do so it will need to + use the keystone client to get an auth_token. + """ + project_id = CONF.venus_internal_tenant_project_id + user_id = CONF.venus_internal_tenant_user_id + + if project_id and user_id: + return RequestContext(user_id=user_id, + project_id=project_id, + is_admin=True) + else: + LOG.warning(_LW('Unable to get internal tenant context: Missing ' + 'required config parameters.')) + return None diff --git a/venus/db/__init__.py b/venus/db/__init__.py new file mode 100644 index 0000000..564b630 --- /dev/null +++ b/venus/db/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2010 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +DB abstraction for Venus +""" diff --git a/venus/db/base.py b/venus/db/base.py new file mode 100644 index 0000000..d4d288c --- /dev/null +++ b/venus/db/base.py @@ -0,0 +1,39 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Base class for classes that need modular database access.""" + +from oslo_config import cfg +from oslo_utils import importutils + + +db_driver_opt = cfg.StrOpt('db_driver', + default='venus.db', + help='Driver to use for database access') + +CONF = cfg.CONF +CONF.register_opt(db_driver_opt) + + +class Base(object): + """DB driver is injected in the init method.""" + + def __init__(self, db_driver=None): + # NOTE(mriedem): Without this call, multiple inheritance involving + # the db Base class does not work correctly. + super(Base, self).__init__() + if not db_driver: + db_driver = CONF.db_driver + self.db = importutils.import_module(db_driver) # pylint: disable=C0103 + self.db.dispose_engine() diff --git a/venus/db/migration.py b/venus/db/migration.py new file mode 100644 index 0000000..456ac49 --- /dev/null +++ b/venus/db/migration.py @@ -0,0 +1,58 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Database setup and migration commands.""" + +import os +import threading + +from oslo_config import cfg +from oslo_db import options +from stevedore import driver + +from venus.db.sqlalchemy import api as db_api + +INIT_VERSION = 000 + +_IMPL = None +_LOCK = threading.Lock() + +options.set_defaults(cfg.CONF) + +MIGRATE_REPO_PATH = os.path.join( + os.path.abspath(os.path.dirname(__file__)), + 'sqlalchemy', + 'migrate_repo', +) + + +def get_backend(): + global _IMPL + if _IMPL is None: + with _LOCK: + if _IMPL is None: + _IMPL = driver.DriverManager( + "venus.database.migration_backend", + cfg.CONF.database.backend).driver + return _IMPL + + +def db_sync(version=None, init_version=INIT_VERSION, engine=None): + """Migrate the database to `version` or the most recent version.""" + if engine is None: + engine = db_api.get_engine() + return get_backend().db_sync(engine=engine, + abs_path=MIGRATE_REPO_PATH, + version=version, + init_version=init_version) diff --git a/venus/db/sqlalchemy/__init__.py b/venus/db/sqlalchemy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/db/sqlalchemy/api.py b/venus/db/sqlalchemy/api.py new file mode 100644 index 0000000..d34eb65 --- /dev/null +++ b/venus/db/sqlalchemy/api.py @@ -0,0 +1,161 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of SQLAlchemy backend.""" + +import sqlalchemy +import sys +import threading +import warnings + +from oslo_config import cfg +from oslo_db import options +from oslo_db.sqlalchemy import session as db_session +from oslo_log import log as logging +import osprofiler.sqlalchemy + +from venus import exception +from venus.i18n import _ + +CONF = cfg.CONF +CONF.import_group("profiler", "venus.service") +log = logging.getLogger(__name__) + +options.set_defaults(CONF, connection='sqlite:///$state_path/venus.sqlite') + +_LOCK = threading.Lock() +_FACADE = None + + +def _create_facade_lazily(): + global _LOCK + with _LOCK: + global _FACADE + if _FACADE is None: + _FACADE = db_session.EngineFacade( + CONF.database.connection, + **dict(CONF.database) + ) + + if CONF.profiler.profiler_enabled: + if CONF.profiler.trace_sqlalchemy: + osprofiler.sqlalchemy.add_tracing(sqlalchemy, + _FACADE.get_engine(), + "db") + + return _FACADE + + +def get_engine(): + facade = _create_facade_lazily() + return facade.get_engine() + + +def get_session(**kwargs): + facade = _create_facade_lazily() + return facade.get_session(**kwargs) + + +def dispose_engine(): + get_engine().dispose() + + +_DEFAULT_QUOTA_NAME = 'default' + + +def get_backend(): + """The backend is this module itself.""" + + return sys.modules[__name__] + + +def is_admin_context(context): + """Indicates if the request context is an administrator.""" + if not context: + warnings.warn(_('Use of empty request context is deprecated'), + DeprecationWarning) + raise Exception('die') + return context.is_admin + + +def is_user_context(context): + """Indicates if the request context is a normal user.""" + if not context: + return False + if context.is_admin: + return False + if not context.user_id or not context.project_id: + return False + return True + + +def authorize_project_context(context, project_id): + """Ensures a request has permission to access the given project.""" + if is_user_context(context): + if not context.project_id: + raise exception.NotAuthorized() + elif context.project_id != project_id: + raise exception.NotAuthorized() + + +def authorize_user_context(context, user_id): + """Ensures a request has permission to access the given user.""" + if is_user_context(context): + if not context.user_id: + raise exception.NotAuthorized() + elif context.user_id != user_id: + raise exception.NotAuthorized() + + +def authorize_quota_class_context(context, class_name): + """Ensures a request has permission to access the given quota class.""" + if is_user_context(context): + if not context.quota_class: + raise exception.NotAuthorized() + elif context.quota_class != class_name: + raise exception.NotAuthorized() + + +def require_admin_context(f): + """Decorator to require admin request context. + + The first argument to the wrapped function must be the context. + + """ + + def wrapper(*args, **kwargs): + if not is_admin_context(args[0]): + raise exception.AdminRequired() + return f(*args, **kwargs) + + return wrapper + + +def require_context(f): + """Decorator to require *any* user or admin context. + + This does no authorization for user or project access matching, see + :py:func:`authorize_project_context` and + :py:func:`authorize_user_context`. + + The first argument to the wrapped function must be the context. + + """ + + def wrapper(*args, **kwargs): + if not is_admin_context(args[0]) and not is_user_context(args[0]): + raise exception.NotAuthorized() + return f(*args, **kwargs) + + return wrapper diff --git a/venus/db/sqlalchemy/migrate_repo/README b/venus/db/sqlalchemy/migrate_repo/README new file mode 100644 index 0000000..8ea790d --- /dev/null +++ b/venus/db/sqlalchemy/migrate_repo/README @@ -0,0 +1,4 @@ +This is a database migration repository. + +More information at +http://code.google.com/p/sqlalchemy-migrate/ diff --git a/venus/db/sqlalchemy/migrate_repo/__init__.py b/venus/db/sqlalchemy/migrate_repo/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/db/sqlalchemy/migrate_repo/manage.py b/venus/db/sqlalchemy/migrate_repo/manage.py new file mode 100644 index 0000000..d8d882d --- /dev/null +++ b/venus/db/sqlalchemy/migrate_repo/manage.py @@ -0,0 +1,24 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os + +from migrate.versioning.shell import main + +from venus.db.sqlalchemy import migrate_repo + + +if __name__ == '__main__': + main(debug='False', + repository=os.path.abspath(os.path.dirname(migrate_repo.__file__))) diff --git a/venus/db/sqlalchemy/migrate_repo/migrate.cfg b/venus/db/sqlalchemy/migrate_repo/migrate.cfg new file mode 100644 index 0000000..1e0c914 --- /dev/null +++ b/venus/db/sqlalchemy/migrate_repo/migrate.cfg @@ -0,0 +1,20 @@ +[db_settings] +# Used to identify which repository this database is versioned under. +# You can use the name of your project. +repository_id=venus + +# The name of the database table used to track the schema version. +# This name shouldn't already be used by your project. +# If this is changed once a database is under version control, you'll need to +# change the table name in each database too. +version_table=migrate_version + +# When committing a change script, Migrate will attempt to generate the +# sql for all supported databases; normally, if one of them fails - probably +# because you don't have that database installed - it is ignored and the +# commit continues, perhaps ending successfully. +# Databases in this list MUST compile successfully during a commit, or the +# entire commit will fail. List the databases your application will actually +# be using to ensure your updates to that database work properly. +# This must be a list; example: ['postgres','sqlite'] +required_dbs=[] diff --git a/venus/db/sqlalchemy/migrate_repo/versions/001_venus_init.py b/venus/db/sqlalchemy/migrate_repo/versions/001_venus_init.py new file mode 100644 index 0000000..930dfa3 --- /dev/null +++ b/venus/db/sqlalchemy/migrate_repo/versions/001_venus_init.py @@ -0,0 +1,85 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""WSGI Routers for the Identity service.""" + +import sqlalchemy as sql +from sqlalchemy.orm import sessionmaker + + +def upgrade(migrate_engine): + meta = sql.MetaData() + meta.bind = migrate_engine + + t_mo_regitster_task = sql.Table( + 't_mo_regitster_task', + meta, + sql.Column('Id', sql.Integer, nullable=False, + primary_key=True), + sql.Column('task_name', sql.String(255), nullable=True, + primary_key=False), + sql.Column('host_name', sql.String(255), nullable=True, + primary_key=False), + sql.Column('update_time', sql.DateTime, nullable=True, + primary_key=False, default='0000-00-00 00:00:00'), + sql.Column('created_at', sql.DateTime, + nullable=True, primary_key=False), + sql.Column('updated_at', sql.DateTime, + nullable=True, primary_key=False), + sql.Column('deleted', sql.String(1), + nullable=True, primary_key=False), + sql.Column('deleted_at', sql.DateTime, + nullable=True, primary_key=False), + + mysql_engine='InnoDB', + mysql_charset='utf8') + + t_mo_regitster_task.create(migrate_engine, checkfirst=True) + + new_data = { + 'Id': '1', + 'task_name': 'delete_es_index', + 'host_name': '', + 'update_time': '1900-01-01 00:00:00' + } + maker = sessionmaker(bind=migrate_engine) + session = maker() + t_mo_regitster_task = sql.Table('t_mo_regitster_task', meta, autoload=True) + row = t_mo_regitster_task.insert().values(**new_data) + session.execute(row) + session.commit() + + t_mo_custom_config = sql.Table( + 't_mo_custom_config', + meta, + sql.Column('id', sql.String(64), primary_key=True), + sql.Column('value', sql.String(10240), nullable=False), + sql.Column('update_time', sql.DateTime), + mysql_engine='InnoDB', + mysql_charset='utf8') + + t_mo_custom_config.create(migrate_engine, checkfirst=True) + new_data = { + 'id': 'es_index_length', + 'value': '30', + 'update_time': '1900-01-01 00:00:00' + } + maker = sessionmaker(bind=migrate_engine) + session = maker() + t_mo_custom_config = sql.Table('t_mo_custom_config', meta, autoload=True) + row = t_mo_custom_config.insert().values(**new_data) + session.execute(row) + + session.commit() + session.close() diff --git a/venus/db/sqlalchemy/migrate_repo/versions/__init__.py b/venus/db/sqlalchemy/migrate_repo/versions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/exception.py b/venus/exception.py new file mode 100644 index 0000000..4d97b7e --- /dev/null +++ b/venus/exception.py @@ -0,0 +1,253 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Venus base exception handling. + +Includes decorator for re-raising Venus-type exceptions. + +SHOULD include dedicated exception logging. + +""" + +import sys + +from oslo_config import cfg +from oslo_log import log as logging +from oslo_versionedobjects import exception as obj_exc +import six +import webob.exc + +from venus.i18n import _, _LE + + +LOG = logging.getLogger(__name__) + +exc_log_opts = [ + cfg.BoolOpt('fatal_exception_format_errors', + default=False, + help='Make exception message format errors fatal.'), +] + +CONF = cfg.CONF +CONF.register_opts(exc_log_opts) + + +class ConvertedException(webob.exc.WSGIHTTPException): + def __init__(self, code=500, title="", explanation=""): + self.code = code + self.title = title + self.explanation = explanation + super(ConvertedException, self).__init__() + + +class Error(Exception): + pass + + +class VenusException(Exception): + """Base Venus Exception + + To correctly use this class, inherit from it and define + a 'message' property. That message will get printf'd + with the keyword arguments provided to the constructor. + + """ + message = _("An unknown exception occurred.") + code = 500 + headers = {} + safe = False + + def __init__(self, message=None, **kwargs): + self.kwargs = kwargs + self.kwargs['message'] = message + + if 'code' not in self.kwargs: + try: + self.kwargs['code'] = self.code + except AttributeError: + pass + + for k, v in self.kwargs.items(): + if isinstance(v, Exception): + self.kwargs[k] = six.text_type(v) + + if self._should_format(): + try: + message = self.message % kwargs + + except Exception: + exc_info = sys.exc_info() + # kwargs doesn't match a variable in the message + # log the issue and the kwargs + LOG.exception(_LE('Exception in string format operation')) + for name, value in kwargs.items(): + LOG.error(_LE("%(name)s: %(value)s"), + {'name': name, 'value': value}) + if CONF.fatal_exception_format_errors: + six.reraise(*exc_info) + # at least get the core message out if something happened + message = self.message + elif isinstance(message, Exception): + message = six.text_type(message) + + # NOTE(luisg): We put the actual message in 'msg' so that we can access + # it, because if we try to access the message via 'message' it will be + # overshadowed by the class' message attribute + self.msg = message + super(VenusException, self).__init__(message) + + def _should_format(self): + return self.kwargs['message'] is None or '%(message)' in self.message + + def __unicode__(self): + return six.text_type(self.msg) + + +class NotAuthorized(VenusException): + message = _("Not authorized.") + code = 403 + + +class AdminRequired(NotAuthorized): + message = _("User does not have admin privileges") + + +class PolicyNotAuthorized(NotAuthorized): + message = _("Policy doesn't allow %(action)s to be performed.") + + +class Invalid(VenusException): + message = _("Unacceptable parameters.") + code = 400 + + +class InvalidResults(Invalid): + message = _("The results are invalid.") + + +class AuthFail(Invalid): + message = _("Authentication failure, " + "please check ip, port, " + "user name or password.") + + +class InvalidInput(Invalid): + message = _("Invalid input received: %(reason)s") + + +class InvalidContentType(Invalid): + message = _("Invalid content type %(content_type)s.") + + +class InvalidHost(Invalid): + message = _("Invalid host: %(reason)s") + + +# Cannot be templated as the error syntax varies. +# msg needs to be constructed when raised. +class InvalidParameterValue(Invalid): + message = _("%(err)s") + + +class InvalidAuthKey(Invalid): + message = _("Invalid auth key: %(reason)s") + + +class InvalidConfigurationValue(Invalid): + message = _('Value "%(value)s" is not valid for ' + 'configuration option "%(option)s"') + + +class ServiceUnavailable(Invalid): + message = _("Service is unavailable at this time.") + + +class InvalidUUID(Invalid): + message = _("Expected a uuid but received %(uuid)s.") + + +class APIException(VenusException): + message = _("Error while requesting %(service)s API.") + + def __init__(self, message=None, **kwargs): + if 'service' not in kwargs: + kwargs['service'] = 'unknown' + super(APIException, self).__init__(message, **kwargs) + + +class APITimeout(APIException): + message = _("Timeout while requesting %(service)s API.") + + +class NotFound(VenusException): + message = _("Resource could not be found.") + code = 404 + safe = True + + +class HostNotFound(NotFound): + message = _("Host %(host)s could not be found.") + + +class HostBinaryNotFound(NotFound): + message = _("Could not find binary %(binary)s on host %(host)s.") + + +class NoticeNotFound(NotFound): + message = _("Notice could not be found.") + + +class InvalidReservationExpiration(Invalid): + message = _("Invalid reservation expiration %(expire)s.") + + +class MalformedRequestBody(VenusException): + message = _("Malformed message body: %(reason)s") + + +class ConfigNotFound(NotFound): + message = _("Could not find config at %(path)s") + + +class ParameterNotFound(NotFound): + message = _("Could not find parameter %(param)s") + + +class PasteAppNotFound(NotFound): + message = _("Could not load paste app '%(name)s' from %(path)s") + + +class NoValidHost(VenusException): + message = _("No valid host was found. %(reason)s") + + +class NoMoreTargets(VenusException): + """No more available targets.""" + pass + + +class KeyManagerError(VenusException): + message = _("key manager error: %(reason)s") + + +class EvaluatorParseException(Exception): + message = _("Error during evaluator parsing: %(reason)s") + + +UnsupportedObjectError = obj_exc.UnsupportedObjectError +OrphanedObjectError = obj_exc.OrphanedObjectError +IncompatibleObjectVersion = obj_exc.IncompatibleObjectVersion +ReadOnlyFieldError = obj_exc.ReadOnlyFieldError +ObjectActionError = obj_exc.ObjectActionError +ObjectFieldInvalid = obj_exc.ObjectFieldInvalid diff --git a/venus/hacking/__init__.py b/venus/hacking/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/hacking/checks.py b/venus/hacking/checks.py new file mode 100644 index 0000000..8bedab8 --- /dev/null +++ b/venus/hacking/checks.py @@ -0,0 +1,200 @@ +# +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import re + +from hacking import core + +""" +Guidelines for writing new hacking checks + + - Use only for Magnum specific tests. OpenStack general tests + should be submitted to the common 'hacking' module. + - Pick numbers in the range M3xx. Find the current test with + the highest allocated number and then pick the next value. + If nova has an N3xx code for that test, use the same number. + - Keep the test method code in the source file ordered based + on the M3xx value. + - List the new rule in the top level HACKING.rst file + - Add test cases for each new rule to magnum/tests/unit/test_hacking.py + +""" +UNDERSCORE_IMPORT_FILES = [] + +mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])") +assert_equal_in_end_with_true_or_false_re = re.compile( + r"assertEqual\((\w|[][.'\"])+ in (\w|[][.'\", ])+, (True|False)\)") +assert_equal_in_start_with_true_or_false_re = re.compile( + r"assertEqual\((True|False), (\w|[][.'\"])+ in (\w|[][.'\", ])+\)") +assert_equal_with_is_not_none_re = re.compile( + r"assertEqual\(.*?\s+is+\s+not+\s+None\)$") +assert_true_isinstance_re = re.compile( + r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " + r"(\w|\.|\'|\"|\[|\])+\)\)") +dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)") +assert_xrange_re = re.compile( + r"\s*xrange\s*\(") +log_translation = re.compile( + r"(.)*LOG\.(audit|error|critical)\(\s*('|\")") +log_translation_info = re.compile( + r"(.)*LOG\.(info)\(\s*(_\(|'|\")") +log_translation_exception = re.compile( + r"(.)*LOG\.(exception)\(\s*(_\(|'|\")") +log_translation_LW = re.compile( + r"(.)*LOG\.(warning|warn)\(\s*(_\(|'|\")") +custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*") +underscore_import_check = re.compile(r"(.)*import _(.)*") +translated_log = re.compile( + r"(.)*LOG\.(audit|error|info|critical|exception)" + r"\(\s*_\(\s*('|\")") +string_translation = re.compile(r"[^_]*_\(\s*('|\")") + + +@core.flake8ext +def no_mutable_default_args(logical_line): + msg = "M322: Method's default argument shouldn't be mutable!" + if mutable_default_args.match(logical_line): + yield (0, msg) + + +@core.flake8ext +def assert_equal_not_none(logical_line): + """Check for assertEqual(A is not None) sentences M302""" + msg = "M302: assertEqual(A is not None) sentences not allowed." + res = assert_equal_with_is_not_none_re.search(logical_line) + if res: + yield (0, msg) + + +@core.flake8ext +def assert_true_isinstance(logical_line): + """Check for assertTrue(isinstance(a, b)) sentences + + M316 + """ + if assert_true_isinstance_re.match(logical_line): + yield (0, "M316: assertTrue(isinstance(a, b)) sentences not allowed") + + +@core.flake8ext +def assert_equal_in(logical_line): + """Check for assertEqual(True|False, A in B), assertEqual(A in B, True|False) + + M338 + """ + res = (assert_equal_in_start_with_true_or_false_re.search(logical_line) or + assert_equal_in_end_with_true_or_false_re.search(logical_line)) + if res: + yield (0, "M338: Use assertIn/NotIn(A, B) rather than " + "assertEqual(A in B, True/False) when checking collection " + "contents.") + + +@core.flake8ext +def no_xrange(logical_line): + """Disallow 'xrange()' + + M339 + """ + if assert_xrange_re.match(logical_line): + yield(0, "M339: Do not use xrange().") + + +@core.flake8ext +def use_timeutils_utcnow(logical_line, filename): + # tools are OK to use the standard datetime module + if "/tools/" in filename: + return + + msg = "M310: timeutils.utcnow() must be used instead of datetime.%s()" + datetime_funcs = ['now', 'utcnow'] + for f in datetime_funcs: + pos = logical_line.find('datetime.%s' % f) + if pos != -1: + yield (pos, msg % f) + + +@core.flake8ext +def dict_constructor_with_list_copy(logical_line): + msg = ("M336: Must use a dict comprehension instead of a dict constructor" + " with a sequence of key-value pairs." + ) + if dict_constructor_with_list_copy_re.match(logical_line): + yield (0, msg) + + +@core.flake8ext +def no_log_warn(logical_line): + """Disallow 'LOG.warn(' + + Deprecated LOG.warn(), instead use LOG.warning + https://bugs.launchpad.net/magnum/+bug/1508442 + + M352 + """ + + msg = ("M352: LOG.warn is deprecated, please use LOG.warning!") + if "LOG.warn(" in logical_line: + yield (0, msg) + + +@core.flake8ext +def check_explicit_underscore_import(logical_line, filename): + """Check for explicit import of the _ function + + We need to ensure that any files that are using the _() function + to translate logs are explicitly importing the _ function. We + can't trust unit test to catch whether the import has been + added so we need to check for it here. + """ + + # Build a list of the files that have _ imported. No further + # checking needed once it is found. + if filename in UNDERSCORE_IMPORT_FILES: + pass + elif (underscore_import_check.match(logical_line) or + custom_underscore_check.match(logical_line)): + UNDERSCORE_IMPORT_FILES.append(filename) + elif (translated_log.match(logical_line) or + string_translation.match(logical_line)): + yield(0, "M340: Found use of _() without explicit import of _ !") + + +@core.flake8ext +def import_stock_mock(logical_line): + """Use python's mock, not the mock library. + + Since we `dropped support for python 2`__, we no longer need to use the + mock library, which existed to backport py3 functionality into py2. + Which must be done by saying:: + + from unittest import mock + + ...because if you say:: + + import mock + + ...you definitely will not be getting the standard library mock. That will + always import the third party mock library. This check can be removed in + the future (and we can start saying ``import mock`` again) if we manage to + purge these transitive dependencies. + + .. __: https://review.opendev.org/#/c/688593/ + + N366 + """ + if logical_line == 'import mock': + yield (0, "N366: You must explicitly import python's mock: " + "``from unittest import mock``") diff --git a/venus/i18n.py b/venus/i18n.py new file mode 100644 index 0000000..7de1a63 --- /dev/null +++ b/venus/i18n.py @@ -0,0 +1,70 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""oslo.i18n integration module. + +See http://docs.openstack.org/developer/oslo.i18n/usage.html . + +""" + +import oslo_i18n as i18n + +from venus.openstack.common import gettextutils + +DOMAIN = 'venus' + +_translators = i18n.TranslatorFactory(domain=DOMAIN) + +# The primary translation function using the well-known name "_" +_ = _translators.primary + +# Translators for log levels. +# +# The abbreviated names are meant to reflect the usual use of a short +# name like '_'. The "L" is for "log" and the other letter comes from +# the level. +_LI = _translators.log_info +_LW = _translators.log_warning +_LE = _translators.log_error +_LC = _translators.log_critical + + +def enable_lazy(enable=True): + return i18n.enable_lazy(enable) + + +def translate(value, user_locale=None): + return i18n.translate(value, user_locale) + + +def get_available_languages(): + return i18n.get_available_languages(DOMAIN) + + +# Parts in oslo-incubator are still using gettextutils._(), _LI(), etc., from +# oslo-incubator. Until these parts are changed to use oslo.i18n, Venus +# needs to do something to allow them to work. One option is to continue to +# initialize gettextutils, but with the way that Venus has initialization +# spread out over mutltiple entry points, we'll monkey-patch +# gettextutils._(), _LI(), etc., to use our oslo.i18n versions. + +# FIXME(dims): Remove the monkey-patching and update openstack-common.conf and +# do a sync with oslo-incubator to remove gettextutils once oslo-incubator +# isn't using oslo-incubator gettextutils any more. + +gettextutils._ = _ +gettextutils._LI = _LI +gettextutils._LW = _LW +gettextutils._LE = _LE +gettextutils._LC = _LC diff --git a/venus/locale/venus-log-info.pot b/venus/locale/venus-log-info.pot new file mode 100644 index 0000000..43bed7a --- /dev/null +++ b/venus/locale/venus-log-info.pot @@ -0,0 +1,5 @@ +# Translations template for venus. +# Copyright (C) 2016 ORGANIZATION +# This file is distributed under the same license as the venus project. +# FIRST AUTHOR , 2016. +# diff --git a/venus/locale/venus-log-warning.pot b/venus/locale/venus-log-warning.pot new file mode 100644 index 0000000..43bed7a --- /dev/null +++ b/venus/locale/venus-log-warning.pot @@ -0,0 +1,5 @@ +# Translations template for venus. +# Copyright (C) 2016 ORGANIZATION +# This file is distributed under the same license as the venus project. +# FIRST AUTHOR , 2016. +# diff --git a/venus/locale/venus.pot b/venus/locale/venus.pot new file mode 100644 index 0000000..43bed7a --- /dev/null +++ b/venus/locale/venus.pot @@ -0,0 +1,5 @@ +# Translations template for venus. +# Copyright (C) 2016 ORGANIZATION +# This file is distributed under the same license as the venus project. +# FIRST AUTHOR , 2016. +# diff --git a/venus/modules/__init__.py b/venus/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/modules/custom_config/__init__.py b/venus/modules/custom_config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/modules/custom_config/action.py b/venus/modules/custom_config/action.py new file mode 100644 index 0000000..797d1b8 --- /dev/null +++ b/venus/modules/custom_config/action.py @@ -0,0 +1,34 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo_config import cfg +from oslo_log import log as logging +from venus.modules.custom_config.backends.sql import CustomConfigSql + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + + +class CustomConfigCore(object): + def __init__(self): + self.config_sql = CustomConfigSql() + super(CustomConfigCore, self).__init__() + + def get_config(self, id): + res = {} + res["value"] = self.config_sql.get_config(id) + return res + + def set_config(self, id, value): + return self.config_sql.set_config(id, value) diff --git a/venus/modules/custom_config/backends/__init__.py b/venus/modules/custom_config/backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/modules/custom_config/backends/models.py b/venus/modules/custom_config/backends/models.py new file mode 100644 index 0000000..b1cc49b --- /dev/null +++ b/venus/modules/custom_config/backends/models.py @@ -0,0 +1,70 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +SQLAlchemy models for venus data. +""" + +from oslo_config import cfg +from oslo_db.sqlalchemy import models +from oslo_utils import timeutils +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import DateTime, String +CONF = cfg.CONF +BASE = declarative_base() + + +class VenusBase(models.TimestampMixin, + models.ModelBase): + """Base class for Venus Models.""" + + __table_args__ = {'mysql_engine': 'InnoDB'} + + # TODO(rpodolyaka): reuse models.SoftDeleteMixin in the next stage + # of implementing of BP db-cleanup + created_at = Column(DateTime) + updated_at = Column(DateTime) + deleted_at = Column(DateTime) + # deleted = Column(Boolean, default=False) + deleted = Column(String(1), default=0) + metadata = None + + def delete(self, session): + """Delete this object.""" + self.deleted = True + self.deleted_at = timeutils.utcnow() + self.save(session=session) + + +def register_models(): + """Rvenuster Models and create metadata. + + Called from venus.db.sqlalchemy.__init__ as part of loading the driver, + it will never need to be called explicitly elsewhere unless the + connection is lost and needs to be reestablished. + """ + from sqlalchemy import create_engine + + models = () + engine = create_engine(CONF.database.connection, echo=False) + for model in models: + model.metadata.create_all(engine) + + +class CustomConfig(BASE): + __tablename__ = 't_mo_custom_config' + id = Column(String(64), primary_key=True) + value = Column(String(10240)) + update_time = Column(DateTime()) diff --git a/venus/modules/custom_config/backends/sql.py b/venus/modules/custom_config/backends/sql.py new file mode 100644 index 0000000..0d6738f --- /dev/null +++ b/venus/modules/custom_config/backends/sql.py @@ -0,0 +1,97 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of SQLAlchemy backend.""" + +import sqlalchemy + +import threading + +import time + +from oslo_config import cfg + +from oslo_db import options + +from oslo_db.sqlalchemy import session as db_session + +from oslo_log import log as logging + +import osprofiler.sqlalchemy + +from venus.modules.custom_config.backends import models + + +CONF = cfg.CONF +CONF.import_group("profiler", "venus.service") +log = logging.getLogger(__name__) +options.set_defaults(CONF, connection='sqlite:///$state_path/venus.sqlite') + +_LOCK = threading.Lock() +_FACADE = None + + +def _create_facade_lazily(): + global _LOCK + with _LOCK: + global _FACADE + if _FACADE is None: + _FACADE = db_session.EngineFacade( + CONF.database.connection, + **dict(CONF.database) + ) + + if CONF.profiler.profiler_enabled: + if CONF.profiler.trace_sqlalchemy: + osprofiler.sqlalchemy.add_tracing(sqlalchemy, + _FACADE.get_engine(), + "db") + + return _FACADE + + +def get_session(**kwargs): + facade = _create_facade_lazily() + return facade.get_session(**kwargs) + + +class CustomConfigSql(object): + + def get_config(self, id): + session = get_session() + with session.begin(): + config = session.query(models.CustomConfig).filter_by( + id=id).first() + if config is None: + return None + else: + return config.value + + def set_config(self, id, value): + session = get_session() + with session.begin(): + config = session.query(models.CustomConfig).filter_by( + id=id).first() + if config is None: + s_instance = models.CustomConfig( + id=id, + value=value, + update_time=time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(time.time()))) + + session.add(s_instance) + else: + config.value = value + config.update_time = time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(time.time())) diff --git a/venus/modules/custom_config/controller.py b/venus/modules/custom_config/controller.py new file mode 100644 index 0000000..cb1cbc0 --- /dev/null +++ b/venus/modules/custom_config/controller.py @@ -0,0 +1,47 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The template api.""" + +from oslo_log import log as logging + +from venus.api.openstack import wsgi + +from venus.modules.custom_config.action import CustomConfigCore + +LOG = logging.getLogger(__name__) + + +class CustomConfigController(wsgi.Controller): + def __init__(self, ext_mgr): + self.ext_mgr = ext_mgr + self.config_api = CustomConfigCore() + super(CustomConfigController, self).__init__() + + @wsgi.wrap_check_policy + def get_config(self, req): + id = req.params.get("id", None) + text = self.config_api.get_config(id) + return text + + @wsgi.wrap_check_policy + def set_config(self, req, body): + id = body.get("id", None) + value = body.get("value", None) + text = self.config_api.set_config(id, value) + return text + + +def create_resource(ext_mgr): + return wsgi.Resource(CustomConfigController(ext_mgr)) diff --git a/venus/modules/search/__init__.py b/venus/modules/search/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/modules/search/action.py b/venus/modules/search/action.py new file mode 100644 index 0000000..4633571 --- /dev/null +++ b/venus/modules/search/action.py @@ -0,0 +1,690 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import datetime +import json +import time + +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import timeutils + +from venus.common import utils +from venus.modules.search import es_template + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + +""" +config the elasticsearch info +from /etc/venus/venus.conf +if not exists ,default +""" +elasticsearch_group = cfg.OptGroup(name='elasticsearch', + title='elasticsearch') + +elasticsearch_opts = [ + cfg.StrOpt('url', + default='', + help='the es url'), + cfg.StrOpt('username', + default='', + help='the es username'), + cfg.StrOpt('password', + default='', + help='the es password') +] +CONF.register_group(elasticsearch_group) +CONF.register_opts(elasticsearch_opts, elasticsearch_group) + + +class SearchCore(object): + def __init__(self): + self.elasticsearch_url = CONF.elasticsearch.url + self.flog_index_prefix = "flog" + self.slog_index_prefix = "slog" + super(SearchCore, self).__init__() + + def get_all_index(self, index_prefix): + url = self.elasticsearch_url + '/_cat/indices/' + index_prefix + '-*' + index_names = [] + status, indexes = utils.request_es(url, 'GET') + if status != 200: + LOG.error("failed to get all es indexes") + return "" + indexes_array = indexes.split('\n') + for index in indexes_array: + index_name = index.split(' ')[2] + index_names.append(index_name) + + return index_names + + def get_index_names(self, index_prefix, start_time, end_time): + start_time, end_time = start_time.date(), end_time.date() + exist_index_names = self.get_all_index(index_prefix) + names = [] + t = start_time + while t <= end_time: + name = index_prefix + "-" + t.strftime('%Y.%m.%d') + if name in exist_index_names: + names.append(name) + t = t + datetime.timedelta(days=1) + if len(names) == 0: + return None + + index_names = ",".join(names) + return index_names + + def get_interval(self, start_time, end_time): + diff = end_time - start_time + per_diff = diff / 60 + if per_diff <= 1: + return "1s", "", "1second" + elif per_diff <= 10: + return "10s", "10seconds", "10seconds" + elif per_diff <= 30: + return "30s", "30seconds", "30seconds" + elif per_diff <= 60: + return "1m", "1minute", "1minute" + elif per_diff <= 600: + return "10m", "10minutes", "10minutes" + elif per_diff <= 1800: + return "30m", "30minutes", "30minutes" + elif per_diff <= 3600: + return "1h", "1hour", "1hour" + elif per_diff <= 14400: + return "3h", "3hours", "3hours" + elif per_diff <= 21600: + return "6h", "6hours", "6hours" + elif per_diff <= 43200: + return "12h", "12hours", "12hours" + else: + return "24h", "1day", "1day" + + def params(self, type, module_name, index_type): + field = "" + if type == "host_name": + field = "Hostname.keyword" + elif type == "level": + field = "log_level.keyword" + elif type == "program_name": + field = "programname.keyword" + elif type == "module_name": + field = "Logger.keyword" + else: + return {"code": -1, "msg": "invalid param"} + + gen_params = {} + if module_name: + gen_params["Logger.keyword"] = module_name + must_params = self.generate_must(gen_params) + + if index_type is None: + index_type = self.flog_index_prefix + if (index_type != self.flog_index_prefix and + index_type != self.slog_index_prefix): + return {"code": -1, "msg": "invalid param"} + + end_time = timeutils.utcnow() + start_time = end_time - datetime.timedelta(days=7) + index_prefix = index_type + index_names = self.get_index_names(index_prefix, + start_time, end_time) + if index_names is None: + return {"code": 0, "msg": "no data, no index"} + url = self.elasticsearch_url + '/' + index_names + '/_search' + data = es_template.search_params(field, must_params) + + values = [] + status, text = utils.request_es(url, "POST", data) + if status != 200: + return {"code": -1, "msg": "internal error, bad request"} + res = json.loads(text) + + aggr = res.get("aggregations", None) + if aggr is None: + return {"code": 0, "msg": "no data, no aggregations"} + search_values = aggr.get("search_values", None) + if search_values is None: + return {"code": 0, "msg": "no data, no values"} + buckets = search_values.get("buckets", None) + if buckets is None: + return {"code": 0, "msg": "no data, no buckets"} + for bucket in buckets: + if type == "level": + v = bucket["key"] + vu = v.upper() + if vu not in values: + values.append(vu) + else: + values.append(bucket["key"]) + + values.sort() + if type == "level": + values.append("NO EXIST") + + return {"code": 1, "msg": "OK", "values": values} + + def generate_must(self, params): + must_params = [] + for (k, v) in params.items(): + if k == "log_level.keyword": + terms = {} + field = {} + vs = [] + vs.append(v) + if v.islower(): + vs.append(v.upper()) + else: + vs.append(v.lower()) + field[k] = vs + terms["terms"] = field + must_params.append(terms) + else: + match = {} + field = {} + q = {} + q["query"] = v + field[k] = q + match["match_phrase"] = field + must_params.append(match) + return must_params + + def generate_must_not(self, params): + must_not = [] + for (k, v) in params.items(): + if k == "log_level.keyword": + terms = {} + field = {} + field["field"] = v + terms["exists"] = field + must_not.append(terms) + return must_not + + def logs(self, host_name, module_name, program_name, + level, user_id, project_id, query, index_type, + start_time, end_time, page_num, page_size): + if (start_time is None or end_time is None or + page_num is None or page_size is None): + return {"code": -1, "msg": "invalid param"} + + if index_type is None: + index_type = self.flog_index_prefix + if (index_type != self.flog_index_prefix and + index_type != self.slog_index_prefix): + return {"code": -1, "msg": "invalid param"} + + size = int(page_size) + from_i = (int(page_num) - 1) * size + gen_params = {} + gen_not_params = {} + if host_name: + gen_params["Hostname.keyword"] = host_name + + if module_name: + gen_params["Logger.keyword"] = module_name + + if program_name: + gen_params["programname.keyword"] = program_name + + if level: + if level == "NO EXIST": + gen_not_params["log_level.keyword"] = "log_level" + else: + gen_params["log_level.keyword"] = level + + if user_id: + gen_params["user_id.keyword"] = user_id + + if project_id: + gen_params["tenant_id.keyword"] = project_id + + must_params = self.generate_must(gen_params) + must_not_params = self.generate_must_not(gen_not_params) + + if query is not None and query != "": + match = {} + field = {} + field["all_fields"] = True + field["analyze_wildcard"] = True + query = query.replace('"', '\\"') + field["query"] = '"' + query + '"' + match["query_string"] = field + must_params.append(match) + + match = {} + field = {} + q = {} + q["format"] = "epoch_millis" + q["gte"] = int(start_time) * 1000 + q["lte"] = int(end_time) * 1000 + field["@timestamp"] = q + match["range"] = field + must_params.append(match) + + t_start_time = datetime.datetime.utcfromtimestamp(int(start_time)) + t_end_time = datetime.datetime.utcfromtimestamp(int(end_time)) + + index_prefix = index_type + index_names = self.get_index_names(index_prefix, + t_start_time, t_end_time) + if index_names is None: + return {"code": 0, "msg": "no data, no index"} + interval, interval_cn, interval_en = \ + self.get_interval(int(start_time), int(end_time)) + url = self.elasticsearch_url + '/' + index_names + '/_search' + data = es_template.search_logs(must_params, must_not_params, + start_time, end_time, interval, + from_i, size) + + data_count = [] + res_values = [] + status, text = utils.request_es(url, "POST", data) + if status != 200: + return {"code": -1, "msg": "internal error, bad request"} + res = json.loads(text) + + aggr = res.get("aggregations", None) + if aggr is None: + return {"code": 0, "msg": "no data, no aggregations"} + search_values = aggr.get("data_count", None) + if search_values is None: + return {"code": 0, "msg": "no data, no count data"} + buckets = search_values.get("buckets", None) + if buckets is None: + return {"code": 0, "msg": "no data, no buckets"} + for bucket in buckets: + data_count.append(bucket) + hits1 = res.get("hits", None) + if hits1 is None: + return {"code": 0, "msg": "no data, no hit"} + hits = hits1.get("hits", None) + total = hits1.get("total", 0) + if hits is None: + return {"code": 0, "msg": "no data, no hit"} + for hit in hits: + d = {} + _source = hit.get("_source", None) + if _source is not None: + d["host_name"] = _source.get("Hostname", "") + d["time"] = _source.get("@timestamp", "") + d["level"] = _source.get("log_level", "") + d["desc"] = _source.get("Payload", "") + if d["desc"] == "": + d["desc"] = _source.get("message", "") + d["program_name"] = _source.get("programname", "") + d["user_id"] = _source.get("user_id", "") + d["project_id"] = _source.get("tenant_id", "") + d["module_name"] = _source.get("Logger", "") + res_values.append(d) + + ds = {} + ds["count"] = data_count + ds["interval_cn"] = interval_cn + ds["interval_en"] = interval_en + d = {} + d["total"] = total + d["values"] = res_values + return {"code": 1, "msg": "OK", "data_stats": ds, "data": d} + + def analyse_logs(self, group_name, host_name, module_name, + program_name, level, start_time, end_time): + gen_params = {} + gen_not_params = {} + title_cn_params = [] + title_en_params = [] + + if group_name == "host_name": + g_name = "Hostname.keyword" + title_cn = "Host Log Analysis Histogram TOP5".decode('utf-8') + title_en = "Host Log Analysis Histogram TOP5" + elif group_name == "program_name": + g_name = "programname.keyword" + title_cn = "Program Log Analysis Histogram TOP5".decode('utf-8') + title_en = "Program Log Analysis Histogram TOP5" + else: + return {"code": -1, "msg": "invalid param"} + + if host_name: + gen_params["Hostname.keyword"] = host_name + title_cn_params.append("host=".decode('utf-8') + host_name) + title_en_params.append("host=" + host_name) + + if module_name: + gen_params["Logger.keyword"] = module_name + title_cn_params.append("module=".decode('utf-8') + module_name) + title_en_params.append("module=" + module_name) + + if program_name: + gen_params["programname.keyword"] = program_name + title_cn_params.append("program=".decode('utf-8') + program_name) + title_en_params.append("program=" + program_name) + + if level: + if level == "NO EXIST": + gen_not_params["log_level.keyword"] = "log_level" + else: + gen_params["log_level.keyword"] = level + title_cn_params.append("level=".decode('utf-8') + level) + title_en_params.append("level=" + level) + + if len(title_cn_params) > 0: + title_cn = title_cn + " (" + " ".join(title_cn_params) + ")" + if len(title_en_params) > 0: + title_en = title_cn + " (" + " ".join(title_en_params) + ")" + + must_params = self.generate_must(gen_params) + must_not_params = self.generate_must_not(gen_not_params) + + match = {} + field = {} + q = {} + q["format"] = "epoch_millis" + q["gte"] = int(start_time) * 1000 + q["lte"] = int(end_time) * 1000 + field["@timestamp"] = q + match["range"] = field + must_params.append(match) + + t_start_time = datetime.datetime.utcfromtimestamp(int(start_time)) + t_end_time = datetime.datetime.utcfromtimestamp(int(end_time)) + index_names = self.get_index_names(self.flog_index_prefix, + t_start_time, t_end_time) + if index_names is None: + return {"code": 0, "msg": "no data, no index"} + url = self.elasticsearch_url + '/' + index_names + '/_search' + data = es_template.search_analyse_logs(must_params, + must_not_params, + g_name) + + status, text = utils.request_es(url, "POST", data) + if status != 200: + return {"code": -1, "msg": "internal error, bad request"} + res = json.loads(text) + aggr = res.get("aggregations", None) + if aggr is None: + return {"code": 0, "msg": "no data, no aggregations"} + search_values = aggr.get("data_count", None) + if search_values is None: + return {"code": 0, "msg": "no data, no count data"} + buckets = search_values.get("buckets", None) + if buckets is None: + return {"code": 0, "msg": "no data, no buckets"} + data_count = buckets + + d = {} + d["count"] = data_count + d["title_cn"] = title_cn + d["title_en"] = title_en + + return {"code": 1, "msg": "OK", "data": d} + + def typical_logs(self, type, start_time, end_time): + gen_params = {} + if type == "error_stats": + gen_params["log_level.keyword"] = "ERROR" + group_name = "programname.keyword" + return self. typical_stats( + gen_params, group_name, start_time, end_time) + elif type == "rabbitmq_error_stats": + gen_params["log_level.keyword"] = "ERROR" + rabbit_driver = "oslo.messaging._drivers.impl_rabbit" + gen_params["python_module.keyword"] = rabbit_driver + group_name = "programname.keyword" + return self. typical_stats( + gen_params, group_name, start_time, end_time) + elif type == "mysql_error_stats": + gen_params["log_level.keyword"] = "ERROR" + gen_params["python_module.keyword"] = "oslo_db.sqlalchemy.engines" + group_name = "programname.keyword" + return self. typical_stats( + gen_params, group_name, start_time, end_time) + elif type == "novalidhost_error_stats": + gen_params["log_level.keyword"] = "ERROR" + gen_params["query"] = "No valid host was found" + group_name = "programname.keyword" + return self. typical_stats( + gen_params, group_name, start_time, end_time) + else: + return {"code": -1, "msg": "invalid param"} + + def typical_stats(self, gen_params, group_field, start_time, end_time): + must_params = self.generate_must(gen_params) + match = {} + field = {} + q = {} + q["format"] = "epoch_millis" + q["gte"] = int(start_time) * 1000 + q["lte"] = int(end_time) * 1000 + field["@timestamp"] = q + match["range"] = field + must_params.append(match) + + t_start_time = datetime.datetime.utcfromtimestamp(int(start_time)) + t_end_time = datetime.datetime.utcfromtimestamp(int(end_time)) + index_names = self.get_index_names(self.flog_index_prefix, + t_start_time, t_end_time) + if index_names is None: + return {"code": 0, "msg": "no data, no index"} + + interval, interval_cn, interval_en = \ + self.get_interval(int(start_time), int(end_time)) + url = self.elasticsearch_url + '/' + index_names + '/_search' + data = es_template.search_typical_logs(must_params, group_field, + start_time, end_time, interval) + + data_stats = [] + status, text = utils.request_es(url, "POST", data) + if status != 200: + return {"code": -1, "msg": "internal error, bad request"} + res = json.loads(text) + + aggr = res.get("aggregations", None) + if aggr is None: + return {"code": 0, "msg": "no data, no aggregations"} + data_group = aggr.get("data_group", None) + if data_group is None: + return {"code": 0, "msg": "no data, no data group"} + buckets = data_group.get("buckets", None) + if buckets is None: + return {"code": 0, "msg": "no data, no buckets"} + for bucket in buckets: + d = {} + d["key"] = bucket.get("key", "") + d["total"] = bucket.get("doc_count", 0) + data_count = bucket.get("data_count", None) + if data_count is None: + continue + sub_buckets = data_count.get("buckets", None) + if sub_buckets is None: + continue + d["count"] = sub_buckets + data_stats.append(d) + + ds = {} + ds["stats"] = data_stats + ds["interval_cn"] = interval_cn + ds["interval_en"] = interval_en + return {"code": 1, "msg": "OK", "data": ds} + + def stat_instance_created_compute(self, request_id, uuid, index_names, + start_time, end_time): + gen_params = {} + gen_not_params = {} + gen_params["request_id.keyword"] = request_id + gen_params["programname.keyword"] = "nova-compute" + must_params = self.generate_must(gen_params) + must_not_params = self.generate_must_not(gen_not_params) + + match = {} + field = {} + field["all_fields"] = True + field["analyze_wildcard"] = True + field["query"] = '"' + uuid + '"' + match["query_string"] = field + must_params.append(match) + url = self.elasticsearch_url + '/' + index_names + '/_search' + data = es_template.search_logs(must_params, must_not_params, + start_time, end_time, "24h", + 0, 10000) + status, text = utils.request_es(url, "POST", data) + if status != 200: + return None, "internal error, bad request" + res = json.loads(text) + hits1 = res.get("hits", None) + if hits1 is None: + return [], "no data, no hit" + hits = hits1.get("hits", None) + if hits is None: + return [], "no data, no hit" + hostinfos = {} + for hit in hits: + info = {} + _source = hit.get("_source", None) + if _source is not None: + hostname = _source.get("Hostname", "") + if hostinfos.get(hostname, None) is None: + hostinfos[hostname] = [] + info["payload"] = _source.get("Payload", "") + info["time"] = _source.get("@timestamp", "") + hostinfos[hostname].append(info) + + res = [] + for (k, v) in hostinfos.items(): + r = {} + r["hostname"] = k + start_time = "" + end_time = "" + is_success = 0 + for i in v: + payload = i.get("payload", None) + if "Took" in payload and "seconds to build" in payload: + end_time = i.get("time", "") + is_success = 1 + if ("Enter inspur build_and_run_instance" in payload and + start_time == ""): + start_time = i.get("time", "") + + if is_success == 0 and len(v) > 0: + end_time = v[0].get("time", "") + start_time = v[len(v) - 1].get("time", "") + + r["is_success"] = is_success + r["start_time"] = start_time + r["end_time"] = end_time + res.append(r) + + def sort_time(e): + return e.get('start_time') + res.sort(key=sort_time) + + return res, None + + def stat_instance_created_other(self, index_names, params): + data = es_template.search_all_logs(params) + url = self.elasticsearch_url + '/' + index_names + '/_search' + status, text = utils.request_es(url, "POST", data) + if status != 200: + return [], "internal error, bad request" + json_text = json.loads(text) + hits1 = json_text.get("hits", None) + if hits1 is None: + return [], "no data, no hit" + hits = hits1.get("hits", None) + + hostinfos = {} + for hit in hits: + info = {} + _source = hit.get("_source", None) + if _source is not None: + hostname = _source.get("Hostname", "") + if hostinfos.get(hostname, None) is None: + hostinfos[hostname] = [] + info["level"] = _source.get("log_level", "") + info["time"] = _source.get("@timestamp", "") + hostinfos[hostname].append(info) + + res = [] + for (k, v) in hostinfos.items(): + r = {} + r["hostname"] = k + error_num = 0 + start_time = "" + end_time = "" + for i in v: + level = i.get("level") + if level == "ERROR" or level == "error": + error_num += 1 + + if len(v) > 0: + start_time = v[0].get("time", "") + end_time = v[len(v) - 1].get("time", "") + + r["log_num"] = len(v) + r["error_log_num"] = error_num + r["start_time"] = start_time + r["end_time"] = end_time + res.append(r) + + def sort_time(e): + return e.get('start_time') + res.sort(key=sort_time) + + return res, None + + def instance_call_chain(self, request_id, uuid): + end_time = int(time.time()) + start_time = end_time - 86400 * 365 + + t_start_time = datetime.datetime.utcfromtimestamp(int(start_time)) + t_end_time = datetime.datetime.utcfromtimestamp(int(end_time)) + index_names = self.get_index_names(self.flog_index_prefix, + t_start_time, t_end_time) + if index_names is None: + return {"code": 0, "msg": "no data, no index"} + + programs = ["nova-api", "nova-conductor", "nova-scheduler"] + res = {} + msg = "OK" + code = 1 + for p in programs: + gen_params = {} + gen_params["request_id.keyword"] = request_id + gen_params["programname.keyword"] = p + params = self.generate_must(gen_params) + match = {} + field = {} + q = {} + q["format"] = "epoch_millis" + q["gte"] = start_time * 1000 + q["lte"] = end_time * 1000 + field["@timestamp"] = q + match["range"] = field + params.append(match) + + d, r = self.stat_instance_created_other(index_names, params) + res[p] = d + if r is not None: + msg = r + code = -1 + + # for nova-compute + d, r = self.stat_instance_created_compute( + request_id, uuid, index_names, start_time, end_time) + res["nova-compute"] = d + if r is not None: + msg = r + code = -1 + + return {"code": code, "msg": msg, "data": res} diff --git a/venus/modules/search/es_template.py b/venus/modules/search/es_template.py new file mode 100644 index 0000000..65b5311 --- /dev/null +++ b/venus/modules/search/es_template.py @@ -0,0 +1,178 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of Template.""" + + +def search_params(field, must_params): + data = { + "aggs": { + "search_values": { + "terms": { + "field": field, + "size": 10000 + } + } + }, + "query": { + "bool": { + "must": must_params + } + }, + "size": 0, + "version": True + } + return data + + +def search_logs(must_params, must_not_params, start_time, + end_time, interval, from_i, size): + data = { + "aggs": { + "data_count": { + "date_histogram": { + "field": "@timestamp", + "interval": interval, + "min_doc_count": 0, + "time_zone": "Asia/Shanghai", + "extended_bounds": { + "min": int(start_time) * 1000, + "max": int(end_time) * 1000 + } + } + } + }, + "query": { + "bool": { + "must": must_params, + "must_not": must_not_params + } + }, + "script_fields": {}, + "from": from_i, + "size": size, + "sort": [ + { + "@timestamp": { + "order": "desc", + "unmapped_type": "boolean" + } + } + ] + } + return data + + +def search_analyse_logs(must_params, must_not_params, g_name): + data = { + "aggs": { + "data_count": { + "terms": { + "field": g_name, + "order": { + "_count": "desc" + }, + "size": 5 + } + } + }, + "query": { + "bool": { + "must": must_params, + "must_not": must_not_params + } + }, + "size": 0 + } + return data + + +def search_typical_logs(must_params, group_field, start_time, + end_time, interval): + data = { + "aggs": { + "data_group": { + "aggs": { + "data_count": { + "date_histogram": { + "field": "@timestamp", + "interval": interval, + "min_doc_count": 0, + "time_zone": "Asia/Shanghai", + "extended_bounds": { + "min": int(start_time) * 1000, + "max": int(end_time) * 1000 + } + } + } + }, + "terms": { + "field": group_field, + "order": { + "_count": "desc" + }, + "size": 10000 + } + } + }, + "query": { + "bool": { + "must": must_params + } + }, + "size": 0, + "version": True + } + return data + + +def search_request_ids(): + data = { + "query": { + "bool": { + "must": [ + { + "match": { + "Payload": "build_and_run_instance" + } + } + ] + } + }, + "from": 0, + "size": 10000 + } + return data + + +def search_all_logs(must_params): + data = { + "query": { + "bool": { + "must": must_params + } + }, + "size": 10000, + "version": True, + "sort": [ + { + "@timestamp": { + "order": "asc", + "unmapped_type": "boolean" + } + } + ] + } + + return data diff --git a/venus/modules/search/search_lib.py b/venus/modules/search/search_lib.py new file mode 100644 index 0000000..21bf40b --- /dev/null +++ b/venus/modules/search/search_lib.py @@ -0,0 +1,202 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from datetime import datetime +from elasticsearch import Elasticsearch +import re +import urlparse + +from oslo_config import cfg +from oslo_log import log as logging + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + +""" +config the elasticsearch info +from /etc/venus/venus.conf +if not exists ,default +""" +elasticsearch_group = cfg.OptGroup(name='elasticsearch', + title='elasticsearch') + +elasticsearch_opts = [ + cfg.StrOpt('url', + default='', + help='the es url'), + cfg.StrOpt('username', + default='', + help='the es username'), + cfg.StrOpt('password', + default='', + help='the es password') +] +CONF.register_group(elasticsearch_group) +CONF.register_opts(elasticsearch_opts, elasticsearch_group) + + +class ESSearchObj(object): + + def __init__(self): + url = urlparse.urlparse(CONF.elasticsearch.url) + self.es = Elasticsearch([url.hostname], + http_auth=(CONF.elasticsearch.username, + CONF.elasticsearch.password), + port=url.port) + + def get_all_index(self): + indices = self.es.cat.indices(format="json") + return indices + + def _create_index(self, index_name): + all_index = self.get_all_index() + exist = False + for index in all_index: + if index.index == index_name: + exist = True + break + + if not exist: + result = self.es.indices.create(index_name) + + return result + + def _get_index_info(self, index): + pass + + def get_global_log(self, global_id): + ID_FORMAT = (r'^req-[a-f0-9]{8}-[a-f0-9]{4}-' + r'[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$') + + if not re.match(ID_FORMAT, global_id): + return {"error": "the request param is not correct"} + + doc = { + "query": { + "term": { + "global_id.keyword": global_id + } + }, + "size": 10000, + } + result = self.es.search(index="flog*", body=doc) + log_list = self.parse_result(result) + self.sort_result_by_time(log_list) + + data = {} + data["log_size"] = len(log_list) + data["global_id"] = global_id + data["analysis"] = self.analysis_log(log_list) + + return data + + def analysis_log(self, log_list): + data = {} + for log in log_list: + logger = log["Logger"] + if logger in data: + pass + else: + data[logger] = {} + + for log in log_list: + programname = log["programname"] + logger = log["Logger"] + hostname = log["Hostname"] + loglevel = log["log_level"] + if programname not in data[logger]: + data[logger][programname] = {} + data[logger][programname]["log_list"] = [] + data[logger][programname]["log_list"].append(log) + data[logger][programname]["host"] = [] + + if (hostname not in data[logger][programname]["host"]): + data[logger][programname]["host"].append(hostname) + + data[logger][programname]["start_time"] = log["timeutc"] + data[logger][programname]["end_time"] = log["timeutc"] + + data[logger][programname]["log_total"] = 1 + data[logger][programname]["log_error"] = 0 + + if self.get_log_level(loglevel) > 0: + data[logger][programname]["log_error"] = 1 + else: + data[logger][programname]["log_list"].append(log) + + if (hostname not in data[logger][programname]["host"]): + data[logger][programname]["host"].append(hostname) + + data[logger][programname]["end_time"] = log["timeutc"] + + data[logger][programname][ + "log_total"] = data[logger][programname]["log_total"] + 1 + + if self.get_log_level(loglevel) > 0: + data[logger][programname]["log_error"] = data[ + logger][programname]["log_error"] + 1 + + return self.sort_deal_data(data) + + def get_log_level(self, log_level): + + log_levels = {"trace": -10, + "notset": -8, + "debug": -8, + "warning": -3, + "info": 0, + "error": 10, + "fatal": 12, + "critical": 15} + if log_level.lower() in log_levels.keys(): + return log_levels[log_level.lower()] + else: + LOG.waring("can't find the log level %S", log_level) + return -1 + + def sort_result_by_time(self, log_list): + for log in log_list: + log_time = log["Timestamp"].encode("utf-8") + datetime_obj = datetime.strptime(log_time, "%Y-%m-%d %H:%M:%S.%f") + log["timeutc"] = datetime_obj + + log_list.sort(key=lambda logcontent: logcontent['timeutc']) + + def parse_result(self, result): + logs = [] + for log in result["hits"]["hits"]: + logs.append(log["_source"]) + return logs + + def sort_deal_data(self, data): + + for part in data: + model_list = [] + for model in data.get(part): + data.get(part).get(model)["model_name"] = model + model_list.append(data.get(part).get(model)) + data[part][model] = None + model_list.sort(key=lambda model: model['start_time']) + data[part]['model_list'] = model_list + data[part]['start_time'] = model_list[0]['start_time'] + new_data = {} + part_list = [] + for part in data: + data.get(part)["part_name"] = part + part_list.append(data.get(part)) + part_list.sort(key=lambda part: part['start_time']) + new_data["part_list"] = part_list + + return new_data diff --git a/venus/objects/__init__.py b/venus/objects/__init__.py new file mode 100644 index 0000000..cdc90a8 --- /dev/null +++ b/venus/objects/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2015 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# NOTE(comstud): You may scratch your head as you see code that imports +# this module and then accesses attributes for objects such as Instance, +# etc, yet you do not see these attributes in here. Never fear, there is +# a little bit of magic. When objects are registered, an attribute is set +# on this module automatically, pointing to the newest/latest version of +# the object. + + +def register_all(): + # NOTE(danms): You must make sure your object gets imported in this + # function in order for it to be registered by services that may + # need to receive it via RPC. + pass diff --git a/venus/objects/base.py b/venus/objects/base.py new file mode 100644 index 0000000..a24e65d --- /dev/null +++ b/venus/objects/base.py @@ -0,0 +1,156 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Venus common internal object model""" + +import contextlib +import datetime + +from oslo_log import log as logging +from oslo_versionedobjects import base +from oslo_versionedobjects import fields + +from venus import exception +from venus import objects + + +LOG = logging.getLogger('object') +remotable = base.remotable +remotable_classmethod = base.remotable_classmethod +obj_make_list = base.obj_make_list + + +class VenusObjectRegistry(base.VersionedObjectRegistry): + def rvenustration_hook(self, cls, index): + setattr(objects, cls.obj_name(), cls) + + +@VenusObjectRegistry.register +class VenusObject(base.VersionedObject): + # NOTE(thangp): OBJ_PROJECT_NAMESPACE needs to be set so that nova, + # venus, and other objects can exist on the same bus and be distinguished + # from one another. + OBJ_PROJECT_NAMESPACE = 'venus' + + # NOTE(thangp): As more objects are added to venus, each object should + # have a custom map of version compatibility. This just anchors the base + # version compatibility. + VERSION_COMPATIBILITY = {'7.0.0': '1.0'} + + def venus_obj_get_changes(self): + """Returns a dict of changed fields with tz unaware datetimes. + + Any timezone aware datetime field will be converted to UTC timezone + and returned as timezone unaware datetime. + + This will allow us to pass these fields directly to a db update + method as they can't have timezone information. + """ + # Get dirtied/changed fields + changes = self.obj_get_changes() + + # Look for datetime objects that contain timezone information + for k, v in changes.items(): + if isinstance(v, datetime.datetime) and v.tzinfo: + # Remove timezone information and adjust the time according to + # the timezone information's offset. + changes[k] = v.replace(tzinfo=None) - v.utcoffset() + + # Return modified dict + return changes + + +class VenusObjectDictCompat(base.VersionedObjectDictCompat): + """Mix-in to provide dictionary key access compat. + + If an object needs to support attribute access using + dictionary items instead of object attributes, inherit + from this class. This should only be used as a temporary + measure until all callers are converted to use modern + attribute access. + + NOTE(berrange) This class will eventually be deleted. + """ + + def get(self, key, value=base._NotSpecifiedSentinel): + """For backwards-compatibility with dict-based objects. + + NOTE(danms): May be removed in the future. + """ + if key not in self.obj_fields: + # NOTE(jdg): There are a number of places where we rely on the + # old dictionary version and do a get(xxx, None). + # The following preserves that compatibility but in + # the future we'll remove this shim altogether so don't + # rely on it. + LOG.debug('Venus object %(object_name)s has no ' + 'attribute named: %(attribute_name)s', + {'object_name': self.__class__.__name__, + 'attribute_name': key}) + return None + if (value != base._NotSpecifiedSentinel and + not self.obj_attr_is_set(key)): + return value + else: + return getattr(self, key) + + +class VenusPersistentObject(object): + """Mixin class for Persistent objects. + + This adds the fields that we use in common for all persistent objects. + """ + fields = { + 'created_at': fields.DateTimeField(nullable=True), + 'updated_at': fields.DateTimeField(nullable=True), + 'deleted_at': fields.DateTimeField(nullable=True), + 'deleted': fields.BooleanField(default=False), + } + + @contextlib.contextmanager + def obj_as_admin(self): + """Context manager to make an object call as an admin. + + This temporarily modifies the context embedded in an object to + be elevated() and restores it after the call completes. Example + usage: + + with obj.obj_as_admin(): + obj.save() + """ + if self._context is None: + raise exception.OrphanedObjectError(method='obj_as_admin', + objtype=self.obj_name()) + + original_context = self._context + self._context = self._context.elevated() + try: + yield + finally: + self._context = original_context + + +class VenusComparableObject(base.ComparableVersionedObject): + def __eq__(self, obj): + if hasattr(obj, 'obj_to_primitive'): + return self.obj_to_primitive() == obj.obj_to_primitive() + return False + + +class ObjectListBase(base.ObjectListBase): + pass + + +class VenusObjectSerializer(base.VersionedObjectSerializer): + OBJ_BASE_CLASS = VenusObject diff --git a/venus/openstack/__init__.py b/venus/openstack/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/openstack/common/README b/venus/openstack/common/README new file mode 100644 index 0000000..a27e008 --- /dev/null +++ b/venus/openstack/common/README @@ -0,0 +1,16 @@ +oslo-incubator +-------------- + +A number of modules from oslo-incubator are imported into this project. +You can clone the oslo-incubator repository using the following url: + + git://git.openstack.org/openstack/oslo-incubator + +These modules are "incubating" in oslo-incubator and are kept in sync +with the help of oslo-incubator's update.py script. See: + + https://wiki.openstack.org/wiki/Oslo#Syncing_Code_from_Incubator + +The copy of the code should never be directly modified here. Please +always update oslo-incubator first and then run the script to copy +the changes across. diff --git a/venus/openstack/common/__init__.py b/venus/openstack/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/openstack/common/_i18n.py b/venus/openstack/common/_i18n.py new file mode 100644 index 0000000..71ae3aa --- /dev/null +++ b/venus/openstack/common/_i18n.py @@ -0,0 +1,47 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""oslo.i18n integration module. + +See http://docs.openstack.org/developer/oslo.i18n/usage.html + +""" + +try: + import oslo_i18n + + # NOTE(dhellmann): This reference to o-s-l-o will be replaced by the + # application name when this module is synced into the separate + # repository. It is OK to have more than one translation function + # using the same domain, since there will still only be one message + # catalog. + _translators = oslo_i18n.TranslatorFactory(domain='venus') + + # The primary translation function using the well-known name "_" + _ = _translators.primary + + # Translators for log levels. + # + # The abbreviated names are meant to reflect the usual use of a short + # name like '_'. The "L" is for "log" and the other letter comes from + # the level. + _LI = _translators.log_info + _LW = _translators.log_warning + _LE = _translators.log_error + _LC = _translators.log_critical +except ImportError: + # NOTE(dims): Support for cases where a project wants to use + # code from oslo-incubator, but is not ready to be internationalized + # (like tempest) + _ = _LI = _LW = _LE = _LC = lambda x: x diff --git a/venus/openstack/common/config/__init__.py b/venus/openstack/common/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/openstack/common/config/generator.py b/venus/openstack/common/config/generator.py new file mode 100644 index 0000000..5f09795 --- /dev/null +++ b/venus/openstack/common/config/generator.py @@ -0,0 +1,286 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Extracts OpenStack config option info from module(s).""" + +from __future__ import print_function + +import argparse +import imp +import os +import re +import socket +import sys +import textwrap + +from oslo_config import cfg +from oslo_utils import importutils +import six + +from venus.openstack.common import gettextutils + +gettextutils.install('venus') + +STROPT = "StrOpt" +BOOLOPT = "BoolOpt" +INTOPT = "IntOpt" +FLOATOPT = "FloatOpt" +LISTOPT = "ListOpt" +DICTOPT = "DictOpt" +MULTISTROPT = "MultiStrOpt" + +OPT_TYPES = { + STROPT: 'string value', + BOOLOPT: 'boolean value', + INTOPT: 'integer value', + FLOATOPT: 'floating point value', + LISTOPT: 'list value', + DICTOPT: 'dict value', + MULTISTROPT: 'multi valued', +} + +OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, + FLOATOPT, LISTOPT, DICTOPT, + MULTISTROPT])) + +PY_EXT = ".py" +BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), + "../../../../")) +WORDWRAP_WIDTH = 60 + + +# def raise_extension_exception(extmanager, ep, err): +# raise + + +def generate(argv): + parser = argparse.ArgumentParser( + description='generate sample configuration file', + ) + parser.add_argument('-m', dest='modules', action='append') + parser.add_argument('-l', dest='libraries', action='append') + parser.add_argument('srcfiles', nargs='*') + parsed_args = parser.parse_args(argv) + + mods_by_pkg = dict() + for filepath in parsed_args.srcfiles: + pkg_name = filepath.split(os.sep)[1] + mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), + os.path.basename(filepath).split('.')[0]]) + mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) + # NOTE(lzyeval): place top level modules before packages + pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) + ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) + pkg_names.extend(ext_names) + + # opts_by_group is a mapping of group name to an options list + # The options list is a list of (module, options) tuples + opts_by_group = {'DEFAULT': []} + + if parsed_args.modules: + for module_name in parsed_args.modules: + module = _import_module(module_name) + if module: + for group, opts in _list_opts(module): + opts_by_group.setdefault(group, []).append((module_name, + opts)) + + for pkg_name in pkg_names: + mods = mods_by_pkg.get(pkg_name) + mods.sort() + for mod_str in mods: + if mod_str.endswith('.__init__'): + mod_str = mod_str[:mod_str.rfind(".")] + + mod_obj = _import_module(mod_str) + if not mod_obj: + raise RuntimeError("Unable to import module %s" % mod_str) + + for group, opts in _list_opts(mod_obj): + opts_by_group.setdefault(group, []).append((mod_str, opts)) + + print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) + for group in sorted(opts_by_group.keys()): + print_group_opts(group, opts_by_group[group]) + + +def _import_module(mod_str): + try: + if mod_str.startswith('bin.'): + imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) + return sys.modules[mod_str[4:]] + else: + return importutils.import_module(mod_str) + except Exception as e: + sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) + return None + + +def _is_in_group(opt, group): + "Check if opt is in group." + for value in group._opts.values(): + # NOTE(llu): Temporary workaround for bug #1262148, wait until + # newly released oslo.config support '==' operator. + if not(value['opt'] != opt): + return True + return False + + +def _guess_groups(opt, mod_obj): + # is it in the DEFAULT group? + if _is_in_group(opt, cfg.CONF): + return 'DEFAULT' + + # what other groups is it in? + for value in cfg.CONF.values(): + if isinstance(value, cfg.CONF.GroupAttr): + if _is_in_group(opt, value._group): + return value._group.name + + raise RuntimeError( + "Unable to find group for option %s, " + "maybe it's defined twice in the same group?" + % opt.name + ) + + +def _list_opts(obj): + def is_opt(o): + return (isinstance(o, cfg.Opt) and + not isinstance(o, cfg.SubCommandOpt)) + + opts = list() + for attr_str in dir(obj): + attr_obj = getattr(obj, attr_str) + if is_opt(attr_obj): + opts.append(attr_obj) + elif (isinstance(attr_obj, list) and + all(map(lambda x: is_opt(x), attr_obj))): + opts.extend(attr_obj) + + ret = {} + for opt in opts: + ret.setdefault(_guess_groups(opt, obj), []).append(opt) + return ret.items() + + +def print_group_opts(group, opts_by_module): + print("[%s]" % group) + print('') + for mod, opts in opts_by_module: + print('#') + print('# Options defined in %s' % mod) + print('#') + print('') + for opt in opts: + _print_opt(opt) + print('') + + +def _get_my_ip(): + try: + csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + csock.connect(('8.8.8.8', 80)) + (addr, port) = csock.getsockname() + csock.close() + return addr + except socket.error: + return None + + +def _sanitize_default(name, value): + """Set up a reasonably sensible default for pybasedir, my_ip and host.""" + if value.startswith(sys.prefix): + # NOTE(jd) Don't use os.path.join, because it is likely to think the + # second part is an absolute pathname and therefore drop the first + # part. + value = os.path.normpath("/usr/" + value[len(sys.prefix):]) + elif value.startswith(BASEDIR): + return value.replace(BASEDIR, '/usr/lib/python/site-packages') + elif BASEDIR in value: + return value.replace(BASEDIR, '') + elif value == _get_my_ip(): + return '10.0.0.1' + elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: + return 'venus' + elif value.strip() != value: + return '"%s"' % value + return value + + +def _print_opt(opt): + opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help + if not opt_help: + sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) + opt_help = "" + opt_type = None + try: + opt_type = OPTION_REGEX.search(str(type(opt))).group(0) + except (ValueError, AttributeError) as err: + sys.stderr.write("%s\n" % str(err)) + sys.exit(1) + opt_help = u'%s (%s)' % (opt_help, + OPT_TYPES[opt_type]) + print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) + if opt.deprecated_opts: + for deprecated_opt in opt.deprecated_opts: + if deprecated_opt.name: + deprecated_group = (deprecated_opt.group if + deprecated_opt.group else "DEFAULT") + print('# Deprecated group/name - [%s]/%s' % + (deprecated_group, + deprecated_opt.name)) + try: + if opt_default is None: + print('#%s=' % opt_name) + elif opt_type == STROPT: + assert(isinstance(opt_default, six.string_types)) + print('#%s=%s' % (opt_name, _sanitize_default(opt_name, + opt_default))) + elif opt_type == BOOLOPT: + assert(isinstance(opt_default, bool)) + print('#%s=%s' % (opt_name, str(opt_default).lower())) + elif opt_type == INTOPT: + assert(isinstance(opt_default, int) and + not isinstance(opt_default, bool)) + print('#%s=%s' % (opt_name, opt_default)) + elif opt_type == FLOATOPT: + assert(isinstance(opt_default, float)) + print('#%s=%s' % (opt_name, opt_default)) + elif opt_type == LISTOPT: + assert(isinstance(opt_default, list)) + print('#%s=%s' % (opt_name, ','.join(opt_default))) + elif opt_type == DICTOPT: + assert(isinstance(opt_default, dict)) + opt_default_strlist = [str(key) + ':' + str(value) + for (key, value) in opt_default.items()] + print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) + elif opt_type == MULTISTROPT: + assert(isinstance(opt_default, list)) + if not opt_default: + opt_default = [''] + for default in opt_default: + print('#%s=%s' % (opt_name, default)) + print('') + except Exception: + sys.stderr.write('Error in option "%s"\n' % opt_name) + sys.exit(1) + + +def main(): + generate(sys.argv[1:]) + + +if __name__ == '__main__': + main() diff --git a/venus/openstack/common/gettextutils.py b/venus/openstack/common/gettextutils.py new file mode 100644 index 0000000..7c5046e --- /dev/null +++ b/venus/openstack/common/gettextutils.py @@ -0,0 +1,437 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +gettext for openstack-common modules. + +Usual usage in an openstack.common module: + + from venus.openstack.common.gettextutils import _ +""" + +import copy +import gettext +import locale +from logging import handlers +import os +import oslo_i18n + +import six + +_AVAILABLE_LANGUAGES = {} + +# FIXME(dhellmann): Remove this when moving to oslo.i18n. +USE_LAZY = False + + +class TranslatorFactory(object): + """Create translator functions + """ + + def __init__(self, domain, localedir=None): + """Establish a set of translation functions for the domain. + + :param domain: Name of translation domain, + specifying a message catalog. + :type domain: str + :param lazy: Delays translation until a message is emitted. + Defaults to False. + :type lazy: Boolean + :param localedir: Directory with translation catalogs. + :type localedir: str + """ + self.domain = domain + if localedir is None: + localedir = os.environ.get(domain.upper() + '_LOCALEDIR') + self.localedir = localedir + + def _make_translation_func(self, domain=None): + """Return a new translation function ready for use. + + Takes into account whether or not lazy translation is being + done. + + The domain can be specified to override the default from the + factory, but the localedir from the factory is always used + because we assume the log-level translation catalogs are + installed in the same directory as the main application + catalog. + + """ + if domain is None: + domain = self.domain + t = gettext.translation(domain, + localedir=self.localedir, + fallback=True) + # Use the appropriate method of the translation object based + # on the python version. + m = t.gettext if six.PY3 else t.ugettext + + def f(msg): + """oslo.i18n.gettextutils translation function.""" + if USE_LAZY: + return Message(msg, domain=domain) + return m(msg) + return f + + @property + def primary(self): + "The default translation function." + return self._make_translation_func() + + def _make_log_translation_func(self, level): + return self._make_translation_func(self.domain + '-log-' + level) + + @property + def log_info(self): + "Translate info-level log messages." + return self._make_log_translation_func('info') + + @property + def log_warning(self): + "Translate warning-level log messages." + return self._make_log_translation_func('warning') + + @property + def log_error(self): + "Translate error-level log messages." + return self._make_log_translation_func('error') + + @property + def log_critical(self): + "Translate critical-level log messages." + return self._make_log_translation_func('critical') + + +# NOTE(dhellmann): When this module moves out of the incubator into +# oslo.i18n, these global variables can be moved to an integration +# module within each application. + +# Create the global translation functions. +_translators = TranslatorFactory('venus') + +# The primary translation function using the well-known name "_" +_ = _translators.primary + +# Translators for log levels. +# +# The abbreviated names are meant to reflect the usual use of a short +# name like '_'. The "L" is for "log" and the other letter comes from +# the level. +_LI = _translators.log_info +_LW = _translators.log_warning +_LE = _translators.log_error +_LC = _translators.log_critical + +# NOTE(dhellmann): End of globals that will move to the application's +# integration module. + + +def enable_lazy(): + """Convenience function for configuring _() to use lazy gettext + + Call this at the start of execution to enable the gettextutils._ + function to use lazy gettext functionality. This is useful if + your project is importing _ directly instead of using the + gettextutils.install() way of importing the _ function. + """ + global USE_LAZY + USE_LAZY = True + + +def install(domain): + """Install a _() function using the given translation domain. + + Given a translation domain, install a _() function using gettext's + install() function. + + The main difference from gettext.install() is that we allow + overriding the default localedir (e.g. /usr/share/locale) using + a translation-domain-specific environment variable (e.g. + NOVA_LOCALEDIR). + + Note that to enable lazy translation, enable_lazy must be + called. + + :param domain: the translation domain + """ + from six import moves + tf = TranslatorFactory(domain) + moves.builtins.__dict__['_'] = tf.primary + + +class Message(six.text_type): + """A Message object is a unicode object that can be translated. + + Translation of Message is done explicitly using the translate() method. + For all non-translation intents and purposes, a Message is simply unicode, + and can be treated as such. + """ + + def __new__(cls, msgid, msgtext=None, params=None, + domain='venus', *args): + """Create a new Message object. + + In order for translation to work gettext requires a message ID, this + msgid will be used as the base unicode text. It is also possible + for the msgid and the base unicode text to be different by passing + the msgtext parameter. + """ + # If the base msgtext is not given, we use the default translation + # of the msgid (which is in English) just in case the system locale is + # not English, so that the base text will be in that locale by default. + if not msgtext: + msgtext = Message._translate_msgid(msgid, domain) + # We want to initialize the parent unicode with the actual object that + # would have been plain unicode if 'Message' was not enabled. + msg = super(Message, cls).__new__(cls, msgtext) + msg.msgid = msgid + msg.domain = domain + msg.params = params + return msg + + def translate(self, desired_locale=None): + """Translate this message to the desired locale. + + :param desired_locale: The desired locale to translate the message to, + if no locale is provided the message will be + translated to the system's default locale. + + :returns: the translated message in unicode + """ + + translated_message = Message._translate_msgid(self.msgid, + self.domain, + desired_locale) + if self.params is None: + # No need for more translation + return translated_message + + # This Message object may have been formatted with one or more + # Message objects as substitution arguments, given either as a single + # argument, part of a tuple, or as one or more values in a dictionary. + # When translating this Message we need to translate those Messages too + translated_params = _translate_args(self.params, desired_locale) + + translated_message = translated_message % translated_params + + return translated_message + + @staticmethod + def _translate_msgid(msgid, domain, desired_locale=None): + if not desired_locale: + system_locale = locale.getdefaultlocale() + # If the system locale is not available to the runtime use English + if not system_locale[0]: + desired_locale = 'en_US' + else: + desired_locale = system_locale[0] + + locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') + lang = gettext.translation(domain, + localedir=locale_dir, + languages=[desired_locale], + fallback=True) + if six.PY3: + translator = lang.gettext + else: + translator = lang.ugettext + + translated_message = translator(msgid) + return translated_message + + def __mod__(self, other): + # When we mod a Message we want the actual operation to be performed + # by the parent class (i.e. unicode()), the only thing we do here is + # save the original msgid and the parameters in case of a translation + params = self._sanitize_mod_params(other) + unicode_mod = super(Message, self).__mod__(params) + modded = Message(self.msgid, + msgtext=unicode_mod, + params=params, + domain=self.domain) + return modded + + def _sanitize_mod_params(self, other): + """Sanitize the object being modded with this Message. + + - Add support for modding 'None' so translation supports it + - Trim the modded object, which can be a large dictionary, to only + those keys that would actually be used in a translation + - Snapshot the object being modded, in case the message is + translated, it will be used as it was when the Message was created + """ + if other is None: + params = (other,) + elif isinstance(other, dict): + # Merge the dictionaries + # Copy each item in case one does not support deep copy. + params = {} + if isinstance(self.params, dict): + for key, val in self.params.items(): + params[key] = self._copy_param(val) + for key, val in other.items(): + params[key] = self._copy_param(val) + else: + params = self._copy_param(other) + return params + + def _copy_param(self, param): + try: + return copy.deepcopy(param) + except Exception: + # Fallback to casting to unicode this will handle the + # python code-like objects that can't be deep-copied + return six.text_type(param) + + def __add__(self, other): + msg = _('Message objects do not support addition.') + raise TypeError(msg) + + def __radd__(self, other): + return self.__add__(other) + + if six.PY2: + def __str__(self): + # NOTE(luisg): Logging in python 2.6 tries to str() log records, + # and it expects specifically a UnicodeError in order to proceed. + msg = _('Message objects do not support str() because they may ' + 'contain non-ascii characters. ' + 'Please use unicode() or translate() instead.') + raise UnicodeError(msg) + + +def get_available_languages(domain): + """Lists the available languages for the given translation domain. + + :param domain: the domain to get languages for + """ + return oslo_i18n.get_available_languages(domain) + + +def translate(obj, desired_locale=None): + """Gets the translated unicode representation of the given object. + + If the object is not translatable it is returned as-is. + If the locale is None the object is translated to the system locale. + + :param obj: the object to translate + :param desired_locale: the locale to translate the message to, if None the + default system locale will be used + :returns: the translated object in unicode, or the original object if + it could not be translated + """ + message = obj + if not isinstance(message, Message): + # If the object to translate is not already translatable, + # let's first get its unicode representation + message = six.text_type(obj) + if isinstance(message, Message): + # Even after unicoding() we still need to check if we are + # running with translatable unicode before translating + return message.translate(desired_locale) + return obj + + +def _translate_args(args, desired_locale=None): + """Translates all the translatable elements of the given arguments object. + + This method is used for translating the translatable values in method + arguments which include values of tuples or dictionaries. + If the object is not a tuple or a dictionary the object itself is + translated if it is translatable. + + If the locale is None the object is translated to the system locale. + + :param args: the args to translate + :param desired_locale: the locale to translate the args to, if None the + default system locale will be used + :returns: a new args object with the translated contents of the original + """ + if isinstance(args, tuple): + return tuple(translate(v, desired_locale) for v in args) + if isinstance(args, dict): + translated_dict = {} + for (k, v) in six.iteritems(args): + translated_v = translate(v, desired_locale) + translated_dict[k] = translated_v + return translated_dict + return translate(args, desired_locale) + + +class TranslationHandler(handlers.MemoryHandler): + """Handler that translates records before logging them. + + The TranslationHandler takes a locale and a target logging.Handler object + to forward LogRecord objects to after translating them. This handler + depends on Message objects being logged, instead of regular strings. + + The handler can be configured declaratively in the logging.conf as follows: + + [handlers] + keys = translatedlog, translator + + [handler_translatedlog] + class = handlers.WatchedFileHandler + args = ('/var/log/api-localized.log',) + formatter = context + + [handler_translator] + class = openstack.common.log.TranslationHandler + target = translatedlog + args = ('zh_CN',) + + If the specified locale is not available in the system, the handler will + log in the default locale. + """ + + def __init__(self, locale=None, target=None): + """Initialize a TranslationHandler + + :param locale: locale to use for translating messages + :param target: logging.Handler object to forward + LogRecord objects to after translation + """ + # NOTE(luisg): In order to allow this handler to be a wrapper for + # other handlers, such as a FileHandler, and still be able to + # configure it using logging.conf, this handler has to extend + # MemoryHandler because only the MemoryHandlers' logging.conf + # parsing is implemented such that it accepts a target handler. + handlers.MemoryHandler.__init__(self, capacity=0, target=target) + self.locale = locale + + def setFormatter(self, fmt): + self.target.setFormatter(fmt) + + def emit(self, record): + # We save the message from the original record to restore it + # after translation, so other handlers are not affected by this + original_msg = record.msg + original_args = record.args + + try: + self._translate_and_log_record(record) + finally: + record.msg = original_msg + record.args = original_args + + def _translate_and_log_record(self, record): + record.msg = translate(record.msg, self.locale) + + # In addition to translating the message, we also need to translate + # arguments that were passed to the log method that were not part + # of the main message e.g., log.info(_('Some message %s'), this_one)) + record.args = _translate_args(record.args, self.locale) + + self.target.emit(record) diff --git a/venus/service.py b/venus/service.py new file mode 100644 index 0000000..7125952 --- /dev/null +++ b/venus/service.py @@ -0,0 +1,368 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Generic Node base class for all workers that run on hosts.""" + + +import inspect +import os +import random + +from oslo_concurrency import processutils +from oslo_config import cfg +from oslo_log import log as logging +import oslo_messaging as messaging +from oslo_service import loopingcall +from oslo_service import service +from oslo_utils import importutils +import osprofiler.notifier +from osprofiler import profiler +import osprofiler.web + +from venus import context +from venus import exception +from venus.i18n import _, _LI, _LW +from venus.objects import base as objects_base +from venus import version +from venus.wsgi import common as wsgi_common +from venus.wsgi import eventlet_server as wsgi + +LOG = logging.getLogger(__name__) + +service_opts = [ + cfg.IntOpt('periodic_interval', + default=60, + help='Interval, in seconds, between running periodic tasks'), + cfg.IntOpt('periodic_fuzzy_delay', + default=60, + help='Range, in seconds, to randomly delay when starting the' + ' periodic task scheduler to reduce stampeding.' + ' (Disable by setting to 0)'), + cfg.StrOpt('osapi_venus_listen', + default="0.0.0.0", + help='IP address on which OpenStack Venus API listens'), + cfg.IntOpt('osapi_venus_listen_port', + default=8560, + min=1, max=65535, + help='Port on which OpenStack Venus API listens'), + cfg.IntOpt('osapi_venus_workers', + help='Number of workers for OpenStack Venus API service. ' + 'The default is equal to the number of CPUs available.'), ] + +profiler_opts = [ + cfg.BoolOpt("profiler_enabled", default=False, + help=_('If False fully disable profiling feature.')), + cfg.BoolOpt("trace_sqlalchemy", default=False, + help=_("If False doesn't trace SQL requests.")) +] + +CONF = cfg.CONF +CONF.register_opts(service_opts) +CONF.register_opts(profiler_opts, group="profiler") + + +def setup_profiler(binary, host): + if CONF.profiler.profiler_enabled: + LOG.warning( + _LW("OSProfiler is enabled.\nIt means that person who knows " + "any of hmac_keys that are specified in " + "/etc/venus/api-paste.ini can trace his requests. \n" + "In real life only operator can read this file so there " + "is no security issue. Note that even if person can " + "trigger profiler, only admin user can retrieve trace " + "information.\n" + "To disable OSprofiler set in venus.conf:\n" + "[profiler]\nenabled=false")) + else: + osprofiler.web.disable() + + +class Service(service.Service): + """Service object for binaries running on hosts. + + A service takes a manager and enables rpc by listening to queues based + on topic. It also periodically runs tasks on the manager and reports + it state to the database services table. + """ + + def __init__(self, host, binary, topic, manager, periodic_interval=None, + periodic_fuzzy_delay=None, service_name=None, + *args, **kwargs): + super(Service, self).__init__() + + + + self.host = host + self.binary = binary + self.topic = topic + self.manager_class_name = manager + manager_class = importutils.import_class(self.manager_class_name) + manager_class = profiler.trace_cls("rpc")(manager_class) + + self.manager = manager_class(host=self.host, + service_name=service_name, + *args, **kwargs) + self.periodic_interval = periodic_interval + self.periodic_fuzzy_delay = periodic_fuzzy_delay + self.saved_args, self.saved_kwargs = args, kwargs + self.timers = [] + + setup_profiler(binary, host) + self.rpcserver = None + + def start(self): + version_string = version.version_string() + LOG.info(_LI('Starting %(topic)s node (version %(version_string)s)'), + {'topic': self.topic, 'version_string': version_string}) + self.model_disconnected = False + self.manager.init_host() + LOG.debug("Creating RPC server for service %s", self.topic) + + target = messaging.Target(topic=self.topic, server=self.host) + endpoints = [self.manager] + endpoints.extend(self.manager.additional_endpoints) + serializer = objects_base.VenusObjectSerializer() + + + self.manager.init_host_with_rpc() + + if self.periodic_interval: + if self.periodic_fuzzy_delay: + initial_delay = random.randint(0, self.periodic_fuzzy_delay) + else: + initial_delay = None + + periodic = loopingcall.FixedIntervalLoopingCall( + self.periodic_tasks) + periodic.start(interval=self.periodic_interval, + initial_delay=initial_delay) + self.timers.append(periodic) + + def __getattr__(self, key): + manager = self.__dict__.get('manager', None) + return getattr(manager, key) + + @classmethod + def create(cls, host=None, binary=None, topic=None, manager=None, + periodic_interval=None, periodic_fuzzy_delay=None, + service_name=None): + """Instantiates class and passes back application object. + + :param host: defaults to CONF.host + :param binary: defaults to basename of executable + :param topic: defaults to bin_name - 'venus-' part + :param manager: defaults to CONF._manager + :param periodic_interval: defaults to CONF.periodic_interval + :param periodic_fuzzy_delay: defaults to CONF.periodic_fuzzy_delay + + """ + if not host: + host = CONF.host + if not binary: + binary = os.path.basename(inspect.stack()[-1][1]) + if not topic: + topic = binary + if not manager: + subtopic = topic.rpartition('venus-')[2] + manager = CONF.get('%s_manager' % subtopic, None) + if periodic_interval is None: + periodic_interval = CONF.periodic_interval + if periodic_fuzzy_delay is None: + periodic_fuzzy_delay = CONF.periodic_fuzzy_delay + service_obj = cls(host, binary, topic, manager, + periodic_interval=periodic_interval, + periodic_fuzzy_delay=periodic_fuzzy_delay, + service_name=service_name) + + return service_obj + + def kill(self): + """Destroy the service object in the datastore.""" + self.stop() + + def stop(self): + # Try to shut the connection down, but if we get any sort of + # errors, go ahead and ignore them.. as we're shutting down anyway + try: + self.rpcserver.stop() + except Exception: + pass + for x in self.timers: + try: + x.stop() + except Exception: + pass + self.timers = [] + super(Service, self).stop() + + def wait(self): + for x in self.timers: + try: + x.wait() + except Exception: + pass + if self.rpcserver: + self.rpcserver.wait() + + def periodic_tasks(self, raise_on_error=False): + """Tasks to be run at a periodic interval.""" + ctxt = context.get_admin_context() + self.manager.periodic_tasks(ctxt, raise_on_error=raise_on_error) + + +class WSGIService(service.ServiceBase): + """Provides ability to launch API from a 'paste' configuration.""" + + def __init__(self, name, loader=None): + """Initialize, but do not start the WSGI server. + + :param name: The name of the WSGI server given to the loader. + :param loader: Loads the WSGI application using the given name. + :returns: None + + """ + self.name = name + self.manager = self._get_manager() + self.loader = loader or wsgi_common.Loader() + self.app = self.loader.load_app(name) + self.host = getattr(CONF, '%s_listen' % name, "0.0.0.0") + self.port = getattr(CONF, '%s_listen_port' % name, 0) + self.workers = (getattr(CONF, '%s_workers' % name, None) or + processutils.get_worker_count()) + if self.workers and self.workers < 1: + worker_name = '%s_workers' % name + msg = (_("%(worker_name)s value of %(workers)d is invalid, " + "must be greater than 0.") % + {'worker_name': worker_name, + 'workers': self.workers}) + raise exception.InvalidInput(msg) + setup_profiler(name, self.host) + + self.server = wsgi.Server(name, + self.app, + host=self.host, + port=self.port) + + def _get_manager(self): + """Initialize a Manager object appropriate for this service. + + Use the service name to look up a Manager subclass from the + configuration and initialize an instance. If no class name + is configured, just return None. + + :returns: a Manager instance, or None. + + """ + fl = '%s_manager' % self.name + if fl not in CONF: + return None + + manager_class_name = CONF.get(fl, None) + if not manager_class_name: + return None + + manager_class = importutils.import_class(manager_class_name) + return manager_class() + + def start(self): + """Start serving this service using loaded configuration. + + Also, retrieve updated port number in case '0' was passed in, which + indicates a random port should be used. + + :returns: None + + """ + if self.manager: + self.manager.init_host() + self.server.start() + self.port = self.server.port + + def stop(self): + """Stop serving this API. + + :returns: None + + """ + self.server.stop() + + def wait(self): + """Wait for the service to stop serving this API. + + :returns: None + + """ + self.server.wait() + + def reset(self): + """Reset server greenpool size to default. + + :returns: None + + """ + self.server.reset() + + +def process_launcher(): + return service.ProcessLauncher(CONF) + + +# NOTE(vish): the global launcher is to maintain the existing +# functionality of calling service.serve + +# service.wait +_launcher = None + + +def serve(server, workers=None): + global _launcher + if _launcher: + raise RuntimeError(_('serve() can only be called once')) + + _launcher = service.launch(CONF, server, workers=workers) + + +def wait(): + LOG.debug('Full set of CONF:') + for flag in CONF: + flag_get = CONF.get(flag, None) + # hide flag contents from log if contains a password + # should use secret flag when switch over to openstack-common + if ("_password" in flag or "_key" in flag or + (flag == "sql_connection" and + ("mysql:" in flag_get or "postgresql:" in flag_get))): + LOG.debug('%s : FLAG SET ', flag) + else: + LOG.debug('%(flag)s : %(flag_get)s', + {'flag': flag, 'flag_get': flag_get}) + try: + _launcher.wait() + except KeyboardInterrupt: + _launcher.stop() + + +class Launcher(object): + def __init__(self): + self.launch_service = serve + self.wait = wait + + +def get_launcher(): + # Note(lpetrut): ProcessLauncher uses green pipes which fail on Windows + # due to missing support of non-blocking I/O pipes. For this reason, the + # service must be spawned differently on Windows, using the ServiceLauncher + # class instead. + if os.name == 'nt': + return Launcher() + else: + return process_launcher() diff --git a/venus/task/__init__.py b/venus/task/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/task/adapter.py b/venus/task/adapter.py new file mode 100644 index 0000000..aa56ae7 --- /dev/null +++ b/venus/task/adapter.py @@ -0,0 +1,26 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A Timer Task With APScheduler.""" +from oslo_config import cfg +from oslo_log import log as logging +from venus.task.core import delete_es_index_task + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + + +def delete_es_index_job(): + job = delete_es_index_task.DeleteESIndexTask() + job.start_task() diff --git a/venus/task/backends/__init__.py b/venus/task/backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/task/backends/models.py b/venus/task/backends/models.py new file mode 100644 index 0000000..135b7d3 --- /dev/null +++ b/venus/task/backends/models.py @@ -0,0 +1,71 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +SQLAlchemy models for venus data. +""" + +from oslo_config import cfg +from oslo_db.sqlalchemy import models +from oslo_utils import timeutils +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import DateTime, String +CONF = cfg.CONF +BASE = declarative_base() + + +class VenusBase(models.TimestampMixin, + models.ModelBase): + """Base class for Venus Models.""" + + __table_args__ = {'mysql_engine': 'InnoDB'} + + # TODO(rpodolyaka): reuse models.SoftDeleteMixin in the next stage + # of implementing of BP db-cleanup + created_at = Column(DateTime) + updated_at = Column(DateTime) + deleted_at = Column(DateTime) + # deleted = Column(Boolean, default=False) + deleted = Column(String(1), default=0) + metadata = None + + def delete(self, session): + """Delete this object.""" + self.deleted = True + self.deleted_at = timeutils.utcnow() + self.save(session=session) + + +def register_models(): + """Rvenuster Models and create metadata. + + Called from venus.db.sqlalchemy.__init__ as part of loading the driver, + it will never need to be called explicitly elsewhere unless the + connection is lost and needs to be reestablished. + """ + from sqlalchemy import create_engine + + models = () + engine = create_engine(CONF.database.connection, echo=False) + for model in models: + model.metadata.create_all(engine) + + +class RegitsterTask(BASE, VenusBase): + __tablename__ = 't_mo_regitster_task' + Id = Column(String(11), primary_key=True) + task_name = Column(String(255)) + host_name = Column(String(255)) + update_time = Column(DateTime()) diff --git a/venus/task/backends/sql.py b/venus/task/backends/sql.py new file mode 100644 index 0000000..6b89e66 --- /dev/null +++ b/venus/task/backends/sql.py @@ -0,0 +1,91 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of SQLAlchemy backend.""" + +from oslo_config import cfg +from oslo_db import options +from oslo_db.sqlalchemy import session as db_session +from oslo_log import log as logging +import osprofiler.sqlalchemy +import socket +import sqlalchemy +import threading +import time + +from venus.i18n import _LE +from venus.task.backends import models + +CONF = cfg.CONF +CONF.import_group("profiler", "venus.service") +log = logging.getLogger(__name__) +options.set_defaults(CONF, connection='sqlite:///$state_path/venus.sqlite') + +_LOCK = threading.Lock() +_FACADE = None + + +def _create_facade_lazily(): + global _LOCK + with _LOCK: + global _FACADE + if _FACADE is None: + _FACADE = db_session.EngineFacade( + CONF.database.connection, + **dict(CONF.database) + ) + + if CONF.profiler.profiler_enabled: + if CONF.profiler.trace_sqlalchemy: + osprofiler.sqlalchemy.add_tracing(sqlalchemy, + _FACADE.get_engine(), + "db") + + return _FACADE + + +def get_session(**kwargs): + facade = _create_facade_lazily() + return facade.get_session(**kwargs) + + +class TaskSql(object): + def check_task(self, t_name): + session = get_session() + with session.begin(): + res = False + hostname = socket.gethostname() + now = time.time() + tasks = session.query(models.RegitsterTask).filter_by( + task_name=t_name).with_lockmode('update').all() + if len(tasks) != 1: + log.error(_LE("unsuported task type:%s, please check it"), + t_name) + return False + + if tasks[0].update_time is None or (now - time.mktime( + time.strptime(str(tasks[0].update_time), + '%Y-%m-%d %H:%M:%S'))) > 600: + tasks[0].host_name = hostname + tasks[0].update_time = time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(now)) + res = True + else: + if tasks[0].host_name == hostname: + tasks[0].update_time = time.strftime('%Y-%m-%d %H:%M:%S', + time.localtime(now)) + res = True + else: + res = False + return res diff --git a/venus/task/core/__init__.py b/venus/task/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/task/core/delete_es_index_task.py b/venus/task/core/delete_es_index_task.py new file mode 100644 index 0000000..589354a --- /dev/null +++ b/venus/task/core/delete_es_index_task.py @@ -0,0 +1,111 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import datetime +import six +import time + +from oslo_config import cfg +from oslo_log import log as logging + +from venus.common import utils +from venus.modules.custom_config.backends.sql import CustomConfigSql +from venus.i18n import _LE, _LI +from venus.task.backends.sql import TaskSql + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + +""" +config the elasticsearch info +from /etc/venus/venus.conf +if not exists ,default +""" +elasticsearch_group = cfg.OptGroup(name='elasticsearch', + title='elasticsearch') + +elasticsearch_opts = [ + cfg.StrOpt('url', + default='', + help='the es url'), + cfg.StrOpt('username', + default='', + help='the es username'), + cfg.StrOpt('password', + default='', + help='the es password') +] +CONF.register_group(elasticsearch_group) +CONF.register_opts(elasticsearch_opts, elasticsearch_group) + +TASK_NAME = "delete_es_index" + + +class DeleteESIndexTask(object): + """delete es index task.""" + + def __init__(self): + self.elasticsearch_url = CONF.elasticsearch.url + self.custom_sql = CustomConfigSql() + self.task_sql = TaskSql() + + def delete_index(self, name): + url = self.elasticsearch_url + '/' + name + status, text = utils.request_es(url, "DELETE") + if status != 200: + LOG.error(_LE("failed to delete es index")) + return + + def delete_es_history_index(self): + len_d = self.custom_sql.get_config("es_index_length") + if len_d is None: + LOG.error(_LE("es_index_length no exist")) + return + + today = time.strftime('%Y-%m-%d') + url = self.elasticsearch_url + '/_cat/indices/*log-*' + status, indexes = utils.request_es(url, "GET") + if status != 200: + LOG.error(_LE("failed to get es indexes")) + return + indexes_array = indexes.split('\n') + for index in indexes_array: + index_name = index.split(' ')[2] + index_day = index_name.split('-')[1] + diff_day = datetime.datetime.strptime(today, "%Y-%m-%d") - \ + datetime.datetime.strptime(index_day, '%Y.%m.%d') + if diff_day.days >= int(len_d): + self.delete_index(index_name) + + def start_task(self): + try: + LOG.info(_LI("delete es index task started")) + ret = self.task_sql.check_task(TASK_NAME) + if ret is not True: + LOG.info(_LI("delete es index task not need execute")) + return + + if CONF.elasticsearch.url == "": + LOG.info(_LI("not deploy es and not need execute")) + return + + try: + self.delete_es_history_index() + except Exception as e: + LOG.error(_LE("delete es index, catch exception:%s"), + six.text_type(e)) + LOG.info(_LI("delete es index task done")) + except Exception as e: + LOG.error(_LE("delete es index task, catch exception:%s"), + six.text_type(e)) diff --git a/venus/task/timer.py b/venus/task/timer.py new file mode 100644 index 0000000..ac2b4d5 --- /dev/null +++ b/venus/task/timer.py @@ -0,0 +1,38 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A Timer Task With APScheduler.""" +from apscheduler.schedulers.blocking import BlockingScheduler +from oslo_config import cfg +from oslo_log import log as logging +from venus.task import adapter + +LOG = logging.getLogger(__name__) +CONF = cfg.CONF + +sched = BlockingScheduler() + +TRIGGER_INTERVAL = 'interval' +TRIGGER_CRON = 'cron' +TRIGGER_DATE = 'date' + + +def init_advanced_timer(): + add_jobs() + sched.start() + + +def add_jobs(): + sched.add_job(adapter.delete_es_index_job, TRIGGER_INTERVAL, + hours=1, id='delete_es_index_job') diff --git a/venus/tests/__init__.py b/venus/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/tests/functional/__init__.py b/venus/tests/functional/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/tests/unit/__init__.py b/venus/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/tests/unit/api/__init__.py b/venus/tests/unit/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/tests/unit/api/test_search_action.py b/venus/tests/unit/api/test_search_action.py new file mode 100644 index 0000000..7e18ee2 --- /dev/null +++ b/venus/tests/unit/api/test_search_action.py @@ -0,0 +1,126 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import unittest + +from venus.modules.search.action import SearchCore + + +class TestSearchAction(unittest.TestCase): + def test_get_interval(self): + action = SearchCore() + + want1 = "1s" + want2 = "1秒" + want3 = "1second" + end_time = 100000000 + start_time = end_time - 50 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "10s" + want2 = "10秒" + want3 = "10seconds" + start_time = end_time - 500 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "30s" + want2 = "30秒" + want3 = "30seconds" + start_time = end_time - 1500 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "1m" + want2 = "1分钟" + want3 = "1minute" + start_time = end_time - 50 * 60 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "10m" + want2 = "10分钟" + want3 = "10minutes" + start_time = end_time - 500 * 60 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "30m" + want2 = "30分钟" + want3 = "30minutes" + start_time = end_time - 1500 * 60 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "1h" + want2 = "1小时" + want3 = "1hour" + start_time = end_time - 50 * 3600 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "3h" + want2 = "3小时" + want3 = "3hours" + start_time = end_time - 150 * 3600 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "6h" + want2 = "6小时" + want3 = "6hours" + start_time = end_time - 300 * 3600 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "12h" + want2 = "12小时" + want3 = "12hours" + start_time = end_time - 700 * 3600 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + want1 = "24h" + want2 = "1天" + want3 = "1day" + start_time = end_time - 50 * 86400 + res1, res2, res3 = action.get_interval(start_time, end_time) + self.assertEqual(want1, res1) + self.assertEqual(want2, res2) + self.assertEqual(want3, res3) + + +if __name__ == "__main__": + unittest.main() diff --git a/venus/utils.py b/venus/utils.py new file mode 100644 index 0000000..b8a57bf --- /dev/null +++ b/venus/utils.py @@ -0,0 +1,746 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utilities and helper functions.""" + +import abc +import contextlib +import functools +import hashlib +import inspect +import logging as py_logging +import os +import pyclbr +import random +import re +import shutil +import socket +import stat +import sys +import tempfile +import time +import types +from xml.dom import minidom +from xml.parsers import expat +from xml import sax +from xml.sax import expatreader +from xml.sax import saxutils + +from oslo_concurrency import lockutils +from oslo_concurrency import processutils +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import encodeutils +from oslo_utils import importutils +from oslo_utils import strutils +from oslo_utils import timeutils +import retrying +import six + +from venus import exception +from venus.i18n import _, _LW + + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) +ISO_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S" +PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" +VALID_TRACE_FLAGS = {'method', 'api'} +TRACE_METHOD = False +TRACE_API = False + +synchronized = lockutils.synchronized_with_prefix('venus-') + + +def find_config(config_path): + """Find a configuration file using the given hint. + + :param config_path: Full or relative path to the config. + :returns: Full path of the config, if it exists. + :raises: `venus.exception.ConfigNotFound` + + """ + possible_locations = [ + config_path, + os.path.join(CONF.state_path, "etc", "venus", config_path), + os.path.join(CONF.state_path, "etc", config_path), + os.path.join(CONF.state_path, config_path), + "/etc/venus/%s" % config_path, + ] + + for path in possible_locations: + if os.path.exists(path): + return os.path.abspath(path) + + raise exception.ConfigNotFound(path=os.path.abspath(config_path)) + + +def as_int(obj, quiet=True): + # Try "2" -> 2 + try: + return int(obj) + except (ValueError, TypeError): + pass + # Try "2.5" -> 2 + try: + return int(float(obj)) + except (ValueError, TypeError): + pass + # Eck, not sure what this is then. + if not quiet: + raise TypeError(_("Can not translate %s to integer.") % (obj)) + return obj + + +def is_int_like(val): + """Check if a value looks like an int.""" + try: + return str(int(val)) == str(val) + except Exception: + return False + + +def check_exclusive_options(**kwargs): + """Checks that only one of the provided options is actually not-none. + + Iterates over all the kwargs passed in and checks that only one of said + arguments is not-none, if more than one is not-none then an exception will + be raised with the names of those arguments who were not-none. + """ + + if not kwargs: + return + + pretty_keys = kwargs.pop("pretty_keys", True) + exclusive_options = {} + for (k, v) in kwargs.items(): + if v is not None: + exclusive_options[k] = True + + if len(exclusive_options) > 1: + # Change the format of the names from pythonic to + # something that is more readable. + # + # Ex: 'the_key' -> 'the key' + if pretty_keys: + names = [k.replace('_', ' ') for k in kwargs.keys()] + else: + names = kwargs.keys() + names = ", ".join(sorted(names)) + msg = (_("May specify only one of %s") % (names)) + raise exception.InvalidInput(reason=msg) + + +def execute(*cmd, **kwargs): + """Convenience wrapper around oslo's execute() method.""" + if 'run_as_root' in kwargs and 'root_helper' not in kwargs: + kwargs['root_helper'] = get_root_helper() + return processutils.execute(*cmd, **kwargs) + + +def check_ssh_injection(cmd_list): + ssh_injection_pattern = ['`', '$', '|', '||', ';', '&', '&&', '>', '>>', + '<'] + + # Check whether injection attacks exist + for arg in cmd_list: + arg = arg.strip() + + # Check for matching quotes on the ends + is_quoted = re.match('^(?P[\'"])(?P.*)(?P=quote)$', arg) + if is_quoted: + # Check for unescaped quotes within the quoted argument + quoted = is_quoted.group('quoted') + if quoted: + if (re.match('[\'"]', quoted) or + re.search('[^\\\\][\'"]', quoted)): + raise exception.SSHInjectionThreat(command=cmd_list) + else: + # We only allow spaces within quoted arguments, and that + # is the only special character allowed within quotes + if len(arg.split()) > 1: + raise exception.SSHInjectionThreat(command=cmd_list) + + # Second, check whether danger character in command. So the shell + # special operator must be a single argument. + for c in ssh_injection_pattern: + if c not in arg: + continue + + result = arg.find(c) + if not result == -1: + if result == 0 or not arg[result - 1] == '\\': + raise exception.SSHInjectionThreat(command=cmd_list) + + +def create_channel(client, width, height): + """Invoke an interactive shell session on server.""" + channel = client.invoke_shell() + channel.resize_pty(width, height) + return channel + + +def venusdir(): + import venus + return os.path.abspath(venus.__file__).split('venus/__init__.py')[0] + + +def list_of_dicts_to_dict(seq, key): + """Convert list of dicts to a indexted dict. + + Takes a list of dicts, and converts it a nested dict + indexed by + + :param seq: list of dicts + :parm key: key in dicts to index by + + example: + lst = [{'id': 1, ...}, {'id': 2, ...}...] + key = 'id' + returns {1:{'id': 1, ...}, 2:{'id':2, ...} + + """ + return {d[key]: dict(d, index=d[key]) for (i, d) in enumerate(seq)} + + +class ProtectedExpatParser(expatreader.ExpatParser): + """An expat parser which disables DTD's and entities by default.""" + + def __init__(self, forbid_dtd=True, forbid_entities=True, + *args, **kwargs): + # Python 2.x old style class + expatreader.ExpatParser.__init__(self, *args, **kwargs) + self.forbid_dtd = forbid_dtd + self.forbid_entities = forbid_entities + + def start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise ValueError("Inline DTD forbidden") + + def entity_decl(self, entityName, is_parameter_entity, value, base, + systemId, publicId, notationName): + raise ValueError(" forbidden") + + def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise ValueError(" forbidden") + + def reset(self): + expatreader.ExpatParser.reset(self) + if self.forbid_dtd: + self._parser.StartDoctypeDeclHandler = self.start_doctype_decl + if self.forbid_entities: + self._parser.EntityDeclHandler = self.entity_decl + self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl + + +def safe_minidom_parse_string(xml_string): + """Parse an XML string using minidom safely. + + """ + try: + return minidom.parseString(xml_string, parser=ProtectedExpatParser()) + except sax.SAXParseException: + raise expat.ExpatError() + + +def xhtml_escape(value): + """Escapes a string so it is valid within XML or XHTML.""" + return saxutils.escape(value, {'"': '"', "'": '''}) + + +def get_from_path(items, path): + """Returns a list of items matching the specified path. + + Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item + in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the + intermediate results are lists it will treat each list item individually. + A 'None' in items or any child expressions will be ignored, this function + will not throw because of None (anywhere) in items. The returned list + will contain no None values. + + """ + if path is None: + raise exception.Error('Invalid mini_xpath') + + (first_token, sep, remainder) = path.partition('/') + + if first_token == '': + raise exception.Error('Invalid mini_xpath') + + results = [] + + if items is None: + return results + + if not isinstance(items, list): + # Wrap single objects in a list + items = [items] + + for item in items: + if item is None: + continue + get_method = getattr(item, 'get', None) + if get_method is None: + continue + child = get_method(first_token) + if child is None: + continue + if isinstance(child, list): + # Flatten intermediate lists + for x in child: + results.append(x) + else: + results.append(child) + + if not sep: + # No more tokens + return results + else: + return get_from_path(results, remainder) + + +def is_valid_boolstr(val): + """Check if the provided string is a valid bool string or not.""" + val = str(val).lower() + return (val == 'true' or val == 'false' or + val == 'yes' or val == 'no' or + val == 'y' or val == 'n' or + val == '1' or val == '0') + + +def is_none_string(val): + """Check if a string represents a None value.""" + if not isinstance(val, six.string_types): + return False + + return val.lower() == 'none' + + +def monkey_patch(): + """Patches decorators for all functions in a specified module. + + If the CONF.monkey_patch set as True, + this function patches a decorator + for all functions in specified modules. + + You can set decorators for each modules + using CONF.monkey_patch_modules. + The format is "Module path:Decorator function". + Example: 'venus.api.ec2.cloud:' \ + venus.openstack.common.notifier.api.notify_decorator' + + Parameters of the decorator is as follows. + (See venus.openstack.common.notifier.api.notify_decorator) + + :param name: name of the function + :param function: object of the function + """ + # If CONF.monkey_patch is not True, this function do nothing. + if not CONF.monkey_patch: + return + # Get list of modules and decorators + for module_and_decorator in CONF.monkey_patch_modules: + module, decorator_name = module_and_decorator.split(':') + # import decorator function + decorator = importutils.import_class(decorator_name) + __import__(module) + # Retrieve module information using pyclbr + module_data = pyclbr.readmodule_ex(module) + for key in module_data.keys(): + # set the decorator for the class methods + if isinstance(module_data[key], pyclbr.Class): + clz = importutils.import_class("%s.%s" % (module, key)) + for method, func in inspect.getmembers(clz, inspect.ismethod): + setattr( + clz, method, + decorator("%s.%s.%s" % (module, key, method), func)) + # set the decorator for the function + if isinstance(module_data[key], pyclbr.Function): + func = importutils.import_class("%s.%s" % (module, key)) + setattr(sys.modules[module], key, + decorator("%s.%s" % (module, key), func)) + + +def make_dev_path(dev, partition=None, base='/dev'): + """Return a path to a particular device. + + >>> make_dev_path('xvdc') + /dev/xvdc + + >>> make_dev_path('xvdc', 1) + /dev/xvdc1 + """ + path = os.path.join(base, dev) + if partition: + path += str(partition) + return path + + +def sanitize_hostname(hostname): + """Return a hostname which conforms to RFC-952 and RFC-1123 specs.""" + if six.PY3: + hostname = hostname.encode('latin-1', 'ignore') + hostname = hostname.decode('latin-1') + else: + if isinstance(hostname, six.text_type): + hostname = hostname.encode('latin-1', 'ignore') + + hostname = re.sub('[ _]', '-', hostname) + hostname = re.sub('[^\w.-]+', '', hostname) + hostname = hostname.lower() + hostname = hostname.strip('.-') + + return hostname + + +def hash_file(file_like_object): + """Generate a hash for the contents of a file.""" + checksum = hashlib.sha1() + any(map(checksum.update, iter(lambda: file_like_object.read(32768), b''))) + return checksum.hexdigest() + + +def service_is_up(service): + """Check whether a service is up based on last heartbeat.""" + last_heartbeat = service['updated_at'] or service['created_at'] + # Timestamps in DB are UTC. + elapsed = (timeutils.utcnow(with_timezone=True) - + last_heartbeat).total_seconds() + return abs(elapsed) <= CONF.service_down_time + + +def read_file_as_root(file_path): + """Secure helper to read file as root.""" + try: + out, _err = execute('cat', file_path, run_as_root=True) + return out + except processutils.ProcessExecutionError: + raise exception.FileNotFound(file_path=file_path) + + +@contextlib.contextmanager +def temporary_chown(path, owner_uid=None): + """Temporarily chown a path. + + :params owner_uid: UID of temporary owner (defaults to current user) + """ + if owner_uid is None: + owner_uid = os.getuid() + + orig_uid = os.stat(path).st_uid + + if orig_uid != owner_uid: + execute('chown', owner_uid, path, run_as_root=True) + try: + yield + finally: + if orig_uid != owner_uid: + execute('chown', orig_uid, path, run_as_root=True) + + +@contextlib.contextmanager +def tempdir(**kwargs): + tmpdir = tempfile.mkdtemp(**kwargs) + try: + yield tmpdir + finally: + try: + shutil.rmtree(tmpdir) + except OSError as e: + LOG.debug('Could not remove tmpdir: %s', + six.text_type(e)) + + +def walk_class_hierarchy(clazz, encountered=None): + """Walk class hierarchy, yielding most derived classes first.""" + if not encountered: + encountered = [] + for subclass in clazz.__subclasses__(): + if subclass not in encountered: + encountered.append(subclass) + # drill down to leaves first + for subsubclass in walk_class_hierarchy(subclass, encountered): + yield subsubclass + yield subclass + + +def get_root_helper(): + return 'sudo venus-rootwrap %s' % CONF.rootwrap_config + + +def get_file_mode(path): + """This primarily exists to make unit testing easier.""" + return stat.S_IMODE(os.stat(path).st_mode) + + +def get_file_gid(path): + """This primarily exists to make unit testing easier.""" + return os.stat(path).st_gid + + +def get_file_size(path): + """Returns the file size.""" + return os.stat(path).st_size + + +def get_bool_param(param_string, params): + param = params.get(param_string, False) + if not is_valid_boolstr(param): + msg = _('Value %(param)s for %(param_string)s is not a ' + 'boolean.') % {'param': param, 'param_string': param_string} + raise exception.InvalidParameterValue(err=msg) + + return strutils.bool_from_string(param, strict=True) + + +def check_string_length(value, name, min_length=0, max_length=None): + """Check the length of specified string. + + :param value: the value of the string + :param name: the name of the string + :param min_length: the min_length of the string + :param max_length: the max_length of the string + """ + if not isinstance(value, six.string_types): + msg = _("%s is not a string or unicode") % name + raise exception.InvalidInput(message=msg) + + if len(value) < min_length: + msg = _("%(name)s has a minimum character requirement of " + "%(min_length)s.") % {'name': name, 'min_length': min_length} + raise exception.InvalidInput(message=msg) + + if max_length and len(value) > max_length: + msg = _("%(name)s has more than %(max_length)s " + "characters.") % {'name': name, 'max_length': max_length} + raise exception.InvalidInput(message=msg) + + +def remove_invalid_filter_options(context, filters, + allowed_search_options): + """Remove search options that are not valid for non-admin API/context.""" + + if context.is_admin: + # Allow all options + return + # Otherwise, strip out all unknown options + unknown_options = [opt for opt in filters + if opt not in allowed_search_options] + bad_options = ", ".join(unknown_options) + LOG.debug("Removing options '%s' from query.", bad_options) + for opt in unknown_options: + del filters[opt] + + +def retry(exceptions, interval=1, retries=3, backoff_rate=2, + wait_random=False): + + def _retry_on_exception(e): + return isinstance(e, exceptions) + + def _backoff_sleep(previous_attempt_number, delay_since_first_attempt_ms): + exp = backoff_rate ** previous_attempt_number + wait_for = interval * exp + + if wait_random: + random.seed() + wait_val = random.randrange(interval * 1000.0, wait_for * 1000.0) + else: + wait_val = wait_for * 1000.0 + + LOG.debug("Sleeping for %s seconds", (wait_val / 1000.0)) + + return wait_val + + def _print_stop(previous_attempt_number, delay_since_first_attempt_ms): + delay_since_first_attempt = delay_since_first_attempt_ms / 1000.0 + LOG.debug("Failed attempt %s", previous_attempt_number) + LOG.debug("Have been at this for %s seconds", + delay_since_first_attempt) + return previous_attempt_number == retries + + if retries < 1: + raise ValueError('Retries must be greater than or ' + 'equal to 1 (received: %s). ' % retries) + + def _decorator(f): + + @six.wraps(f) + def _wrapper(*args, **kwargs): + r = retrying.Retrying(retry_on_exception=_retry_on_exception, + wait_func=_backoff_sleep, + stop_func=_print_stop) + return r.call(f, *args, **kwargs) + + return _wrapper + + return _decorator + + +def convert_str(text): + """Convert to native string. + + Convert bytes and Unicode strings to native strings: + + * convert to bytes on Python 2: + encode Unicode using encodeutils.safe_encode() + * convert to Unicode on Python 3: decode bytes from UTF-8 + """ + if six.PY2: + return encodeutils.safe_encode(text) + else: + if isinstance(text, bytes): + return text.decode('utf-8') + else: + return text + + +def trace_method(f): + """Decorates a function if TRACE_METHOD is true.""" + @functools.wraps(f) + def trace_method_logging_wrapper(*args, **kwargs): + if TRACE_METHOD: + return trace(f)(*args, **kwargs) + return f(*args, **kwargs) + return trace_method_logging_wrapper + + +def trace_api(f): + """Decorates a function if TRACE_API is true.""" + @functools.wraps(f) + def trace_api_logging_wrapper(*args, **kwargs): + if TRACE_API: + return trace(f)(*args, **kwargs) + return f(*args, **kwargs) + return trace_api_logging_wrapper + + +def trace(f): + """Trace calls to the decorated function. + + This decorator should always be defined as the outermost decorator so it + is defined last. This is important so it does not interfere + with other decorators. + + Using this decorator on a function will cause its execution to be logged at + `DEBUG` level with arguments, return values, and exceptions. + + :returns a function decorator + """ + + func_name = f.__name__ + + @functools.wraps(f) + def trace_logging_wrapper(*args, **kwargs): + if len(args) > 0: + maybe_self = args[0] + else: + maybe_self = kwargs.get('self', None) + + if maybe_self and hasattr(maybe_self, '__module__'): + logger = logging.getLogger(maybe_self.__module__) + else: + logger = LOG + + # NOTE(ameade): Don't bother going any further if DEBUG log level + # is not enabled for the logger. + if not logger.isEnabledFor(py_logging.DEBUG): + return f(*args, **kwargs) + + all_args = inspect.getcallargs(f, *args, **kwargs) + logger.debug('==> %(func)s: call %(all_args)r', + {'func': func_name, 'all_args': all_args}) + + start_time = time.time() * 1000 + try: + result = f(*args, **kwargs) + except Exception as exc: + total_time = int(round(time.time() * 1000)) - start_time + logger.debug('<== %(func)s: exception (%(time)dms) %(exc)r', + {'func': func_name, + 'time': total_time, + 'exc': exc}) + raise + total_time = int(round(time.time() * 1000)) - start_time + + logger.debug('<== %(func)s: return (%(time)dms) %(result)r', + {'func': func_name, + 'time': total_time, + 'result': result}) + return result + return trace_logging_wrapper + + +class TraceWrapperMetaclass(type): + """Metaclass that wraps all methods of a class with trace_method. + + This metaclass will cause every function inside of the class to be + decorated with the trace_method decorator. + + To use the metaclass you define a class like so: + @six.add_metaclass(utils.TraceWrapperMetaclass) + class MyClass(object): + """ + def __new__(meta, classname, bases, classDict): + newClassDict = {} + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + # replace it with a wrapped version + attribute = functools.update_wrapper(trace_method(attribute), + attribute) + newClassDict[attributeName] = attribute + + return type.__new__(meta, classname, bases, newClassDict) + + +class TraceWrapperWithABCMetaclass(abc.ABCMeta, TraceWrapperMetaclass): + """Metaclass that wraps all methods of a class with trace.""" + pass + + +def setup_tracing(trace_flags): + """Set global variables for each trace flag. + + Sets variables TRACE_METHOD and TRACE_API, which represent + whether to log method and api traces. + + :param trace_flags: a list of strings + """ + global TRACE_METHOD + global TRACE_API + try: + trace_flags = [flag.strip() for flag in trace_flags] + except TypeError: # Handle when trace_flags is None or a test mock + trace_flags = [] + for invalid_flag in (set(trace_flags) - VALID_TRACE_FLAGS): + LOG.warning(_LW('Invalid trace flag: %s'), invalid_flag) + TRACE_METHOD = 'method' in trace_flags + TRACE_API = 'api' in trace_flags + + +def resolve_hostname(hostname): + """Resolves host name to IP address. + + Resolves a host name (my.data.point.com) to an IP address (10.12.143.11). + This routine also works if the data passed in hostname is already an IP. + In this case, the same IP address will be returned. + + :param hostname: Host name to resolve. + :return: IP Address for Host name. + """ + result = socket.getaddrinfo(hostname, None)[0] + (family, socktype, proto, canonname, sockaddr) = result + LOG.debug('Asked to resolve hostname %(host)s and got IP %(ip)s.', + {'host': hostname, 'ip': sockaddr[0]}) + return sockaddr[0] diff --git a/venus/version.py b/venus/version.py new file mode 100644 index 0000000..2b47ebd --- /dev/null +++ b/venus/version.py @@ -0,0 +1,23 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pbr import version as pbr_version + +VENUS_VENDOR = "OpenStack Foundation" +VENUS_PRODUCT = "OpenStack Venus" +VENUS_PACKAGE = None # OS distro package version suffix + +loaded = False +version_info = pbr_version.VersionInfo('venus') +version_string = version_info.version_string diff --git a/venus/wsgi/__init__.py b/venus/wsgi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venus/wsgi/common.py b/venus/wsgi/common.py new file mode 100644 index 0000000..2686c55 --- /dev/null +++ b/venus/wsgi/common.py @@ -0,0 +1,284 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utility methods for working with WSGI servers.""" + +from paste import deploy +import routes.middleware +import sys +import webob.dec +import webob.exc + +from oslo_config import cfg +from oslo_log import log as logging + +from venus import exception +from venus.i18n import _, _LE +from venus import utils + +CONF = cfg.CONF +LOG = logging.getLogger(__name__) + + +class Request(webob.Request): + pass + + +class Application(object): + """Base WSGI application wrapper. Subclasses need to implement __call__.""" + + @classmethod + def factory(cls, global_config, **local_config): + """Used for paste app factories in paste.deploy config files. + + Any local configuration (that is, values under the [app:APPNAME] + section of the paste config) will be passed into the `__init__` method + as kwargs. + + A hypothetical configuration would look like: + + [app:wadl] + latest_version = 1.3 + paste.app_factory = venus.api.fancy_api:Wadl.factory + + which would result in a call to the `Wadl` class as + + import venus.api.fancy_api + fancy_api.Wadl(latest_version='1.3') + + You could of course re-implement the `factory` method in subclasses, + but using the kwarg passing it shouldn't be necessary. + + """ + return cls(**local_config) + + def __call__(self, environ, start_response): + r"""Subclasses will probably want to implement __call__ like this: + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, req): + # Any of the following objects work as responses: + + # Option 1: simple string + res = 'message\n' + + # Option 2: a nicely formatted HTTP exception page + res = exc.HTTPForbidden(explanation='Nice try') + + # Option 3: a webob Response object (in case you need to play with + # headers, or you want to be treated like an iterable) + res = Response(); + res.app_iter = open('somefile') + + # Option 4: any wsgi app to be run next + res = self.application + + # Option 5: you can get a Response object for a wsgi app, too, to + # play with headers etc + res = req.get_response(self.application) + + # You can then just return your response... + return res + # ... or set req.response and return None. + req.response = res + + See the end of http://pythonpaste.org/webob/modules/dec.html + for more info. + + """ + raise NotImplementedError(_('You must implement __call__')) + + +class Middleware(Application): + """Base WSGI middleware. + + These classes require an application to be + initialized that will be called next. By default the middleware will + simply call its wrapped app, or you can override __call__ to customize its + behavior. + + """ + + @classmethod + def factory(cls, global_config, **local_config): + """Used for paste app factories in paste.deploy config files. + + Any local configuration (that is, values under the [filter:APPNAME] + section of the paste config) will be passed into the `__init__` method + as kwargs. + + A hypothetical configuration would look like: + + [filter:analytics] + redis_host = 127.0.0.1 + paste.filter_factory = venus.api.analytics:Analytics.factory + + which would result in a call to the `Analytics` class as + + import venus.api.analytics + analytics.Analytics(app_from_paste, redis_host='127.0.0.1') + + You could of course re-implement the `factory` method in subclasses, + but using the kwarg passing it shouldn't be necessary. + + """ + def _factory(app): + return cls(app, **local_config) + return _factory + + def __init__(self, application): + self.application = application + + def process_request(self, req): + """Called on each request. + + If this returns None, the next application down the stack will be + executed. If it returns a response then that response will be returned + and execution will stop here. + + """ + return None + + def process_response(self, response): + """Do whatever you'd like to the response.""" + return response + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, req): + response = self.process_request(req) + if response: + return response + response = req.get_response(self.application) + return self.process_response(response) + + +class Debug(Middleware): + """Helper class for debugging a WSGI application. + + Can be inserted into any WSGI application chain to get information + about the request and response. + + """ + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, req): + print(('*' * 40) + ' REQUEST ENVIRON') # noqa + for key, value in req.environ.items(): + print(key, '=', value) # noqa + print() # noqa + resp = req.get_response(self.application) + + print(('*' * 40) + ' RESPONSE HEADERS') # noqa + for (key, value) in resp.headers.items(): + print(key, '=', value) # noqa + print() # noqa + + resp.app_iter = self.print_generator(resp.app_iter) + + return resp + + @staticmethod + def print_generator(app_iter): + """Iterator that prints the contents of a wrapper string.""" + print(('*' * 40) + ' BODY') # noqa + for part in app_iter: + sys.stdout.write(part) + sys.stdout.flush() + yield part + print() # noqa + + +class Router(object): + """WSGI middleware that maps incoming requests to WSGI apps.""" + + def __init__(self, mapper): + """Create a router for the given routes.Mapper. + + Each route in `mapper` must specify a 'controller', which is a + WSGI app to call. You'll probably want to specify an 'action' as + well and have your controller be an object that can route + the request to the action-specific method. + + Examples: + mapper = routes.Mapper() + sc = ServerController() + + # Explicit mapping of one route to a controller+action + mapper.connect(None, '/svrlist', controller=sc, action='list') + + # Actions are all implicitly defined + mapper.resource('server', 'servers', controller=sc) + + # Pointing to an arbitrary WSGI app. You can specify the + # {path_info:.*} parameter so the target app can be handed just that + # section of the URL. + mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp()) + + """ + self.map = mapper + self._router = routes.middleware.RoutesMiddleware(self._dispatch, + self.map) + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, req): + """Route the incoming request to a controller based on self.map. + + If no match, return a 404. + + """ + return self._router + + @staticmethod + @webob.dec.wsgify(RequestClass=Request) + def _dispatch(req): + """Dispatch the request to the appropriate controller. + + Called by self._router after matching the incoming request to a route + and putting the information into req.environ. Either returns 404 + or the routed WSGI app's response. + + """ + match = req.environ['wsgiorg.routing_args'][1] + if not match: + return webob.exc.HTTPNotFound() + app = match['controller'] + return app + + +class Loader(object): + """Used to load WSGI applications from paste configurations.""" + + def __init__(self, config_path=None): + """Initialize the loader, and attempt to find the config. + + :param config_path: Full or relative path to the paste config. + :returns: None + + """ + config_path = config_path or CONF.api_paste_config + self.config_path = utils.find_config(config_path) + + def load_app(self, name): + """Return the paste URLMap wrapped WSGI application. + + :param name: Name of the application to load. + :returns: Paste URLMap object wrapping the requested application. + :raises: `venus.exception.PasteAppNotFound` + + """ + try: + return deploy.loadapp("config:%s" % self.config_path, name=name) + except LookupError: + LOG.exception(_LE("Error loading app %s"), name) + raise exception.PasteAppNotFound(name=name, path=self.config_path) diff --git a/venus/wsgi/eventlet_server.py b/venus/wsgi/eventlet_server.py new file mode 100644 index 0000000..757afed --- /dev/null +++ b/venus/wsgi/eventlet_server.py @@ -0,0 +1,287 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Methods for working with eventlet WSGI servers.""" + +from __future__ import print_function + +import errno +import os +import socket +import ssl +import time + +import eventlet +import eventlet.wsgi +import greenlet +from oslo_config import cfg +from oslo_log import log as logging +from oslo_service import service +from oslo_utils import excutils +from oslo_utils import netutils + + +from venus import exception +from venus.i18n import _, _LE, _LI + + +socket_opts = [ + cfg.BoolOpt('tcp_keepalive', + default=True, + help="Sets the value of TCP_KEEPALIVE (True/False) for each " + "server socket."), + cfg.IntOpt('tcp_keepidle', + default=600, + help="Sets the value of TCP_KEEPIDLE in seconds for each " + "server socket. Not supported on OS X."), + cfg.IntOpt('tcp_keepalive_interval', + help="Sets the value of TCP_KEEPINTVL in seconds for each " + "server socket. Not supported on OS X."), + cfg.IntOpt('tcp_keepalive_count', + help="Sets the value of TCP_KEEPCNT for each " + "server socket. Not supported on OS X."), + cfg.StrOpt('ssl_ca_file', + default=None, + help="CA certificate file to use to verify " + "connecting clients"), + cfg.StrOpt('ssl_cert_file', + default=None, + help="Certificate file to use when starting " + "the server securely"), + cfg.StrOpt('ssl_key_file', + default=None, + help="Private key file to use when starting " + "the server securely"), +] + +eventlet_opts = [ + cfg.IntOpt('max_header_line', + default=16384, + help="Maximum line size of message headers to be accepted. " + "max_header_line may need to be increased when using " + "large tokens (typically those generated by the " + "Keystone v3 API with big service catalogs)."), + cfg.IntOpt('client_socket_timeout', default=900, + help="Timeout for client connections\' socket operations. " + "If an incoming connection is idle for this number of " + "seconds it will be closed. A value of \'0\' means " + "wait forever."), + cfg.BoolOpt('wsgi_keep_alive', + default=True, + help='If False, closes the client socket connection ' + 'explicitly. Setting it to True to maintain backward ' + 'compatibility. Recommended setting is set it to False.'), +] + +CONF = cfg.CONF +CONF.register_opts(socket_opts) +CONF.register_opts(eventlet_opts) + +LOG = logging.getLogger(__name__) + + +class Server(service.ServiceBase): + """Server class to manage a WSGI server, serving a WSGI application.""" + + default_pool_size = 1000 + + def __init__(self, name, app, host=None, port=None, pool_size=None, + protocol=eventlet.wsgi.HttpProtocol, backlog=128): + """Initialize, but do not start, a WSGI server. + + :param name: Pretty name for logging. + :param app: The WSGI application to serve. + :param host: IP address to serve the application. + :param port: Port number to server the application. + :param pool_size: Maximum number of eventlets to spawn concurrently. + :returns: None + + """ + # Allow operators to customize http requests max header line size. + eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line + self.client_socket_timeout = CONF.client_socket_timeout or None + self.name = name + self.app = app + self._host = host or "0.0.0.0" + self._port = port or 0 + self._server = None + self._socket = None + self._protocol = protocol + self.pool_size = pool_size or self.default_pool_size + self._pool = eventlet.GreenPool(self.pool_size) + self._logger = logging.getLogger("eventlet.wsgi.server") + + if backlog < 1: + raise exception.InvalidInput( + reason='The backlog must be more than 1') + + bind_addr = (host, port) + # TODO(dims): eventlet's green dns/socket module does not actually + # support IPv6 in getaddrinfo(). We need to get around this in the + # future or monitor upstream for a fix + try: + info = socket.getaddrinfo(bind_addr[0], + bind_addr[1], + socket.AF_UNSPEC, + socket.SOCK_STREAM)[0] + family = info[0] + bind_addr = info[-1] + except Exception: + family = socket.AF_INET + + cert_file = CONF.ssl_cert_file + key_file = CONF.ssl_key_file + ca_file = CONF.ssl_ca_file + self._use_ssl = cert_file or key_file + + if cert_file and not os.path.exists(cert_file): + raise RuntimeError(_("Unable to find cert_file : %s") + % cert_file) + + if ca_file and not os.path.exists(ca_file): + raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) + + if key_file and not os.path.exists(key_file): + raise RuntimeError(_("Unable to find key_file : %s") + % key_file) + + if self._use_ssl and (not cert_file or not key_file): + raise RuntimeError(_("When running server in SSL mode, you " + "must specify both a cert_file and " + "key_file option value in your " + "configuration file.")) + + retry_until = time.time() + 30 + while not self._socket and time.time() < retry_until: + try: + self._socket = eventlet.listen(bind_addr, backlog=backlog, + family=family) + except socket.error as err: + if err.args[0] != errno.EADDRINUSE: + raise + eventlet.sleep(0.1) + + if not self._socket: + raise RuntimeError(_("Could not bind to %(host)s:%(port)s " + "after trying for 30 seconds") % + {'host': host, 'port': port}) + + (self._host, self._port) = self._socket.getsockname()[0:2] + LOG.info(_LI("%(name)s listening on %(_host)s:%(_port)s"), + {'name': self.name, '_host': self._host, '_port': self._port}) + + def start(self): + """Start serving a WSGI application. + + :returns: None + :raises: venus.exception.InvalidInput + + """ + # The server socket object will be closed after server exits, + # but the underlying file descriptor will remain open, and will + # give bad file descriptor error. So duplicating the socket object, + # to keep file descriptor usable. + + dup_socket = self._socket.dup() + dup_socket.setsockopt(socket.SOL_SOCKET, + socket.SO_REUSEADDR, 1) + + # NOTE(praneshp): Call set_tcp_keepalive in oslo to set + # tcp keepalive parameters. Sockets can hang around forever + # without keepalive + netutils.set_tcp_keepalive(dup_socket, + CONF.tcp_keepalive, + CONF.tcp_keepidle, + CONF.tcp_keepalive_count, + CONF.tcp_keepalive_interval) + + if self._use_ssl: + try: + ssl_kwargs = { + 'server_side': True, + 'certfile': CONF.ssl_cert_file, + 'keyfile': CONF.ssl_key_file, + 'cert_reqs': ssl.CERT_NONE, + } + + if CONF.ssl_ca_file: + ssl_kwargs['ca_certs'] = CONF.ssl_ca_file + ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED + + dup_socket = ssl.wrap_socket(dup_socket, + **ssl_kwargs) + except Exception: + with excutils.save_and_reraise_exception(): + LOG.error(_LE("Failed to start %(name)s on %(_host)s: " + "%(_port)s with SSL " + "support."), self.__dict__) + + wsgi_kwargs = { + 'func': eventlet.wsgi.server, + 'sock': dup_socket, + 'site': self.app, + 'protocol': self._protocol, + 'custom_pool': self._pool, + 'log': self._logger, + 'socket_timeout': self.client_socket_timeout, + 'keepalive': CONF.wsgi_keep_alive + } + + self._server = eventlet.spawn(**wsgi_kwargs) + + @property + def host(self): + return self._host + + @property + def port(self): + return self._port + + def stop(self): + """Stop this server. + + This is not a very nice action, as currently the method by which a + server is stopped is by killing its eventlet. + + :returns: None + + """ + LOG.info(_LI("Stopping WSGI server.")) + if self._server is not None: + # Resize pool to stop new requests from being processed + self._pool.resize(0) + self._server.kill() + + def wait(self): + """Block, until the server has stopped. + + Waits on the server's eventlet to finish, then returns. + + :returns: None + + """ + try: + if self._server is not None: + self._pool.waitall() + self._server.wait() + except greenlet.GreenletExit: + LOG.info(_LI("WSGI server has stopped.")) + + def reset(self): + """Reset server greenpool size to default. + + :returns: None + + """ + self._pool.resize(self.pool_size) diff --git a/venus/wsgi/wsgi.py b/venus/wsgi/wsgi.py new file mode 100644 index 0000000..deeacd8 --- /dev/null +++ b/venus/wsgi/wsgi.py @@ -0,0 +1,44 @@ +# Copyright 2020 Inspur +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Venus OS API WSGI application.""" + +import sys +import warnings + +from oslo_config import cfg +from oslo_log import log as logging + +# Need to register global_opts +from venus.common import config # noqa +from venus import i18n +from venus import objects +from venus import version +from venus.wsgi import common as wsgi_common + +warnings.simplefilter('once', DeprecationWarning) +i18n.enable_lazy() +CONF = cfg.CONF + + +def _application(): + objects.register_all() + CONF(sys.argv[1:], project='venus', + version=version.version_string()) + logging.setup(CONF, "venus") + + return wsgi_common.Loader().load_app(name='osapi_venus') + + +application = _application()