Make refstack jobs work again
There were three main issues in the code from refstack repository that
were preventing refstack jobs from working:
1) Improper handling of sessions in the database API.
2) PEP8 job was failing because of outdated version of flake8.
3) The new version of cryptography library doesn't support signer() and
verifier() functions.
Issue #1 was solved by using the get_session() function as a context
manager instead of using session.begin() as a context manager. Using
session.begin() as a context manager does not ensure that the session
will be closed at the end of the context (see "Opening and Closing
a Session" and "Framing out a begin / commit / rollback block"
here [1]).
Issue #2 was solved by updating the libraries in
test-requirements.txt file. This change also forces flake8 to ignore
some pep8 errors (similar to the ones ignored in tempest project).
Issue #3 was solved by using the sign() and verify() functions instead
of verifier() and signer() functions [2].
Related Taiga issues:
- https://tree.taiga.io/project/openstack-interop-working-group/issue/77
- https://tree.taiga.io/project/openstack-interop-working-group/issue/79
[1] https://docs.sqlalchemy.org/en/14/orm/session_basics.html
[2] e71c0df301
Change-Id: If98670475b371d1ece7c877a0eea3158f6c1b3f5
This commit is contained in:
parent
f222f3fb95
commit
d18f8ad221
@ -25,9 +25,9 @@ from oslo_log import log
|
||||
import pecan
|
||||
import webob
|
||||
|
||||
from refstack.api import constants as const
|
||||
from refstack.api import exceptions as api_exc
|
||||
from refstack.api import utils as api_utils
|
||||
from refstack.api import constants as const
|
||||
from refstack import db
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
@ -81,20 +81,21 @@ API_OPTS = [
|
||||
help='Template for test result url.'
|
||||
),
|
||||
cfg.StrOpt('opendev_api_capabilities_url',
|
||||
default='https://opendev.org/api/v1/repos/openinfra/interop/contents/'
|
||||
'guidelines',
|
||||
default='https://opendev.org/api/v1/repos/openinfra/interop/'
|
||||
'contents/guidelines',
|
||||
help='The GitHub API URL of the repository and location of the '
|
||||
'Interop Working Group capability files. This URL is used '
|
||||
'to get a listing of all capability files.'
|
||||
),
|
||||
cfg.StrOpt('additional_capability_urls',
|
||||
default='https://opendev.org/api/v1/repos/openinfra/interop/contents/'
|
||||
'add-ons/guidelines',
|
||||
default='https://opendev.org/api/v1/repos/openinfra/interop/'
|
||||
'contents/add-ons/guidelines',
|
||||
help=('The GitHub API URL of the repository and location of '
|
||||
'any additional guideline sources which will need to '
|
||||
'be parsed by the refstack API.')),
|
||||
cfg.StrOpt('opendev_raw_base_url',
|
||||
default='https://opendev.org/api/v1/repos/openinfra/interop/raw/',
|
||||
default='https://opendev.org/api/v1/repos/openinfra/interop/'
|
||||
'raw/',
|
||||
help='This is the base URL that is used for retrieving '
|
||||
'specific capability files. Capability file names will '
|
||||
'be appended to this URL to get the contents of that file.'
|
||||
|
@ -15,10 +15,11 @@
|
||||
|
||||
"""Authentication controller."""
|
||||
|
||||
from urllib import parse
|
||||
|
||||
from oslo_config import cfg
|
||||
import pecan
|
||||
from pecan import rest
|
||||
from urllib import parse
|
||||
|
||||
from refstack.api import constants as const
|
||||
from refstack.api import utils as api_utils
|
||||
|
@ -178,7 +178,7 @@ class ProductsController(validation.BaseRestControllerWithValidation):
|
||||
products = list(result.values())
|
||||
except Exception as ex:
|
||||
LOG.exception('An error occurred during '
|
||||
'operation with database: %s' % ex)
|
||||
'operation with database: %s', ex)
|
||||
pecan.abort(400)
|
||||
|
||||
products.sort(key=lambda x: x['name'])
|
||||
@ -248,8 +248,8 @@ class ProductsController(validation.BaseRestControllerWithValidation):
|
||||
product = db.get_product(id)
|
||||
vendor_id = product['organization_id']
|
||||
vendor = db.get_organization(vendor_id)
|
||||
is_admin = (api_utils.check_user_is_foundation_admin()
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id))
|
||||
is_admin = (api_utils.check_user_is_foundation_admin() or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id))
|
||||
if not is_admin:
|
||||
pecan.abort(403, 'Forbidden.')
|
||||
|
||||
@ -264,8 +264,8 @@ class ProductsController(validation.BaseRestControllerWithValidation):
|
||||
# user can mark product as public only if
|
||||
# his/her vendor is public(official)
|
||||
public = api_utils.str_to_bool(kw['public'])
|
||||
if (vendor['type'] not in (const.OFFICIAL_VENDOR, const.FOUNDATION)
|
||||
and public):
|
||||
if (vendor['type'] not in
|
||||
(const.OFFICIAL_VENDOR, const.FOUNDATION) and public):
|
||||
pecan.abort(403, 'Forbidden.')
|
||||
product_info['public'] = public
|
||||
if 'properties' in kw:
|
||||
|
@ -15,19 +15,18 @@
|
||||
|
||||
"""Test results controller."""
|
||||
import functools
|
||||
from urllib import parse
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
import pecan
|
||||
from pecan import rest
|
||||
|
||||
from urllib import parse
|
||||
|
||||
from refstack import db
|
||||
from refstack.api import constants as const
|
||||
from refstack.api.controllers import validation
|
||||
from refstack.api import utils as api_utils
|
||||
from refstack.api import validators
|
||||
from refstack.api.controllers import validation
|
||||
from refstack import db
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -267,7 +266,7 @@ class ResultsController(validation.BaseRestControllerWithValidation):
|
||||
}}
|
||||
except Exception as ex:
|
||||
LOG.debug('An error occurred during '
|
||||
'operation with database: %s' % str(ex))
|
||||
'operation with database: %s', str(ex))
|
||||
pecan.abort(500)
|
||||
|
||||
return page
|
||||
|
@ -15,8 +15,8 @@
|
||||
|
||||
"""Root controller."""
|
||||
|
||||
from pecan import expose
|
||||
from oslo_config import cfg
|
||||
from pecan import expose
|
||||
|
||||
from refstack.api.controllers import v1
|
||||
|
||||
|
@ -19,9 +19,9 @@ import pecan
|
||||
from pecan import rest
|
||||
from pecan.secure import secure
|
||||
|
||||
from refstack.api.controllers import validation
|
||||
from refstack.api import utils as api_utils
|
||||
from refstack.api import validators
|
||||
from refstack.api.controllers import validation
|
||||
from refstack import db
|
||||
|
||||
|
||||
|
@ -49,8 +49,8 @@ class UsersController(rest.RestController):
|
||||
@pecan.expose('json')
|
||||
def get(self, vendor_id):
|
||||
"""Return list of users in the vendor's group."""
|
||||
if not (api_utils.check_user_is_foundation_admin()
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
if not (api_utils.check_user_is_foundation_admin() or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
return None
|
||||
|
||||
org_users = db.get_organization_users(vendor_id)
|
||||
@ -62,8 +62,8 @@ class UsersController(rest.RestController):
|
||||
"""Add user to vendor group."""
|
||||
openid = base64.b64decode(openid)
|
||||
|
||||
if not (api_utils.check_user_is_foundation_admin()
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
if not (api_utils.check_user_is_foundation_admin() or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
pecan.abort(403, 'Forbidden.')
|
||||
|
||||
vendor = db.get_organization(vendor_id)
|
||||
@ -77,8 +77,8 @@ class UsersController(rest.RestController):
|
||||
"""Remove user from vendor group."""
|
||||
openid = base64.b64decode(openid)
|
||||
|
||||
if not (api_utils.check_user_is_foundation_admin()
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
if not (api_utils.check_user_is_foundation_admin() or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
pecan.abort(403, 'Forbidden.')
|
||||
|
||||
vendor = db.get_organization(vendor_id)
|
||||
@ -115,8 +115,8 @@ class VendorsController(validation.BaseRestControllerWithValidation):
|
||||
def put(self, vendor_id, **kw):
|
||||
"""Handler for update item. Should return full info with updates."""
|
||||
is_foundation_admin = api_utils.check_user_is_foundation_admin()
|
||||
is_admin = (is_foundation_admin
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id))
|
||||
is_admin = (is_foundation_admin or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id))
|
||||
if not is_admin:
|
||||
pecan.abort(403, 'Forbidden.')
|
||||
vendor_info = {'id': vendor_id}
|
||||
@ -168,7 +168,7 @@ class VendorsController(validation.BaseRestControllerWithValidation):
|
||||
vendors = list(result.values())
|
||||
except Exception as ex:
|
||||
LOG.exception('An error occurred during '
|
||||
'operation with database: %s' % ex)
|
||||
'operation with database: %s', ex)
|
||||
pecan.abort(400)
|
||||
return {'vendors': vendors}
|
||||
|
||||
@ -176,8 +176,8 @@ class VendorsController(validation.BaseRestControllerWithValidation):
|
||||
def get_one(self, vendor_id):
|
||||
"""Get information about vendor."""
|
||||
allowed_keys = None
|
||||
is_admin = (api_utils.check_user_is_foundation_admin()
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id))
|
||||
is_admin = (api_utils.check_user_is_foundation_admin() or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id))
|
||||
if not is_admin:
|
||||
allowed_keys = ['id', 'type', 'name', 'description']
|
||||
|
||||
@ -194,8 +194,8 @@ class VendorsController(validation.BaseRestControllerWithValidation):
|
||||
@pecan.expose('json')
|
||||
def delete(self, vendor_id):
|
||||
"""Delete vendor."""
|
||||
if not (api_utils.check_user_is_foundation_admin()
|
||||
or api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
if not (api_utils.check_user_is_foundation_admin() or
|
||||
api_utils.check_user_is_vendor_admin(vendor_id)):
|
||||
pecan.abort(403, 'Forbidden.')
|
||||
_check_is_not_foundation(vendor_id)
|
||||
|
||||
|
@ -19,6 +19,7 @@ import itertools
|
||||
from operator import itemgetter
|
||||
import os
|
||||
import re
|
||||
|
||||
import requests
|
||||
import requests_cache
|
||||
|
||||
@ -80,11 +81,11 @@ class Guidelines:
|
||||
try:
|
||||
resp = requests.get(src_url)
|
||||
|
||||
LOG.debug("Response Status: %s / Used Requests Cache: %s" %
|
||||
(resp.status_code,
|
||||
getattr(resp, 'from_cache', False)))
|
||||
LOG.debug("Response Status: %s / Used Requests Cache: %s",
|
||||
resp.status_code,
|
||||
getattr(resp, 'from_cache', False))
|
||||
if resp.status_code == 200:
|
||||
regex = re.compile('([0-9]{4}\.[0-9]{2}|next)\.json')
|
||||
regex = re.compile(r'([0-9]{4}\.[0-9]{2}|next)\.json')
|
||||
for rfile in resp.json():
|
||||
if rfile["type"] == "file" and \
|
||||
regex.search(rfile["name"]):
|
||||
@ -103,12 +104,12 @@ class Guidelines:
|
||||
powered_files.append(file_dict)
|
||||
else:
|
||||
LOG.warning('Guidelines repo URL (%s) returned '
|
||||
'non-success HTTP code: %s' %
|
||||
(src_url, resp.status_code))
|
||||
'non-success HTTP code: %s', src_url,
|
||||
resp.status_code)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
LOG.warning('An error occurred trying to get repository '
|
||||
'contents through %s: %s' % (src_url, e))
|
||||
'contents through %s: %s', src_url, e)
|
||||
for k, v in itertools.groupby(addon_files,
|
||||
key=lambda x: x['name'].split('.')[0]):
|
||||
values = [{'name': x['name'].split('.', 1)[1], 'file': x['name']}
|
||||
@ -122,7 +123,7 @@ class Guidelines:
|
||||
"""Get contents for a given guideline path."""
|
||||
if '.json' not in gl_file:
|
||||
gl_file = '.'.join((gl_file, 'json'))
|
||||
regex = re.compile("[a-z]*\.([0-9]{4}\.[0-9]{2}|next)\.json")
|
||||
regex = re.compile(r"[a-z]*\.([0-9]{4}\.[0-9]{2}|next)\.json")
|
||||
if regex.search(gl_file):
|
||||
guideline_path = 'add-ons/guidelines/' + gl_file
|
||||
else:
|
||||
@ -130,23 +131,23 @@ class Guidelines:
|
||||
|
||||
file_url = ''.join((self.raw_url.rstrip('/'),
|
||||
'/', guideline_path))
|
||||
LOG.debug("file_url: %s" % (file_url))
|
||||
LOG.debug("file_url: %s", file_url)
|
||||
try:
|
||||
response = requests.get(file_url)
|
||||
LOG.debug("Response Status: %s / Used Requests Cache: %s" %
|
||||
(response.status_code,
|
||||
getattr(response, 'from_cache', False)))
|
||||
LOG.debug("Response body: %s" % str(response.text))
|
||||
LOG.debug("Response Status: %s / Used Requests Cache: %s",
|
||||
response.status_code,
|
||||
getattr(response, 'from_cache', False))
|
||||
LOG.debug("Response body: %s", str(response.text))
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
LOG.warning('Raw guideline URL (%s) returned non-success HTTP '
|
||||
'code: %s' % (self.raw_url, response.status_code))
|
||||
'code: %s', self.raw_url, response.status_code)
|
||||
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
LOG.warning('An error occurred trying to get raw capability file '
|
||||
'contents from %s: %s' % (self.raw_url, e))
|
||||
'contents from %s: %s', self.raw_url, e)
|
||||
return None
|
||||
|
||||
def get_target_capabilities(self, guideline_json, types=None,
|
||||
|
@ -18,24 +18,23 @@ import binascii
|
||||
import copy
|
||||
import functools
|
||||
import random
|
||||
import requests
|
||||
import string
|
||||
import types
|
||||
from urllib import parse
|
||||
|
||||
from cryptography.hazmat import backends
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
import jwt
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
from oslo_utils import timeutils
|
||||
import pecan
|
||||
import pecan.rest
|
||||
import jwt
|
||||
import requests
|
||||
|
||||
from urllib import parse
|
||||
|
||||
from refstack import db
|
||||
from refstack.api import constants as const
|
||||
from refstack.api import exceptions as api_exc
|
||||
from refstack import db
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
@ -52,11 +51,8 @@ def _get_input_params_from_request(expected_params):
|
||||
value = pecan.request.GET.get(param)
|
||||
if value is not None:
|
||||
filters[param] = value
|
||||
LOG.debug('Parameter %(param)s has been received '
|
||||
'with value %(value)s' % {
|
||||
'param': param,
|
||||
'value': value
|
||||
})
|
||||
LOG.debug('Parameter %s has been received '
|
||||
'with value %s', param, value)
|
||||
return filters
|
||||
|
||||
|
||||
@ -329,8 +325,8 @@ def verify_openid_request(request):
|
||||
for token in verify_data_tokens
|
||||
if len(token.split(':')) > 1)
|
||||
|
||||
if (verify_response.status_code / 100 != 2
|
||||
or verify_dict['is_valid'] != 'true'):
|
||||
if (verify_response.status_code / 100 != 2 or
|
||||
verify_dict['is_valid'] != 'true'):
|
||||
pecan.abort(401, 'Authentication is failed. Try again.')
|
||||
|
||||
# Is the data we've received within our required parameters?
|
||||
|
@ -16,15 +16,15 @@
|
||||
"""Validators module."""
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import uuid
|
||||
|
||||
import json
|
||||
import jsonschema
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat import backends
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
|
||||
import jsonschema
|
||||
|
||||
from refstack.api import exceptions as api_exc
|
||||
|
||||
@ -138,10 +138,9 @@ class TestResultValidator(BaseValidator):
|
||||
except (binascii.Error, ValueError) as e:
|
||||
raise api_exc.ValidationError('Malformed public key', e)
|
||||
|
||||
verifier = key.verifier(sign, padding.PKCS1v15(), hashes.SHA256())
|
||||
verifier.update(request.body)
|
||||
try:
|
||||
verifier.verify()
|
||||
key.verify(sign, request.body, padding.PKCS1v15(),
|
||||
hashes.SHA256())
|
||||
except InvalidSignature:
|
||||
raise api_exc.ValidationError('Signature verification failed')
|
||||
if self._is_empty_result(request):
|
||||
@ -195,10 +194,9 @@ class PubkeyValidator(BaseValidator):
|
||||
except (binascii.Error, ValueError) as e:
|
||||
raise api_exc.ValidationError('Malformed public key', e)
|
||||
|
||||
verifier = key.verifier(sign, padding.PKCS1v15(), hashes.SHA256())
|
||||
verifier.update('signature'.encode('utf-8'))
|
||||
try:
|
||||
verifier.verify()
|
||||
key.verify(sign, 'signature'.encode('utf-8'), padding.PKCS1v15(),
|
||||
hashes.SHA256())
|
||||
except InvalidSignature:
|
||||
raise api_exc.ValidationError('Signature verification failed')
|
||||
|
||||
|
@ -16,8 +16,8 @@
|
||||
import alembic
|
||||
import alembic.migration as alembic_migration
|
||||
from oslo_config import cfg
|
||||
from refstack.db.sqlalchemy import api as db_api
|
||||
from refstack.db.migrations.alembic import utils
|
||||
from refstack.db.sqlalchemy import api as db_api
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
@ -16,8 +16,8 @@
|
||||
import os
|
||||
|
||||
from alembic import config as alembic_conf
|
||||
from alembic.operations import Operations
|
||||
import alembic.migration as alembic_migration
|
||||
from alembic.operations import Operations
|
||||
try:
|
||||
# Python 3.10 and above
|
||||
from collections.abc import Iterable
|
||||
|
@ -79,8 +79,8 @@ def _to_dict(sqlalchemy_object, allowed_keys=None):
|
||||
if isinstance(sqlalchemy_object, list):
|
||||
return [_to_dict(obj, allowed_keys=allowed_keys)
|
||||
for obj in sqlalchemy_object]
|
||||
if (hasattr(sqlalchemy_object, 'keys')
|
||||
and hasattr(sqlalchemy_object, 'index')):
|
||||
if (hasattr(sqlalchemy_object, 'keys') and
|
||||
hasattr(sqlalchemy_object, 'index')):
|
||||
return {key: getattr(sqlalchemy_object, key)
|
||||
for key in sqlalchemy_object.keys()}
|
||||
if hasattr(sqlalchemy_object, 'default_allowed_keys'):
|
||||
@ -98,8 +98,8 @@ def _to_dict(sqlalchemy_object, allowed_keys=None):
|
||||
for item in value}
|
||||
elif hasattr(value, 'default_allowed_keys'):
|
||||
result[key] = _to_dict(value)
|
||||
elif (isinstance(value, list) and value
|
||||
and hasattr(value[0], 'default_allowed_keys')):
|
||||
elif (isinstance(value, list) and value and
|
||||
hasattr(value[0], 'default_allowed_keys')):
|
||||
result[key] = [_to_dict(item) for item in value]
|
||||
else:
|
||||
result[key] = value
|
||||
@ -117,8 +117,7 @@ def store_test_results(results):
|
||||
test.cpid = results.get('cpid')
|
||||
test.duration_seconds = results.get('duration_seconds')
|
||||
test.product_version_id = results.get('product_version_id')
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
for result in results.get('results', []):
|
||||
test_result = models.TestResults()
|
||||
test_result.test_id = test_id
|
||||
@ -130,24 +129,27 @@ def store_test_results(results):
|
||||
meta.meta_key, meta.value = k, v
|
||||
test.meta.append(meta)
|
||||
test.save(session)
|
||||
session.commit()
|
||||
return test_id
|
||||
|
||||
|
||||
def get_test_result(test_id, allowed_keys=None):
|
||||
"""Get test info."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
test_info = session.query(models.Test). \
|
||||
filter_by(id=test_id). \
|
||||
first()
|
||||
if not test_info:
|
||||
raise NotFound('Test result %s not found' % test_id)
|
||||
return _to_dict(test_info, allowed_keys)
|
||||
|
||||
test_result_dict = _to_dict(test_info, allowed_keys)
|
||||
|
||||
return test_result_dict
|
||||
|
||||
|
||||
def delete_test_result(test_id):
|
||||
"""Delete test information from the database."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
test = session.query(models.Test).filter_by(id=test_id).first()
|
||||
if test:
|
||||
session.query(models.TestMeta) \
|
||||
@ -155,16 +157,18 @@ def delete_test_result(test_id):
|
||||
session.query(models.TestResults) \
|
||||
.filter_by(test_id=test_id).delete()
|
||||
session.delete(test)
|
||||
session.commit()
|
||||
else:
|
||||
raise NotFound('Test result %s not found' % test_id)
|
||||
|
||||
|
||||
def update_test_result(test_info):
|
||||
"""Update test from the given test_info dictionary."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
_id = test_info.get('id')
|
||||
test = session.query(models.Test).filter_by(id=_id).first()
|
||||
if test is None:
|
||||
session.close()
|
||||
raise NotFound('Test result with id %s not found' % _id)
|
||||
|
||||
keys = ['product_version_id', 'verification_status']
|
||||
@ -172,14 +176,15 @@ def update_test_result(test_info):
|
||||
if key in test_info:
|
||||
setattr(test, key, test_info[key])
|
||||
|
||||
with session.begin():
|
||||
test.save(session=session)
|
||||
return _to_dict(test)
|
||||
test_result_dict = _to_dict(test)
|
||||
session.commit()
|
||||
return test_result_dict
|
||||
|
||||
|
||||
def get_test_result_meta_key(test_id, key, default=None):
|
||||
"""Get metadata value related to specified test run."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
meta_item = session.query(models.TestMeta). \
|
||||
filter_by(test_id=test_id). \
|
||||
filter_by(meta_key=key). \
|
||||
@ -190,27 +195,27 @@ def get_test_result_meta_key(test_id, key, default=None):
|
||||
|
||||
def save_test_result_meta_item(test_id, key, value):
|
||||
"""Store or update item value related to specified test run."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
meta_item = (session.query(models.TestMeta)
|
||||
.filter_by(test_id=test_id)
|
||||
.filter_by(meta_key=key).first() or models.TestMeta())
|
||||
meta_item.test_id = test_id
|
||||
meta_item.meta_key = key
|
||||
meta_item.value = value
|
||||
with session.begin():
|
||||
meta_item.save(session)
|
||||
session.commit()
|
||||
|
||||
|
||||
def delete_test_result_meta_item(test_id, key):
|
||||
"""Delete metadata item related to specified test run."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
meta_item = session.query(models.TestMeta). \
|
||||
filter_by(test_id=test_id). \
|
||||
filter_by(meta_key=key). \
|
||||
first()
|
||||
if meta_item:
|
||||
with session.begin():
|
||||
session.delete(meta_item)
|
||||
session.commit()
|
||||
else:
|
||||
raise NotFound('Metadata key %s '
|
||||
'not found for test run %s' % (key, test_id))
|
||||
@ -218,11 +223,12 @@ def delete_test_result_meta_item(test_id, key):
|
||||
|
||||
def get_test_results(test_id):
|
||||
"""Get test results."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
results = session.query(models.TestResults). \
|
||||
filter_by(test_id=test_id). \
|
||||
all()
|
||||
return [_to_dict(result) for result in results]
|
||||
test_results_list = [_to_dict(result) for result in results]
|
||||
return test_results_list
|
||||
|
||||
|
||||
def _apply_filters_for_query(query, filters):
|
||||
@ -276,30 +282,31 @@ def _apply_filters_for_query(query, filters):
|
||||
|
||||
def get_test_result_records(page, per_page, filters):
|
||||
"""Get page with list of test records."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
query = session.query(models.Test)
|
||||
query = _apply_filters_for_query(query, filters)
|
||||
results = query.order_by(models.Test.created_at.desc()). \
|
||||
offset(per_page * (page - 1)). \
|
||||
limit(per_page).all()
|
||||
return _to_dict(results)
|
||||
test_result_records_dict = _to_dict(results)
|
||||
return test_result_records_dict
|
||||
|
||||
|
||||
def get_test_result_records_count(filters):
|
||||
"""Get total test records count."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
query = session.query(models.Test.id)
|
||||
records_count = _apply_filters_for_query(query, filters).count()
|
||||
|
||||
return records_count
|
||||
|
||||
|
||||
def user_get(user_openid):
|
||||
"""Get user info by openid."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
user = session.query(models.User).filter_by(openid=user_openid).first()
|
||||
if user is None:
|
||||
raise NotFound('User with OpenID %s not found' % user_openid)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@ -310,10 +317,10 @@ def user_save(user_info):
|
||||
except NotFound:
|
||||
user = models.User()
|
||||
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
user.update(user_info)
|
||||
user.save(session=session)
|
||||
session.commit()
|
||||
return user
|
||||
|
||||
|
||||
@ -322,9 +329,11 @@ def get_pubkey(key):
|
||||
|
||||
The md5 hash of the key is used for the query for quicker lookups.
|
||||
"""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
md5_hash = hashlib.md5(base64.b64decode(key)).hexdigest()
|
||||
pubkeys = session.query(models.PubKey).filter_by(md5_hash=md5_hash).all()
|
||||
pubkeys = (session.query(models.PubKey)
|
||||
.filter_by(md5_hash=md5_hash).all())
|
||||
|
||||
if len(pubkeys) == 1:
|
||||
return pubkeys[0]
|
||||
elif len(pubkeys) > 1:
|
||||
@ -346,59 +355,61 @@ def store_pubkey(pubkey_info):
|
||||
)
|
||||
).hexdigest()
|
||||
pubkey.comment = pubkey_info['comment']
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
|
||||
with get_session() as session:
|
||||
pubkeys_collision = (session.
|
||||
query(models.PubKey).
|
||||
filter_by(md5_hash=pubkey.md5_hash).
|
||||
filter_by(pubkey=pubkey.pubkey).all())
|
||||
if not pubkeys_collision:
|
||||
pubkey.save(session)
|
||||
session.commit()
|
||||
else:
|
||||
raise Duplication('Public key already exists.')
|
||||
|
||||
return pubkey.id
|
||||
|
||||
|
||||
def delete_pubkey(id):
|
||||
"""Delete public key from DB."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
key = session.query(models.PubKey).filter_by(id=id).first()
|
||||
session.delete(key)
|
||||
session.commit()
|
||||
|
||||
|
||||
def get_user_pubkeys(user_openid):
|
||||
"""Get public pubkeys for specified user."""
|
||||
session = get_session()
|
||||
pubkeys = session.query(models.PubKey).filter_by(openid=user_openid).all()
|
||||
with get_session() as session:
|
||||
pubkeys = (session.query(models.PubKey)
|
||||
.filter_by(openid=user_openid).all())
|
||||
return _to_dict(pubkeys)
|
||||
|
||||
|
||||
def add_user_to_group(user_openid, group_id, created_by_user):
|
||||
"""Add specified user to specified group."""
|
||||
item = models.UserToGroup()
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
item.user_openid = user_openid
|
||||
item.group_id = group_id
|
||||
item.created_by_user = created_by_user
|
||||
item.save(session=session)
|
||||
session.commit()
|
||||
|
||||
|
||||
def remove_user_from_group(user_openid, group_id):
|
||||
"""Remove specified user from specified group."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
(session.query(models.UserToGroup).
|
||||
filter_by(user_openid=user_openid).
|
||||
filter_by(group_id=group_id).
|
||||
delete(synchronize_session=False))
|
||||
session.commit()
|
||||
|
||||
|
||||
def add_organization(organization_info, creator):
|
||||
"""Add organization."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
group = models.Group()
|
||||
group.name = 'Group for %s' % organization_info['name']
|
||||
group.save(session=session)
|
||||
@ -419,20 +430,20 @@ def add_organization(organization_info, creator):
|
||||
organization.created_by_user = creator
|
||||
organization.properties = organization_info.get('properties')
|
||||
organization.save(session=session)
|
||||
|
||||
return _to_dict(organization)
|
||||
session.commit()
|
||||
organization_dict = _to_dict(organization)
|
||||
return organization_dict
|
||||
|
||||
|
||||
def update_organization(organization_info):
|
||||
"""Update organization."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
_id = organization_info['id']
|
||||
organization = (session.query(models.Organization).
|
||||
filter_by(id=_id).first())
|
||||
if organization is None:
|
||||
raise NotFound('Organization with id %s not found' % _id)
|
||||
|
||||
with session.begin():
|
||||
organization.type = organization_info.get(
|
||||
'type', organization.type)
|
||||
organization.name = organization_info.get(
|
||||
@ -442,23 +453,24 @@ def update_organization(organization_info):
|
||||
organization.properties = organization_info.get(
|
||||
'properties', organization.properties)
|
||||
organization.save(session=session)
|
||||
return _to_dict(organization)
|
||||
organization_dict = _to_dict(organization)
|
||||
session.commit()
|
||||
return organization_dict
|
||||
|
||||
|
||||
def get_organization(organization_id, allowed_keys=None):
|
||||
"""Get organization by id."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
organization = (session.query(models.Organization).
|
||||
filter_by(id=organization_id).first())
|
||||
if organization is None:
|
||||
raise NotFound('Organization with id %s not found' % organization_id)
|
||||
raise NotFound(f'Organization with id {organization_id} not found')
|
||||
return _to_dict(organization, allowed_keys=allowed_keys)
|
||||
|
||||
|
||||
def delete_organization(organization_id):
|
||||
"""delete organization by id."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
product_ids = (session
|
||||
.query(models.Product.id)
|
||||
.filter_by(organization_id=organization_id))
|
||||
@ -487,63 +499,68 @@ def add_product(product_info, creator):
|
||||
product.public = product_info.get('public', False)
|
||||
product.properties = product_info.get('properties')
|
||||
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
product.save(session=session)
|
||||
product_version = models.ProductVersion()
|
||||
product_version.created_by_user = creator
|
||||
product_version.version = product_info.get('version')
|
||||
product_version.product_id = product.id
|
||||
product_version.save(session=session)
|
||||
product_dict = _to_dict(product)
|
||||
session.commit()
|
||||
|
||||
return _to_dict(product)
|
||||
return product_dict
|
||||
|
||||
|
||||
def update_product(product_info):
|
||||
"""Update product by id."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
_id = product_info.get('id')
|
||||
product = session.query(models.Product).filter_by(id=_id).first()
|
||||
if product is None:
|
||||
raise NotFound('Product with id %s not found' % _id)
|
||||
|
||||
keys = ['name', 'description', 'product_ref_id', 'public', 'properties']
|
||||
keys = ['name', 'description', 'product_ref_id', 'public',
|
||||
'properties']
|
||||
for key in keys:
|
||||
if key in product_info:
|
||||
setattr(product, key, product_info[key])
|
||||
|
||||
with session.begin():
|
||||
product.save(session=session)
|
||||
return _to_dict(product)
|
||||
product_dict = _to_dict(product)
|
||||
session.commit()
|
||||
return product_dict
|
||||
|
||||
|
||||
def get_product(id, allowed_keys=None):
|
||||
"""Get product by id."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
product = session.query(models.Product).filter_by(id=id).first()
|
||||
if product is None:
|
||||
session.close()
|
||||
raise NotFound('Product with id "%s" not found' % id)
|
||||
return _to_dict(product, allowed_keys=allowed_keys)
|
||||
|
||||
|
||||
def delete_product(id):
|
||||
"""delete product by id."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
(session.query(models.ProductVersion)
|
||||
.filter_by(product_id=id)
|
||||
.delete(synchronize_session=False))
|
||||
(session.query(models.Product).filter_by(id=id).
|
||||
delete(synchronize_session=False))
|
||||
session.commit()
|
||||
|
||||
|
||||
def get_foundation_users():
|
||||
"""Get users' openid-s that belong to group of foundation."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
organization = (
|
||||
session.query(models.Organization.group_id)
|
||||
.filter_by(type=api_const.FOUNDATION).first())
|
||||
if organization is None:
|
||||
session.close()
|
||||
LOG.warning('Foundation organization record not found in DB.')
|
||||
return []
|
||||
group_id = organization.group_id
|
||||
@ -554,7 +571,7 @@ def get_foundation_users():
|
||||
|
||||
def get_organization_users(organization_id):
|
||||
"""Get users that belong to group of organization."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
organization = (session.query(models.Organization.group_id)
|
||||
.filter_by(id=organization_id).first())
|
||||
if organization is None:
|
||||
@ -566,13 +583,15 @@ def get_organization_users(organization_id):
|
||||
models.User.openid == models.UserToGroup.user_openid)
|
||||
.filter(models.UserToGroup.group_id == group_id))
|
||||
keys = ['openid', 'fullname', 'email']
|
||||
return {item[1].openid: _to_dict(item[1], allowed_keys=keys)
|
||||
organization_users_dict = {item[1].openid:
|
||||
_to_dict(item[1], allowed_keys=keys)
|
||||
for item in users}
|
||||
return organization_users_dict
|
||||
|
||||
|
||||
def get_organizations(allowed_keys=None):
|
||||
"""Get all organizations."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
items = (
|
||||
session.query(models.Organization)
|
||||
.order_by(models.Organization.created_at.desc()).all())
|
||||
@ -581,7 +600,7 @@ def get_organizations(allowed_keys=None):
|
||||
|
||||
def get_organizations_by_types(types, allowed_keys=None):
|
||||
"""Get organization by list of types."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
items = (
|
||||
session.query(models.Organization)
|
||||
.filter(models.Organization.type.in_(types))
|
||||
@ -591,9 +610,10 @@ def get_organizations_by_types(types, allowed_keys=None):
|
||||
|
||||
def get_organizations_by_user(user_openid, allowed_keys=None):
|
||||
"""Get organizations for specified user."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
items = (
|
||||
session.query(models.Organization, models.Group, models.UserToGroup)
|
||||
session
|
||||
.query(models.Organization, models.Group, models.UserToGroup)
|
||||
.join(models.Group,
|
||||
models.Group.id == models.Organization.group_id)
|
||||
.join(models.UserToGroup,
|
||||
@ -601,7 +621,8 @@ def get_organizations_by_user(user_openid, allowed_keys=None):
|
||||
.filter(models.UserToGroup.user_openid == user_openid)
|
||||
.order_by(models.Organization.created_at.desc()).all())
|
||||
items = [item[0] for item in items]
|
||||
return _to_dict(items, allowed_keys=allowed_keys)
|
||||
organizations_dict = _to_dict(items, allowed_keys=allowed_keys)
|
||||
return organizations_dict
|
||||
|
||||
|
||||
def get_products(allowed_keys=None, filters=None):
|
||||
@ -615,19 +636,20 @@ def get_products(allowed_keys=None, filters=None):
|
||||
raise Exception('Unknown filter key "%s"' % key)
|
||||
filter_args[key] = value
|
||||
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
query = session.query(models.Product)
|
||||
if filter_args:
|
||||
query = query.filter_by(**filter_args)
|
||||
items = query.order_by(models.Product.created_at.desc()).all()
|
||||
return _to_dict(items, allowed_keys=allowed_keys)
|
||||
products_dict = _to_dict(items, allowed_keys=allowed_keys)
|
||||
return products_dict
|
||||
|
||||
|
||||
def get_products_by_user(user_openid, allowed_keys=None, filters=None):
|
||||
"""Get products that a user can manage."""
|
||||
if filters is None:
|
||||
filters = {}
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
query = (
|
||||
session.query(models.Product, models.Organization, models.Group,
|
||||
models.UserToGroup)
|
||||
@ -647,47 +669,54 @@ def get_products_by_user(user_openid, allowed_keys=None, filters=None):
|
||||
filters[key])
|
||||
items = query.order_by(models.Organization.created_at.desc()).all()
|
||||
items = [item[0] for item in items]
|
||||
return _to_dict(items, allowed_keys=allowed_keys)
|
||||
products_dict = _to_dict(items, allowed_keys=allowed_keys)
|
||||
return products_dict
|
||||
|
||||
|
||||
def get_product_by_version(product_version_id, allowed_keys=None):
|
||||
"""Get product info from a product version ID."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
product = (session.query(models.Product).join(models.ProductVersion)
|
||||
.filter(models.ProductVersion.id == product_version_id).first())
|
||||
.filter(models.ProductVersion.id == product_version_id)
|
||||
.first())
|
||||
return _to_dict(product, allowed_keys=allowed_keys)
|
||||
|
||||
|
||||
def get_product_version(product_version_id, allowed_keys=None):
|
||||
"""Get details of a specific version given the id."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
version = (
|
||||
session.query(models.ProductVersion)
|
||||
.filter_by(id=product_version_id).first()
|
||||
)
|
||||
if version is None:
|
||||
raise NotFound('Version with id "%s" not found' % product_version_id)
|
||||
return _to_dict(version, allowed_keys=allowed_keys)
|
||||
session.close()
|
||||
raise NotFound(f'Version with id "{product_version_id}" not found')
|
||||
product_version_dict = _to_dict(version, allowed_keys=allowed_keys)
|
||||
return product_version_dict
|
||||
|
||||
|
||||
def get_product_version_by_cpid(cpid, allowed_keys=None):
|
||||
"""Get a product version given a cloud provider id."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
version = (
|
||||
session.query(models.ProductVersion)
|
||||
.filter_by(cpid=cpid).all()
|
||||
)
|
||||
return _to_dict(version, allowed_keys=allowed_keys)
|
||||
product_version_dict = _to_dict(version, allowed_keys=allowed_keys)
|
||||
return product_version_dict
|
||||
|
||||
|
||||
def get_product_versions(product_id, allowed_keys=None):
|
||||
"""Get all versions for a product."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
version_info = (
|
||||
session.query(models.ProductVersion)
|
||||
.filter_by(product_id=product_id).all()
|
||||
)
|
||||
return _to_dict(version_info, allowed_keys=allowed_keys)
|
||||
product_version_dict = _to_dict(version_info,
|
||||
allowed_keys=allowed_keys)
|
||||
return product_version_dict
|
||||
|
||||
|
||||
def add_product_version(product_id, version, creator, cpid, allowed_keys=None):
|
||||
@ -697,17 +726,21 @@ def add_product_version(product_id, version, creator, cpid, allowed_keys=None):
|
||||
product_version.version = version
|
||||
product_version.product_id = product_id
|
||||
product_version.cpid = cpid
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
product_version.save(session=session)
|
||||
return _to_dict(product_version, allowed_keys=allowed_keys)
|
||||
product_version_dict = _to_dict(product_version,
|
||||
allowed_keys=allowed_keys)
|
||||
session.commit()
|
||||
return product_version_dict
|
||||
|
||||
|
||||
def update_product_version(product_version_info):
|
||||
"""Update product version from product_info_version dictionary."""
|
||||
session = get_session()
|
||||
with get_session() as session:
|
||||
_id = product_version_info.get('id')
|
||||
version = session.query(models.ProductVersion).filter_by(id=_id).first()
|
||||
version = (session.query(models.ProductVersion)
|
||||
.filter_by(id=_id)
|
||||
.first())
|
||||
if version is None:
|
||||
raise NotFound('Product version with id %s not found' % _id)
|
||||
|
||||
@ -717,14 +750,15 @@ def update_product_version(product_version_info):
|
||||
if key in product_version_info:
|
||||
setattr(version, key, product_version_info[key])
|
||||
|
||||
with session.begin():
|
||||
version.save(session=session)
|
||||
return _to_dict(version)
|
||||
product_version_dict = _to_dict(version)
|
||||
session.commit()
|
||||
return product_version_dict
|
||||
|
||||
|
||||
def delete_product_version(product_version_id):
|
||||
"""Delete a product version."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
with get_session() as session:
|
||||
(session.query(models.ProductVersion).filter_by(id=product_version_id).
|
||||
delete(synchronize_session=False))
|
||||
session.commit()
|
||||
|
@ -20,8 +20,8 @@ import uuid
|
||||
|
||||
from oslo_db.sqlalchemy import models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import orm
|
||||
|
||||
BASE = declarative_base()
|
||||
|
||||
|
@ -34,8 +34,8 @@ def list_opts():
|
||||
import itertools
|
||||
|
||||
import refstack.api.app
|
||||
import refstack.api.controllers.v1
|
||||
import refstack.api.controllers.auth
|
||||
import refstack.api.controllers.v1
|
||||
import refstack.db.api
|
||||
|
||||
|
||||
|
@ -19,8 +19,8 @@ import os
|
||||
from oslo_config import fixture as config_fixture
|
||||
from oslotest import base
|
||||
import pecan.testing
|
||||
from sqlalchemy.engine import reflection
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.engine import reflection
|
||||
from sqlalchemy.schema import (
|
||||
MetaData,
|
||||
Table,
|
||||
@ -106,7 +106,7 @@ class FunctionalTest(base.BaseTestCase):
|
||||
trans.commit()
|
||||
trans.close()
|
||||
conn.close()
|
||||
except:
|
||||
except Exception:
|
||||
trans.rollback()
|
||||
conn.close()
|
||||
raise
|
||||
|
@ -13,8 +13,8 @@
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
from oslo_config import fixture as config_fixture
|
||||
import webtest.app
|
||||
|
@ -23,8 +23,8 @@ from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
import webtest.app
|
||||
|
||||
from refstack.tests import api
|
||||
from refstack import db
|
||||
from refstack.tests import api
|
||||
|
||||
|
||||
class TestProfileEndpoint(api.FunctionalTest):
|
||||
@ -56,9 +56,9 @@ class TestProfileEndpoint(api.FunctionalTest):
|
||||
key_size=1024,
|
||||
backend=default_backend()
|
||||
)
|
||||
signer = key.signer(padding.PKCS1v15(), hashes.SHA256())
|
||||
signer.update('signature'.encode('utf-8'))
|
||||
sign = signer.finalize()
|
||||
sign = key.sign('signature'.encode('utf-8'),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256())
|
||||
pubkey = key.public_key().public_bytes(
|
||||
serialization.Encoding.OpenSSH,
|
||||
serialization.PublicFormat.OpenSSH
|
||||
|
@ -12,10 +12,16 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import uuid
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from oslo_config import fixture as config_fixture
|
||||
import webtest.app
|
||||
|
||||
@ -24,13 +30,6 @@ from refstack.api import validators
|
||||
from refstack import db
|
||||
from refstack.tests import api
|
||||
|
||||
import binascii
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
|
||||
FAKE_TESTS_RESULT = {
|
||||
'cpid': 'foo',
|
||||
@ -428,9 +427,7 @@ class TestResultsEndpointNoAnonymous(api.FunctionalTest):
|
||||
)
|
||||
|
||||
def _sign_body_(self, keypair, body):
|
||||
signer = keypair.signer(padding.PKCS1v15(), hashes.SHA256())
|
||||
signer.update(body)
|
||||
return signer.finalize()
|
||||
return keypair.sign(body, padding.PKCS1v15(), hashes.SHA256())
|
||||
|
||||
def _get_public_key_(self, keypair):
|
||||
pubkey = keypair.public_key().public_bytes(
|
||||
|
@ -13,8 +13,8 @@
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
from oslo_config import fixture as config_fixture
|
||||
import webtest.app
|
||||
|
@ -17,19 +17,19 @@
|
||||
|
||||
import json
|
||||
from unittest import mock
|
||||
from urllib import parse
|
||||
|
||||
from oslo_config import fixture as config_fixture
|
||||
from urllib import parse
|
||||
import webob.exc
|
||||
|
||||
from refstack.api import constants as const
|
||||
from refstack.api import exceptions as api_exc
|
||||
from refstack.api.controllers import auth
|
||||
from refstack.api.controllers import guidelines
|
||||
from refstack.api.controllers import results
|
||||
from refstack.api.controllers import user
|
||||
from refstack.api.controllers import validation
|
||||
from refstack.api.controllers import vendors
|
||||
from refstack.api import exceptions as api_exc
|
||||
from refstack.tests import unit as base
|
||||
|
||||
|
||||
@ -335,7 +335,8 @@ class GuidelinesControllerTestCase(BaseControllerTestCase):
|
||||
@mock.patch('refstack.api.guidelines.Guidelines.get_guideline_list')
|
||||
def test_get_guidelines_error(self, mock_list):
|
||||
"""Test when there is a problem getting the guideline list and
|
||||
nothing is returned."""
|
||||
nothing is returned.
|
||||
"""
|
||||
mock_list.return_value = None
|
||||
self.controller.get()
|
||||
self.mock_abort.assert_called_with(500, mock.ANY)
|
||||
|
@ -16,13 +16,13 @@
|
||||
"""Tests for API's utils"""
|
||||
import time
|
||||
from unittest import mock
|
||||
from urllib import parse
|
||||
|
||||
import jwt
|
||||
from oslo_config import fixture as config_fixture
|
||||
from oslo_utils import timeutils
|
||||
from oslotest import base
|
||||
from pecan import rest
|
||||
import jwt
|
||||
from urllib import parse
|
||||
from webob import exc
|
||||
|
||||
from refstack.api import constants as const
|
||||
|
@ -162,7 +162,9 @@ class CORSHookTestCase(base.BaseTestCase):
|
||||
|
||||
def test_no_origin_header(self):
|
||||
"""Test when there is no 'Origin' header in the request, in which case,
|
||||
the request is not cross-origin and doesn't need the CORS headers."""
|
||||
the request is not cross-origin and doesn't need the CORS headers.
|
||||
"""
|
||||
|
||||
hook = app.CORSHook()
|
||||
request = pecan.core.Request({})
|
||||
state = pecan.core.RoutingState(request, pecan.core.Response(), None)
|
||||
|
@ -23,8 +23,8 @@ from oslo_config import fixture as config_fixture
|
||||
from oslotest import base
|
||||
import sqlalchemy.orm
|
||||
|
||||
from refstack import db
|
||||
from refstack.api import constants as api_const
|
||||
from refstack import db
|
||||
from refstack.db.sqlalchemy import api
|
||||
from refstack.db.sqlalchemy import models
|
||||
|
||||
@ -178,8 +178,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
test = mock_test.return_value
|
||||
test.save = mock.Mock()
|
||||
|
||||
session = mock_get_session.return_value
|
||||
session.begin = mock.MagicMock()
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
test_result = mock_test_result.return_value
|
||||
test_result.save = mock.Mock()
|
||||
@ -189,7 +189,6 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
mock_test.assert_called_once_with()
|
||||
mock_get_session.assert_called_once_with()
|
||||
test.save.assert_called_once_with(session)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
self.assertEqual(test_id, str(_id))
|
||||
self.assertEqual(test.cpid, fake_tests_result['cpid'])
|
||||
@ -202,7 +201,9 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Test')
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x, *args: x)
|
||||
def test_get_test_result(self, mock_to_dict, mock_test, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
session.query = mock.Mock()
|
||||
query = session.query.return_value
|
||||
query.filter_by = mock.Mock()
|
||||
@ -218,7 +219,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
filter_by.first.assert_called_once_with()
|
||||
self.assertEqual(mock_result, actual_result)
|
||||
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
session.query = mock.Mock()
|
||||
query = session.query.return_value
|
||||
query.filter_by.return_value.first.return_value = None
|
||||
@ -227,17 +229,21 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
@mock.patch.object(api, 'get_session')
|
||||
def test_delete_test_result(self, mock_get_session, mock_models):
|
||||
session = mock_get_session.return_value
|
||||
test_query = mock.Mock()
|
||||
test_meta_query = mock.Mock()
|
||||
test_results_query = mock.Mock()
|
||||
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
session.query = mock.Mock(side_effect={
|
||||
mock_models.Test: test_query,
|
||||
mock_models.TestMeta: test_meta_query,
|
||||
mock_models.TestResults: test_results_query
|
||||
}.get)
|
||||
|
||||
db.delete_test_result('fake_id')
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
test_query.filter_by.return_value.first\
|
||||
.assert_called_once_with()
|
||||
test_meta_query.filter_by.return_value.delete\
|
||||
@ -247,17 +253,20 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
session.delete.assert_called_once_with(
|
||||
test_query.filter_by.return_value.first.return_value)
|
||||
|
||||
mock_get_session.return_value = mock.MagicMock()
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
.first.return_value = None
|
||||
|
||||
self.assertRaises(api.NotFound, db.delete_test_result, 'fake_id')
|
||||
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x: x)
|
||||
def test_update_test_result(self, mock_to_dict, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
mock_test = mock.Mock()
|
||||
session.query.return_value.filter_by.return_value\
|
||||
.first.return_value = mock_test
|
||||
@ -267,18 +276,20 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
|
||||
mock_get_session.assert_called_once_with()
|
||||
mock_test.save.assert_called_once_with(session=session)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
@mock.patch.object(api, 'get_session')
|
||||
def test_get_test_result_meta_key(self, mock_get_session, mock_models):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
.filter_by.return_value\
|
||||
.first.return_value = mock.Mock(value=42)
|
||||
self.assertEqual(
|
||||
42, db.get_test_result_meta_key('fake_id', 'fake_key'))
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
.filter_by.return_value\
|
||||
@ -289,7 +300,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
@mock.patch.object(api, 'get_session')
|
||||
def test_save_test_result_meta_item(self, mock_get_session, mock_models):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
mock_meta_item = mock.Mock()
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
@ -299,9 +311,10 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
self.assertEqual('fake_id', mock_meta_item.test_id)
|
||||
self.assertEqual('fake_key', mock_meta_item.meta_key)
|
||||
self.assertEqual(42, mock_meta_item.value)
|
||||
session.begin.assert_called_once_with()
|
||||
mock_meta_item.save.assert_called_once_with(session)
|
||||
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
.filter_by.return_value\
|
||||
@ -316,16 +329,18 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
@mock.patch.object(api, 'get_session')
|
||||
def test_delete_test_result_meta_item(self, mock_get_session, mock_models):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
mock_meta_item = mock.Mock()
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
.filter_by.return_value\
|
||||
.first.return_value = mock_meta_item
|
||||
db.delete_test_result_meta_item('fake_id', 'fake_key')
|
||||
session.begin.assert_called_once_with()
|
||||
session.delete.assert_called_once_with(mock_meta_item)
|
||||
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
session.query.return_value\
|
||||
.filter_by.return_value\
|
||||
.filter_by.return_value\
|
||||
@ -339,7 +354,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
def test_get_test_results(self, mock_test_result, mock_get_session):
|
||||
mock_test_result.name = mock.Mock()
|
||||
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
session.query = mock.Mock()
|
||||
query = session.query.return_value
|
||||
query.filter_by = mock.Mock()
|
||||
@ -460,7 +476,9 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
api_const.CPID: 'fake3'
|
||||
}
|
||||
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
first_query = session.query.return_value
|
||||
second_query = mock_apply.return_value
|
||||
ordered_query = second_query.order_by.return_value
|
||||
@ -487,7 +505,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
mock_apply):
|
||||
|
||||
filters = mock.Mock()
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
apply_result = mock_apply.return_value
|
||||
apply_result.count.return_value = 999
|
||||
@ -499,12 +518,12 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
mock_apply.assert_called_once_with(query, filters)
|
||||
apply_result.count.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.User')
|
||||
def test_user_get(self, mock_model, mock_get_session):
|
||||
user_openid = 'user@example.com'
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
user = filtered.first.return_value
|
||||
@ -516,12 +535,12 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
query.filter_by.assert_called_once_with(openid=user_openid)
|
||||
filtered.first.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.User')
|
||||
def test_user_get_none(self, mock_model, mock_get_session):
|
||||
user_openid = 'user@example.com'
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
filtered.first.return_value = None
|
||||
@ -533,7 +552,10 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
def test_user_update_or_create(self, mock_get_user, mock_model,
|
||||
mock_get_session):
|
||||
user_info = {'openid': 'user@example.com'}
|
||||
session = mock_get_session.return_value
|
||||
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
user = mock_model.return_value
|
||||
result = api.user_save(user_info)
|
||||
self.assertEqual(result, user)
|
||||
@ -542,15 +564,14 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
mock_get_session.assert_called_once_with()
|
||||
user.save.assert_called_once_with(session=session)
|
||||
user.update.assert_called_once_with(user_info)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.PubKey')
|
||||
def test_get_pubkey(self, mock_model, mock_get_session):
|
||||
key = 'AAAAB3Nz'
|
||||
khash = hashlib.md5(base64.b64decode(key.encode('ascii'))).hexdigest()
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
|
||||
@ -577,7 +598,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
def test_store_pubkey(self, mock_models, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
pubkey_info = {
|
||||
'openid': 'fake_id',
|
||||
'format': 'ssh-rsa',
|
||||
@ -612,7 +634,9 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
def test_delete_pubkey(self, mock_models, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
db.delete_pubkey('key_id')
|
||||
key = session\
|
||||
.query.return_value\
|
||||
@ -622,14 +646,14 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
session.query.return_value.filter_by.assert_called_once_with(
|
||||
id='key_id')
|
||||
session.delete.assert_called_once_with(key)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x: x)
|
||||
def test_get_user_pubkeys(self, mock_to_dict, mock_models,
|
||||
mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
actual_keys = db.get_user_pubkeys('user_id')
|
||||
keys = session \
|
||||
.query.return_value \
|
||||
@ -643,18 +667,19 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.UserToGroup')
|
||||
def test_add_user_to_group(self, mock_model, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
api.add_user_to_group('user-123', 'GUID', 'user-321')
|
||||
|
||||
mock_model.assert_called_once_with()
|
||||
mock_get_session.assert_called_once_with()
|
||||
mock_model.return_value.save.assert_called_once_with(session=session)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
def test_remove_user_from_group(self, mock_models, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
db.remove_user_from_group('user-123', 'GUID')
|
||||
|
||||
session.query.assert_called_once_with(mock_models.UserToGroup)
|
||||
@ -662,7 +687,6 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
mock.call(user_openid='user-123'),
|
||||
mock.call().filter_by(group_id='GUID'),
|
||||
mock.call().filter_by().delete(synchronize_session=False)))
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Organization')
|
||||
@ -674,7 +698,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
mock_get_session):
|
||||
|
||||
organization_info = {'name': 'a', 'description': 'b', 'type': 1}
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
organization = mock_model_organization.return_value
|
||||
result = api.add_organization(organization_info, 'user-123')
|
||||
self.assertEqual(result, organization)
|
||||
@ -692,7 +717,6 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
group.save.assert_called_once_with(session=session)
|
||||
user_to_group = mock_model_user_to_group.return_value
|
||||
user_to_group.save.assert_called_once_with(session=session)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Product')
|
||||
@ -700,7 +724,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x: x)
|
||||
def test_product_add(self, mock_to_dict, mock_version,
|
||||
mock_product, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
version = mock_version.return_value
|
||||
product = mock_product.return_value
|
||||
product_info = {'product_ref_id': 'hash_or_guid', 'name': 'a',
|
||||
@ -716,7 +741,6 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
|
||||
mock_get_session.assert_called_once_with()
|
||||
product.save.assert_called_once_with(session=session)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Product')
|
||||
@ -727,7 +751,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Product.save')
|
||||
def test_product_update(self, mock_product_save, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
product = models.Product()
|
||||
@ -748,16 +773,15 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
|
||||
mock_get_session.assert_called_once_with()
|
||||
mock_product_save.assert_called_once_with(session=session)
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Organization')
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x, allowed_keys: x)
|
||||
def test_organization_get(self, mock_to_dict, mock_model,
|
||||
mock_get_session):
|
||||
organization_id = 12345
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
organization = filtered.first.return_value
|
||||
@ -769,13 +793,13 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
query.filter_by.assert_called_once_with(id=organization_id)
|
||||
filtered.first.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Product')
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x, allowed_keys: x)
|
||||
def test_product_get(self, mock_to_dict, mock_model, mock_get_session):
|
||||
_id = 12345
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
product = filtered.first.return_value
|
||||
@ -790,7 +814,8 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
def test_product_delete(self, mock_models, mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
db.delete_product('product_id')
|
||||
|
||||
session.query.return_value.filter_by.assert_has_calls((
|
||||
@ -799,14 +824,13 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
session.query.return_value.filter_by.assert_has_calls((
|
||||
mock.call(id='product_id'),
|
||||
mock.call().delete(synchronize_session=False)))
|
||||
session.begin.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.api.models')
|
||||
def test_get_organization_users(self, mock_models, mock_get_session):
|
||||
organization_id = 12345
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
filtered = query.filter_by.return_value
|
||||
filtered.first.return_value.group_id = 'foo'
|
||||
@ -830,13 +854,13 @@ class DBBackendTestCase(base.BaseTestCase):
|
||||
session.query.assert_any_call(mock_models.UserToGroup,
|
||||
mock_models.User)
|
||||
|
||||
@mock.patch.object(api, 'get_session',
|
||||
return_value=mock.Mock(name='session'),)
|
||||
@mock.patch.object(api, 'get_session')
|
||||
@mock.patch('refstack.db.sqlalchemy.models.Organization')
|
||||
@mock.patch.object(api, '_to_dict', side_effect=lambda x, allowed_keys: x)
|
||||
def test_organizations_get(self, mock_to_dict, mock_model,
|
||||
mock_get_session):
|
||||
session = mock_get_session.return_value
|
||||
session = mock.Mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
query = session.query.return_value
|
||||
ordered = query.order_by.return_value
|
||||
organizations = ordered.all.return_value
|
||||
|
@ -15,9 +15,9 @@
|
||||
|
||||
"""Tests for refstack's migrations."""
|
||||
|
||||
import alembic
|
||||
from unittest import mock
|
||||
|
||||
import alembic
|
||||
from oslotest import base
|
||||
|
||||
from refstack.db import migration
|
||||
|
@ -112,9 +112,7 @@ class TestResultValidatorTestCase(base.BaseTestCase):
|
||||
key_size=1024,
|
||||
backend=default_backend()
|
||||
)
|
||||
signer = key.signer(padding.PKCS1v15(), hashes.SHA256())
|
||||
signer.update(request.body)
|
||||
sign = signer.finalize()
|
||||
sign = key.sign(request.body, padding.PKCS1v15(), hashes.SHA256())
|
||||
pubkey = key.public_key().public_bytes(
|
||||
serialization.Encoding.OpenSSH,
|
||||
serialization.PublicFormat.OpenSSH
|
||||
|
@ -1,13 +1,13 @@
|
||||
coverage>=3.6
|
||||
pep8==1.5.7
|
||||
pyflakes==0.8.1
|
||||
flake8==2.2.4
|
||||
hacking>=3.0.1,<3.1.0;python_version>='3.5' # Apache-2.0
|
||||
pycodestyle>=2.0.0,<2.6.0 # MIT
|
||||
flake8-import-order==0.11 # LGPLv3
|
||||
|
||||
docutils>=0.11 # OSI-Approved Open Source, Public Domain
|
||||
httmock>=1.2.4
|
||||
oslotest>=1.2.0 # Apache-2.0
|
||||
python-subunit>=0.0.18
|
||||
stestr>=1.1.0 # Apache-2.0
|
||||
testtools>=0.9.34
|
||||
pep257>=0.5.0
|
||||
PyMySQL>=0.6.2,!=0.6.4
|
||||
WebTest>=3.0.0
|
||||
|
@ -22,12 +22,12 @@ Test result update & verify script for the local refstack database.
|
||||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import jwt
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
|
||||
from collections import namedtuple
|
||||
import jwt
|
||||
import requests
|
||||
|
||||
|
||||
def generate_token(keyfile, _id):
|
||||
|
25
tox.ini
25
tox.ini
@ -4,14 +4,17 @@ minversion = 3.18
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
basepython = python3
|
||||
usedevelop = True
|
||||
install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -U {opts} {packages}
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
setenv =
|
||||
VIRTUAL_ENV={envdir}
|
||||
LANG=en_US.UTF-8
|
||||
LANGUAGE=en_US:en
|
||||
LC_ALL=C
|
||||
allowlist_externals = find
|
||||
allowlist_externals =
|
||||
find
|
||||
{toxinidir}/setup-mysql-tests.sh
|
||||
{toxinidir}/tools/cover.sh
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
commands =
|
||||
@ -32,7 +35,6 @@ commands = {toxinidir}/setup-mysql-tests.sh stestr run --test-path ./refstack/te
|
||||
commands =
|
||||
flake8 {posargs}
|
||||
flake8 --filename=refstack* bin
|
||||
pep257 refstack
|
||||
distribute = false
|
||||
|
||||
[testenv:genconfig]
|
||||
@ -54,13 +56,18 @@ deps = -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/
|
||||
commands = sphinx-build -b html doc/source doc/build/html
|
||||
|
||||
[flake8]
|
||||
# E125 continuation line does not distinguish itself from next logical line
|
||||
# E125 is a won't fix until https://github.com/jcrocholl/pep8/issues/126 is resolved. For further detail see https://review.opendev.org/#/c/36788/
|
||||
# E123 skipped because it is ignored by default in the default pep8
|
||||
# E129 skipped because it is too limiting when combined with other rules
|
||||
# W504 skipped because it is overeager and unnecessary
|
||||
# H404 multi line docstring should start with a summary
|
||||
ignore = E125,H404
|
||||
enable-extensions = H203
|
||||
show-source = true
|
||||
# H405 skipped because it arbitrarily forces doctring "title" lines
|
||||
ignore = E125,E123,E129,W504,H404,H405
|
||||
show-source = True
|
||||
exclude = .git,.venv,.tox,dist,doc,*egg,build,*lib/python*,*alembic/versions*
|
||||
enable-extensions = H106,H203,H904
|
||||
import-order-style = pep8
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build
|
||||
|
||||
[testenv:pip-check-reqs]
|
||||
# Do not install test-requirements as that will pollute the virtualenv for
|
||||
|
Loading…
Reference in New Issue
Block a user