Corrections for stricter pep8 checking.
Updates the code to reflect stricter pep8 checking standards for pep8 1.3+. Also adds a pep8 config file to silence unnecessary indentation errors. Removes the "reverse bugfix" monkeypatch since Django 1.4 is now required. Implements blueprint upgrade-pep8. Change-Id: I7343321627d8ccd1598f39323821133168a645cc
This commit is contained in:
parent
7ce7905518
commit
820fdfe868
4
.pep8
Normal file
4
.pep8
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
|
||||||
|
[pep8]
|
||||||
|
ignore = E121,E126,E127,E128,W602
|
||||||
|
exclude = vcsversion.py,panel_template,dash_template,local_settings.py
|
@ -39,11 +39,6 @@ except ImportError:
|
|||||||
warnings.warn(msg, Warning)
|
warnings.warn(msg, Warning)
|
||||||
|
|
||||||
if Horizon:
|
if Horizon:
|
||||||
# This can be removed once the upstream bug is fixed.
|
|
||||||
import django
|
|
||||||
if django.VERSION < (1, 4):
|
|
||||||
from horizon.utils import reverse_bugfix
|
|
||||||
|
|
||||||
register = Horizon.register
|
register = Horizon.register
|
||||||
unregister = Horizon.unregister
|
unregister = Horizon.unregister
|
||||||
get_absolute_url = Horizon.get_absolute_url
|
get_absolute_url = Horizon.get_absolute_url
|
||||||
|
@ -127,12 +127,12 @@ class Usage(APIResourceWrapper):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def total_active_instances(self):
|
def total_active_instances(self):
|
||||||
return sum(1 for s in self.server_usages if s['ended_at'] == None)
|
return sum(1 for s in self.server_usages if s['ended_at'] is None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def vcpus(self):
|
def vcpus(self):
|
||||||
return sum(s['vcpus'] for s in self.server_usages
|
return sum(s['vcpus'] for s in self.server_usages
|
||||||
if s['ended_at'] == None)
|
if s['ended_at'] is None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def vcpu_hours(self):
|
def vcpu_hours(self):
|
||||||
@ -141,12 +141,12 @@ class Usage(APIResourceWrapper):
|
|||||||
@property
|
@property
|
||||||
def local_gb(self):
|
def local_gb(self):
|
||||||
return sum(s['local_gb'] for s in self.server_usages
|
return sum(s['local_gb'] for s in self.server_usages
|
||||||
if s['ended_at'] == None)
|
if s['ended_at'] is None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def memory_mb(self):
|
def memory_mb(self):
|
||||||
return sum(s['memory_mb'] for s in self.server_usages
|
return sum(s['memory_mb'] for s in self.server_usages
|
||||||
if s['ended_at'] == None)
|
if s['ended_at'] is None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def disk_gb_hours(self):
|
def disk_gb_hours(self):
|
||||||
@ -164,8 +164,8 @@ class SecurityGroup(APIResourceWrapper):
|
|||||||
"""Wraps transmitted rule info in the novaclient rule class."""
|
"""Wraps transmitted rule info in the novaclient rule class."""
|
||||||
if "_rules" not in self.__dict__:
|
if "_rules" not in self.__dict__:
|
||||||
manager = nova_rules.SecurityGroupRuleManager
|
manager = nova_rules.SecurityGroupRuleManager
|
||||||
self._rules = [nova_rules.SecurityGroupRule(manager, rule) for \
|
self._rules = [nova_rules.SecurityGroupRule(manager, rule)
|
||||||
rule in self._apiresource.rules]
|
for rule in self._apiresource.rules]
|
||||||
return self.__dict__['_rules']
|
return self.__dict__['_rules']
|
||||||
|
|
||||||
@rules.setter
|
@rules.setter
|
||||||
@ -310,8 +310,8 @@ def server_list(request, search_opts=None, all_tenants=False):
|
|||||||
search_opts['all_tenants'] = True
|
search_opts['all_tenants'] = True
|
||||||
else:
|
else:
|
||||||
search_opts['project_id'] = request.user.tenant_id
|
search_opts['project_id'] = request.user.tenant_id
|
||||||
return [Server(s, request) for s in novaclient(request).\
|
return [Server(s, request)
|
||||||
servers.list(True, search_opts)]
|
for s in novaclient(request).servers.list(True, search_opts)]
|
||||||
|
|
||||||
|
|
||||||
def server_console_output(request, instance_id, tail_length=None):
|
def server_console_output(request, instance_id, tail_length=None):
|
||||||
@ -439,18 +439,17 @@ def tenant_quota_usages(request):
|
|||||||
|
|
||||||
|
|
||||||
def security_group_list(request):
|
def security_group_list(request):
|
||||||
return [SecurityGroup(g) for g in novaclient(request).\
|
return [SecurityGroup(g) for g
|
||||||
security_groups.list()]
|
in novaclient(request).security_groups.list()]
|
||||||
|
|
||||||
|
|
||||||
def security_group_get(request, security_group_id):
|
def security_group_get(request, sg_id):
|
||||||
return SecurityGroup(novaclient(request).\
|
return SecurityGroup(novaclient(request).security_groups.get(sg_id))
|
||||||
security_groups.get(security_group_id))
|
|
||||||
|
|
||||||
|
|
||||||
def security_group_create(request, name, description):
|
def security_group_create(request, name, desc):
|
||||||
return SecurityGroup(novaclient(request).\
|
return SecurityGroup(novaclient(request).security_groups.create(name,
|
||||||
security_groups.create(name, description))
|
desc))
|
||||||
|
|
||||||
|
|
||||||
def security_group_delete(request, security_group_id):
|
def security_group_delete(request, security_group_id):
|
||||||
@ -460,13 +459,13 @@ def security_group_delete(request, security_group_id):
|
|||||||
def security_group_rule_create(request, parent_group_id, ip_protocol=None,
|
def security_group_rule_create(request, parent_group_id, ip_protocol=None,
|
||||||
from_port=None, to_port=None, cidr=None,
|
from_port=None, to_port=None, cidr=None,
|
||||||
group_id=None):
|
group_id=None):
|
||||||
return SecurityGroupRule(novaclient(request).\
|
sg = novaclient(request).security_group_rules.create(parent_group_id,
|
||||||
security_group_rules.create(parent_group_id,
|
|
||||||
ip_protocol,
|
ip_protocol,
|
||||||
from_port,
|
from_port,
|
||||||
to_port,
|
to_port,
|
||||||
cidr,
|
cidr,
|
||||||
group_id))
|
group_id)
|
||||||
|
return SecurityGroupRule(sg)
|
||||||
|
|
||||||
|
|
||||||
def security_group_rule_delete(request, security_group_rule_id):
|
def security_group_rule_delete(request, security_group_rule_id):
|
||||||
|
@ -117,11 +117,13 @@ def swift_filter_objects(request, filter_string, container_name, prefix=None,
|
|||||||
path=path)
|
path=path)
|
||||||
filter_string_list = filter_string.lower().strip().split(' ')
|
filter_string_list = filter_string.lower().strip().split(' ')
|
||||||
|
|
||||||
return filter(lambda obj: any([
|
def matches_filter(obj):
|
||||||
obj.content_type != "application/directory"
|
if obj.content_type == "application/directory":
|
||||||
and wildcard_search(obj.name.lower(), q)
|
return False
|
||||||
for q in filter_string_list if q != ''
|
for q in filter_string_list:
|
||||||
]), objects)
|
return wildcard_search(obj.name.lower(), q)
|
||||||
|
|
||||||
|
return filter(matches_filter, objects)
|
||||||
|
|
||||||
|
|
||||||
def wildcard_search(string, q):
|
def wildcard_search(string, q):
|
||||||
|
@ -112,10 +112,10 @@ class AddRule(forms.SelfHandlingForm):
|
|||||||
source_group = cleaned_data.get("source_group", None)
|
source_group = cleaned_data.get("source_group", None)
|
||||||
|
|
||||||
if ip_proto == 'icmp':
|
if ip_proto == 'icmp':
|
||||||
if from_port == None:
|
if from_port is None:
|
||||||
msg = _('The ICMP type is invalid.')
|
msg = _('The ICMP type is invalid.')
|
||||||
raise ValidationError(msg)
|
raise ValidationError(msg)
|
||||||
if to_port == None:
|
if to_port is None:
|
||||||
msg = _('The ICMP code is invalid.')
|
msg = _('The ICMP code is invalid.')
|
||||||
raise ValidationError(msg)
|
raise ValidationError(msg)
|
||||||
if from_port not in xrange(-1, 256):
|
if from_port not in xrange(-1, 256):
|
||||||
@ -125,10 +125,10 @@ class AddRule(forms.SelfHandlingForm):
|
|||||||
msg = _('The ICMP code not in range (-1, 255)')
|
msg = _('The ICMP code not in range (-1, 255)')
|
||||||
raise ValidationError(msg)
|
raise ValidationError(msg)
|
||||||
else:
|
else:
|
||||||
if from_port == None:
|
if from_port is None:
|
||||||
msg = _('The "from" port number is invalid.')
|
msg = _('The "from" port number is invalid.')
|
||||||
raise ValidationError(msg)
|
raise ValidationError(msg)
|
||||||
if to_port == None:
|
if to_port is None:
|
||||||
msg = _('The "to" port number is invalid.')
|
msg = _('The "to" port number is invalid.')
|
||||||
raise ValidationError(msg)
|
raise ValidationError(msg)
|
||||||
if to_port < from_port:
|
if to_port < from_port:
|
||||||
@ -159,8 +159,8 @@ class AddRule(forms.SelfHandlingForm):
|
|||||||
data['to_port'],
|
data['to_port'],
|
||||||
data['cidr'],
|
data['cidr'],
|
||||||
data['source_group'])
|
data['source_group'])
|
||||||
messages.success(request, _('Successfully added rule: %s') \
|
messages.success(request,
|
||||||
% unicode(rule))
|
_('Successfully added rule: %s') % unicode(rule))
|
||||||
except:
|
except:
|
||||||
exceptions.handle(request,
|
exceptions.handle(request,
|
||||||
_('Unable to add rule to security group.'))
|
_('Unable to add rule to security group.'))
|
||||||
|
@ -105,8 +105,9 @@ class ImagesTable(tables.DataTable):
|
|||||||
("killed", False),
|
("killed", False),
|
||||||
("deleted", False),
|
("deleted", False),
|
||||||
)
|
)
|
||||||
name = tables.Column("name", link="horizon:nova:images_and_snapshots:" \
|
name = tables.Column("name",
|
||||||
"images:detail",
|
link=("horizon:nova:images_and_snapshots:"
|
||||||
|
"images:detail"),
|
||||||
verbose_name=_("Image Name"))
|
verbose_name=_("Image Name"))
|
||||||
image_type = tables.Column(get_image_type,
|
image_type = tables.Column(get_image_type,
|
||||||
verbose_name=_("Type"),
|
verbose_name=_("Type"),
|
||||||
|
@ -48,8 +48,7 @@ class ImageViewTests(test.TestCase):
|
|||||||
'minimum_disk': 15,
|
'minimum_disk': 15,
|
||||||
'minimum_ram': 512,
|
'minimum_ram': 512,
|
||||||
'is_public': 1,
|
'is_public': 1,
|
||||||
'method': 'CreateImageForm'
|
'method': 'CreateImageForm'}
|
||||||
}
|
|
||||||
|
|
||||||
api.glance.image_create(IsA(http.HttpRequest),
|
api.glance.image_create(IsA(http.HttpRequest),
|
||||||
container_format="bare",
|
container_format="bare",
|
||||||
|
@ -255,8 +255,9 @@ class InstancesTable(tables.DataTable):
|
|||||||
TASK_DISPLAY_CHOICES = (
|
TASK_DISPLAY_CHOICES = (
|
||||||
("image_snapshot", "Snapshotting"),
|
("image_snapshot", "Snapshotting"),
|
||||||
)
|
)
|
||||||
name = tables.Column("name", link="horizon:nova:instances_and_volumes:" \
|
name = tables.Column("name",
|
||||||
"instances:detail",
|
link=("horizon:nova:instances_and_volumes:"
|
||||||
|
"instances:detail"),
|
||||||
verbose_name=_("Instance Name"))
|
verbose_name=_("Instance Name"))
|
||||||
ip = tables.Column(get_ips, verbose_name=_("IP Address"))
|
ip = tables.Column(get_ips, verbose_name=_("IP Address"))
|
||||||
size = tables.Column(get_size, verbose_name=_("Size"))
|
size = tables.Column(get_size, verbose_name=_("Size"))
|
||||||
|
@ -109,7 +109,7 @@ class VolumeOptionsAction(workflows.Action):
|
|||||||
def populate_volume_id_choices(self, request, context):
|
def populate_volume_id_choices(self, request, context):
|
||||||
volume_options = [("", _("Select Volume"))]
|
volume_options = [("", _("Select Volume"))]
|
||||||
try:
|
try:
|
||||||
volumes = [v for v in api.nova.volume_list(self.request) \
|
volumes = [v for v in api.nova.volume_list(self.request)
|
||||||
if v.status == api.VOLUME_STATE_AVAILABLE]
|
if v.status == api.VOLUME_STATE_AVAILABLE]
|
||||||
volume_options.extend([self._get_volume_display_name(vol)
|
volume_options.extend([self._get_volume_display_name(vol)
|
||||||
for vol in volumes])
|
for vol in volumes])
|
||||||
@ -122,7 +122,7 @@ class VolumeOptionsAction(workflows.Action):
|
|||||||
volume_options = [("", _("Select Volume Snapshot"))]
|
volume_options = [("", _("Select Volume Snapshot"))]
|
||||||
try:
|
try:
|
||||||
snapshots = api.nova.volume_snapshot_list(self.request)
|
snapshots = api.nova.volume_snapshot_list(self.request)
|
||||||
snapshots = [s for s in snapshots \
|
snapshots = [s for s in snapshots
|
||||||
if s.status == api.VOLUME_STATE_AVAILABLE]
|
if s.status == api.VOLUME_STATE_AVAILABLE]
|
||||||
volume_options.extend([self._get_volume_display_name(snap)
|
volume_options.extend([self._get_volume_display_name(snap)
|
||||||
for snap in snapshots])
|
for snap in snapshots])
|
||||||
|
@ -54,8 +54,8 @@ class IndexView(tables.MultiTableView):
|
|||||||
if instances:
|
if instances:
|
||||||
try:
|
try:
|
||||||
flavors = api.flavor_list(self.request)
|
flavors = api.flavor_list(self.request)
|
||||||
full_flavors = SortedDict([(str(flavor.id), flavor) for \
|
full_flavors = SortedDict([(str(flavor.id), flavor)
|
||||||
flavor in flavors])
|
for flavor in flavors])
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
flavor_id = instance.flavor["id"]
|
flavor_id = instance.flavor["id"]
|
||||||
instance.full_flavor = full_flavors[flavor_id]
|
instance.full_flavor = full_flavors[flavor_id]
|
||||||
|
@ -64,9 +64,8 @@ class VolumeViewTests(test.TestCase):
|
|||||||
args=[volume.id])
|
args=[volume.id])
|
||||||
res = self.client.get(url)
|
res = self.client.get(url)
|
||||||
|
|
||||||
self.assertEqual(res.context['form'].\
|
self.assertEqual(res.context['form'].fields['instance']._choices[0][1],
|
||||||
fields['instance']._choices[0][1],
|
"Select an instance")
|
||||||
"Select an instance")
|
|
||||||
self.assertEqual(len(res.context['form'].fields['instance'].choices),
|
self.assertEqual(len(res.context['form'].fields['instance'].choices),
|
||||||
2)
|
2)
|
||||||
self.assertEqual(res.context['form'].fields['instance']._choices[1][0],
|
self.assertEqual(res.context['form'].fields['instance']._choices[1][0],
|
||||||
@ -90,8 +89,10 @@ class VolumeViewTests(test.TestCase):
|
|||||||
res = self.client.get(url)
|
res = self.client.get(url)
|
||||||
|
|
||||||
self.assertContains(res, "<dd>Volume name</dd>", 1, 200)
|
self.assertContains(res, "<dd>Volume name</dd>", 1, 200)
|
||||||
self.assertContains(res, "<dd>41023e92-8008-4c8b-8059-" \
|
self.assertContains(res,
|
||||||
"7f2293ff3775</dd>", 1, 200)
|
"<dd>41023e92-8008-4c8b-8059-7f2293ff3775</dd>",
|
||||||
|
1,
|
||||||
|
200)
|
||||||
self.assertContains(res, "<dd>Available</dd>", 1, 200)
|
self.assertContains(res, "<dd>Available</dd>", 1, 200)
|
||||||
self.assertContains(res, "<dd>40 GB</dd>", 1, 200)
|
self.assertContains(res, "<dd>40 GB</dd>", 1, 200)
|
||||||
self.assertContains(res, "<a href=\"/nova/instances_and_volumes/"
|
self.assertContains(res, "<a href=\"/nova/instances_and_volumes/"
|
||||||
|
@ -68,8 +68,9 @@ class SyspanelInstancesTable(tables.DataTable):
|
|||||||
host = tables.Column("OS-EXT-SRV-ATTR:host",
|
host = tables.Column("OS-EXT-SRV-ATTR:host",
|
||||||
verbose_name=_("Host"),
|
verbose_name=_("Host"),
|
||||||
classes=('nowrap-col',))
|
classes=('nowrap-col',))
|
||||||
name = tables.Column("name", link="horizon:nova:instances_and_volumes:" \
|
name = tables.Column("name",
|
||||||
"instances:detail",
|
link=("horizon:nova:instances_and_volumes:"
|
||||||
|
"instances:detail"),
|
||||||
verbose_name=_("Instance Name"))
|
verbose_name=_("Instance Name"))
|
||||||
ip = tables.Column(get_ips, verbose_name=_("IP Address"))
|
ip = tables.Column(get_ips, verbose_name=_("IP Address"))
|
||||||
size = tables.Column(get_size,
|
size = tables.Column(get_size,
|
||||||
|
@ -119,19 +119,19 @@ class UpdateQuotas(forms.SelfHandlingForm):
|
|||||||
floating_ips = forms.IntegerField(label=_("Floating IPs"))
|
floating_ips = forms.IntegerField(label=_("Floating IPs"))
|
||||||
|
|
||||||
def handle(self, request, data):
|
def handle(self, request, data):
|
||||||
|
ifcb = data['injected_file_content_bytes']
|
||||||
try:
|
try:
|
||||||
api.nova.tenant_quota_update(request,
|
api.nova.tenant_quota_update(request,
|
||||||
data['tenant_id'],
|
data['tenant_id'],
|
||||||
metadata_items=data['metadata_items'],
|
metadata_items=data['metadata_items'],
|
||||||
injected_file_content_bytes=data['injected_file_content_bytes'],
|
injected_file_content_bytes=ifcb,
|
||||||
volumes=data['volumes'],
|
volumes=data['volumes'],
|
||||||
gigabytes=data['gigabytes'],
|
gigabytes=data['gigabytes'],
|
||||||
ram=data['ram'],
|
ram=data['ram'],
|
||||||
floating_ips=data['floating_ips'],
|
floating_ips=data['floating_ips'],
|
||||||
instances=data['instances'],
|
instances=data['instances'],
|
||||||
injected_files=data['injected_files'],
|
injected_files=data['injected_files'],
|
||||||
cores=data['cores'],
|
cores=data['cores'])
|
||||||
)
|
|
||||||
messages.success(request,
|
messages.success(request,
|
||||||
_('Quotas for %s were successfully updated.')
|
_('Quotas for %s were successfully updated.')
|
||||||
% data['tenant_id'])
|
% data['tenant_id'])
|
||||||
|
@ -128,7 +128,7 @@ class UpdateUserForm(BaseUserForm):
|
|||||||
def __init__(self, request, *args, **kwargs):
|
def __init__(self, request, *args, **kwargs):
|
||||||
super(UpdateUserForm, self).__init__(request, *args, **kwargs)
|
super(UpdateUserForm, self).__init__(request, *args, **kwargs)
|
||||||
|
|
||||||
if api.keystone_can_edit_user() == False:
|
if api.keystone_can_edit_user() is False:
|
||||||
for field in ('name', 'email', 'password', 'confirm_password'):
|
for field in ('name', 'email', 'password', 'confirm_password'):
|
||||||
self.fields.pop(field)
|
self.fields.pop(field)
|
||||||
|
|
||||||
|
@ -751,7 +751,7 @@ class DataTableMetaclass(type):
|
|||||||
# (list() call gives deterministic sort order, which sets don't have.)
|
# (list() call gives deterministic sort order, which sets don't have.)
|
||||||
actions = list(set(opts.row_actions) | set(opts.table_actions))
|
actions = list(set(opts.row_actions) | set(opts.table_actions))
|
||||||
actions.sort(key=attrgetter('name'))
|
actions.sort(key=attrgetter('name'))
|
||||||
actions_dict = SortedDict([(action.name, action()) \
|
actions_dict = SortedDict([(action.name, action())
|
||||||
for action in actions])
|
for action in actions])
|
||||||
attrs['base_actions'] = actions_dict
|
attrs['base_actions'] = actions_dict
|
||||||
if opts._filter_action:
|
if opts._filter_action:
|
||||||
|
@ -198,5 +198,4 @@ class ComputeApiTests(test.APITestCase):
|
|||||||
'floating_ips': {'available': -1,
|
'floating_ips': {'available': -1,
|
||||||
'used': 2,
|
'used': 2,
|
||||||
'flavor_fields': [],
|
'flavor_fields': [],
|
||||||
'quota': 1}
|
'quota': 1}})
|
||||||
})
|
|
||||||
|
@ -128,17 +128,17 @@ LOGGING = {
|
|||||||
'null': {
|
'null': {
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'django.utils.log.NullHandler',
|
'class': 'django.utils.log.NullHandler',
|
||||||
},
|
},
|
||||||
'test': {
|
'test': {
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'class': 'logging.StreamHandler',
|
'class': 'logging.StreamHandler',
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'loggers': {
|
'loggers': {
|
||||||
'django.db.backends': {
|
'django.db.backends': {
|
||||||
'handlers': ['null'],
|
'handlers': ['null'],
|
||||||
'propagate': False,
|
'propagate': False,
|
||||||
},
|
},
|
||||||
'horizon': {
|
'horizon': {
|
||||||
'handlers': ['test'],
|
'handlers': ['test'],
|
||||||
'propagate': False,
|
'propagate': False,
|
||||||
|
@ -1,183 +0,0 @@
|
|||||||
"""
|
|
||||||
Bugfix for issue #15900: https://code.djangoproject.com/ticket/15900.
|
|
||||||
|
|
||||||
This code is largely reproduced from
|
|
||||||
https://code.djangoproject.com/browser/django/trunk/django/core/urlresolvers.py
|
|
||||||
and is the work of Django's authors:
|
|
||||||
https://code.djangoproject.com/browser/django/trunk/AUTHORS
|
|
||||||
|
|
||||||
It is licensed under Django's BSD license, available here:
|
|
||||||
https://code.djangoproject.com/browser/django/trunk/LICENSE
|
|
||||||
|
|
||||||
To use, simply import this code in your project's root URLconf file before
|
|
||||||
defining any URL patterns.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from django.core import urlresolvers
|
|
||||||
|
|
||||||
if not hasattr(urlresolvers.RegexURLResolver, "_reverse_with_prefix"):
|
|
||||||
import re
|
|
||||||
|
|
||||||
from django.conf import urls
|
|
||||||
from django.utils.datastructures import MultiValueDict
|
|
||||||
from django.utils.encoding import iri_to_uri, force_unicode
|
|
||||||
from django.utils.regex_helper import normalize
|
|
||||||
|
|
||||||
def _populate(self):
|
|
||||||
lookups = MultiValueDict()
|
|
||||||
namespaces = {}
|
|
||||||
apps = {}
|
|
||||||
for pattern in reversed(self.url_patterns):
|
|
||||||
p_pattern = pattern.regex.pattern
|
|
||||||
if p_pattern.startswith('^'):
|
|
||||||
p_pattern = p_pattern[1:]
|
|
||||||
if isinstance(pattern, urlresolvers.RegexURLResolver):
|
|
||||||
if pattern.namespace:
|
|
||||||
namespaces[pattern.namespace] = (p_pattern, pattern)
|
|
||||||
if pattern.app_name:
|
|
||||||
apps.setdefault(pattern.app_name, []) \
|
|
||||||
.append(pattern.namespace)
|
|
||||||
else:
|
|
||||||
parent = normalize(pattern.regex.pattern)
|
|
||||||
for name in pattern.reverse_dict:
|
|
||||||
for matches, pat, defaults in \
|
|
||||||
pattern.reverse_dict.getlist(name):
|
|
||||||
new_matches = []
|
|
||||||
for piece, p_args in parent:
|
|
||||||
vals = [(piece + suffix, p_args + args) for \
|
|
||||||
(suffix, args) in matches]
|
|
||||||
new_matches.extend(vals)
|
|
||||||
lookup_list = (new_matches, p_pattern + pat,
|
|
||||||
dict(defaults,
|
|
||||||
**pattern.default_kwargs))
|
|
||||||
lookups.appendlist(name, lookup_list)
|
|
||||||
for namespace, (prefix, sub_pattern) in \
|
|
||||||
pattern.namespace_dict.items():
|
|
||||||
namespace_vals = (p_pattern + prefix, sub_pattern)
|
|
||||||
namespaces[namespace] = namespace_vals
|
|
||||||
for app_name, namespace_list in pattern.app_dict.items():
|
|
||||||
apps.setdefault(app_name, []).extend(namespace_list)
|
|
||||||
else:
|
|
||||||
bits = normalize(p_pattern)
|
|
||||||
lookup_list = (bits, p_pattern, pattern.default_args)
|
|
||||||
lookups.appendlist(pattern.callback, lookup_list)
|
|
||||||
if pattern.name is not None:
|
|
||||||
lookup_list = (bits, p_pattern, pattern.default_args)
|
|
||||||
lookups.appendlist(pattern.name, lookup_list)
|
|
||||||
self._reverse_dict = lookups
|
|
||||||
self._namespace_dict = namespaces
|
|
||||||
self._app_dict = apps
|
|
||||||
|
|
||||||
def resolver_reverse(self, lookup_view, *args, **kwargs):
|
|
||||||
return self._reverse_with_prefix(lookup_view, '', *args, **kwargs)
|
|
||||||
|
|
||||||
def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
|
|
||||||
if args and kwargs:
|
|
||||||
raise ValueError("Don't mix *args and **kwargs in call to "
|
|
||||||
"reverse()!")
|
|
||||||
try:
|
|
||||||
lookup_view = urlresolvers.get_callable(lookup_view, True)
|
|
||||||
except (ImportError, AttributeError), e:
|
|
||||||
raise urlresolvers.NoReverseMatch("Error importing '%s': %s."
|
|
||||||
% (lookup_view, e))
|
|
||||||
possibilities = self.reverse_dict.getlist(lookup_view)
|
|
||||||
prefix_norm, prefix_args = normalize(_prefix)[0]
|
|
||||||
for possibility, pattern, defaults in possibilities:
|
|
||||||
for result, params in possibility:
|
|
||||||
if args:
|
|
||||||
if len(args) != len(params) + len(prefix_args):
|
|
||||||
continue
|
|
||||||
unicode_args = [force_unicode(val) for val in args]
|
|
||||||
candidate = (prefix_norm + result) \
|
|
||||||
% dict(zip(prefix_args + params, unicode_args))
|
|
||||||
else:
|
|
||||||
if set(kwargs.keys() + defaults.keys()) != \
|
|
||||||
set(params + defaults.keys() + prefix_args):
|
|
||||||
continue
|
|
||||||
matches = True
|
|
||||||
for k, v in defaults.items():
|
|
||||||
if kwargs.get(k, v) != v:
|
|
||||||
matches = False
|
|
||||||
break
|
|
||||||
if not matches:
|
|
||||||
continue
|
|
||||||
unicode_kwargs = dict([(k, force_unicode(v)) for \
|
|
||||||
(k, v) in kwargs.items()])
|
|
||||||
candidate = (prefix_norm + result) % unicode_kwargs
|
|
||||||
if re.search(u'^%s%s' % (_prefix, pattern),
|
|
||||||
candidate, re.UNICODE):
|
|
||||||
return candidate
|
|
||||||
# lookup_view can be URL label, or dotted path, or callable, Any of
|
|
||||||
# these can be passed in at the top, but callables are not friendly in
|
|
||||||
# error messages.
|
|
||||||
m = getattr(lookup_view, '__module__', None)
|
|
||||||
n = getattr(lookup_view, '__name__', None)
|
|
||||||
if m is not None and n is not None:
|
|
||||||
lookup_view_s = "%s.%s" % (m, n)
|
|
||||||
else:
|
|
||||||
lookup_view_s = lookup_view
|
|
||||||
raise urlresolvers.NoReverseMatch("Reverse for '%s' with "
|
|
||||||
"arguments '%s' and keyword "
|
|
||||||
"arguments '%s' not found."
|
|
||||||
% (lookup_view_s, args, kwargs))
|
|
||||||
|
|
||||||
def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None,
|
|
||||||
current_app=None):
|
|
||||||
if urlconf is None:
|
|
||||||
urlconf = urlresolvers.get_urlconf()
|
|
||||||
resolver = urlresolvers.get_resolver(urlconf)
|
|
||||||
args = args or []
|
|
||||||
kwargs = kwargs or {}
|
|
||||||
|
|
||||||
if prefix is None:
|
|
||||||
prefix = urlresolvers.get_script_prefix()
|
|
||||||
|
|
||||||
if not isinstance(viewname, basestring):
|
|
||||||
view = viewname
|
|
||||||
else:
|
|
||||||
parts = viewname.split(':')
|
|
||||||
parts.reverse()
|
|
||||||
view = parts[0]
|
|
||||||
path = parts[1:]
|
|
||||||
|
|
||||||
resolved_path = []
|
|
||||||
while path:
|
|
||||||
ns = path.pop()
|
|
||||||
|
|
||||||
# Lookup the name to see if it could be an app identifier
|
|
||||||
try:
|
|
||||||
app_list = resolver.app_dict[ns]
|
|
||||||
# Yes! Path part matches an app in the current Resolver
|
|
||||||
if current_app and current_app in app_list:
|
|
||||||
# If we are reversing for a particular app,
|
|
||||||
# use that namespace
|
|
||||||
ns = current_app
|
|
||||||
elif ns not in app_list:
|
|
||||||
# The name isn't shared by one of the instances
|
|
||||||
# (i.e., the default) so just pick the first instance
|
|
||||||
# as the default.
|
|
||||||
ns = app_list[0]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
extra, resolver = resolver.namespace_dict[ns]
|
|
||||||
resolved_path.append(ns)
|
|
||||||
prefix = prefix + extra
|
|
||||||
except KeyError, key:
|
|
||||||
if resolved_path:
|
|
||||||
raise urlresolvers.NoReverseMatch("%s is not a "
|
|
||||||
"registered namespace inside %s'"
|
|
||||||
% (key, ':'.join(resolved_path)))
|
|
||||||
else:
|
|
||||||
raise urlresolvers.NoReverseMatch("%s is not a "
|
|
||||||
"registered "
|
|
||||||
"namespace" % key)
|
|
||||||
|
|
||||||
return iri_to_uri(resolver._reverse_with_prefix(view, prefix,
|
|
||||||
*args, **kwargs))
|
|
||||||
|
|
||||||
urlresolvers.RegexURLResolver._populate = _populate
|
|
||||||
urlresolvers.RegexURLResolver.reverse = resolver_reverse
|
|
||||||
urlresolvers.RegexURLResolver._reverse_with_prefix = _reverse_with_prefix
|
|
||||||
urlresolvers.reverse = reverse
|
|
@ -6,7 +6,7 @@ set -o errexit
|
|||||||
# Increment me any time the environment should be rebuilt.
|
# Increment me any time the environment should be rebuilt.
|
||||||
# This includes dependncy changes, directory renames, etc.
|
# This includes dependncy changes, directory renames, etc.
|
||||||
# Simple integer secuence: 1, 2, 3...
|
# Simple integer secuence: 1, 2, 3...
|
||||||
environment_version=20
|
environment_version=21
|
||||||
#--------------------------------------------------------#
|
#--------------------------------------------------------#
|
||||||
|
|
||||||
function usage {
|
function usage {
|
||||||
@ -124,10 +124,7 @@ function run_pylint {
|
|||||||
function run_pep8 {
|
function run_pep8 {
|
||||||
echo "Running pep8 ..."
|
echo "Running pep8 ..."
|
||||||
rm -f pep8.txt
|
rm -f pep8.txt
|
||||||
PEP8_EXCLUDE=vcsversion.py,panel_template,dash_template
|
${command_wrapper} pep8 $included_dirs | perl -ple 's/: ([WE]\d+)/: [$1]/' > pep8.txt || true
|
||||||
PEP8_IGNORE=W602
|
|
||||||
PEP8_OPTIONS="--exclude=$PEP8_EXCLUDE --ignore=$PEP8_IGNORE --repeat"
|
|
||||||
${command_wrapper} pep8 $PEP8_OPTIONS $included_dirs | perl -ple 's/: ([WE]\d+)/: [$1]/' > pep8.txt || true
|
|
||||||
PEP8_COUNT=`wc -l pep8.txt | awk '{ print $1 }'`
|
PEP8_COUNT=`wc -l pep8.txt | awk '{ print $1 }'`
|
||||||
if [ $PEP8_COUNT -ge 1 ]; then
|
if [ $PEP8_COUNT -ge 1 ]; then
|
||||||
echo "PEP8 violations found ($PEP8_COUNT):"
|
echo "PEP8 violations found ($PEP8_COUNT):"
|
||||||
|
@ -9,7 +9,7 @@ nose
|
|||||||
nose-exclude
|
nose-exclude
|
||||||
nosexcover
|
nosexcover
|
||||||
openstack.nose_plugin
|
openstack.nose_plugin
|
||||||
pep8==1.1
|
pep8>=1.3
|
||||||
pylint
|
pylint
|
||||||
selenium
|
selenium
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user