Split project and requirement types out.
These don't need to be hidden within update.py anymore, and we need them for lint and edit. Change-Id: I92f4ff51f98f6343cc94a74a69660306b742dbf3
This commit is contained in:
parent
bddbdc3532
commit
50eee6fc57
180
openstack_requirements/project.py
Normal file
180
openstack_requirements/project.py
Normal file
@ -0,0 +1,180 @@
|
||||
# Copyright 2012 OpenStack Foundation
|
||||
# Copyright 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""The project abstraction."""
|
||||
|
||||
import collections
|
||||
import errno
|
||||
import io
|
||||
import os
|
||||
from six.moves import configparser
|
||||
|
||||
from parsley import makeGrammar
|
||||
|
||||
from openstack_requirements import requirement
|
||||
|
||||
# PURE logic from here until the IO marker below.
|
||||
|
||||
|
||||
_Comment = collections.namedtuple('Comment', ['line'])
|
||||
_Extra = collections.namedtuple('Extra', ['name', 'content'])
|
||||
|
||||
|
||||
_extras_grammar = """
|
||||
ini = (line*:p extras?:e line*:l final:s) -> (''.join(p), e, ''.join(l+[s]))
|
||||
line = ~extras <(~'\\n' anything)* '\\n'>
|
||||
final = <(~'\\n' anything)* >
|
||||
extras = '[' 'e' 'x' 't' 'r' 'a' 's' ']' '\\n'+ body*:b -> b
|
||||
body = comment | extra
|
||||
comment = <'#' (~'\\n' anything)* '\\n'>:c '\\n'* -> comment(c)
|
||||
extra = name:n ' '* '=' line:l cont*:c '\\n'* -> extra(n, ''.join([l] + c))
|
||||
name = <(anything:x ?(x not in '\\n \\t='))+>
|
||||
cont = ' '+ <(~'\\n' anything)* '\\n'>
|
||||
"""
|
||||
_extras_compiled = makeGrammar(
|
||||
_extras_grammar, {"comment": _Comment, "extra": _Extra})
|
||||
|
||||
|
||||
Error = collections.namedtuple('Error', ['message'])
|
||||
File = collections.namedtuple('File', ['filename', 'content'])
|
||||
StdOut = collections.namedtuple('StdOut', ['message'])
|
||||
Verbose = collections.namedtuple('Verbose', ['message'])
|
||||
|
||||
|
||||
def extras(project):
|
||||
"""Return a dict of extra-name:content for the extras in setup.cfg."""
|
||||
c = configparser.SafeConfigParser()
|
||||
c.readfp(io.StringIO(project['setup.cfg']))
|
||||
if not c.has_section('extras'):
|
||||
return dict()
|
||||
return dict(c.items('extras'))
|
||||
|
||||
|
||||
def merge_setup_cfg(old_content, new_extras):
|
||||
# This is ugly. All the existing libraries handle setup.cfg's poorly.
|
||||
prefix, extras, suffix = _extras_compiled(old_content).ini()
|
||||
out_extras = []
|
||||
if extras is not None:
|
||||
for extra in extras:
|
||||
if type(extra) is _Comment:
|
||||
out_extras.append(extra)
|
||||
elif type(extra) is _Extra:
|
||||
if extra.name not in new_extras:
|
||||
out_extras.append(extra)
|
||||
continue
|
||||
e = _Extra(
|
||||
extra.name,
|
||||
requirement.to_content(
|
||||
new_extras[extra.name], ':', ' ', False))
|
||||
out_extras.append(e)
|
||||
else:
|
||||
raise TypeError('unknown type %r' % extra)
|
||||
if out_extras:
|
||||
extras_str = ['[extras]\n']
|
||||
for extra in out_extras:
|
||||
if type(extra) is _Comment:
|
||||
extras_str.append(extra.line)
|
||||
else:
|
||||
extras_str.append(extra.name + ' =')
|
||||
extras_str.append(extra.content)
|
||||
if suffix:
|
||||
extras_str.append('\n')
|
||||
extras_str = ''.join(extras_str)
|
||||
else:
|
||||
extras_str = ''
|
||||
return prefix + extras_str + suffix
|
||||
|
||||
|
||||
# IO from here to the end of the file.
|
||||
|
||||
def _safe_read(project, filename, output=None):
|
||||
if output is None:
|
||||
output = project
|
||||
try:
|
||||
path = project['root'] + '/' + filename
|
||||
with io.open(path, 'rt', encoding="utf-8") as f:
|
||||
output[filename] = f.read()
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
def read(root):
|
||||
"""Read into memory the packaging data for the project at root.
|
||||
|
||||
:param root: A directory path.
|
||||
:return: A dict representing the project with the following keys:
|
||||
- root: The root dir.
|
||||
- setup.py: Contents of setup.py.
|
||||
- setup.cfg: Contents of setup.cfg.
|
||||
- requirements: Dict of requirement file name: contents.
|
||||
"""
|
||||
result = {'root': root}
|
||||
_safe_read(result, 'setup.py')
|
||||
_safe_read(result, 'setup.cfg')
|
||||
requirements = {}
|
||||
result['requirements'] = requirements
|
||||
target_files = [
|
||||
'requirements.txt', 'tools/pip-requires',
|
||||
'test-requirements.txt', 'tools/test-requires',
|
||||
]
|
||||
for py_version in (2, 3):
|
||||
target_files.append('requirements-py%s.txt' % py_version)
|
||||
target_files.append('test-requirements-py%s.txt' % py_version)
|
||||
for target_file in target_files:
|
||||
_safe_read(result, target_file, output=requirements)
|
||||
return result
|
||||
|
||||
|
||||
def write(project, actions, stdout, verbose, noop=False):
|
||||
"""Write actions into project.
|
||||
|
||||
:param project: A project metadata dict.
|
||||
:param actions: A list of action tuples - File or Verbose - that describe
|
||||
what actions are to be taken.
|
||||
Error objects write a message to stdout and trigger an exception at
|
||||
the end of _write_project.
|
||||
File objects describe a file to have content placed in it.
|
||||
StdOut objects describe a message to write to stdout.
|
||||
Verbose objects will write a message to stdout when verbose is True.
|
||||
:param stdout: Where to write content for stdout.
|
||||
:param verbose: If True Verbose actions will be written to stdout.
|
||||
:param noop: If True nothing will be written to disk.
|
||||
:return None:
|
||||
:raises IOError: If the IO operations fail, IOError is raised. If this
|
||||
happens some actions may have been applied and others not.
|
||||
"""
|
||||
error = False
|
||||
for action in actions:
|
||||
if type(action) is Error:
|
||||
error = True
|
||||
stdout.write(action.message + '\n')
|
||||
elif type(action) is File:
|
||||
if noop:
|
||||
continue
|
||||
fullname = project['root'] + '/' + action.filename
|
||||
tmpname = fullname + '.tmp'
|
||||
with open(tmpname, 'wt') as f:
|
||||
f.write(action.content)
|
||||
os.rename(tmpname, fullname)
|
||||
elif type(action) is StdOut:
|
||||
stdout.write(action.message)
|
||||
elif type(action) is Verbose:
|
||||
if verbose:
|
||||
stdout.write(u"%s\n" % (action.message,))
|
||||
else:
|
||||
raise Exception("Invalid action %r" % (action,))
|
||||
if error:
|
||||
raise Exception("Error occured processing %s" % (project['root']))
|
117
openstack_requirements/requirement.py
Normal file
117
openstack_requirements/requirement.py
Normal file
@ -0,0 +1,117 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Requirements handling."""
|
||||
|
||||
# This module has no IO at all, and none should be added.
|
||||
|
||||
import collections
|
||||
|
||||
import pkg_resources
|
||||
|
||||
|
||||
# A header for the requirements file(s).
|
||||
# TODO(lifeless): Remove this once constraints are in use.
|
||||
_REQS_HEADER = [
|
||||
'# The order of packages is significant, because pip processes '
|
||||
'them in the order\n',
|
||||
'# of appearance. Changing the order has an impact on the overall '
|
||||
'integration\n',
|
||||
'# process, which may cause wedges in the gate later.\n',
|
||||
]
|
||||
|
||||
|
||||
Requirement = collections.namedtuple(
|
||||
'Requirement', ['package', 'specifiers', 'markers', 'comment'])
|
||||
Requirements = collections.namedtuple('Requirements', ['reqs'])
|
||||
|
||||
|
||||
def parse(content):
|
||||
return to_dict(to_reqs(content))
|
||||
|
||||
|
||||
def parse_line(req_line):
|
||||
"""Parse a single line of a requirements file.
|
||||
|
||||
requirements files here are a subset of pip requirements files: we don't
|
||||
try to parse URL entries, or pip options like -f and -e. Those are not
|
||||
permitted in global-requirements.txt. If encountered in a synchronised
|
||||
file such as requirements.txt or test-requirements.txt, they are illegal
|
||||
but currently preserved as-is.
|
||||
|
||||
They may of course be used by local test configurations, just not
|
||||
committed into the OpenStack reference branches.
|
||||
"""
|
||||
end = len(req_line)
|
||||
hash_pos = req_line.find('#')
|
||||
if hash_pos < 0:
|
||||
hash_pos = end
|
||||
if '://' in req_line[:hash_pos]:
|
||||
# Trigger an early failure before we look for ':'
|
||||
pkg_resources.Requirement.parse(req_line)
|
||||
semi_pos = req_line.find(';', 0, hash_pos)
|
||||
colon_pos = req_line.find(':', 0, hash_pos)
|
||||
marker_pos = max(semi_pos, colon_pos)
|
||||
if marker_pos < 0:
|
||||
marker_pos = hash_pos
|
||||
markers = req_line[marker_pos + 1:hash_pos].strip()
|
||||
if hash_pos != end:
|
||||
comment = req_line[hash_pos:]
|
||||
else:
|
||||
comment = ''
|
||||
req_line = req_line[:marker_pos]
|
||||
|
||||
if req_line:
|
||||
parsed = pkg_resources.Requirement.parse(req_line)
|
||||
name = parsed.project_name
|
||||
specifier = str(parsed.specifier)
|
||||
else:
|
||||
name = ''
|
||||
specifier = ''
|
||||
return Requirement(name, specifier, markers, comment)
|
||||
|
||||
|
||||
def to_content(reqs, marker_sep=';', line_prefix='', prefix=True):
|
||||
lines = []
|
||||
if prefix:
|
||||
lines += _REQS_HEADER
|
||||
for req in reqs.reqs:
|
||||
comment_p = ' ' if req.package else ''
|
||||
comment = (comment_p + req.comment if req.comment else '')
|
||||
marker = marker_sep + req.markers if req.markers else ''
|
||||
package = line_prefix + req.package if req.package else ''
|
||||
lines.append('%s%s%s%s\n' % (package, req.specifiers, marker, comment))
|
||||
return u''.join(lines)
|
||||
|
||||
|
||||
def to_dict(req_sequence):
|
||||
reqs = dict()
|
||||
for req, req_line in req_sequence:
|
||||
if req is not None:
|
||||
reqs.setdefault(req.package.lower(), []).append((req, req_line))
|
||||
return reqs
|
||||
|
||||
|
||||
def _pass_through(req_line):
|
||||
"""Identify unparsable lines."""
|
||||
return (req_line.startswith('http://tarballs.openstack.org/') or
|
||||
req_line.startswith('-e') or
|
||||
req_line.startswith('-f'))
|
||||
|
||||
|
||||
def to_reqs(content):
|
||||
for content_line in content.splitlines(True):
|
||||
req_line = content_line.strip()
|
||||
if _pass_through(req_line):
|
||||
yield None, content_line
|
||||
else:
|
||||
yield parse_line(req_line), content_line
|
@ -15,6 +15,8 @@ import shutil
|
||||
|
||||
import fixtures
|
||||
|
||||
from openstack_requirements import project
|
||||
from openstack_requirements import requirement
|
||||
from openstack_requirements import update
|
||||
|
||||
|
||||
@ -82,10 +84,10 @@ class GlobalRequirements(fixtures.Fixture):
|
||||
# Static data for unit testing.
|
||||
def make_project(fixture):
|
||||
with fixture:
|
||||
return update._read_project(fixture.root)
|
||||
return project.read(fixture.root)
|
||||
|
||||
|
||||
global_reqs = update._parse_reqs(
|
||||
global_reqs = requirement.parse(
|
||||
open("openstack_requirements/tests/files/gr-base.txt", "rt").read())
|
||||
pbr_project = make_project(pbr_fixture)
|
||||
project_project = make_project(project_fixture)
|
||||
@ -94,13 +96,13 @@ oslo_project = make_project(oslo_fixture)
|
||||
|
||||
|
||||
def project_file(
|
||||
fail, project, action_filename, suffix=None, softupdate=None,
|
||||
fail, proj, action_filename, suffix=None, softupdate=None,
|
||||
non_std_reqs=False):
|
||||
actions = update._process_project(
|
||||
project, global_reqs, suffix, softupdate, None,
|
||||
proj, global_reqs, suffix, softupdate, None,
|
||||
non_std_reqs)
|
||||
for action in actions:
|
||||
if type(action) is update.File:
|
||||
if type(action) is project.File:
|
||||
if action.filename == action_filename:
|
||||
return action.content.splitlines()
|
||||
fail('File %r not found in %r' % (action_filename, actions))
|
||||
|
@ -13,7 +13,7 @@
|
||||
from packaging import specifiers
|
||||
import testtools
|
||||
|
||||
from openstack_requirements import update
|
||||
from openstack_requirements import requirement
|
||||
|
||||
|
||||
def check_compatible(global_reqs, constraints):
|
||||
@ -62,14 +62,14 @@ class TestRequirements(testtools.TestCase):
|
||||
def test_constraints_compatible(self):
|
||||
global_req_content = open('global-requirements.txt', 'rt').read()
|
||||
constraints_content = open('upper-constraints.txt', 'rt').read()
|
||||
global_reqs = update._parse_reqs(global_req_content)
|
||||
constraints = update._parse_reqs(constraints_content)
|
||||
global_reqs = requirement.parse(global_req_content)
|
||||
constraints = requirement.parse(constraints_content)
|
||||
self.assertEqual([], check_compatible(global_reqs, constraints))
|
||||
|
||||
def test_check_compatible(self):
|
||||
global_reqs = update._parse_reqs("foo>=1.2\n")
|
||||
good_constraints = update._parse_reqs("foo===1.2.5\n")
|
||||
bad_constraints = update._parse_reqs("foo===1.1.2\n")
|
||||
global_reqs = requirement.parse("foo>=1.2\n")
|
||||
good_constraints = requirement.parse("foo===1.2.5\n")
|
||||
bad_constraints = requirement.parse("foo===1.1.2\n")
|
||||
self.assertEqual([], check_compatible(global_reqs, good_constraints))
|
||||
self.assertNotEqual(
|
||||
[], check_compatible(global_reqs, bad_constraints))
|
||||
|
277
openstack_requirements/tests/test_project.py
Normal file
277
openstack_requirements/tests/test_project.py
Normal file
@ -0,0 +1,277 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import io
|
||||
import textwrap
|
||||
|
||||
import fixtures
|
||||
import parsley
|
||||
import testscenarios
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from openstack_requirements import project
|
||||
from openstack_requirements import requirement
|
||||
from openstack_requirements.tests import common
|
||||
|
||||
|
||||
load_tests = testscenarios.load_tests_apply_scenarios
|
||||
|
||||
|
||||
class TestReadProject(testtools.TestCase):
|
||||
|
||||
def test_pbr(self):
|
||||
root = self.useFixture(common.pbr_fixture).root
|
||||
proj = project.read(root)
|
||||
self.expectThat(proj['root'], matchers.Equals(root))
|
||||
setup_py = open(root + '/setup.py', 'rt').read()
|
||||
self.expectThat(proj['setup.py'], matchers.Equals(setup_py))
|
||||
setup_cfg = open(root + '/setup.cfg', 'rt').read()
|
||||
self.expectThat(proj['setup.cfg'], matchers.Equals(setup_cfg))
|
||||
self.expectThat(
|
||||
proj['requirements'],
|
||||
matchers.KeysEqual('requirements.txt', 'test-requirements.txt'))
|
||||
|
||||
def test_no_setup_py(self):
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
proj = project.read(root)
|
||||
self.expectThat(
|
||||
proj, matchers.Equals({'root': root, 'requirements': {}}))
|
||||
|
||||
|
||||
class TestProjectExtras(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
proj = {'setup.cfg': textwrap.dedent(u"""
|
||||
[extras]
|
||||
1 =
|
||||
foo
|
||||
2 =
|
||||
foo # fred
|
||||
bar
|
||||
""")}
|
||||
expected = {
|
||||
'1': '\nfoo',
|
||||
'2': '\nfoo # fred\nbar'
|
||||
}
|
||||
self.assertEqual(expected, project.extras(proj))
|
||||
|
||||
def test_none(self):
|
||||
proj = {'setup.cfg': u"[metadata]\n"}
|
||||
self.assertEqual({}, project.extras(proj))
|
||||
|
||||
|
||||
class TestExtrasParsing(testtools.TestCase):
|
||||
|
||||
def test_none(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
ini = project._extras_compiled(old_content).ini()
|
||||
self.assertEqual(ini, (old_content, None, ''))
|
||||
|
||||
def test_no_eol(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux""")
|
||||
expected1 = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
""")
|
||||
suffix = ' foo = bar:quux'
|
||||
ini = project._extras_compiled(old_content).ini()
|
||||
self.assertEqual(ini, (expected1, None, suffix))
|
||||
|
||||
def test_two_extras_raises(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
a = b
|
||||
[extras]
|
||||
b = c
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
with testtools.ExpectedException(parsley.ParseError):
|
||||
project._extras_compiled(old_content).ini()
|
||||
|
||||
def test_extras(self):
|
||||
# We get an AST for extras we can use to preserve comments.
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
# comment1
|
||||
a =
|
||||
b
|
||||
c
|
||||
# comment2
|
||||
# comment3
|
||||
d =
|
||||
e
|
||||
# comment4
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
prefix = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
""")
|
||||
suffix = textwrap.dedent(u"""\
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
extras = [
|
||||
project._Comment('# comment1\n'),
|
||||
project._Extra('a', '\nb\nc\n'),
|
||||
project._Comment('# comment2\n'),
|
||||
project._Comment('# comment3\n'),
|
||||
project._Extra('d', '\ne\n'),
|
||||
project._Comment('# comment4\n')]
|
||||
ini = project._extras_compiled(old_content).ini()
|
||||
self.assertEqual(ini, (prefix, extras, suffix))
|
||||
|
||||
|
||||
class TestMergeSetupCfg(testtools.TestCase):
|
||||
|
||||
def test_merge_none(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
merged = project.merge_setup_cfg(old_content, {})
|
||||
self.assertEqual(old_content, merged)
|
||||
|
||||
def test_merge_extras(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
# Comment
|
||||
a =
|
||||
b
|
||||
# comment
|
||||
c =
|
||||
d
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
blank = requirement.Requirement('', '', '', '')
|
||||
r1 = requirement.Requirement('b', '>=1', "python_version=='2.7'", '')
|
||||
r2 = requirement.Requirement('d', '', '', '# BSD')
|
||||
reqs = {
|
||||
'a': requirement.Requirements([blank, r1]),
|
||||
'c': requirement.Requirements([blank, r2])}
|
||||
merged = project.merge_setup_cfg(old_content, reqs)
|
||||
expected = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
# Comment
|
||||
a =
|
||||
b>=1:python_version=='2.7'
|
||||
# comment
|
||||
c =
|
||||
d # BSD
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
self.assertEqual(expected, merged)
|
||||
|
||||
|
||||
class TestWriteProject(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
proj = {'root': root}
|
||||
actions = [
|
||||
project.File('foo', '123\n'),
|
||||
project.File('bar', '456\n'),
|
||||
project.Verbose(u'fred')]
|
||||
project.write(proj, actions, stdout, True)
|
||||
foo = open(root + '/foo', 'rt').read()
|
||||
self.expectThat(foo, matchers.Equals('123\n'))
|
||||
bar = open(root + '/bar', 'rt').read()
|
||||
self.expectThat(bar, matchers.Equals('456\n'))
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals('fred\n'))
|
||||
|
||||
def test_non_verbose(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
proj = {'root': root}
|
||||
actions = [project.Verbose(u'fred')]
|
||||
project.write(proj, actions, stdout, False)
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals(''))
|
||||
|
||||
def test_bad_action(self):
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
stdout = io.StringIO()
|
||||
proj = {'root': root}
|
||||
actions = [('foo', 'bar')]
|
||||
with testtools.ExpectedException(Exception):
|
||||
project.write(proj, actions, stdout, True)
|
||||
|
||||
def test_stdout(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
proj = {'root': root}
|
||||
actions = [project.StdOut(u'fred\n')]
|
||||
project.write(proj, actions, stdout, True)
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals('fred\n'))
|
||||
|
||||
def test_errors(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
proj = {'root': root}
|
||||
actions = [project.Error(u'fred')]
|
||||
with testtools.ExpectedException(Exception):
|
||||
project.write(proj, actions, stdout, True)
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals('fred\n'))
|
84
openstack_requirements/tests/test_requirement.py
Normal file
84
openstack_requirements/tests/test_requirement.py
Normal file
@ -0,0 +1,84 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pkg_resources
|
||||
import testscenarios
|
||||
import testtools
|
||||
|
||||
from openstack_requirements import requirement
|
||||
|
||||
|
||||
load_tests = testscenarios.load_tests_apply_scenarios
|
||||
|
||||
|
||||
class TestParseRequirement(testtools.TestCase):
|
||||
|
||||
scenarios = [
|
||||
('package', dict(
|
||||
line='swift',
|
||||
req=requirement.Requirement('swift', '', '', ''))),
|
||||
('specifier', dict(
|
||||
line='alembic>=0.4.1',
|
||||
req=requirement.Requirement('alembic', '>=0.4.1', '', ''))),
|
||||
('specifiers', dict(
|
||||
line='alembic>=0.4.1,!=1.1.8',
|
||||
req=requirement.Requirement('alembic', '!=1.1.8,>=0.4.1', '', ''))),
|
||||
('comment-only', dict(
|
||||
line='# foo',
|
||||
req=requirement.Requirement('', '', '', '# foo'))),
|
||||
('comment', dict(
|
||||
line='Pint>=0.5 # BSD',
|
||||
req=requirement.Requirement('Pint', '>=0.5', '', '# BSD'))),
|
||||
('comment-with-semicolon', dict(
|
||||
line='Pint>=0.5 # BSD;fred',
|
||||
req=requirement.Requirement('Pint', '>=0.5', '', '# BSD;fred'))),
|
||||
('case', dict(
|
||||
line='Babel>=1.3',
|
||||
req=requirement.Requirement('Babel', '>=1.3', '', ''))),
|
||||
('markers', dict(
|
||||
line="pywin32;sys_platform=='win32'",
|
||||
req=requirement.Requirement('pywin32', '', "sys_platform=='win32'",
|
||||
''))),
|
||||
('markers-with-comment', dict(
|
||||
line="Sphinx<=1.2; python_version=='2.7'# Sadface",
|
||||
req=requirement.Requirement('Sphinx', '<=1.2',
|
||||
"python_version=='2.7'", '# Sadface')))]
|
||||
|
||||
def test_parse(self):
|
||||
parsed = requirement.parse_line(self.line)
|
||||
self.assertEqual(self.req, parsed)
|
||||
|
||||
|
||||
class TestParseRequirementFailures(testtools.TestCase):
|
||||
|
||||
scenarios = [
|
||||
('url', dict(line='http://tarballs.openstack.org/oslo.config/'
|
||||
'oslo.config-1.2.0a3.tar.gz#egg=oslo.config')),
|
||||
('-e', dict(line='-e git+https://foo.com#egg=foo')),
|
||||
('-f', dict(line='-f http://tarballs.openstack.org/'))]
|
||||
|
||||
def test_does_not_parse(self):
|
||||
with testtools.ExpectedException(pkg_resources.RequirementParseError):
|
||||
requirement.parse_line(self.line)
|
||||
|
||||
|
||||
class TestToContent(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
reqs = requirement.to_content(requirement.Requirements(
|
||||
[requirement.Requirement(
|
||||
'foo', '<=1', "python_version=='2.7'", '# BSD')]),
|
||||
marker_sep='!')
|
||||
self.assertEqual(
|
||||
''.join(requirement._REQS_HEADER
|
||||
+ ["foo<=1!python_version=='2.7' # BSD\n"]),
|
||||
reqs)
|
@ -14,18 +14,17 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
import StringIO
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import fixtures
|
||||
import parsley
|
||||
import pkg_resources
|
||||
import testscenarios
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from openstack_requirements import project
|
||||
from openstack_requirements import requirement
|
||||
from openstack_requirements.tests import common
|
||||
from openstack_requirements import update
|
||||
|
||||
@ -123,16 +122,16 @@ class UpdateTest(testtools.TestCase):
|
||||
common.oslo_project, common.global_reqs, None, None, None,
|
||||
False)
|
||||
for action in actions:
|
||||
if type(action) is update.File:
|
||||
if type(action) is project.File:
|
||||
self.assertNotEqual(action.filename, 'setup.py')
|
||||
|
||||
# These are tests which don't need to run the project update in advance
|
||||
def test_requirement_not_in_global(self):
|
||||
actions = update._process_project(
|
||||
common.bad_project, common.global_reqs, None, None, None, False)
|
||||
errors = [a for a in actions if type(a) is update.Error]
|
||||
errors = [a for a in actions if type(a) is project.Error]
|
||||
msg = u"'thisisnotarealdepedency' is not in global-requirements.txt"
|
||||
self.assertEqual([update.Error(message=msg)], errors)
|
||||
self.assertEqual([project.Error(message=msg)], errors)
|
||||
|
||||
def test_requirement_not_in_global_non_fatal(self):
|
||||
reqs = common.project_file(
|
||||
@ -152,7 +151,7 @@ class UpdateTest(testtools.TestCase):
|
||||
common.project_project, common.global_reqs, None, None, None,
|
||||
False)
|
||||
capture = StringIO.StringIO()
|
||||
update._write_project(
|
||||
project.write(
|
||||
common.project_project, actions, capture, False, True)
|
||||
expected = ('Version change for: greenlet, SQLAlchemy, eventlet, PasteDeploy, routes, WebOb, wsgiref, boto, kombu, pycrypto, python-swiftclient, lxml, jsonschema, python-keystoneclient\n' # noqa
|
||||
"""Updated %(project)s/requirements.txt:
|
||||
@ -184,7 +183,7 @@ Updated %(project)s/test-requirements.txt:
|
||||
common.project_project, common.global_reqs, None, None, None,
|
||||
False)
|
||||
capture = StringIO.StringIO()
|
||||
update._write_project(
|
||||
project.write(
|
||||
common.project_project, actions, capture, True, True)
|
||||
expected = ("""Syncing %(project)s/requirements.txt
|
||||
Version change for: greenlet, SQLAlchemy, eventlet, PasteDeploy, routes, WebOb, wsgiref, boto, kombu, pycrypto, python-swiftclient, lxml, jsonschema, python-keystoneclient\n""" # noqa
|
||||
@ -215,78 +214,6 @@ Syncing setup.py
|
||||
self.assertEqual(expected, capture.getvalue())
|
||||
|
||||
|
||||
class TestReadProject(testtools.TestCase):
|
||||
|
||||
def test_pbr(self):
|
||||
root = self.useFixture(common.pbr_fixture).root
|
||||
project = update._read_project(root)
|
||||
self.expectThat(project['root'], matchers.Equals(root))
|
||||
setup_py = open(root + '/setup.py', 'rt').read()
|
||||
self.expectThat(project['setup.py'], matchers.Equals(setup_py))
|
||||
setup_cfg = open(root + '/setup.cfg', 'rt').read()
|
||||
self.expectThat(project['setup.cfg'], matchers.Equals(setup_cfg))
|
||||
self.expectThat(
|
||||
project['requirements'],
|
||||
matchers.KeysEqual('requirements.txt', 'test-requirements.txt'))
|
||||
|
||||
def test_no_setup_py(self):
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
project = update._read_project(root)
|
||||
self.expectThat(
|
||||
project, matchers.Equals({'root': root, 'requirements': {}}))
|
||||
|
||||
|
||||
class TestWriteProject(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
project = {'root': root}
|
||||
actions = [
|
||||
update.File('foo', '123\n'),
|
||||
update.File('bar', '456\n'),
|
||||
update.Verbose(u'fred')]
|
||||
update._write_project(project, actions, stdout, True)
|
||||
foo = open(root + '/foo', 'rt').read()
|
||||
self.expectThat(foo, matchers.Equals('123\n'))
|
||||
bar = open(root + '/bar', 'rt').read()
|
||||
self.expectThat(bar, matchers.Equals('456\n'))
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals('fred\n'))
|
||||
|
||||
def test_non_verbose(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
project = {'root': root}
|
||||
actions = [update.Verbose(u'fred')]
|
||||
update._write_project(project, actions, stdout, False)
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals(''))
|
||||
|
||||
def test_bad_action(self):
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
stdout = io.StringIO()
|
||||
project = {'root': root}
|
||||
actions = [('foo', 'bar')]
|
||||
with testtools.ExpectedException(Exception):
|
||||
update._write_project(project, actions, stdout, True)
|
||||
|
||||
def test_stdout(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
project = {'root': root}
|
||||
actions = [update.StdOut(u'fred\n')]
|
||||
update._write_project(project, actions, stdout, True)
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals('fred\n'))
|
||||
|
||||
def test_errors(self):
|
||||
stdout = io.StringIO()
|
||||
root = self.useFixture(fixtures.TempDir()).path
|
||||
project = {'root': root}
|
||||
actions = [update.Error(u'fred')]
|
||||
with testtools.ExpectedException(Exception):
|
||||
update._write_project(project, actions, stdout, True)
|
||||
self.expectThat(stdout.getvalue(), matchers.Equals('fred\n'))
|
||||
|
||||
|
||||
class TestMain(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
@ -315,56 +242,6 @@ class TestMain(testtools.TestCase):
|
||||
update.main(['-o', 'global', '/dev/zero'], _worker=check_params)
|
||||
|
||||
|
||||
class TestParseRequirement(testtools.TestCase):
|
||||
|
||||
scenarios = [
|
||||
('package', dict(
|
||||
line='swift',
|
||||
req=update.Requirement('swift', '', '', ''))),
|
||||
('specifier', dict(
|
||||
line='alembic>=0.4.1',
|
||||
req=update.Requirement('alembic', '>=0.4.1', '', ''))),
|
||||
('specifiers', dict(
|
||||
line='alembic>=0.4.1,!=1.1.8',
|
||||
req=update.Requirement('alembic', '!=1.1.8,>=0.4.1', '', ''))),
|
||||
('comment-only', dict(
|
||||
line='# foo',
|
||||
req=update.Requirement('', '', '', '# foo'))),
|
||||
('comment', dict(
|
||||
line='Pint>=0.5 # BSD',
|
||||
req=update.Requirement('Pint', '>=0.5', '', '# BSD'))),
|
||||
('comment-with-semicolon', dict(
|
||||
line='Pint>=0.5 # BSD;fred',
|
||||
req=update.Requirement('Pint', '>=0.5', '', '# BSD;fred'))),
|
||||
('case', dict(
|
||||
line='Babel>=1.3',
|
||||
req=update.Requirement('Babel', '>=1.3', '', ''))),
|
||||
('markers', dict(
|
||||
line="pywin32;sys_platform=='win32'",
|
||||
req=update.Requirement('pywin32', '', "sys_platform=='win32'", ''))),
|
||||
('markers-with-comment', dict(
|
||||
line="Sphinx<=1.2; python_version=='2.7'# Sadface",
|
||||
req=update.Requirement('Sphinx', '<=1.2', "python_version=='2.7'",
|
||||
'# Sadface')))]
|
||||
|
||||
def test_parse(self):
|
||||
parsed = update._parse_requirement(self.line)
|
||||
self.assertEqual(self.req, parsed)
|
||||
|
||||
|
||||
class TestParseRequirementFailures(testtools.TestCase):
|
||||
|
||||
scenarios = [
|
||||
('url', dict(line='http://tarballs.openstack.org/oslo.config/'
|
||||
'oslo.config-1.2.0a3.tar.gz#egg=oslo.config')),
|
||||
('-e', dict(line='-e git+https://foo.com#egg=foo')),
|
||||
('-f', dict(line='-f http://tarballs.openstack.org/'))]
|
||||
|
||||
def test_does_not_parse(self):
|
||||
with testtools.ExpectedException(pkg_resources.RequirementParseError):
|
||||
update._parse_requirement(self.line)
|
||||
|
||||
|
||||
class TestSyncRequirementsFile(testtools.TestCase):
|
||||
|
||||
def test_multiple_lines_in_global_one_in_project(self):
|
||||
@ -375,18 +252,19 @@ class TestSyncRequirementsFile(testtools.TestCase):
|
||||
project_content = textwrap.dedent("""\
|
||||
foo
|
||||
""")
|
||||
global_reqs = update._parse_reqs(global_content)
|
||||
project_reqs = list(update._content_to_reqs(project_content))
|
||||
global_reqs = requirement.parse(global_content)
|
||||
project_reqs = list(requirement.to_reqs(project_content))
|
||||
actions, reqs = update._sync_requirements_file(
|
||||
global_reqs, project_reqs, 'f', False, False, False)
|
||||
self.assertEqual(update.Requirements([
|
||||
update.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
update.Requirement('foo', '>1', "python_version!='2.7'", '')]),
|
||||
self.assertEqual(requirement.Requirements([
|
||||
requirement.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
requirement.Requirement(
|
||||
'foo', '>1', "python_version!='2.7'", '')]),
|
||||
reqs)
|
||||
self.assertEqual(update.StdOut(
|
||||
self.assertEqual(project.StdOut(
|
||||
" foo "
|
||||
"-> foo<2;python_version=='2.7'\n"), actions[2])
|
||||
self.assertEqual(update.StdOut(
|
||||
self.assertEqual(project.StdOut(
|
||||
" "
|
||||
"-> foo>1;python_version!='2.7'\n"), actions[3])
|
||||
self.assertThat(actions, matchers.HasLength(4))
|
||||
@ -401,14 +279,14 @@ class TestSyncRequirementsFile(testtools.TestCase):
|
||||
# mumbo gumbo
|
||||
foo>1;python_version!='2.7'
|
||||
""")
|
||||
global_reqs = update._parse_reqs(global_content)
|
||||
project_reqs = list(update._content_to_reqs(project_content))
|
||||
global_reqs = requirement.parse(global_content)
|
||||
project_reqs = list(requirement.to_reqs(project_content))
|
||||
actions, reqs = update._sync_requirements_file(
|
||||
global_reqs, project_reqs, 'f', False, False, False)
|
||||
self.assertEqual(update.Requirements([
|
||||
update.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
update.Requirement('foo', '>1', "python_version!='2.7'", ''),
|
||||
update.Requirement('', '', '', "# mumbo gumbo")]),
|
||||
self.assertEqual(requirement.Requirements([
|
||||
requirement.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
requirement.Requirement('foo', '>1', "python_version!='2.7'", ''),
|
||||
requirement.Requirement('', '', '', "# mumbo gumbo")]),
|
||||
reqs)
|
||||
self.assertThat(actions, matchers.HasLength(0))
|
||||
|
||||
@ -422,19 +300,19 @@ class TestSyncRequirementsFile(testtools.TestCase):
|
||||
# mumbo gumbo
|
||||
foo>0.9;python_version!='2.7'
|
||||
""")
|
||||
global_reqs = update._parse_reqs(global_content)
|
||||
project_reqs = list(update._content_to_reqs(project_content))
|
||||
global_reqs = requirement.parse(global_content)
|
||||
project_reqs = list(requirement.to_reqs(project_content))
|
||||
actions, reqs = update._sync_requirements_file(
|
||||
global_reqs, project_reqs, 'f', False, False, False)
|
||||
self.assertEqual(update.Requirements([
|
||||
update.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
update.Requirement('foo', '>1', "python_version!='2.7'", ''),
|
||||
update.Requirement('', '', '', "# mumbo gumbo")]),
|
||||
self.assertEqual(requirement.Requirements([
|
||||
requirement.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
requirement.Requirement('foo', '>1', "python_version!='2.7'", ''),
|
||||
requirement.Requirement('', '', '', "# mumbo gumbo")]),
|
||||
reqs)
|
||||
self.assertEqual(update.StdOut(
|
||||
self.assertEqual(project.StdOut(
|
||||
" foo<1.8;python_version=='2.7' -> "
|
||||
"foo<2;python_version=='2.7'\n"), actions[2])
|
||||
self.assertEqual(update.StdOut(
|
||||
self.assertEqual(project.StdOut(
|
||||
" foo>0.9;python_version!='2.7' -> "
|
||||
"foo>1;python_version!='2.7'\n"), actions[3])
|
||||
self.assertThat(actions, matchers.HasLength(4))
|
||||
@ -448,13 +326,14 @@ class TestSyncRequirementsFile(testtools.TestCase):
|
||||
foo<2;python_version=='2.7'
|
||||
foo>1;python_version!='2.7'
|
||||
""")
|
||||
global_reqs = update._parse_reqs(global_content)
|
||||
project_reqs = list(update._content_to_reqs(project_content))
|
||||
global_reqs = requirement.parse(global_content)
|
||||
project_reqs = list(requirement.to_reqs(project_content))
|
||||
actions, reqs = update._sync_requirements_file(
|
||||
global_reqs, project_reqs, 'f', False, False, False)
|
||||
self.assertEqual(update.Requirements([
|
||||
update.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
update.Requirement('foo', '>1', "python_version!='2.7'", '')]),
|
||||
self.assertEqual(requirement.Requirements([
|
||||
requirement.Requirement('foo', '<2', "python_version=='2.7'", ''),
|
||||
requirement.Requirement(
|
||||
'foo', '>1', "python_version!='2.7'", '')]),
|
||||
reqs)
|
||||
self.assertThat(actions, matchers.HasLength(0))
|
||||
|
||||
@ -466,211 +345,20 @@ class TestSyncRequirementsFile(testtools.TestCase):
|
||||
foo<2;python_version=='2.7'
|
||||
foo>1;python_version!='2.7'
|
||||
""")
|
||||
global_reqs = update._parse_reqs(global_content)
|
||||
project_reqs = list(update._content_to_reqs(project_content))
|
||||
global_reqs = requirement.parse(global_content)
|
||||
project_reqs = list(requirement.to_reqs(project_content))
|
||||
actions, reqs = update._sync_requirements_file(
|
||||
global_reqs, project_reqs, 'f', False, False, False)
|
||||
self.assertEqual(update.Requirements([
|
||||
update.Requirement('foo', '>1', "", '')]),
|
||||
self.assertEqual(requirement.Requirements([
|
||||
requirement.Requirement('foo', '>1', "", '')]),
|
||||
reqs)
|
||||
self.assertEqual(update.StdOut(
|
||||
self.assertEqual(project.StdOut(
|
||||
" foo<2;python_version=='2.7' -> foo>1\n"), actions[2])
|
||||
self.assertEqual(update.StdOut(
|
||||
self.assertEqual(project.StdOut(
|
||||
" foo>1;python_version!='2.7' -> \n"), actions[3])
|
||||
self.assertThat(actions, matchers.HasLength(4))
|
||||
|
||||
|
||||
class TestReqsToContent(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
reqs = update._reqs_to_content(update.Requirements(
|
||||
[update.Requirement(
|
||||
'foo', '<=1', "python_version=='2.7'", '# BSD')]),
|
||||
marker_sep='!')
|
||||
self.assertEqual(
|
||||
''.join(update._REQS_HEADER
|
||||
+ ["foo<=1!python_version=='2.7' # BSD\n"]),
|
||||
reqs)
|
||||
|
||||
|
||||
class TestProjectExtras(testtools.TestCase):
|
||||
|
||||
def test_smoke(self):
|
||||
project = {'setup.cfg': textwrap.dedent(u"""
|
||||
[extras]
|
||||
1 =
|
||||
foo
|
||||
2 =
|
||||
foo # fred
|
||||
bar
|
||||
""")}
|
||||
expected = {
|
||||
'1': '\nfoo',
|
||||
'2': '\nfoo # fred\nbar'
|
||||
}
|
||||
self.assertEqual(expected, update._project_extras(project))
|
||||
|
||||
def test_none(self):
|
||||
project = {'setup.cfg': u"[metadata]\n"}
|
||||
self.assertEqual({}, update._project_extras(project))
|
||||
|
||||
|
||||
class TestExtras(testtools.TestCase):
|
||||
|
||||
def test_none(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
ini = update.extras_compiled(old_content).ini()
|
||||
self.assertEqual(ini, (old_content, None, ''))
|
||||
|
||||
def test_no_eol(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux""")
|
||||
expected1 = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
""")
|
||||
suffix = ' foo = bar:quux'
|
||||
ini = update.extras_compiled(old_content).ini()
|
||||
self.assertEqual(ini, (expected1, None, suffix))
|
||||
|
||||
def test_two_extras_raises(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
a = b
|
||||
[extras]
|
||||
b = c
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
with testtools.ExpectedException(parsley.ParseError):
|
||||
update.extras_compiled(old_content).ini()
|
||||
|
||||
def test_extras(self):
|
||||
# We get an AST for extras we can use to preserve comments.
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
# comment1
|
||||
a =
|
||||
b
|
||||
c
|
||||
# comment2
|
||||
# comment3
|
||||
d =
|
||||
e
|
||||
# comment4
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
prefix = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
""")
|
||||
suffix = textwrap.dedent(u"""\
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
extras = [
|
||||
update.Comment('# comment1\n'),
|
||||
update.Extra('a', '\nb\nc\n'),
|
||||
update.Comment('# comment2\n'),
|
||||
update.Comment('# comment3\n'),
|
||||
update.Extra('d', '\ne\n'),
|
||||
update.Comment('# comment4\n')]
|
||||
ini = update.extras_compiled(old_content).ini()
|
||||
self.assertEqual(ini, (prefix, extras, suffix))
|
||||
|
||||
|
||||
class TestMergeSetupCfg(testtools.TestCase):
|
||||
|
||||
def test_merge_none(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
# something something
|
||||
name = fred
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
merged = update._merge_setup_cfg(old_content, {})
|
||||
self.assertEqual(old_content, merged)
|
||||
|
||||
def test_merge_extras(self):
|
||||
old_content = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
# Comment
|
||||
a =
|
||||
b
|
||||
# comment
|
||||
c =
|
||||
d
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
blank = update.Requirement('', '', '', '')
|
||||
r1 = update.Requirement('b', '>=1', "python_version=='2.7'", '')
|
||||
r2 = update.Requirement('d', '', '', '# BSD')
|
||||
reqs = {
|
||||
'a': update.Requirements([blank, r1]),
|
||||
'c': update.Requirements([blank, r2])}
|
||||
merged = update._merge_setup_cfg(old_content, reqs)
|
||||
expected = textwrap.dedent(u"""
|
||||
[metadata]
|
||||
name = fred
|
||||
|
||||
[extras]
|
||||
# Comment
|
||||
a =
|
||||
b>=1:python_version=='2.7'
|
||||
# comment
|
||||
c =
|
||||
d # BSD
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
foo = bar:quux
|
||||
""")
|
||||
self.assertEqual(expected, merged)
|
||||
|
||||
|
||||
class TestCopyRequires(testtools.TestCase):
|
||||
|
||||
def test_extras_no_change(self):
|
||||
@ -690,14 +378,14 @@ class TestCopyRequires(testtools.TestCase):
|
||||
opt =
|
||||
freddy
|
||||
""")
|
||||
project = {}
|
||||
project['root'] = '/dev/null'
|
||||
project['requirements'] = {}
|
||||
project['setup.cfg'] = setup_cfg
|
||||
global_reqs = update._parse_reqs(global_content)
|
||||
proj = {}
|
||||
proj['root'] = '/dev/null'
|
||||
proj['requirements'] = {}
|
||||
proj['setup.cfg'] = setup_cfg
|
||||
global_reqs = requirement.parse(global_content)
|
||||
actions = update._copy_requires(
|
||||
u'', False, False, project, global_reqs, False)
|
||||
u'', False, False, proj, global_reqs, False)
|
||||
self.assertEqual([
|
||||
update.Verbose('Syncing extra [opt]'),
|
||||
update.Verbose('Syncing extra [test]'),
|
||||
update.File('setup.cfg', setup_cfg)], actions)
|
||||
project.Verbose('Syncing extra [opt]'),
|
||||
project.Verbose('Syncing extra [test]'),
|
||||
project.File('setup.cfg', setup_cfg)], actions)
|
||||
|
@ -20,6 +20,7 @@ from __future__ import print_function
|
||||
|
||||
import testtools
|
||||
|
||||
from openstack_requirements import project
|
||||
from openstack_requirements.tests import common
|
||||
from openstack_requirements import update
|
||||
|
||||
@ -47,5 +48,5 @@ class UpdateTestPbr(testtools.TestCase):
|
||||
common.pbr_project, common.global_reqs, None, None, None,
|
||||
False)
|
||||
for action in actions:
|
||||
if type(action) is update.File:
|
||||
if type(action) is project.File:
|
||||
self.assertNotEqual(action.filename, 'setup.py')
|
||||
|
@ -16,6 +16,7 @@ from __future__ import print_function
|
||||
|
||||
import testtools
|
||||
|
||||
from openstack_requirements import project
|
||||
from openstack_requirements.tests import common
|
||||
from openstack_requirements import update
|
||||
|
||||
@ -59,5 +60,5 @@ class UpdateTestWithSuffix(testtools.TestCase):
|
||||
common.oslo_project, common.global_reqs, 'global', None, None,
|
||||
False)
|
||||
for action in actions:
|
||||
if type(action) is update.File:
|
||||
if type(action) is project.File:
|
||||
self.assertNotEqual(action.filename, 'setup.py')
|
||||
|
@ -26,18 +26,14 @@ updated to match the global requirements. Requirements not in the global
|
||||
files will be dropped.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import errno
|
||||
import io
|
||||
import itertools
|
||||
import optparse
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from parsley import makeGrammar
|
||||
import pkg_resources
|
||||
from six.moves import configparser
|
||||
from openstack_requirements import project
|
||||
from openstack_requirements import requirement
|
||||
|
||||
_setup_py_text = """# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
@ -70,37 +66,9 @@ setuptools.setup(
|
||||
pbr=True)
|
||||
"""
|
||||
|
||||
# A header for the requirements file(s).
|
||||
# TODO(lifeless): Remove this once constraints are in use.
|
||||
_REQS_HEADER = [
|
||||
'# The order of packages is significant, because pip processes '
|
||||
'them in the order\n',
|
||||
'# of appearance. Changing the order has an impact on the overall '
|
||||
'integration\n',
|
||||
'# process, which may cause wedges in the gate later.\n',
|
||||
]
|
||||
|
||||
|
||||
Comment = collections.namedtuple('Comment', ['line'])
|
||||
Extra = collections.namedtuple('Extra', ['name', 'content'])
|
||||
|
||||
|
||||
extras_grammar = """
|
||||
ini = (line*:p extras?:e line*:l final:s) -> (''.join(p), e, ''.join(l+[s]))
|
||||
line = ~extras <(~'\\n' anything)* '\\n'>
|
||||
final = <(~'\\n' anything)* >
|
||||
extras = '[' 'e' 'x' 't' 'r' 'a' 's' ']' '\\n'+ body*:b -> b
|
||||
body = comment | extra
|
||||
comment = <'#' (~'\\n' anything)* '\\n'>:c '\\n'* -> comment(c)
|
||||
extra = name:n ' '* '=' line:l cont*:c '\\n'* -> extra(n, ''.join([l] + c))
|
||||
name = <(anything:x ?(x not in '\\n \\t='))+>
|
||||
cont = ' '+ <(~'\\n' anything)* '\\n'>
|
||||
"""
|
||||
extras_compiled = makeGrammar(
|
||||
extras_grammar, {"comment": Comment, "extra": Extra})
|
||||
|
||||
|
||||
# Pure --
|
||||
|
||||
class Change(object):
|
||||
def __init__(self, name, old, new):
|
||||
self.name = name
|
||||
@ -111,78 +79,19 @@ class Change(object):
|
||||
return "%-30.30s -> %s" % (self.old, self.new)
|
||||
|
||||
|
||||
Error = collections.namedtuple('Error', ['message'])
|
||||
File = collections.namedtuple('File', ['filename', 'content'])
|
||||
StdOut = collections.namedtuple('StdOut', ['message'])
|
||||
Verbose = collections.namedtuple('Verbose', ['message'])
|
||||
|
||||
|
||||
Requirement = collections.namedtuple(
|
||||
'Requirement', ['package', 'specifiers', 'markers', 'comment'])
|
||||
Requirements = collections.namedtuple('Requirements', ['reqs'])
|
||||
|
||||
|
||||
def _parse_requirement(req_line):
|
||||
"""Parse a single line of a requirements file.
|
||||
|
||||
requirements files here are a subset of pip requirements files: we don't
|
||||
try to parse URL entries, or pip options like -f and -e. Those are not
|
||||
permitted in global-requirements.txt. If encountered in a synchronised
|
||||
file such as requirements.txt or test-requirements.txt, they are illegal
|
||||
but currently preserved as-is.
|
||||
|
||||
They may of course be used by local test configurations, just not
|
||||
committed into the OpenStack reference branches.
|
||||
"""
|
||||
end = len(req_line)
|
||||
hash_pos = req_line.find('#')
|
||||
if hash_pos < 0:
|
||||
hash_pos = end
|
||||
if '://' in req_line[:hash_pos]:
|
||||
# Trigger an early failure before we look for ':'
|
||||
pkg_resources.Requirement.parse(req_line)
|
||||
semi_pos = req_line.find(';', 0, hash_pos)
|
||||
colon_pos = req_line.find(':', 0, hash_pos)
|
||||
marker_pos = max(semi_pos, colon_pos)
|
||||
if marker_pos < 0:
|
||||
marker_pos = hash_pos
|
||||
markers = req_line[marker_pos + 1:hash_pos].strip()
|
||||
if hash_pos != end:
|
||||
comment = req_line[hash_pos:]
|
||||
else:
|
||||
comment = ''
|
||||
req_line = req_line[:marker_pos]
|
||||
|
||||
if req_line:
|
||||
parsed = pkg_resources.Requirement.parse(req_line)
|
||||
name = parsed.project_name
|
||||
specifier = str(parsed.specifier)
|
||||
else:
|
||||
name = ''
|
||||
specifier = ''
|
||||
return Requirement(name, specifier, markers, comment)
|
||||
|
||||
|
||||
def _pass_through(req_line):
|
||||
"""Identify unparsable lines."""
|
||||
return (req_line.startswith('http://tarballs.openstack.org/') or
|
||||
req_line.startswith('-e') or
|
||||
req_line.startswith('-f'))
|
||||
|
||||
|
||||
def _check_setup_py(project):
|
||||
def _check_setup_py(proj):
|
||||
actions = []
|
||||
# If it doesn't have a setup.py, then we don't want to update it
|
||||
if 'setup.py' not in project:
|
||||
if 'setup.py' not in proj:
|
||||
return actions
|
||||
# If it doesn't use pbr, we don't want to update it.
|
||||
elif 'pbr' not in project['setup.py']:
|
||||
elif 'pbr' not in proj['setup.py']:
|
||||
return actions
|
||||
# We don't update pbr's setup.py because it can't use itself.
|
||||
if 'setup.cfg' in project and 'name = pbr' in project['setup.cfg']:
|
||||
if 'setup.cfg' in proj and 'name = pbr' in proj['setup.cfg']:
|
||||
return actions
|
||||
actions.append(Verbose("Syncing setup.py"))
|
||||
actions.append(File('setup.py', _setup_py_text))
|
||||
actions.append(project.Verbose("Syncing setup.py"))
|
||||
actions.append(project.File('setup.py', _setup_py_text))
|
||||
return actions
|
||||
|
||||
|
||||
@ -190,18 +99,19 @@ def _sync_requirements_file(
|
||||
source_reqs, dest_sequence, dest_label, softupdate, hacking,
|
||||
non_std_reqs):
|
||||
actions = []
|
||||
dest_reqs = _reqs_to_dict(dest_sequence)
|
||||
dest_reqs = requirement.to_dict(dest_sequence)
|
||||
changes = []
|
||||
output_requirements = []
|
||||
processed_packages = set()
|
||||
|
||||
for req, req_line in dest_sequence:
|
||||
# Skip the instructions header
|
||||
if req_line in _REQS_HEADER:
|
||||
if req_line in requirement._REQS_HEADER:
|
||||
continue
|
||||
elif req is None:
|
||||
# Unparsable lines.
|
||||
output_requirements.append(Requirement('', '', '', req_line))
|
||||
output_requirements.append(
|
||||
requirement.Requirement('', '', '', req_line))
|
||||
continue
|
||||
elif not req.package:
|
||||
# Comment-only lines
|
||||
@ -250,25 +160,25 @@ def _sync_requirements_file(
|
||||
# override. For those we support NON_STANDARD_REQS=1
|
||||
# environment variable to turn this into a warning only.
|
||||
# However this drops the unknown requirement.
|
||||
actions.append(Error(
|
||||
actions.append(project.Error(
|
||||
"'%s' is not in global-requirements.txt" % req.package))
|
||||
# always print out what we did if we did a thing
|
||||
if changes:
|
||||
actions.append(StdOut(
|
||||
actions.append(project.StdOut(
|
||||
"Version change for: %s\n"
|
||||
% ", ".join([x.name for x in changes])))
|
||||
actions.append(StdOut("Updated %s:\n" % dest_label))
|
||||
actions.append(project.StdOut("Updated %s:\n" % dest_label))
|
||||
for change in changes:
|
||||
actions.append(StdOut(" %s\n" % change))
|
||||
return actions, Requirements(output_requirements)
|
||||
actions.append(project.StdOut(" %s\n" % change))
|
||||
return actions, requirement.Requirements(output_requirements)
|
||||
|
||||
|
||||
def _copy_requires(
|
||||
suffix, softupdate, hacking, project, global_reqs, non_std_reqs):
|
||||
suffix, softupdate, hacking, proj, global_reqs, non_std_reqs):
|
||||
"""Copy requirements files."""
|
||||
actions = []
|
||||
for source, content in sorted(project['requirements'].items()):
|
||||
dest_path = os.path.join(project['root'], source)
|
||||
for source, content in sorted(proj['requirements'].items()):
|
||||
dest_path = os.path.join(proj['root'], source)
|
||||
# this is specifically for global-requirements gate jobs so we don't
|
||||
# modify the git tree
|
||||
if suffix:
|
||||
@ -276,20 +186,20 @@ def _copy_requires(
|
||||
dest_name = "%s.%s" % (source, suffix)
|
||||
else:
|
||||
dest_name = source
|
||||
dest_sequence = list(_content_to_reqs(content))
|
||||
actions.append(Verbose("Syncing %s" % dest_path))
|
||||
dest_sequence = list(requirement.to_reqs(content))
|
||||
actions.append(project.Verbose("Syncing %s" % dest_path))
|
||||
_actions, reqs = _sync_requirements_file(
|
||||
global_reqs, dest_sequence, dest_path, softupdate, hacking,
|
||||
non_std_reqs)
|
||||
actions.extend(_actions)
|
||||
actions.append(File(dest_name, _reqs_to_content(reqs)))
|
||||
extras = _project_extras(project)
|
||||
actions.append(project.File(dest_name, requirement.to_content(reqs)))
|
||||
extras = project.extras(proj)
|
||||
output_extras = {}
|
||||
for extra, content in sorted(extras.items()):
|
||||
dest_name = 'extra-%s' % extra
|
||||
dest_path = "%s[%s]" % (project['root'], extra)
|
||||
dest_sequence = list(_content_to_reqs(content))
|
||||
actions.append(Verbose("Syncing extra [%s]" % extra))
|
||||
dest_path = "%s[%s]" % (proj['root'], extra)
|
||||
dest_sequence = list(requirement.to_reqs(content))
|
||||
actions.append(project.Verbose("Syncing extra [%s]" % extra))
|
||||
_actions, reqs = _sync_requirements_file(
|
||||
global_reqs, dest_sequence, dest_path, softupdate, hacking,
|
||||
non_std_reqs)
|
||||
@ -298,68 +208,11 @@ def _copy_requires(
|
||||
dest_path = 'setup.cfg'
|
||||
if suffix:
|
||||
dest_path = "%s.%s" % (dest_path, suffix)
|
||||
actions.append(File(
|
||||
dest_path, _merge_setup_cfg(project['setup.cfg'], output_extras)))
|
||||
actions.append(project.File(
|
||||
dest_path, project.merge_setup_cfg(proj['setup.cfg'], output_extras)))
|
||||
return actions
|
||||
|
||||
|
||||
def _merge_setup_cfg(old_content, new_extras):
|
||||
# This is ugly. All the existing libraries handle setup.cfg's poorly.
|
||||
prefix, extras, suffix = extras_compiled(old_content).ini()
|
||||
out_extras = []
|
||||
if extras is not None:
|
||||
for extra in extras:
|
||||
if type(extra) is Comment:
|
||||
out_extras.append(extra)
|
||||
elif type(extra) is Extra:
|
||||
if extra.name not in new_extras:
|
||||
out_extras.append(extra)
|
||||
continue
|
||||
e = Extra(
|
||||
extra.name,
|
||||
_reqs_to_content(
|
||||
new_extras[extra.name], ':', ' ', False))
|
||||
out_extras.append(e)
|
||||
else:
|
||||
raise TypeError('unknown type %r' % extra)
|
||||
if out_extras:
|
||||
extras_str = ['[extras]\n']
|
||||
for extra in out_extras:
|
||||
if type(extra) is Comment:
|
||||
extras_str.append(extra.line)
|
||||
else:
|
||||
extras_str.append(extra.name + ' =')
|
||||
extras_str.append(extra.content)
|
||||
if suffix:
|
||||
extras_str.append('\n')
|
||||
extras_str = ''.join(extras_str)
|
||||
else:
|
||||
extras_str = ''
|
||||
return prefix + extras_str + suffix
|
||||
|
||||
|
||||
def _project_extras(project):
|
||||
"""Return a dict of extra-name:content for the extras in setup.cfg."""
|
||||
c = configparser.SafeConfigParser()
|
||||
c.readfp(io.StringIO(project['setup.cfg']))
|
||||
if not c.has_section('extras'):
|
||||
return dict()
|
||||
return dict(c.items('extras'))
|
||||
|
||||
|
||||
def _reqs_to_content(reqs, marker_sep=';', line_prefix='', prefix=True):
|
||||
lines = []
|
||||
if prefix:
|
||||
lines += _REQS_HEADER
|
||||
for req in reqs.reqs:
|
||||
comment_p = ' ' if req.package else ''
|
||||
comment = (comment_p + req.comment if req.comment else '')
|
||||
marker = marker_sep + req.markers if req.markers else ''
|
||||
package = line_prefix + req.package if req.package else ''
|
||||
lines.append('%s%s%s%s\n' % (package, req.specifiers, marker, comment))
|
||||
return u''.join(lines)
|
||||
|
||||
|
||||
def _process_project(
|
||||
project, global_reqs, suffix, softupdate, hacking, non_std_reqs):
|
||||
"""Project a project.
|
||||
@ -372,99 +225,7 @@ def _process_project(
|
||||
return actions
|
||||
|
||||
|
||||
def _content_to_reqs(content):
|
||||
for content_line in content.splitlines(True):
|
||||
req_line = content_line.strip()
|
||||
if _pass_through(req_line):
|
||||
yield None, content_line
|
||||
else:
|
||||
yield _parse_requirement(req_line), content_line
|
||||
|
||||
|
||||
def _parse_reqs(content):
|
||||
return _reqs_to_dict(_content_to_reqs(content))
|
||||
|
||||
|
||||
def _reqs_to_dict(req_sequence):
|
||||
reqs = dict()
|
||||
for req, req_line in req_sequence:
|
||||
if req is not None:
|
||||
reqs.setdefault(req.package.lower(), []).append((req, req_line))
|
||||
return reqs
|
||||
|
||||
|
||||
# IO --
|
||||
def _safe_read(project, filename, output=None):
|
||||
if output is None:
|
||||
output = project
|
||||
try:
|
||||
path = project['root'] + '/' + filename
|
||||
with io.open(path, 'rt', encoding="utf-8") as f:
|
||||
output[filename] = f.read()
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
def _read_project(root):
|
||||
result = {'root': root}
|
||||
_safe_read(result, 'setup.py')
|
||||
_safe_read(result, 'setup.cfg')
|
||||
requirements = {}
|
||||
result['requirements'] = requirements
|
||||
target_files = [
|
||||
'requirements.txt', 'tools/pip-requires',
|
||||
'test-requirements.txt', 'tools/test-requires',
|
||||
]
|
||||
for py_version in (2, 3):
|
||||
target_files.append('requirements-py%s.txt' % py_version)
|
||||
target_files.append('test-requirements-py%s.txt' % py_version)
|
||||
for target_file in target_files:
|
||||
_safe_read(result, target_file, output=requirements)
|
||||
return result
|
||||
|
||||
|
||||
def _write_project(project, actions, stdout, verbose, noop=False):
|
||||
"""Write actions into project.
|
||||
|
||||
:param project: A project metadata dict.
|
||||
:param actions: A list of action tuples - File or Verbose - that describe
|
||||
what actions are to be taken.
|
||||
Error objects write a message to stdout and trigger an exception at
|
||||
the end of _write_project.
|
||||
File objects describe a file to have content placed in it.
|
||||
StdOut objects describe a message to write to stdout.
|
||||
Verbose objects will write a message to stdout when verbose is True.
|
||||
:param stdout: Where to write content for stdout.
|
||||
:param verbose: If True Verbose actions will be written to stdout.
|
||||
:param noop: If True nothing will be written to disk.
|
||||
:return None:
|
||||
:raises IOError: If the IO operations fail, IOError is raised. If this
|
||||
happens some actions may have been applied and others not.
|
||||
"""
|
||||
error = False
|
||||
for action in actions:
|
||||
if type(action) is Error:
|
||||
error = True
|
||||
stdout.write(action.message + '\n')
|
||||
elif type(action) is File:
|
||||
if noop:
|
||||
continue
|
||||
fullname = project['root'] + '/' + action.filename
|
||||
tmpname = fullname + '.tmp'
|
||||
with open(tmpname, 'wt') as f:
|
||||
f.write(action.content)
|
||||
os.rename(tmpname, fullname)
|
||||
elif type(action) is StdOut:
|
||||
stdout.write(action.message)
|
||||
elif type(action) is Verbose:
|
||||
if verbose:
|
||||
stdout.write(u"%s\n" % (action.message,))
|
||||
else:
|
||||
raise Exception("Invalid action %r" % (action,))
|
||||
if error:
|
||||
raise Exception("Error occured processing %s" % (project['root']))
|
||||
|
||||
|
||||
def main(argv=None, stdout=None, _worker=None):
|
||||
parser = optparse.OptionParser()
|
||||
@ -499,13 +260,13 @@ def _do_main(
|
||||
root, source, suffix, softupdate, hacking, stdout, verbose,
|
||||
non_std_reqs):
|
||||
"""No options or environment variable access from here on in."""
|
||||
project = _read_project(root)
|
||||
proj = project.read(root)
|
||||
global_req_content = open(
|
||||
os.path.join(source, 'global-requirements.txt'), 'rt').read()
|
||||
global_reqs = _parse_reqs(global_req_content)
|
||||
global_reqs = requirement.parse(global_req_content)
|
||||
actions = _process_project(
|
||||
project, global_reqs, suffix, softupdate, hacking, non_std_reqs)
|
||||
_write_project(project, actions, stdout=stdout, verbose=verbose)
|
||||
proj, global_reqs, suffix, softupdate, hacking, non_std_reqs)
|
||||
project.write(proj, actions, stdout=stdout, verbose=verbose)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
Loading…
x
Reference in New Issue
Block a user