Fix invalid escape sequence warnings
Starting with Python 3.6, invalid escape sequences in string literals are now deprecated[1]. This influence also automatic style checkers like pycodestyle which starting with 2.4.0 complains about invalid escape sequences (W605)[2]. Let's fix all those warnings at once by using raw strings where possible and adding additional \ where not. Footnotes: 1 - https://docs.python.org/3/whatsnew/3.6.html#deprecated-python-behavior 2 - https://github.com/PyCQA/pycodestyle/pull/676 Change-Id: I009a366fd8342edfd30890df6fe8e1fca88bf3cc Signed-off-by: Krzysztof Opasiak <k.opasiak@samsung.com>
This commit is contained in:
parent
f51e726f64
commit
4860f523d4
@ -72,12 +72,12 @@ class SnippetWriter(object):
|
|||||||
def _indent_xml(self, my_string):
|
def _indent_xml(self, my_string):
|
||||||
my_string = my_string.encode("utf-8")
|
my_string = my_string.encode("utf-8")
|
||||||
# convert to plain string without indents and spaces
|
# convert to plain string without indents and spaces
|
||||||
my_re = re.compile('>\s+([^\s])', re.DOTALL)
|
my_re = re.compile(r'>\s+([^\s])', re.DOTALL)
|
||||||
my_string = myre.sub('>\g<1>', my_string)
|
my_string = myre.sub(r'>\g<1>', my_string)
|
||||||
my_string = xml.dom.minidom.parseString(my_string).toprettyxml()
|
my_string = xml.dom.minidom.parseString(my_string).toprettyxml()
|
||||||
# remove line breaks
|
# remove line breaks
|
||||||
my_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
|
my_re = re.compile(r'>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
|
||||||
my_string = my_re.sub('>\g<1></', my_string)
|
my_string = my_re.sub(r'>\g<1></', my_string)
|
||||||
return my_string
|
return my_string
|
||||||
|
|
||||||
def output_request(self, url, output_headers, body, content_type, method,
|
def output_request(self, url, output_headers, body, content_type, method,
|
||||||
|
@ -174,12 +174,12 @@ class ExampleClient(object):
|
|||||||
def _indent_xml(self, my_string):
|
def _indent_xml(self, my_string):
|
||||||
my_string = my_string.encode("utf-8")
|
my_string = my_string.encode("utf-8")
|
||||||
# convert to plain string without indents and spaces
|
# convert to plain string without indents and spaces
|
||||||
my_re = re.compile('>\s+([^\s])', re.DOTALL)
|
my_re = re.compile(r'>\s+([^\s])', re.DOTALL)
|
||||||
my_string = myre.sub('>\g<1>', my_string)
|
my_string = myre.sub(r'>\g<1>', my_string)
|
||||||
my_string = xml.dom.minidom.parseString(my_string).toprettyxml()
|
my_string = xml.dom.minidom.parseString(my_string).toprettyxml()
|
||||||
# remove line breaks
|
# remove line breaks
|
||||||
my_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
|
my_re = re.compile(r'>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
|
||||||
my_string = my_re.sub('>\g<1></', my_string)
|
my_string = my_re.sub(r'>\g<1></', my_string)
|
||||||
return my_string
|
return my_string
|
||||||
|
|
||||||
def output_request(self, url, output_headers, body, content_type, method,
|
def output_request(self, url, output_headers, body, content_type, method,
|
||||||
|
@ -54,7 +54,7 @@ if CONFIG.values.get('rabbit_runs_locally', False) == True:
|
|||||||
shell=False)
|
shell=False)
|
||||||
for line in iter(proc.stdout.readline, ""):
|
for line in iter(proc.stdout.readline, ""):
|
||||||
print("LIST QUEUES:" + line)
|
print("LIST QUEUES:" + line)
|
||||||
m = re.search("""%s\s+([0-9]+)""" % queue_name, line)
|
m = re.search(r"%s\s+([0-9]+)" % queue_name, line)
|
||||||
if m:
|
if m:
|
||||||
return int(m.group(1))
|
return int(m.group(1))
|
||||||
return None
|
return None
|
||||||
|
@ -120,9 +120,9 @@ class Service(object):
|
|||||||
proc = start_proc(["/usr/bin/pmap", "-d", str(pid)],
|
proc = start_proc(["/usr/bin/pmap", "-d", str(pid)],
|
||||||
shell=False)
|
shell=False)
|
||||||
for line in iter(proc.stdout.readline, ""):
|
for line in iter(proc.stdout.readline, ""):
|
||||||
m = re.search("""mapped\:\s([0-9]+)K\s+"""
|
m = re.search(r"mapped\:\s([0-9]+)K\s+"
|
||||||
"""writeable/private:\s([0-9]+)K\s+"""
|
r"writeable/private:\s([0-9]+)K\s+"
|
||||||
"""shared:\s+([0-9]+)K""", line)
|
r"shared:\s+([0-9]+)K", line)
|
||||||
if m:
|
if m:
|
||||||
return MemoryInfo(int(m.group(1)), int(m.group(2)),
|
return MemoryInfo(int(m.group(1)), int(m.group(2)),
|
||||||
int(m.group(3)))
|
int(m.group(3)))
|
||||||
|
@ -79,7 +79,7 @@ def check_dependencies():
|
|||||||
print('Installing virtualenv via easy_install...'),
|
print('Installing virtualenv via easy_install...'),
|
||||||
if not (run_command(['which', 'easy_install']) and
|
if not (run_command(['which', 'easy_install']) and
|
||||||
run_command(['easy_install', 'virtualenv'])):
|
run_command(['easy_install', 'virtualenv'])):
|
||||||
die('ERROR: virtualenv not found.\n\Trove development'
|
die('ERROR: virtualenv not found.\nTrove development'
|
||||||
' requires virtualenv, please install it using your'
|
' requires virtualenv, please install it using your'
|
||||||
' favorite package management tool')
|
' favorite package management tool')
|
||||||
print('done.')
|
print('done.')
|
||||||
|
@ -205,7 +205,8 @@ class LintRunner(object):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.config = Config()
|
self.config = Config()
|
||||||
self.idline = re.compile("^[*]* Module .*")
|
self.idline = re.compile("^[*]* Module .*")
|
||||||
self.detail = re.compile("(\S+):(\d+): \[(\S+)\((\S+)\), (\S+)?] (.*)")
|
self.detail = re.compile(r"(\S+):(\d+): \[(\S+)\((\S+)\),"
|
||||||
|
r" (\S+)?] (.*)")
|
||||||
|
|
||||||
def dolint(self, filename):
|
def dolint(self, filename):
|
||||||
exceptions = set()
|
exceptions = set()
|
||||||
|
@ -17,8 +17,8 @@
|
|||||||
url_ref = {
|
url_ref = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 8,
|
"minLength": 8,
|
||||||
"pattern": 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]'
|
"pattern": r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]'
|
||||||
'|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
|
r'|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean_string = {
|
boolean_string = {
|
||||||
@ -80,7 +80,7 @@ volume_size = {
|
|||||||
host_string = {
|
host_string = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"pattern": "^[%]?[\w(-).]*[%]?$"
|
"pattern": r"^[%]?[\w(-).]*[%]?$"
|
||||||
}
|
}
|
||||||
|
|
||||||
name_string = {
|
name_string = {
|
||||||
|
@ -28,7 +28,7 @@ class CassandraSchema(models.DatastoreSchema):
|
|||||||
return 32
|
return 32
|
||||||
|
|
||||||
def _is_valid_schema_name(self, value):
|
def _is_valid_schema_name(self, value):
|
||||||
return not any(c in value for c in '/\. "$')
|
return not any(c in value for c in r'/\. "$')
|
||||||
|
|
||||||
|
|
||||||
class CassandraUser(models.DatastoreUser):
|
class CassandraUser(models.DatastoreUser):
|
||||||
|
@ -27,7 +27,7 @@ class MongoDBSchema(models.DatastoreSchema):
|
|||||||
def _is_valid_schema_name(self, value):
|
def _is_valid_schema_name(self, value):
|
||||||
# check against the invalid character set from
|
# check against the invalid character set from
|
||||||
# http://docs.mongodb.org/manual/reference/limits
|
# http://docs.mongodb.org/manual/reference/limits
|
||||||
return not any(c in value for c in '/\. "$')
|
return not any(c in value for c in r'/\. "$')
|
||||||
|
|
||||||
|
|
||||||
class MongoDBUser(models.DatastoreUser):
|
class MongoDBUser(models.DatastoreUser):
|
||||||
|
@ -31,7 +31,7 @@ class MySQLSchema(models.DatastoreSchema):
|
|||||||
# Defaults
|
# Defaults
|
||||||
__charset__ = "utf8"
|
__charset__ = "utf8"
|
||||||
__collation__ = "utf8_general_ci"
|
__collation__ = "utf8_general_ci"
|
||||||
dbname = re.compile("^[A-Za-z0-9_-]+[\s\?\#\@]*[A-Za-z0-9_-]+$")
|
dbname = re.compile(r"^[A-Za-z0-9_-]+[\s\?\#\@]*[A-Za-z0-9_-]+$")
|
||||||
|
|
||||||
# Complete list of acceptable values
|
# Complete list of acceptable values
|
||||||
collation = mysql_settings.collation
|
collation = mysql_settings.collation
|
||||||
@ -121,7 +121,7 @@ class MySQLSchema(models.DatastoreSchema):
|
|||||||
class MySQLUser(models.DatastoreUser):
|
class MySQLUser(models.DatastoreUser):
|
||||||
"""Represents a MySQL User and its associated properties."""
|
"""Represents a MySQL User and its associated properties."""
|
||||||
|
|
||||||
not_supported_chars = re.compile("^\s|\s$|'|\"|;|`|,|/|\\\\")
|
not_supported_chars = re.compile(r"""^\s|\s$|'|"|;|`|,|/|\\""")
|
||||||
|
|
||||||
def _is_valid_string(self, value):
|
def _is_valid_string(self, value):
|
||||||
if (not value or
|
if (not value or
|
||||||
|
@ -197,16 +197,16 @@ class Request(base_wsgi.Request):
|
|||||||
@utils.cached_property
|
@utils.cached_property
|
||||||
def accept_version(self):
|
def accept_version(self):
|
||||||
accept_header = self.headers.get('ACCEPT', "")
|
accept_header = self.headers.get('ACCEPT', "")
|
||||||
accept_version_re = re.compile(".*?application/vnd.openstack.trove"
|
accept_version_re = re.compile(r".*?application/vnd.openstack.trove"
|
||||||
"(\+.+?)?;"
|
r"(\+.+?)?;"
|
||||||
"version=(?P<version_no>\d+\.?\d*)")
|
r"version=(?P<version_no>\d+\.?\d*)")
|
||||||
|
|
||||||
match = accept_version_re.search(accept_header)
|
match = accept_version_re.search(accept_header)
|
||||||
return match.group("version_no") if match else None
|
return match.group("version_no") if match else None
|
||||||
|
|
||||||
@utils.cached_property
|
@utils.cached_property
|
||||||
def url_version(self):
|
def url_version(self):
|
||||||
versioned_url_re = re.compile("/v(?P<version_no>\d+\.?\d*)")
|
versioned_url_re = re.compile(r"/v(?P<version_no>\d+\.?\d*)")
|
||||||
match = versioned_url_re.search(self.path)
|
match = versioned_url_re.search(self.path)
|
||||||
return match.group("version_no") if match else None
|
return match.group("version_no") if match else None
|
||||||
|
|
||||||
|
@ -295,7 +295,7 @@ class ImportOverrideStrategy(ConfigurationOverrideStrategy):
|
|||||||
within their set got applied.
|
within their set got applied.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
FILE_NAME_PATTERN = '%s-([0-9]+)-%s\.%s$'
|
FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\.%s$'
|
||||||
|
|
||||||
def __init__(self, revision_dir, revision_ext):
|
def __init__(self, revision_dir, revision_ext):
|
||||||
"""
|
"""
|
||||||
|
@ -108,7 +108,7 @@ def to_bytes(value):
|
|||||||
"""Convert numbers with a byte suffix to bytes.
|
"""Convert numbers with a byte suffix to bytes.
|
||||||
"""
|
"""
|
||||||
if isinstance(value, six.string_types):
|
if isinstance(value, six.string_types):
|
||||||
pattern = re.compile('^(\d+)([K,M,G]{1})$')
|
pattern = re.compile(r'^(\d+)([K,M,G]{1})$')
|
||||||
match = pattern.match(value)
|
match = pattern.match(value)
|
||||||
if match:
|
if match:
|
||||||
value = match.group(1)
|
value = match.group(1)
|
||||||
|
@ -43,8 +43,8 @@ cmd_update_sysctl_conf = ('echo "vm.swappiness = 0" | sudo tee -a '
|
|||||||
'/etc/sysctl.conf')
|
'/etc/sysctl.conf')
|
||||||
cmd_reset_pwd = 'sudo /opt/couchbase/bin/cbreset_password %(IP)s:8091'
|
cmd_reset_pwd = 'sudo /opt/couchbase/bin/cbreset_password %(IP)s:8091'
|
||||||
pwd_file = COUCHBASE_CONF_DIR + SECRET_KEY
|
pwd_file = COUCHBASE_CONF_DIR + SECRET_KEY
|
||||||
cmd_get_password_from_config = """sudo /opt/couchbase/bin/erl -noinput -eval \
|
cmd_get_password_from_config = (
|
||||||
'case file:read_file("/opt/couchbase/var/lib/couchbase/config/config.dat") \
|
r"""sudo /opt/couchbase/bin/erl -noinput -eval 'case file:read_file("""
|
||||||
of {ok, B} -> io:format("~p~n", [binary_to_term(B)]) end.' \
|
r""""/opt/couchbase/var/lib/couchbase/config/config.dat") of {ok, B} ->"""
|
||||||
-run init stop | grep '\[{"root",\[{password,' | awk -F\\" '{print $4}'
|
r"""io:format("~p~n", [binary_to_term(B)]) end.' -run init stop"""
|
||||||
"""
|
r""" | grep '\[{"root",\[{password,' | awk -F\" '{print $4}'""")
|
||||||
|
@ -61,11 +61,11 @@ RESTORE_OFFLINE_DB = (
|
|||||||
"db2 restore database %(dbname)s from " + DB2_BACKUP_DIR)
|
"db2 restore database %(dbname)s from " + DB2_BACKUP_DIR)
|
||||||
GET_DB_SIZE = (
|
GET_DB_SIZE = (
|
||||||
"db2 +o connect to %(dbname)s;"
|
"db2 +o connect to %(dbname)s;"
|
||||||
"db2 call get_dbsize_info\(?, ?, ?, -1\) | "
|
r"db2 call get_dbsize_info\(?, ?, ?, -1\) | "
|
||||||
"grep -A1 'DATABASESIZE' | grep 'Parameter Value' | sed 's/.*[:]//' |"
|
"grep -A1 'DATABASESIZE' | grep 'Parameter Value' | sed 's/.*[:]//' |"
|
||||||
" tr -d '\n'; db2 +o connect reset")
|
" tr -d '\n'; db2 +o connect reset")
|
||||||
GET_DB_NAMES = ("find /home/db2inst1/db2inst1/backup/ -type f -name '*.001' |"
|
GET_DB_NAMES = ("find /home/db2inst1/db2inst1/backup/ -type f -name '*.001' |"
|
||||||
" grep -Po \"(?<=backup/)[^.']*(?=\.)\"")
|
" grep -Po \"(?<=backup/)[^.']*(?=\\.)\"")
|
||||||
GET_DBM_CONFIGURATION = "db2 get dbm configuration > %(dbm_config)s"
|
GET_DBM_CONFIGURATION = "db2 get dbm configuration > %(dbm_config)s"
|
||||||
UPDATE_DBM_CONFIGURATION = ("db2 update database manager configuration using "
|
UPDATE_DBM_CONFIGURATION = ("db2 update database manager configuration using "
|
||||||
"%(parameter)s %(value)s")
|
"%(parameter)s %(value)s")
|
||||||
|
@ -445,7 +445,7 @@ class PgSqlApp(object):
|
|||||||
"""
|
"""
|
||||||
r = operating_system.read_file(self.pgsql_recovery_config,
|
r = operating_system.read_file(self.pgsql_recovery_config,
|
||||||
as_root=True)
|
as_root=True)
|
||||||
regexp = re.compile("host=(\d+.\d+.\d+.\d+) ")
|
regexp = re.compile(r"host=(\d+.\d+.\d+.\d+) ")
|
||||||
m = regexp.search(r)
|
m = regexp.search(r)
|
||||||
return m.group(1)
|
return m.group(1)
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@ class VSqlError(object):
|
|||||||
stderr looks like: "ERROR 3117: Division by zero"
|
stderr looks like: "ERROR 3117: Division by zero"
|
||||||
:param stderr: string from executing statement via vsql
|
:param stderr: string from executing statement via vsql
|
||||||
"""
|
"""
|
||||||
parse = re.match("^(ERROR|WARNING) (\d+): (.+)$", stderr)
|
parse = re.match(r"^(ERROR|WARNING) (\d+): (.+)$", stderr)
|
||||||
if not parse:
|
if not parse:
|
||||||
raise ValueError(_("VSql stderr %(msg)s not recognized.")
|
raise ValueError(_("VSql stderr %(msg)s not recognized.")
|
||||||
% {'msg': stderr})
|
% {'msg': stderr})
|
||||||
|
@ -202,7 +202,7 @@ class RedhatPackagerMixin(RPMPackagerMixin):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
cmd = "sudo yum --color=never -y install %s" % " ".join(packages)
|
cmd = "sudo yum --color=never -y install %s" % " ".join(packages)
|
||||||
output_expects = ['\[sudo\] password for .*:',
|
output_expects = [r'\[sudo\] password for .*:',
|
||||||
'No package (.*) available.',
|
'No package (.*) available.',
|
||||||
('file .* from install of .* conflicts with file'
|
('file .* from install of .* conflicts with file'
|
||||||
' from package (.*?)\r\n'),
|
' from package (.*?)\r\n'),
|
||||||
@ -243,7 +243,7 @@ class RedhatPackagerMixin(RPMPackagerMixin):
|
|||||||
"""
|
"""
|
||||||
cmd = "sudo yum --color=never -y remove %s" % package_name
|
cmd = "sudo yum --color=never -y remove %s" % package_name
|
||||||
LOG.debug("Running package remove command: %s", cmd)
|
LOG.debug("Running package remove command: %s", cmd)
|
||||||
output_expects = ['\[sudo\] password for .*:',
|
output_expects = [r'\[sudo\] password for .*:',
|
||||||
'No Packages marked for removal',
|
'No Packages marked for removal',
|
||||||
'Removed:']
|
'Removed:']
|
||||||
i, match = self.pexpect_run(cmd, output_expects, time_out)
|
i, match = self.pexpect_run(cmd, output_expects, time_out)
|
||||||
@ -395,7 +395,7 @@ class DebianPackagerMixin(BasePackagerMixin):
|
|||||||
def pkg_version(self, package_name):
|
def pkg_version(self, package_name):
|
||||||
std_out = getoutput("apt-cache", "policy", package_name)
|
std_out = getoutput("apt-cache", "policy", package_name)
|
||||||
for line in std_out.split("\n"):
|
for line in std_out.split("\n"):
|
||||||
m = re.match("\s+Installed: (.*)", line)
|
m = re.match(r"\s+Installed: (.*)", line)
|
||||||
if m:
|
if m:
|
||||||
version = m.group(1)
|
version = m.group(1)
|
||||||
if version == "(none)":
|
if version == "(none)":
|
||||||
|
@ -102,8 +102,8 @@ class NodetoolSnapshot(base.BackupRunner):
|
|||||||
snapshot_name)
|
snapshot_name)
|
||||||
snapshot_files = operating_system.list_files_in_directory(
|
snapshot_files = operating_system.list_files_in_directory(
|
||||||
data_dir, recursive=True, include_dirs=False,
|
data_dir, recursive=True, include_dirs=False,
|
||||||
pattern='.*/snapshots/%s/.*\.%s' % (snapshot_name,
|
pattern=r'.*/snapshots/%s/.*\.%s' % (snapshot_name,
|
||||||
self._SNAPSHOT_EXTENSION),
|
self._SNAPSHOT_EXTENSION),
|
||||||
as_root=True)
|
as_root=True)
|
||||||
num_snapshot_files = len(snapshot_files)
|
num_snapshot_files = len(snapshot_files)
|
||||||
LOG.debug('Found %(num)d snapshot (*.%(ext)s) files.',
|
LOG.debug('Found %(num)d snapshot (*.%(ext)s) files.',
|
||||||
|
@ -125,8 +125,8 @@ class PgBaseBackup(base.BackupRunner, PgBaseBackupUtil):
|
|||||||
operating_system.chmod(
|
operating_system.chmod(
|
||||||
metadata_file, FileMode(add=[stat.S_IROTH]), as_root=True)
|
metadata_file, FileMode(add=[stat.S_IROTH]), as_root=True)
|
||||||
|
|
||||||
start_re = re.compile("START WAL LOCATION: (.*) \(file (.*)\)")
|
start_re = re.compile(r"START WAL LOCATION: (.*) \(file (.*)\)")
|
||||||
stop_re = re.compile("STOP WAL LOCATION: (.*) \(file (.*)\)")
|
stop_re = re.compile(r"STOP WAL LOCATION: (.*) \(file (.*)\)")
|
||||||
checkpt_re = re.compile("CHECKPOINT LOCATION: (.*)")
|
checkpt_re = re.compile("CHECKPOINT LOCATION: (.*)")
|
||||||
label_re = re.compile("LABEL: (.*)")
|
label_re = re.compile("LABEL: (.*)")
|
||||||
|
|
||||||
|
@ -85,7 +85,8 @@ class InnoBackupEx(base.BackupRunner):
|
|||||||
def metadata(self):
|
def metadata(self):
|
||||||
LOG.debug('Getting metadata from backup.')
|
LOG.debug('Getting metadata from backup.')
|
||||||
meta = {}
|
meta = {}
|
||||||
lsn = re.compile("The latest check point \(for incremental\): '(\d+)'")
|
lsn = re.compile(r"The latest check point \(for incremental\): "
|
||||||
|
r"'(\d+)'")
|
||||||
with open('/tmp/innobackupex.log', 'r') as backup_log:
|
with open('/tmp/innobackupex.log', 'r') as backup_log:
|
||||||
output = backup_log.read()
|
output = backup_log.read()
|
||||||
match = lsn.search(output)
|
match = lsn.search(output)
|
||||||
|
@ -37,7 +37,7 @@ class PgDump(base.RestoreRunner):
|
|||||||
base_restore_cmd = 'psql -U os_admin'
|
base_restore_cmd = 'psql -U os_admin'
|
||||||
|
|
||||||
IGNORED_ERROR_PATTERNS = [
|
IGNORED_ERROR_PATTERNS = [
|
||||||
re.compile("ERROR:\s*role \"postgres\" already exists"),
|
re.compile(r'ERROR:\s*role "postgres" already exists'),
|
||||||
]
|
]
|
||||||
|
|
||||||
def restore(self):
|
def restore(self):
|
||||||
@ -96,7 +96,7 @@ class PgBaseBackup(base.RestoreRunner):
|
|||||||
base_restore_cmd = ''
|
base_restore_cmd = ''
|
||||||
|
|
||||||
IGNORED_ERROR_PATTERNS = [
|
IGNORED_ERROR_PATTERNS = [
|
||||||
re.compile("ERROR:\s*role \"postgres\" already exists"),
|
re.compile(r'ERROR:\s*role "postgres" already exists'),
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
@ -67,7 +67,7 @@ class HostsBeforeInstanceCreation(object):
|
|||||||
|
|
||||||
@test(depends_on=[test_empty_index_host_list])
|
@test(depends_on=[test_empty_index_host_list])
|
||||||
def test_empty_index_host_list_single(self):
|
def test_empty_index_host_list_single(self):
|
||||||
self.host.name = self.host.name.replace(".", "\.")
|
self.host.name = self.host.name.replace(".", r"\.")
|
||||||
result = self.client.hosts.get(self.host)
|
result = self.client.hosts.get(self.host)
|
||||||
assert_not_equal(result, None,
|
assert_not_equal(result, None,
|
||||||
"Get host should not be empty for: %s" % self.host)
|
"Get host should not be empty for: %s" % self.host)
|
||||||
@ -126,7 +126,7 @@ class HostsMgmtCommands(object):
|
|||||||
|
|
||||||
@test(depends_on=[test_index_host_list])
|
@test(depends_on=[test_index_host_list])
|
||||||
def test_index_host_list_single(self):
|
def test_index_host_list_single(self):
|
||||||
self.host.name = self.host.name.replace(".", "\.")
|
self.host.name = self.host.name.replace(".", r"\.")
|
||||||
result = self.client.hosts.get(self.host)
|
result = self.client.hosts.get(self.host)
|
||||||
assert_not_equal(result, None,
|
assert_not_equal(result, None,
|
||||||
"list hosts should not be empty: %s" % str(result))
|
"list hosts should not be empty: %s" % str(result))
|
||||||
|
@ -66,7 +66,7 @@ class FakeGuest(object):
|
|||||||
self.version += 1
|
self.version += 1
|
||||||
|
|
||||||
def _check_username(self, username):
|
def _check_username(self, username):
|
||||||
unsupported_chars = re.compile("^\s|\s$|'|\"|;|`|,|/|\\\\")
|
unsupported_chars = re.compile(r"""^\s|\s$|'|"|;|`|,|/|\\""")
|
||||||
if (not username or
|
if (not username or
|
||||||
unsupported_chars.search(username) or
|
unsupported_chars.search(username) or
|
||||||
("%r" % username).find("\\") != -1):
|
("%r" % username).find("\\") != -1):
|
||||||
|
@ -1146,35 +1146,35 @@ class TestOperatingSystem(trove_testtools.TestCase):
|
|||||||
|
|
||||||
# Only '*.txt' in the top directory.
|
# Only '*.txt' in the top directory.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, False, '.*\.txt$', False, all_paths, 3)
|
root_path, False, r'.*\.txt$', False, all_paths, 3)
|
||||||
|
|
||||||
# Only '*.txt' (including directories) in the top directory.
|
# Only '*.txt' (including directories) in the top directory.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, False, '.*\.txt$', True, all_paths, 3)
|
root_path, False, r'.*\.txt$', True, all_paths, 3)
|
||||||
|
|
||||||
# Only '*.txt' recursive.
|
# Only '*.txt' recursive.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, True, '.*\.txt$', True, all_paths, 9)
|
root_path, True, r'.*\.txt$', True, all_paths, 9)
|
||||||
|
|
||||||
# Only '*.txt' (including directories) recursive.
|
# Only '*.txt' (including directories) recursive.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, True, '.*\.txt$', False, all_paths, 9)
|
root_path, True, r'.*\.txt$', False, all_paths, 9)
|
||||||
|
|
||||||
# Only extension-less files in the top directory.
|
# Only extension-less files in the top directory.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, False, '[^\.]*$', False, all_paths, 3)
|
root_path, False, r'[^\.]*$', False, all_paths, 3)
|
||||||
|
|
||||||
# Only extension-less files recursive.
|
# Only extension-less files recursive.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, True, '[^\.]*$', False, all_paths, 9)
|
root_path, True, r'[^\.]*$', False, all_paths, 9)
|
||||||
|
|
||||||
# Non-existing extension in the top directory.
|
# Non-existing extension in the top directory.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, False, '.*\.bak$', False, all_paths, 0)
|
root_path, False, r'.*\.bak$', False, all_paths, 0)
|
||||||
|
|
||||||
# Non-existing extension recursive.
|
# Non-existing extension recursive.
|
||||||
self._assert_list_files(
|
self._assert_list_files(
|
||||||
root_path, True, '.*\.bak$', False, all_paths, 0)
|
root_path, True, r'.*\.bak$', False, all_paths, 0)
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
os.remove(root_path)
|
os.remove(root_path)
|
||||||
|
@ -73,10 +73,10 @@ class SqlAlchemyConnection(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _exception_is_permissions_issue(msg):
|
def _exception_is_permissions_issue(msg):
|
||||||
"""Assert message cited a permissions issue and not something else."""
|
"""Assert message cited a permissions issue and not something else."""
|
||||||
pos_error = re.compile(".*Host '[\w\.]*' is not allowed to connect to "
|
pos_error = re.compile(r".*Host '[\w\.]*' is not allowed to connect "
|
||||||
"this MySQL server.*")
|
"to this MySQL server.*")
|
||||||
pos_error1 = re.compile(".*Access denied for user "
|
pos_error1 = re.compile(".*Access denied for user "
|
||||||
"'[\w\*\!\@\#\^\&]*'@'[\w\.]*'.*")
|
r"'[\w\*\!\@\#\^\&]*'@'[\w\.]*'.*")
|
||||||
if (pos_error.match(msg) or pos_error1.match(msg)):
|
if (pos_error.match(msg) or pos_error1.match(msg)):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ class PexpectMySqlConnection(object):
|
|||||||
cmd = '%s %s' % (tests.SSH_CMD, ssh_args)
|
cmd = '%s %s' % (tests.SSH_CMD, ssh_args)
|
||||||
self.proc = pexpect.spawn(cmd)
|
self.proc = pexpect.spawn(cmd)
|
||||||
print(cmd)
|
print(cmd)
|
||||||
self.proc.expect(":~\$", timeout=self.TIME_OUT)
|
self.proc.expect(r":~\$", timeout=self.TIME_OUT)
|
||||||
cmd2 = "mysql --host '%s' -u '%s' '-p%s'\n" % \
|
cmd2 = "mysql --host '%s' -u '%s' '-p%s'\n" % \
|
||||||
(self.host, self.user, self.password)
|
(self.host, self.user, self.password)
|
||||||
print(cmd2)
|
print(cmd2)
|
||||||
@ -152,7 +152,7 @@ class PexpectMySqlConnection(object):
|
|||||||
self.proc.close()
|
self.proc.close()
|
||||||
|
|
||||||
def execute(self, cmd):
|
def execute(self, cmd):
|
||||||
self.proc.send(cmd + "\G\n")
|
self.proc.send(cmd + "\\G\n")
|
||||||
outcome = self.proc.expect(['Empty set', 'mysql>'],
|
outcome = self.proc.expect(['Empty set', 'mysql>'],
|
||||||
timeout=self.TIME_OUT)
|
timeout=self.TIME_OUT)
|
||||||
if outcome == 0:
|
if outcome == 0:
|
||||||
|
Loading…
Reference in New Issue
Block a user