Remove py2-only code paths
Change-Id: Ic66b9ae89837afe31929ce07cc625dfc28314ea3
This commit is contained in:
parent
94d3a5dee8
commit
128124cdd8
@ -80,11 +80,7 @@ presented below::
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
from swift.proxy.controllers.base import get_container_info
|
||||
from eventlet import Timeout
|
||||
import six
|
||||
if six.PY3:
|
||||
from eventlet.green.urllib import request as urllib2
|
||||
else:
|
||||
from eventlet.green import urllib2
|
||||
from eventlet.green.urllib import urllib_request
|
||||
|
||||
# x-container-sysmeta-webhook
|
||||
SYSMETA_WEBHOOK = get_sys_meta_prefix('container') + 'webhook'
|
||||
@ -119,10 +115,10 @@ presented below::
|
||||
webhook = container_info['sysmeta'].get('webhook')
|
||||
if webhook:
|
||||
# create a POST request with obj name as body
|
||||
webhook_req = urllib2.Request(webhook, data=obj)
|
||||
webhook_req = urllib_request.Request(webhook, data=obj)
|
||||
with Timeout(20):
|
||||
try:
|
||||
urllib2.urlopen(webhook_req).read()
|
||||
urllib_request.urlopen(webhook_req).read()
|
||||
except (Exception, Timeout):
|
||||
self.logger.exception(
|
||||
'failed POST to webhook %s' % webhook)
|
||||
|
@ -65,7 +65,6 @@ PyYAML==3.12
|
||||
requests==2.14.2
|
||||
requests-mock==1.2.0
|
||||
rfc3986==1.1.0
|
||||
six==1.10.0
|
||||
smmap2==2.0.3
|
||||
snowballstemmer==1.2.1
|
||||
stestr==2.0.0
|
||||
|
@ -192,7 +192,6 @@ s3transfer===0.10.4;python_version>='3.8'
|
||||
s3transfer===0.8.2;python_version=='3.7'
|
||||
s3transfer===0.5.2;python_version=='3.6'
|
||||
setuptools===75.3.0;python_version>='3.12'
|
||||
six===1.16.0
|
||||
smmap===5.0.1;python_version>='3.7'
|
||||
smmap===5.0.0;python_version=='3.6'
|
||||
stestr===4.1.0
|
||||
|
@ -7,7 +7,6 @@ greenlet>=0.3.3
|
||||
PasteDeploy>=2.0.0
|
||||
lxml>=4.2.3
|
||||
requests>=2.14.2 # Apache-2.0
|
||||
six>=1.10.0
|
||||
xattr>=0.7.2;sys_platform!='win32' # MIT
|
||||
PyECLib>=1.3.1,!=1.6.2,!=1.6.3 # BSD
|
||||
cryptography>=2.0.2 # BSD/Apache-2.0
|
||||
|
@ -19,10 +19,8 @@ Pluggable Back-end for Account Server
|
||||
|
||||
import sqlite3
|
||||
|
||||
import six
|
||||
|
||||
from swift.common.utils import Timestamp, RESERVED_BYTE
|
||||
from swift.common.db import DatabaseBroker, utf8encode, zero_like
|
||||
from swift.common.db import DatabaseBroker, zero_like
|
||||
|
||||
DATADIR = 'accounts'
|
||||
|
||||
@ -372,9 +370,6 @@ class AccountBroker(DatabaseBroker):
|
||||
put_timestamp, 0)
|
||||
"""
|
||||
delim_force_gte = False
|
||||
if six.PY2:
|
||||
(marker, end_marker, prefix, delimiter) = utf8encode(
|
||||
marker, end_marker, prefix, delimiter)
|
||||
if reverse:
|
||||
# Reverse the markers if we are reversing the listing.
|
||||
marker, end_marker = end_marker, marker
|
||||
|
@ -22,7 +22,6 @@ from time import time
|
||||
import itertools
|
||||
|
||||
from eventlet import GreenPool, sleep, Timeout
|
||||
import six
|
||||
|
||||
import swift.common.db
|
||||
from swift.account.backend import AccountBroker, DATADIR
|
||||
@ -267,12 +266,9 @@ class AccountReaper(Daemon):
|
||||
while containers:
|
||||
try:
|
||||
for (container, _junk, _junk, _junk, _junk) in containers:
|
||||
if six.PY3:
|
||||
container_ = container.encode('utf-8')
|
||||
else:
|
||||
container_ = container
|
||||
this_shard = (
|
||||
int(md5(container_, usedforsecurity=False)
|
||||
int(md5(container.encode('utf-8'),
|
||||
usedforsecurity=False)
|
||||
.hexdigest(), 16) % len(nodes))
|
||||
if container_shard not in (this_shard, None):
|
||||
continue
|
||||
|
@ -15,8 +15,6 @@
|
||||
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
from swift.common import constraints
|
||||
from swift.common.middleware import listing_formats
|
||||
from swift.common.swob import HTTPOk, HTTPNoContent, str_to_wsgi
|
||||
@ -86,12 +84,11 @@ def account_listing_response(account, req, response_content_type, broker=None,
|
||||
data = []
|
||||
for (name, object_count, bytes_used, put_timestamp, is_subdir) \
|
||||
in account_list:
|
||||
name_ = name.decode('utf8') if six.PY2 else name
|
||||
if is_subdir:
|
||||
data.append({'subdir': name_})
|
||||
data.append({'subdir': name})
|
||||
else:
|
||||
data.append(
|
||||
{'name': name_, 'count': object_count, 'bytes': bytes_used,
|
||||
{'name': name, 'count': object_count, 'bytes': bytes_used,
|
||||
'last_modified': Timestamp(put_timestamp).isoformat})
|
||||
if response_content_type.endswith('/xml'):
|
||||
account_list = listing_formats.account_to_xml(data, account)
|
||||
|
@ -23,7 +23,7 @@ from itertools import chain
|
||||
import json
|
||||
from eventlet.greenpool import GreenPool
|
||||
from eventlet.event import Event
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
|
||||
from swift.common.ring import Ring
|
||||
from swift.common.utils import split_path
|
||||
|
@ -28,7 +28,6 @@ import argparse
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import six
|
||||
import time
|
||||
|
||||
from swift.common.internal_client import InternalClient
|
||||
@ -50,17 +49,11 @@ def make_delete_jobs(account, container, objects, timestamp):
|
||||
:returns: list of dicts appropriate for an UPDATE request to an
|
||||
expiring-object queue
|
||||
'''
|
||||
if six.PY2:
|
||||
if isinstance(account, str):
|
||||
account = account.decode('utf8')
|
||||
if isinstance(container, str):
|
||||
container = container.decode('utf8')
|
||||
return [
|
||||
{
|
||||
'name': build_task_obj(
|
||||
timestamp, account, container,
|
||||
obj.decode('utf8') if six.PY2 and isinstance(obj, str)
|
||||
else obj, high_precision=True),
|
||||
obj, high_precision=True),
|
||||
'deleted': 0,
|
||||
'created_at': timestamp.internal,
|
||||
'etag': MD5_OF_EMPTY_STRING,
|
||||
|
@ -23,9 +23,7 @@ from time import time
|
||||
|
||||
from eventlet import GreenPool, patcher, sleep
|
||||
from eventlet.pools import Pool
|
||||
import six
|
||||
from six.moves import range
|
||||
from six.moves.configparser import ConfigParser
|
||||
from configparser import ConfigParser
|
||||
|
||||
from swift.common.internal_client import SimpleClient
|
||||
from swift.common.ring import Ring
|
||||
@ -53,7 +51,7 @@ def put_object(connpool, container, obj, report):
|
||||
global retries_done
|
||||
try:
|
||||
with connpool.item() as conn:
|
||||
data = io.BytesIO(obj if six.PY2 else obj.encode('utf8'))
|
||||
data = io.BytesIO(obj.encode('utf8'))
|
||||
conn.put_object(container, obj, data,
|
||||
headers={'x-object-meta-dispersion': obj})
|
||||
retries_done += conn.attempts - 1
|
||||
|
@ -17,7 +17,7 @@
|
||||
from __future__ import print_function
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from six.moves.configparser import ConfigParser
|
||||
from configparser import ConfigParser
|
||||
from optparse import OptionParser
|
||||
from sys import exit, stdout, stderr
|
||||
from time import time
|
||||
|
@ -24,8 +24,7 @@ import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
import six
|
||||
from six.moves.configparser import ConfigParser
|
||||
from configparser import ConfigParser
|
||||
|
||||
from swift.common.utils import backward, get_logger, dump_recon_cache, \
|
||||
config_true_value
|
||||
@ -128,8 +127,7 @@ def get_errors(error_re, log_file_pattern, minutes, logger,
|
||||
print("Unable to open " + path)
|
||||
sys.exit(1)
|
||||
for line in backward(f):
|
||||
if not six.PY2:
|
||||
line = line.decode(log_file_encoding, 'surrogateescape')
|
||||
line = line.decode(log_file_encoding, 'surrogateescape')
|
||||
if '[ 0.000000]' in line \
|
||||
or 'KERNEL supported cpus:' in line \
|
||||
or 'BIOS-provided physical RAM map:' in line:
|
||||
|
@ -17,7 +17,6 @@ Script for generating a form signature for use with FormPost middleware.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import hmac
|
||||
import six
|
||||
from hashlib import sha1
|
||||
from os.path import basename
|
||||
from time import time
|
||||
@ -95,11 +94,9 @@ def main(argv):
|
||||
return 1
|
||||
data = '%s\n%s\n%s\n%s\n%s' % (path, redirect, max_file_size,
|
||||
max_file_count, expires)
|
||||
if six.PY3:
|
||||
data = data if isinstance(data, six.binary_type) else \
|
||||
data.encode('utf8')
|
||||
key = key if isinstance(key, six.binary_type) else \
|
||||
key.encode('utf8')
|
||||
data = data.encode('utf8')
|
||||
key = key if isinstance(key, bytes) else \
|
||||
key.encode('utf8')
|
||||
sig = hmac.new(key, data,
|
||||
sha1).hexdigest()
|
||||
print(' Expires:', expires)
|
||||
|
@ -21,8 +21,7 @@ import sqlite3
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from swift.common.exceptions import LockTimeout
|
||||
from swift.common.utils import hash_path, storage_directory, \
|
||||
@ -722,10 +721,9 @@ def print_item_locations(ring, ring_name=None, account=None, container=None,
|
||||
|
||||
|
||||
def obj_main():
|
||||
if not six.PY2:
|
||||
# Make stdout able to write escaped bytes
|
||||
sys.stdout = codecs.getwriter("utf-8")(
|
||||
sys.stdout.detach(), errors='surrogateescape')
|
||||
# Make stdout able to write escaped bytes
|
||||
sys.stdout = codecs.getwriter("utf-8")(
|
||||
sys.stdout.detach(), errors='surrogateescape')
|
||||
|
||||
parser = OptionParser('%prog [options] OBJECT_FILE')
|
||||
parser.add_option(
|
||||
|
@ -163,9 +163,6 @@ import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
|
||||
from six.moves import input
|
||||
|
||||
|
||||
from swift.common.utils import Timestamp, get_logger, ShardRange, readconf, \
|
||||
ShardRangeList, non_negative_int, config_positive_int_value
|
||||
from swift.container.backend import ContainerBroker, UNSHARDED
|
||||
|
@ -18,8 +18,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from eventlet.green import socket
|
||||
from six import string_types
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from swift.common.utils import (
|
||||
SWIFT_CONF_FILE, md5_hash_for_file, set_swift_dir)
|
||||
@ -30,13 +29,9 @@ import json
|
||||
import optparse
|
||||
import time
|
||||
import sys
|
||||
import six
|
||||
import os
|
||||
|
||||
if six.PY3:
|
||||
from eventlet.green.urllib import request as urllib2
|
||||
else:
|
||||
from eventlet.green import urllib2
|
||||
from eventlet.green.urllib import request as urllib_request
|
||||
|
||||
|
||||
def seconds2timeunit(seconds):
|
||||
@ -86,19 +81,19 @@ class Scout(object):
|
||||
"""
|
||||
url = base_url + recon_type
|
||||
try:
|
||||
body = urllib2.urlopen(url, timeout=self.timeout).read()
|
||||
if six.PY3 and isinstance(body, six.binary_type):
|
||||
body = urllib_request.urlopen(url, timeout=self.timeout).read()
|
||||
if isinstance(body, bytes):
|
||||
body = body.decode('utf8')
|
||||
content = json.loads(body)
|
||||
if self.verbose:
|
||||
print("-> %s: %s" % (url, content))
|
||||
status = 200
|
||||
except urllib2.HTTPError as err:
|
||||
except urllib_request.HTTPError as err:
|
||||
if not self.suppress_errors or self.verbose:
|
||||
print("-> %s: %s" % (url, err))
|
||||
content = err
|
||||
status = err.code
|
||||
except (urllib2.URLError, socket.timeout) as err:
|
||||
except (urllib_request.URLError, socket.timeout) as err:
|
||||
if not self.suppress_errors or self.verbose:
|
||||
print("-> %s: %s" % (url, err))
|
||||
content = err
|
||||
@ -128,19 +123,19 @@ class Scout(object):
|
||||
"""
|
||||
try:
|
||||
url = "http://%s:%s/" % (host[0], host[1])
|
||||
req = urllib2.Request(url)
|
||||
req = urllib_request.Request(url)
|
||||
req.get_method = lambda: 'OPTIONS'
|
||||
conn = urllib2.urlopen(req)
|
||||
conn = urllib_request.urlopen(req)
|
||||
header = conn.info().get('Server')
|
||||
server_header = header.split('/')
|
||||
content = server_header[0]
|
||||
status = 200
|
||||
except urllib2.HTTPError as err:
|
||||
except urllib_request.HTTPError as err:
|
||||
if not self.suppress_errors or self.verbose:
|
||||
print("-> %s: %s" % (url, err))
|
||||
content = err
|
||||
status = err.code
|
||||
except (urllib2.URLError, socket.timeout) as err:
|
||||
except (urllib_request.URLError, socket.timeout) as err:
|
||||
if not self.suppress_errors or self.verbose:
|
||||
print("-> %s: %s" % (url, err))
|
||||
content = err
|
||||
@ -1074,7 +1069,7 @@ class SwiftRecon(object):
|
||||
ring_names = [p.ring_name for p in POLICIES if (
|
||||
p.name == policy or not policy or (
|
||||
policy.isdigit() and int(policy) == int(p) or
|
||||
(isinstance(policy, string_types)
|
||||
(isinstance(policy, str)
|
||||
and policy in p.aliases)))]
|
||||
else:
|
||||
ring_names = [self.server_type]
|
||||
|
@ -31,9 +31,6 @@ from datetime import timedelta
|
||||
import optparse
|
||||
import math
|
||||
|
||||
from six.moves import zip as izip
|
||||
from six.moves import input
|
||||
|
||||
from swift.common import exceptions
|
||||
from swift.common.ring import RingBuilder, Ring, RingData
|
||||
from swift.common.ring.builder import MAX_BALANCE
|
||||
@ -156,8 +153,8 @@ def _parse_add_values(argvish):
|
||||
print(Commands.add.__doc__.strip())
|
||||
exit(EXIT_ERROR)
|
||||
|
||||
devs_and_weights = izip(islice(args, 0, len(args), 2),
|
||||
islice(args, 1, len(args), 2))
|
||||
devs_and_weights = zip(islice(args, 0, len(args), 2),
|
||||
islice(args, 1, len(args), 2))
|
||||
|
||||
for devstr, weightstr in devs_and_weights:
|
||||
dev_dict = parse_add_value(devstr)
|
||||
@ -257,8 +254,8 @@ def _parse_set_weight_values(argvish):
|
||||
print(Commands.set_weight.__doc__.strip())
|
||||
exit(EXIT_ERROR)
|
||||
|
||||
devs_and_weights = izip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
devs_and_weights = zip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
for devstr, weightstr in devs_and_weights:
|
||||
devs = (builder.search_devs(
|
||||
parse_search_value(devstr)) or [])
|
||||
@ -347,8 +344,8 @@ def _parse_set_region_values(argvish):
|
||||
print(Commands.set_region.__doc__.strip())
|
||||
exit(EXIT_ERROR)
|
||||
|
||||
devs_and_regions = izip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
devs_and_regions = zip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
for devstr, regionstr in devs_and_regions:
|
||||
devs.extend(builder.search_devs(
|
||||
parse_search_value(devstr)) or [])
|
||||
@ -382,8 +379,8 @@ def _parse_set_zone_values(argvish):
|
||||
print(Commands.set_zone.__doc__.strip())
|
||||
exit(EXIT_ERROR)
|
||||
|
||||
devs_and_zones = izip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
devs_and_zones = zip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
for devstr, zonestr in devs_and_zones:
|
||||
devs.extend(builder.search_devs(
|
||||
parse_search_value(devstr)) or [])
|
||||
@ -415,8 +412,8 @@ def _parse_set_info_values(argvish):
|
||||
print(Commands.search.__doc__.strip())
|
||||
exit(EXIT_ERROR)
|
||||
|
||||
searches_and_changes = izip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
searches_and_changes = zip(islice(argvish, 0, len(argvish), 2),
|
||||
islice(argvish, 1, len(argvish), 2))
|
||||
|
||||
for search_value, change_value in searches_and_changes:
|
||||
devs = builder.search_devs(parse_search_value(search_value))
|
||||
|
@ -27,28 +27,22 @@ BufferedHTTPResponse.
|
||||
"""
|
||||
|
||||
from swift.common import constraints
|
||||
import http.client
|
||||
import logging
|
||||
import time
|
||||
import socket
|
||||
|
||||
import eventlet
|
||||
from eventlet.green.httplib import CONTINUE, HTTPConnection, HTTPMessage, \
|
||||
from eventlet.green.http.client import CONTINUE, HTTPConnection, \
|
||||
HTTPResponse, HTTPSConnection, _UNKNOWN, ImproperConnectionState
|
||||
from six.moves.urllib.parse import quote, parse_qsl, urlencode
|
||||
import six
|
||||
from urllib.parse import quote, parse_qsl, urlencode
|
||||
|
||||
if six.PY2:
|
||||
httplib = eventlet.import_patched('httplib')
|
||||
from eventlet.green import httplib as green_httplib
|
||||
else:
|
||||
httplib = eventlet.import_patched('http.client')
|
||||
from eventlet.green.http import client as green_httplib
|
||||
from eventlet.green.http import client as green_http_client
|
||||
|
||||
# Apparently http.server uses this to decide when/whether to send a 431.
|
||||
# Give it some slack, so the app is more likely to get the chance to reject
|
||||
# with a 400 instead.
|
||||
httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
|
||||
green_httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
|
||||
http.client._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
|
||||
green_http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
|
||||
|
||||
|
||||
class BufferedHTTPResponse(HTTPResponse):
|
||||
@ -65,11 +59,6 @@ class BufferedHTTPResponse(HTTPResponse):
|
||||
# No socket means no file-like -- set it to None like in
|
||||
# HTTPResponse.close()
|
||||
self.fp = None
|
||||
elif six.PY2:
|
||||
# sock.fd is a socket._socketobject
|
||||
# sock.fd._sock is a _socket.socket object, which is what we want.
|
||||
self._real_socket = sock.fd._sock
|
||||
self.fp = sock.makefile('rb')
|
||||
else:
|
||||
# sock.fd is a socket.socket, which should have a _real_close
|
||||
self._real_socket = sock.fd
|
||||
@ -91,24 +80,23 @@ class BufferedHTTPResponse(HTTPResponse):
|
||||
self.will_close = _UNKNOWN # conn will close at end of response
|
||||
self._readline_buffer = b''
|
||||
|
||||
if not six.PY2:
|
||||
def begin(self):
|
||||
HTTPResponse.begin(self)
|
||||
header_payload = self.headers.get_payload()
|
||||
if isinstance(header_payload, list) and len(header_payload) == 1:
|
||||
header_payload = header_payload[0].get_payload()
|
||||
if header_payload:
|
||||
# This shouldn't be here. We must've bumped up against
|
||||
# https://bugs.python.org/issue37093
|
||||
for line in header_payload.rstrip('\r\n').split('\n'):
|
||||
if ':' not in line or line[:1] in ' \t':
|
||||
# Well, we're no more broken than we were before...
|
||||
# Should we support line folding?
|
||||
# How can/should we handle a bad header line?
|
||||
break
|
||||
header, value = line.split(':', 1)
|
||||
value = value.strip(' \t\n\r')
|
||||
self.headers.add_header(header, value)
|
||||
def begin(self):
|
||||
HTTPResponse.begin(self)
|
||||
header_payload = self.headers.get_payload()
|
||||
if isinstance(header_payload, list) and len(header_payload) == 1:
|
||||
header_payload = header_payload[0].get_payload()
|
||||
if header_payload:
|
||||
# This shouldn't be here. We must've bumped up against
|
||||
# https://bugs.python.org/issue37093
|
||||
for line in header_payload.rstrip('\r\n').split('\n'):
|
||||
if ':' not in line or line[:1] in ' \t':
|
||||
# Well, we're no more broken than we were before...
|
||||
# Should we support line folding?
|
||||
# How can/should we handle a bad header line?
|
||||
break
|
||||
header, value = line.split(':', 1)
|
||||
value = value.strip(' \t\n\r')
|
||||
self.headers.add_header(header, value)
|
||||
|
||||
def expect_response(self):
|
||||
if self.fp:
|
||||
@ -125,15 +113,7 @@ class BufferedHTTPResponse(HTTPResponse):
|
||||
self.status = status
|
||||
self.reason = reason.strip()
|
||||
self.version = 11
|
||||
if six.PY2:
|
||||
# Under py2, HTTPMessage.__init__ reads the headers
|
||||
# which advances fp
|
||||
self.msg = HTTPMessage(self.fp, 0)
|
||||
# immediately kill msg.fp to make sure it isn't read again
|
||||
self.msg.fp = None
|
||||
else:
|
||||
# py3 has a separate helper for it
|
||||
self.headers = self.msg = httplib.parse_headers(self.fp)
|
||||
self.headers = self.msg = http.client.parse_headers(self.fp)
|
||||
|
||||
def read(self, amt=None):
|
||||
if not self._readline_buffer:
|
||||
@ -157,26 +137,6 @@ class BufferedHTTPResponse(HTTPResponse):
|
||||
self._readline_buffer = b''
|
||||
return buf + HTTPResponse.read(self, smaller_amt)
|
||||
|
||||
def readline(self, size=1024):
|
||||
# You'd think Python's httplib would provide this, but it doesn't.
|
||||
# It does, however, provide a comment in the HTTPResponse class:
|
||||
#
|
||||
# # XXX It would be nice to have readline and __iter__ for this,
|
||||
# # too.
|
||||
#
|
||||
# Yes, it certainly would.
|
||||
while (b'\n' not in self._readline_buffer
|
||||
and len(self._readline_buffer) < size):
|
||||
read_size = size - len(self._readline_buffer)
|
||||
chunk = HTTPResponse.read(self, read_size)
|
||||
if not chunk:
|
||||
break
|
||||
self._readline_buffer += chunk
|
||||
|
||||
line, newline, rest = self._readline_buffer.partition(b'\n')
|
||||
self._readline_buffer = rest
|
||||
return line + newline
|
||||
|
||||
def nuke_from_orbit(self):
|
||||
"""
|
||||
Terminate the socket with extreme prejudice.
|
||||
@ -186,14 +146,9 @@ class BufferedHTTPResponse(HTTPResponse):
|
||||
you care about has a reference to this socket.
|
||||
"""
|
||||
if self._real_socket:
|
||||
if six.PY2:
|
||||
# this is idempotent; see sock_close in Modules/socketmodule.c
|
||||
# in the Python source for details.
|
||||
self._real_socket.close()
|
||||
else:
|
||||
# Hopefully this is equivalent?
|
||||
# TODO: verify that this does everything ^^^^ does for py2
|
||||
self._real_socket._real_close()
|
||||
# Hopefully this is equivalent to py2's _real_socket.close()?
|
||||
# TODO: verify that this does everything ^^^^ does for py2
|
||||
self._real_socket._real_close()
|
||||
self._real_socket = None
|
||||
self.close()
|
||||
|
||||
@ -268,13 +223,13 @@ def http_connect(ipaddr, port, device, partition, method, path,
|
||||
:param ssl: set True if SSL should be used (default: False)
|
||||
:returns: HTTPConnection object
|
||||
"""
|
||||
if isinstance(path, six.text_type):
|
||||
if isinstance(path, str):
|
||||
path = path.encode("utf-8")
|
||||
if isinstance(device, six.text_type):
|
||||
if isinstance(device, str):
|
||||
device = device.encode("utf-8")
|
||||
if isinstance(partition, six.text_type):
|
||||
if isinstance(partition, str):
|
||||
partition = partition.encode('utf-8')
|
||||
elif isinstance(partition, six.integer_types):
|
||||
elif isinstance(partition, int):
|
||||
partition = str(partition).encode('ascii')
|
||||
path = quote(b'/' + device + b'/' + partition + path)
|
||||
return http_connect_raw(
|
||||
@ -305,14 +260,10 @@ def http_connect_raw(ipaddr, port, method, path, headers=None,
|
||||
conn = BufferedHTTPConnection('%s:%s' % (ipaddr, port))
|
||||
if query_string:
|
||||
# Round trip to ensure proper quoting
|
||||
if six.PY2:
|
||||
query_string = urlencode(parse_qsl(
|
||||
query_string, keep_blank_values=True))
|
||||
else:
|
||||
query_string = urlencode(
|
||||
parse_qsl(query_string, keep_blank_values=True,
|
||||
encoding='latin1'),
|
||||
encoding='latin1')
|
||||
query_string = urlencode(
|
||||
parse_qsl(query_string, keep_blank_values=True,
|
||||
encoding='latin1'),
|
||||
encoding='latin1')
|
||||
path += '?' + query_string
|
||||
conn.path = path
|
||||
conn.putrequest(method, path, skip_host=(headers and 'Host' in headers))
|
||||
|
@ -17,9 +17,8 @@ import functools
|
||||
import os
|
||||
from os.path import isdir # tighter scoped import for mocking
|
||||
|
||||
import six
|
||||
from six.moves.configparser import ConfigParser, NoSectionError, NoOptionError
|
||||
from six.moves import urllib
|
||||
from configparser import ConfigParser, NoSectionError, NoOptionError
|
||||
import urllib
|
||||
|
||||
from swift.common import utils, exceptions
|
||||
from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \
|
||||
@ -130,7 +129,7 @@ def check_metadata(req, target_type):
|
||||
meta_count = 0
|
||||
meta_size = 0
|
||||
for key, value in req.headers.items():
|
||||
if (isinstance(value, six.string_types)
|
||||
if (isinstance(value, str)
|
||||
and len(value) > MAX_HEADER_SIZE):
|
||||
|
||||
return HTTPBadRequest(body=b'Header value too long: %s' %
|
||||
@ -364,7 +363,7 @@ def check_utf8(string, internal=False):
|
||||
if not string:
|
||||
return False
|
||||
try:
|
||||
if isinstance(string, six.text_type):
|
||||
if isinstance(string, str):
|
||||
encoded = string.encode('utf-8')
|
||||
decoded = string
|
||||
else:
|
||||
@ -412,9 +411,6 @@ def check_name_format(req, name, target_type):
|
||||
raise HTTPPreconditionFailed(
|
||||
request=req,
|
||||
body='%s name cannot be empty' % target_type)
|
||||
if six.PY2:
|
||||
if isinstance(name, six.text_type):
|
||||
name = name.encode('utf-8')
|
||||
if '/' in name:
|
||||
raise HTTPPreconditionFailed(
|
||||
request=req,
|
||||
|
@ -19,8 +19,7 @@ import hmac
|
||||
import os
|
||||
import time
|
||||
|
||||
import six
|
||||
from six.moves import configparser
|
||||
import configparser
|
||||
|
||||
from swift.common.utils import get_valid_utf8_str
|
||||
|
||||
@ -158,7 +157,7 @@ class ContainerSyncRealms(object):
|
||||
user_key = get_valid_utf8_str(user_key)
|
||||
# XXX We don't know what is the best here yet; wait for container
|
||||
# sync to be tested.
|
||||
if isinstance(path, six.text_type):
|
||||
if isinstance(path, str):
|
||||
path = path.encode('utf-8')
|
||||
return hmac.new(
|
||||
realm_key,
|
||||
|
@ -21,11 +21,9 @@ import json
|
||||
import logging
|
||||
import os
|
||||
from uuid import uuid4
|
||||
import sys
|
||||
import time
|
||||
import errno
|
||||
import six
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
from tempfile import mkstemp
|
||||
|
||||
from eventlet import sleep, Timeout
|
||||
@ -55,28 +53,14 @@ SQLITE_ARG_LIMIT = 999
|
||||
RECLAIM_PAGE_SIZE = 10000
|
||||
|
||||
|
||||
def utf8encode(*args):
|
||||
return [(s.encode('utf8') if isinstance(s, six.text_type) else s)
|
||||
for s in args]
|
||||
|
||||
|
||||
def native_str_keys_and_values(metadata):
|
||||
if six.PY2:
|
||||
uni_keys = [k for k in metadata if isinstance(k, six.text_type)]
|
||||
for k in uni_keys:
|
||||
sv = metadata[k]
|
||||
del metadata[k]
|
||||
metadata[k.encode('utf-8')] = [
|
||||
x.encode('utf-8') if isinstance(x, six.text_type) else x
|
||||
for x in sv]
|
||||
else:
|
||||
bin_keys = [k for k in metadata if isinstance(k, six.binary_type)]
|
||||
for k in bin_keys:
|
||||
sv = metadata[k]
|
||||
del metadata[k]
|
||||
metadata[k.decode('utf-8')] = [
|
||||
x.decode('utf-8') if isinstance(x, six.binary_type) else x
|
||||
for x in sv]
|
||||
bin_keys = [k for k in metadata if isinstance(k, bytes)]
|
||||
for k in bin_keys:
|
||||
sv = metadata[k]
|
||||
del metadata[k]
|
||||
metadata[k.decode('utf-8')] = [
|
||||
x.decode('utf-8') if isinstance(x, bytes) else x
|
||||
for x in sv]
|
||||
|
||||
|
||||
ZERO_LIKE_VALUES = {None, '', 0, '0'}
|
||||
@ -219,7 +203,7 @@ def get_db_connection(path, timeout=30, logger=None, okay_to_create=False):
|
||||
connect_time = time.time()
|
||||
conn = sqlite3.connect(path, check_same_thread=False,
|
||||
factory=GreenDBConnection, timeout=timeout)
|
||||
if QUERY_LOGGING and logger and not six.PY2:
|
||||
if QUERY_LOGGING and logger:
|
||||
conn.set_trace_callback(logger.debug)
|
||||
if not okay_to_create:
|
||||
# attempt to detect and fail when connect creates the db file
|
||||
@ -380,7 +364,7 @@ class DatabaseBroker(object):
|
||||
os.close(fd)
|
||||
conn = sqlite3.connect(tmp_db_file, check_same_thread=False,
|
||||
factory=GreenDBConnection, timeout=0)
|
||||
if QUERY_LOGGING and not six.PY2:
|
||||
if QUERY_LOGGING:
|
||||
conn.set_trace_callback(self.logger.debug)
|
||||
# creating dbs implicitly does a lot of transactions, so we
|
||||
# pick fast, unsafe options here and do a big fsync at the end.
|
||||
@ -505,25 +489,25 @@ class DatabaseBroker(object):
|
||||
self.logger.error(detail)
|
||||
raise sqlite3.DatabaseError(detail)
|
||||
|
||||
def possibly_quarantine(self, exc_type, exc_value, exc_traceback):
|
||||
def possibly_quarantine(self, err):
|
||||
"""
|
||||
Checks the exception info to see if it indicates a quarantine situation
|
||||
(malformed or corrupted database). If not, the original exception will
|
||||
be reraised. If so, the database will be quarantined and a new
|
||||
sqlite3.DatabaseError will be raised indicating the action taken.
|
||||
"""
|
||||
if 'database disk image is malformed' in str(exc_value):
|
||||
if 'database disk image is malformed' in str(err):
|
||||
exc_hint = 'malformed database'
|
||||
elif 'malformed database schema' in str(exc_value):
|
||||
elif 'malformed database schema' in str(err):
|
||||
exc_hint = 'malformed database'
|
||||
elif ' is not a database' in str(exc_value):
|
||||
elif ' is not a database' in str(err):
|
||||
# older versions said 'file is not a database'
|
||||
# now 'file is encrypted or is not a database'
|
||||
exc_hint = 'corrupted database'
|
||||
elif 'disk I/O error' in str(exc_value):
|
||||
elif 'disk I/O error' in str(err):
|
||||
exc_hint = 'disk error while accessing database'
|
||||
else:
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise err
|
||||
|
||||
self.quarantine(exc_hint)
|
||||
|
||||
@ -557,8 +541,8 @@ class DatabaseBroker(object):
|
||||
try:
|
||||
self.conn = get_db_connection(self.db_file, self.timeout,
|
||||
self.logger)
|
||||
except (sqlite3.DatabaseError, DatabaseConnectionError):
|
||||
self.possibly_quarantine(*sys.exc_info())
|
||||
except (sqlite3.DatabaseError, DatabaseConnectionError) as e:
|
||||
self.possibly_quarantine(e)
|
||||
else:
|
||||
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
|
||||
conn = self.conn
|
||||
@ -567,12 +551,12 @@ class DatabaseBroker(object):
|
||||
yield conn
|
||||
conn.rollback()
|
||||
self.conn = conn
|
||||
except sqlite3.DatabaseError:
|
||||
except sqlite3.DatabaseError as e:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
self.possibly_quarantine(*sys.exc_info())
|
||||
self.possibly_quarantine(e)
|
||||
except (Exception, Timeout):
|
||||
conn.close()
|
||||
raise
|
||||
@ -848,11 +832,8 @@ class DatabaseBroker(object):
|
||||
for entry in fp.read().split(b':'):
|
||||
if entry:
|
||||
try:
|
||||
if six.PY2:
|
||||
data = pickle.loads(base64.b64decode(entry))
|
||||
else:
|
||||
data = pickle.loads(base64.b64decode(entry),
|
||||
encoding='utf8')
|
||||
data = pickle.loads(base64.b64decode(entry),
|
||||
encoding='utf8') # nosec: B301
|
||||
self._commit_puts_load(item_list, data)
|
||||
except Exception:
|
||||
self.logger.exception(
|
||||
|
@ -13,9 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import binascii
|
||||
import hashlib
|
||||
import hmac
|
||||
import six
|
||||
|
||||
from swift.common.utils import strict_b64decode
|
||||
|
||||
@ -55,17 +53,14 @@ def get_hmac(request_method, path, expires, key, digest="sha1",
|
||||
parts.insert(0, ip_range)
|
||||
formats.insert(0, b"ip=%s")
|
||||
|
||||
if not isinstance(key, six.binary_type):
|
||||
if isinstance(key, str):
|
||||
key = key.encode('utf8')
|
||||
|
||||
message = b'\n'.join(
|
||||
fmt % (part if isinstance(part, six.binary_type)
|
||||
fmt % (part if isinstance(part, bytes)
|
||||
else part.encode("utf-8"))
|
||||
for fmt, part in zip(formats, parts))
|
||||
|
||||
if six.PY2 and isinstance(digest, six.string_types):
|
||||
digest = getattr(hashlib, digest)
|
||||
|
||||
return hmac.new(key, message, digest).hexdigest()
|
||||
|
||||
|
||||
@ -132,15 +127,10 @@ def extract_digest_and_algorithm(value):
|
||||
if ('-' in value or '_' in value) and not (
|
||||
'+' in value or '/' in value):
|
||||
value = value.replace('-', '+').replace('_', '/')
|
||||
value = binascii.hexlify(strict_b64decode(value + '=='))
|
||||
if not six.PY2:
|
||||
value = value.decode('ascii')
|
||||
value = binascii.hexlify(
|
||||
strict_b64decode(value + '==')).decode('ascii')
|
||||
else:
|
||||
try:
|
||||
binascii.unhexlify(value) # make sure it decodes
|
||||
except TypeError:
|
||||
# This is just for py2
|
||||
raise ValueError('Non-hexadecimal digit found')
|
||||
binascii.unhexlify(value) # make sure it decodes
|
||||
algo = {
|
||||
40: 'sha1',
|
||||
64: 'sha256',
|
||||
|
@ -23,9 +23,8 @@ import os
|
||||
import socket
|
||||
|
||||
from eventlet import sleep, Timeout
|
||||
import six
|
||||
import six.moves.cPickle as pickle
|
||||
from six.moves.http_client import HTTPException
|
||||
import pickle # nosec: B403
|
||||
from http.client import HTTPException
|
||||
|
||||
from swift.common.bufferedhttp import http_connect, http_connect_raw
|
||||
from swift.common.exceptions import ClientException
|
||||
@ -44,7 +43,7 @@ class DirectClientException(ClientException):
|
||||
# host can be used to override the node ip and port reported in
|
||||
# the exception
|
||||
host = host if host is not None else node
|
||||
if not isinstance(path, six.text_type):
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode("utf-8")
|
||||
full_path = quote('/%s/%s%s' % (node['device'], part, path))
|
||||
msg = '%s server %s:%s direct %s %r gave status %s' % (
|
||||
@ -59,7 +58,7 @@ class DirectClientException(ClientException):
|
||||
class DirectClientReconException(ClientException):
|
||||
|
||||
def __init__(self, method, node, path, resp):
|
||||
if not isinstance(path, six.text_type):
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode("utf-8")
|
||||
msg = 'server %s:%s direct %s %r gave status %s' % (
|
||||
node['ip'], node['port'], method, path, resp.status)
|
||||
@ -72,7 +71,7 @@ class DirectClientReconException(ClientException):
|
||||
|
||||
def _make_path(*components):
|
||||
return u'/' + u'/'.join(
|
||||
x.decode('utf-8') if isinstance(x, six.binary_type) else x
|
||||
x.decode('utf-8') if isinstance(x, bytes) else x
|
||||
for x in components)
|
||||
|
||||
|
||||
@ -111,7 +110,7 @@ def _make_req(node, part, method, path, headers, stype,
|
||||
content_length = int(v)
|
||||
if not contents:
|
||||
headers['Content-Length'] = '0'
|
||||
if isinstance(contents, six.string_types):
|
||||
if isinstance(contents, str):
|
||||
contents = [contents]
|
||||
if content_length is None:
|
||||
headers['Transfer-Encoding'] = 'chunked'
|
||||
@ -657,7 +656,7 @@ def direct_get_suffix_hashes(node, part, suffixes, conn_timeout=5,
|
||||
host={'ip': node['replication_ip'],
|
||||
'port': node['replication_port']}
|
||||
)
|
||||
return pickle.loads(resp.read())
|
||||
return pickle.loads(resp.read()) # nosec: B301
|
||||
|
||||
|
||||
def retry(func, *args, **kwargs):
|
||||
|
@ -13,8 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class HeaderKeyDict(dict):
|
||||
"""
|
||||
@ -31,10 +29,7 @@ class HeaderKeyDict(dict):
|
||||
|
||||
@staticmethod
|
||||
def _title(s):
|
||||
if six.PY2:
|
||||
return s.title()
|
||||
else:
|
||||
return s.encode('latin1').title().decode('latin1')
|
||||
return s.encode('latin1').title().decode('latin1')
|
||||
|
||||
def update(self, other):
|
||||
if hasattr(other, 'keys'):
|
||||
@ -51,9 +46,7 @@ class HeaderKeyDict(dict):
|
||||
key = self._title(key)
|
||||
if value is None:
|
||||
self.pop(key, None)
|
||||
elif six.PY2 and isinstance(value, six.text_type):
|
||||
return dict.__setitem__(self, key, value.encode('utf-8'))
|
||||
elif six.PY3 and isinstance(value, six.binary_type):
|
||||
elif isinstance(value, bytes):
|
||||
return dict.__setitem__(self, key, value.decode('latin-1'))
|
||||
else:
|
||||
return dict.__setitem__(self, key, str(value))
|
||||
|
@ -14,18 +14,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
from eventlet import wsgi, websocket
|
||||
import six
|
||||
|
||||
from swift.common.utils import generate_trans_id
|
||||
from swift.common.http import HTTP_NO_CONTENT, HTTP_RESET_CONTENT, \
|
||||
HTTP_NOT_MODIFIED
|
||||
|
||||
if six.PY2:
|
||||
from eventlet.green import httplib as http_client
|
||||
from cgi import escape
|
||||
else:
|
||||
from eventlet.green.http import client as http_client
|
||||
from html import escape
|
||||
from eventlet.green.http import client as http_client
|
||||
from html import escape
|
||||
|
||||
|
||||
class SwiftHttpProtocol(wsgi.HttpProtocol):
|
||||
@ -59,13 +54,6 @@ class SwiftHttpProtocol(wsgi.HttpProtocol):
|
||||
|
||||
class MessageClass(wsgi.HttpProtocol.MessageClass):
|
||||
"""Subclass to see when the client didn't provide a Content-Type"""
|
||||
# for py2:
|
||||
def parsetype(self):
|
||||
if self.typeheader is None:
|
||||
self.typeheader = ''
|
||||
wsgi.HttpProtocol.MessageClass.parsetype(self)
|
||||
|
||||
# for py3:
|
||||
def get_default_type(self):
|
||||
"""If the client didn't provide a content type, leave it blank."""
|
||||
return ''
|
||||
@ -84,9 +72,7 @@ class SwiftHttpProtocol(wsgi.HttpProtocol):
|
||||
self.command = None # set in case of error on the first line
|
||||
self.request_version = version = self.default_request_version
|
||||
self.close_connection = True
|
||||
requestline = self.raw_requestline
|
||||
if not six.PY2:
|
||||
requestline = requestline.decode('iso-8859-1')
|
||||
requestline = self.raw_requestline.decode('iso-8859-1')
|
||||
requestline = requestline.rstrip('\r\n')
|
||||
self.requestline = requestline
|
||||
# Split off \x20 explicitly (see https://bugs.python.org/issue33973)
|
||||
@ -147,26 +133,23 @@ class SwiftHttpProtocol(wsgi.HttpProtocol):
|
||||
self.command, self.path = command, path
|
||||
|
||||
# Examine the headers and look for a Connection directive.
|
||||
if six.PY2:
|
||||
self.headers = self.MessageClass(self.rfile, 0)
|
||||
else:
|
||||
try:
|
||||
self.headers = http_client.parse_headers(
|
||||
self.rfile,
|
||||
_class=self.MessageClass)
|
||||
except http_client.LineTooLong as err:
|
||||
self.send_error(
|
||||
431,
|
||||
"Line too long",
|
||||
str(err))
|
||||
return False
|
||||
except http_client.HTTPException as err:
|
||||
self.send_error(
|
||||
431,
|
||||
"Too many headers",
|
||||
str(err)
|
||||
)
|
||||
return False
|
||||
try:
|
||||
self.headers = http_client.parse_headers(
|
||||
self.rfile,
|
||||
_class=self.MessageClass)
|
||||
except http_client.LineTooLong as err:
|
||||
self.send_error(
|
||||
431,
|
||||
"Line too long",
|
||||
str(err))
|
||||
return False
|
||||
except http_client.HTTPException as err:
|
||||
self.send_error(
|
||||
431,
|
||||
"Too many headers",
|
||||
str(err)
|
||||
)
|
||||
return False
|
||||
|
||||
conntype = self.headers.get('Connection', "")
|
||||
if conntype.lower() == 'close':
|
||||
@ -183,52 +166,51 @@ class SwiftHttpProtocol(wsgi.HttpProtocol):
|
||||
return False
|
||||
return True
|
||||
|
||||
if not six.PY2:
|
||||
def get_environ(self, *args, **kwargs):
|
||||
environ = wsgi.HttpProtocol.get_environ(self, *args, **kwargs)
|
||||
header_payload = self.headers.get_payload()
|
||||
if isinstance(header_payload, list) and len(header_payload) == 1:
|
||||
header_payload = header_payload[0].get_payload()
|
||||
if header_payload:
|
||||
# This shouldn't be here. We must've bumped up against
|
||||
# https://bugs.python.org/issue37093
|
||||
headers_raw = list(environ['headers_raw'])
|
||||
for line in header_payload.rstrip('\r\n').split('\n'):
|
||||
if ':' not in line or line[:1] in ' \t':
|
||||
# Well, we're no more broken than we were before...
|
||||
# Should we support line folding?
|
||||
# Should we 400 a bad header line?
|
||||
break
|
||||
header, value = line.split(':', 1)
|
||||
value = value.strip(' \t\n\r')
|
||||
# NB: Eventlet looks at the headers obj to figure out
|
||||
# whether the client said the connection should close;
|
||||
# see https://github.com/eventlet/eventlet/blob/v0.25.0/
|
||||
# eventlet/wsgi.py#L504
|
||||
self.headers.add_header(header, value)
|
||||
headers_raw.append((header, value))
|
||||
wsgi_key = 'HTTP_' + header.replace('-', '_').encode(
|
||||
'latin1').upper().decode('latin1')
|
||||
if wsgi_key in ('HTTP_CONTENT_LENGTH',
|
||||
'HTTP_CONTENT_TYPE'):
|
||||
wsgi_key = wsgi_key[5:]
|
||||
environ[wsgi_key] = value
|
||||
environ['headers_raw'] = tuple(headers_raw)
|
||||
# Since we parsed some more headers, check to see if they
|
||||
# change how our wsgi.input should behave
|
||||
te = environ.get('HTTP_TRANSFER_ENCODING', '').lower()
|
||||
if te.rsplit(',', 1)[-1].strip() == 'chunked':
|
||||
environ['wsgi.input'].chunked_input = True
|
||||
else:
|
||||
length = environ.get('CONTENT_LENGTH')
|
||||
if length:
|
||||
length = int(length)
|
||||
environ['wsgi.input'].content_length = length
|
||||
if environ.get('HTTP_EXPECT', '').lower() == '100-continue':
|
||||
environ['wsgi.input'].wfile = self.wfile
|
||||
environ['wsgi.input'].wfile_line = \
|
||||
b'HTTP/1.1 100 Continue\r\n'
|
||||
return environ
|
||||
def get_environ(self, *args, **kwargs):
|
||||
environ = wsgi.HttpProtocol.get_environ(self, *args, **kwargs)
|
||||
header_payload = self.headers.get_payload()
|
||||
if isinstance(header_payload, list) and len(header_payload) == 1:
|
||||
header_payload = header_payload[0].get_payload()
|
||||
if header_payload:
|
||||
# This shouldn't be here. We must've bumped up against
|
||||
# https://bugs.python.org/issue37093
|
||||
headers_raw = list(environ['headers_raw'])
|
||||
for line in header_payload.rstrip('\r\n').split('\n'):
|
||||
if ':' not in line or line[:1] in ' \t':
|
||||
# Well, we're no more broken than we were before...
|
||||
# Should we support line folding?
|
||||
# Should we 400 a bad header line?
|
||||
break
|
||||
header, value = line.split(':', 1)
|
||||
value = value.strip(' \t\n\r')
|
||||
# NB: Eventlet looks at the headers obj to figure out
|
||||
# whether the client said the connection should close;
|
||||
# see https://github.com/eventlet/eventlet/blob/v0.25.0/
|
||||
# eventlet/wsgi.py#L504
|
||||
self.headers.add_header(header, value)
|
||||
headers_raw.append((header, value))
|
||||
wsgi_key = 'HTTP_' + header.replace('-', '_').encode(
|
||||
'latin1').upper().decode('latin1')
|
||||
if wsgi_key in ('HTTP_CONTENT_LENGTH',
|
||||
'HTTP_CONTENT_TYPE'):
|
||||
wsgi_key = wsgi_key[5:]
|
||||
environ[wsgi_key] = value
|
||||
environ['headers_raw'] = tuple(headers_raw)
|
||||
# Since we parsed some more headers, check to see if they
|
||||
# change how our wsgi.input should behave
|
||||
te = environ.get('HTTP_TRANSFER_ENCODING', '').lower()
|
||||
if te.rsplit(',', 1)[-1].strip() == 'chunked':
|
||||
environ['wsgi.input'].chunked_input = True
|
||||
else:
|
||||
length = environ.get('CONTENT_LENGTH')
|
||||
if length:
|
||||
length = int(length)
|
||||
environ['wsgi.input'].content_length = length
|
||||
if environ.get('HTTP_EXPECT', '').lower() == '100-continue':
|
||||
environ['wsgi.input'].wfile = self.wfile
|
||||
environ['wsgi.input'].wfile_line = \
|
||||
b'HTTP/1.1 100 Continue\r\n'
|
||||
return environ
|
||||
|
||||
def _read_request_line(self):
|
||||
# Note this is not a new-style class, so super() won't work
|
||||
@ -332,8 +314,7 @@ class SwiftHttpProxiedProtocol(SwiftHttpProtocol):
|
||||
SwiftHttpProtocol.__init__(self, *a, **kw)
|
||||
|
||||
def handle_error(self, connection_line):
|
||||
if not six.PY2:
|
||||
connection_line = connection_line.decode('latin-1')
|
||||
connection_line = connection_line.decode('latin-1')
|
||||
|
||||
# No further processing will proceed on this connection under any
|
||||
# circumstances. We always send the request into the superclass to
|
||||
@ -373,16 +354,12 @@ class SwiftHttpProxiedProtocol(SwiftHttpProtocol):
|
||||
# line.
|
||||
pass
|
||||
elif proxy_parts[1] in (b'TCP4', b'TCP6') and len(proxy_parts) == 6:
|
||||
if six.PY2:
|
||||
self.client_address = (proxy_parts[2], proxy_parts[4])
|
||||
self.proxy_address = (proxy_parts[3], proxy_parts[5])
|
||||
else:
|
||||
self.client_address = (
|
||||
proxy_parts[2].decode('latin-1'),
|
||||
proxy_parts[4].decode('latin-1'))
|
||||
self.proxy_address = (
|
||||
proxy_parts[3].decode('latin-1'),
|
||||
proxy_parts[5].decode('latin-1'))
|
||||
self.client_address = (
|
||||
proxy_parts[2].decode('latin-1'),
|
||||
proxy_parts[4].decode('latin-1'))
|
||||
self.proxy_address = (
|
||||
proxy_parts[3].decode('latin-1'),
|
||||
proxy_parts[5].decode('latin-1'))
|
||||
else:
|
||||
self.handle_error(connection_line)
|
||||
|
||||
|
@ -14,13 +14,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
from eventlet import sleep, Timeout, spawn
|
||||
from eventlet.green import httplib, socket
|
||||
from eventlet.green import socket
|
||||
from eventlet.green.http import client as http_client
|
||||
from eventlet.green.urllib import request as urllib_request
|
||||
import json
|
||||
import six
|
||||
from six.moves import range
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
import struct
|
||||
from sys import exc_info, exit
|
||||
from sys import exit
|
||||
import zlib
|
||||
from time import gmtime, strftime, time
|
||||
from zlib import compressobj
|
||||
@ -34,11 +34,6 @@ from swift.common.swob import Request, bytes_to_wsgi
|
||||
from swift.common.utils import quote, close_if_possible, drain_and_close
|
||||
from swift.common.wsgi import loadapp
|
||||
|
||||
if six.PY3:
|
||||
from eventlet.green.urllib import request as urllib2
|
||||
else:
|
||||
from eventlet.green import urllib2
|
||||
|
||||
|
||||
class UnexpectedResponse(Exception):
|
||||
"""
|
||||
@ -127,8 +122,6 @@ class CompressingFileReader(object):
|
||||
return chunk
|
||||
raise StopIteration
|
||||
|
||||
next = __next__
|
||||
|
||||
def seek(self, offset, whence=0):
|
||||
if not (offset == 0 and whence == 0):
|
||||
raise NotImplementedError('Seek implemented on offset 0 only')
|
||||
@ -217,7 +210,7 @@ class InternalClient(object):
|
||||
headers.setdefault(USE_REPLICATION_NETWORK_HEADER, 'true')
|
||||
|
||||
for attempt in range(self.request_tries):
|
||||
resp = exc_type = exc_value = exc_traceback = None
|
||||
resp = err = None
|
||||
req = Request.blank(
|
||||
path, environ={'REQUEST_METHOD': method}, headers=headers)
|
||||
if body_file is not None:
|
||||
@ -229,8 +222,8 @@ class InternalClient(object):
|
||||
try:
|
||||
# execute in a separate greenthread to not polute corolocals
|
||||
resp = spawn(req.get_response, self.app).wait()
|
||||
except (Exception, Timeout):
|
||||
exc_type, exc_value, exc_traceback = exc_info()
|
||||
except (Exception, Timeout) as e:
|
||||
err = e
|
||||
else:
|
||||
if resp.status_int in acceptable_statuses or \
|
||||
resp.status_int // 100 in acceptable_statuses:
|
||||
@ -256,9 +249,8 @@ class InternalClient(object):
|
||||
# non 2XX responses
|
||||
msg += ' (%s)' % resp.body
|
||||
raise UnexpectedResponse(msg, resp)
|
||||
if exc_type:
|
||||
# To make pep8 tool happy, in place of raise t, v, tb:
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
if err:
|
||||
raise err
|
||||
|
||||
def handle_request(self, *args, **kwargs):
|
||||
resp = self.make_request(*args, **kwargs)
|
||||
@ -848,10 +840,10 @@ class InternalClient(object):
|
||||
def get_auth(url, user, key, auth_version='1.0', **kwargs):
|
||||
if auth_version != '1.0':
|
||||
exit('ERROR: swiftclient missing, only auth v1.0 supported')
|
||||
req = urllib2.Request(url)
|
||||
req = urllib_request.Request(url)
|
||||
req.add_header('X-Auth-User', user)
|
||||
req.add_header('X-Auth-Key', key)
|
||||
conn = urllib2.urlopen(req)
|
||||
conn = urllib_request.urlopen(req)
|
||||
headers = conn.info()
|
||||
return (
|
||||
headers.getheader('X-Storage-Url'),
|
||||
@ -914,12 +906,12 @@ class SimpleClient(object):
|
||||
|
||||
url += '?' + '&'.join(params)
|
||||
|
||||
req = urllib2.Request(url, headers=headers, data=contents)
|
||||
req = urllib_request.Request(url, headers=headers, data=contents)
|
||||
if proxy:
|
||||
proxy = urllib.parse.urlparse(proxy)
|
||||
req.set_proxy(proxy.netloc, proxy.scheme)
|
||||
req.get_method = lambda: method
|
||||
conn = urllib2.urlopen(req, timeout=timeout)
|
||||
conn = urllib_request.urlopen(req, timeout=timeout)
|
||||
body = conn.read()
|
||||
info = conn.info()
|
||||
try:
|
||||
@ -961,14 +953,15 @@ class SimpleClient(object):
|
||||
self.attempts += 1
|
||||
try:
|
||||
return self.base_request(method, **kwargs)
|
||||
except urllib2.HTTPError as err:
|
||||
except urllib_request.HTTPError as err:
|
||||
if is_client_error(err.getcode() or 500):
|
||||
raise ClientException('Client error',
|
||||
http_status=err.getcode())
|
||||
elif self.attempts > retries:
|
||||
raise ClientException('Raise too many retries',
|
||||
http_status=err.getcode())
|
||||
except (socket.error, httplib.HTTPException, urllib2.URLError):
|
||||
except (socket.error, http_client.HTTPException,
|
||||
urllib_request.URLError):
|
||||
if self.attempts > retries:
|
||||
raise
|
||||
sleep(backoff)
|
||||
|
@ -17,8 +17,6 @@ import os
|
||||
import ctypes
|
||||
from ctypes.util import find_library
|
||||
|
||||
import six
|
||||
|
||||
__all__ = ['linkat']
|
||||
|
||||
|
||||
@ -72,9 +70,9 @@ class Linkat(object):
|
||||
if not isinstance(olddirfd, int) or not isinstance(newdirfd, int):
|
||||
raise TypeError("fd must be an integer.")
|
||||
|
||||
if isinstance(oldpath, six.text_type):
|
||||
if isinstance(oldpath, str):
|
||||
oldpath = oldpath.encode('utf8')
|
||||
if isinstance(newpath, six.text_type):
|
||||
if isinstance(newpath, str):
|
||||
newpath = newpath.encode('utf8')
|
||||
|
||||
return self._c_linkat(olddirfd, oldpath, newdirfd, newpath, flags)
|
||||
|
@ -24,14 +24,9 @@ import signal
|
||||
import time
|
||||
import subprocess
|
||||
import re
|
||||
import six
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from shutil import which
|
||||
except ImportError:
|
||||
# py2
|
||||
from distutils.spawn import find_executable as which
|
||||
from shutil import which
|
||||
|
||||
from swift.common.utils import search_tree, remove_file, write_file, readconf
|
||||
from swift.common.exceptions import InvalidPidFileException
|
||||
@ -845,10 +840,8 @@ class Server(object):
|
||||
if proc.stdout.closed:
|
||||
output = ''
|
||||
else:
|
||||
output = proc.stdout.read()
|
||||
output = proc.stdout.read().decode('utf8', 'backslashreplace')
|
||||
proc.stdout.close()
|
||||
if not six.PY2:
|
||||
output = output.decode('utf8', 'backslashreplace')
|
||||
|
||||
if kwargs.get('once', False):
|
||||
# if you don't want once to wait you can send it to the
|
||||
|
@ -45,7 +45,6 @@ http://github.com/memcached/memcached/blob/1.4.2/doc/protocol.txt
|
||||
"""
|
||||
|
||||
import os
|
||||
import six
|
||||
import json
|
||||
import logging
|
||||
# the name of 'time' module is changed to 'tm', to avoid changing the
|
||||
@ -56,8 +55,7 @@ from bisect import bisect
|
||||
from eventlet.green import socket, ssl
|
||||
from eventlet.pools import Pool
|
||||
from eventlet import Timeout
|
||||
from six.moves import range
|
||||
from six.moves.configparser import ConfigParser, NoSectionError, NoOptionError
|
||||
from configparser import ConfigParser, NoSectionError, NoOptionError
|
||||
from swift.common import utils
|
||||
from swift.common.exceptions import MemcacheConnectionError, \
|
||||
MemcacheIncrNotFoundError, MemcachePoolTimeout
|
||||
@ -91,10 +89,7 @@ EXPTIME_MAXDELTA = 30 * 24 * 60 * 60
|
||||
|
||||
def md5hash(key):
|
||||
if not isinstance(key, bytes):
|
||||
if six.PY2:
|
||||
key = key.encode('utf-8')
|
||||
else:
|
||||
key = key.encode('utf-8', errors='surrogateescape')
|
||||
key = key.encode('utf-8', errors='surrogateescape')
|
||||
return md5(key, usedforsecurity=False).hexdigest().encode('ascii')
|
||||
|
||||
|
||||
@ -421,8 +416,7 @@ class MemcacheRing(object):
|
||||
# Wait for the set to complete
|
||||
msg = fp.readline().strip()
|
||||
if msg != b'STORED':
|
||||
if not six.PY2:
|
||||
msg = msg.decode('ascii')
|
||||
msg = msg.decode('ascii')
|
||||
raise MemcacheConnectionError('failed set: %s' % msg)
|
||||
self._return_conn(server, fp, sock)
|
||||
return
|
||||
|
@ -14,8 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import six
|
||||
from six.moves.urllib.parse import unquote, urlparse
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
|
||||
def clean_acl(name, value):
|
||||
@ -294,12 +293,8 @@ def acls_from_account_info(info):
|
||||
if not any((admin_members, readwrite_members, readonly_members)):
|
||||
return None
|
||||
|
||||
acls = {
|
||||
return {
|
||||
'admin': admin_members,
|
||||
'read-write': readwrite_members,
|
||||
'read-only': readonly_members,
|
||||
}
|
||||
if six.PY2:
|
||||
for k in ('admin', 'read-write', 'read-only'):
|
||||
acls[k] = [v.encode('utf8') for v in acls[k]]
|
||||
return acls
|
||||
|
@ -195,7 +195,6 @@ payload sent to the proxy (the list of objects/containers to be deleted).
|
||||
"""
|
||||
|
||||
import json
|
||||
import six
|
||||
import tarfile
|
||||
from xml.sax.saxutils import escape # nosec B406
|
||||
from time import time
|
||||
@ -257,8 +256,6 @@ def get_response_body(data_format, data_dict, error_list, root_tag):
|
||||
escape(status), '</status></object>\n',
|
||||
])
|
||||
output.extend(['</errors>\n</', root_tag, '>\n'])
|
||||
if six.PY2:
|
||||
return ''.join(output)
|
||||
return ''.join(output).encode('utf-8')
|
||||
|
||||
output = []
|
||||
@ -268,8 +265,6 @@ def get_response_body(data_format, data_dict, error_list, root_tag):
|
||||
output.extend(
|
||||
'%s, %s\n' % (name, status)
|
||||
for name, status in error_list)
|
||||
if six.PY2:
|
||||
return ''.join(output)
|
||||
return ''.join(output).encode('utf-8')
|
||||
|
||||
|
||||
@ -279,13 +274,9 @@ def pax_key_to_swift_header(pax_key):
|
||||
return "Content-Type"
|
||||
elif pax_key.startswith(u"SCHILY.xattr.user.meta."):
|
||||
useful_part = pax_key[len(u"SCHILY.xattr.user.meta."):]
|
||||
if six.PY2:
|
||||
return "X-Object-Meta-" + useful_part.encode("utf-8")
|
||||
return str_to_wsgi("X-Object-Meta-" + useful_part)
|
||||
elif pax_key.startswith(u"LIBARCHIVE.xattr.user.meta."):
|
||||
useful_part = pax_key[len(u"LIBARCHIVE.xattr.user.meta."):]
|
||||
if six.PY2:
|
||||
return "X-Object-Meta-" + useful_part.encode("utf-8")
|
||||
return str_to_wsgi("X-Object-Meta-" + useful_part)
|
||||
else:
|
||||
# You can get things like atime/mtime/ctime or filesystem ACLs in
|
||||
@ -357,15 +348,12 @@ class Bulk(object):
|
||||
while data_remaining:
|
||||
if b'\n' in line:
|
||||
obj_to_delete, line = line.split(b'\n', 1)
|
||||
if six.PY2:
|
||||
obj_to_delete = wsgi_unquote(obj_to_delete.strip())
|
||||
else:
|
||||
# yeah, all this chaining is pretty terrible...
|
||||
# but it gets even worse trying to use UTF-8 and
|
||||
# errors='surrogateescape' when dealing with terrible
|
||||
# input like b'\xe2%98\x83'
|
||||
obj_to_delete = wsgi_to_str(wsgi_unquote(
|
||||
bytes_to_wsgi(obj_to_delete.strip())))
|
||||
# yeah, all this chaining is pretty terrible...
|
||||
# but it gets even worse trying to use UTF-8 and
|
||||
# errors='surrogateescape' when dealing with terrible
|
||||
# input like b'\xe2%98\x83'
|
||||
obj_to_delete = wsgi_to_str(wsgi_unquote(
|
||||
bytes_to_wsgi(obj_to_delete.strip())))
|
||||
objs_to_delete.append({'name': obj_to_delete})
|
||||
else:
|
||||
data = req.body_file.read(self.max_path_length)
|
||||
@ -373,11 +361,8 @@ class Bulk(object):
|
||||
line += data
|
||||
else:
|
||||
data_remaining = False
|
||||
if six.PY2:
|
||||
obj_to_delete = wsgi_unquote(line.strip())
|
||||
else:
|
||||
obj_to_delete = wsgi_to_str(wsgi_unquote(
|
||||
bytes_to_wsgi(line.strip())))
|
||||
obj_to_delete = wsgi_to_str(wsgi_unquote(
|
||||
bytes_to_wsgi(line.strip())))
|
||||
if obj_to_delete:
|
||||
objs_to_delete.append({'name': obj_to_delete})
|
||||
if len(objs_to_delete) > self.max_deletes_per_request:
|
||||
@ -577,9 +562,7 @@ class Bulk(object):
|
||||
len(failed_files) >= self.max_failed_extractions:
|
||||
break
|
||||
if tar_info.isfile():
|
||||
obj_path = tar_info.name
|
||||
if not six.PY2:
|
||||
obj_path = obj_path.encode('utf-8', 'surrogateescape')
|
||||
obj_path = tar_info.name.encode('utf-8', 'surrogateescape')
|
||||
obj_path = bytes_to_wsgi(obj_path)
|
||||
if obj_path.startswith('./'):
|
||||
obj_path = obj_path[2:]
|
||||
|
@ -27,8 +27,6 @@ maximum lookup depth. If a match is found, the environment's Host header is
|
||||
rewritten and the request is passed further down the WSGI chain.
|
||||
"""
|
||||
|
||||
import six
|
||||
|
||||
try:
|
||||
import dns.resolver
|
||||
import dns.exception
|
||||
@ -149,8 +147,6 @@ class CNAMELookupMiddleware(object):
|
||||
if self.memcache:
|
||||
memcache_key = ''.join(['cname-', a_domain])
|
||||
found_domain = self.memcache.get(memcache_key)
|
||||
if six.PY2 and found_domain:
|
||||
found_domain = found_domain.encode('utf-8')
|
||||
if found_domain is None:
|
||||
ttl, found_domain = lookup_cname(a_domain, self.resolver)
|
||||
if self.memcache and ttl > 0:
|
||||
|
@ -19,8 +19,7 @@ import os
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
import six
|
||||
from six.moves.urllib import parse as urlparse
|
||||
import urllib.parse
|
||||
|
||||
from swift.common.exceptions import EncryptionException, UnknownSecretIdError
|
||||
from swift.common.swob import HTTPInternalServerError
|
||||
@ -225,7 +224,7 @@ def dump_crypto_meta(crypto_meta):
|
||||
for name, value in crypto_meta.items()}
|
||||
|
||||
# use sort_keys=True to make serialized form predictable for testing
|
||||
return urlparse.quote_plus(
|
||||
return urllib.parse.quote_plus(
|
||||
json.dumps(b64_encode_meta(crypto_meta), sort_keys=True))
|
||||
|
||||
|
||||
@ -251,13 +250,13 @@ def load_crypto_meta(value, b64decode=True):
|
||||
str(name): (
|
||||
base64.b64decode(val) if name in ('iv', 'key') and b64decode
|
||||
else b64_decode_meta(val) if isinstance(val, dict)
|
||||
else val.encode('utf8') if six.PY2 else val)
|
||||
else val)
|
||||
for name, val in crypto_meta.items()}
|
||||
|
||||
try:
|
||||
if not isinstance(value, six.string_types):
|
||||
if not isinstance(value, str):
|
||||
raise ValueError('crypto meta not a string')
|
||||
val = json.loads(urlparse.unquote_plus(value))
|
||||
val = json.loads(urllib.parse.unquote_plus(value))
|
||||
if not isinstance(val, dict):
|
||||
raise ValueError('crypto meta not a Mapping')
|
||||
return b64_decode_meta(val)
|
||||
|
@ -14,7 +14,6 @@
|
||||
# limitations under the License.
|
||||
import hashlib
|
||||
import hmac
|
||||
import six
|
||||
|
||||
from swift.common.exceptions import UnknownSecretIdError
|
||||
from swift.common.middleware.crypto.crypto_utils import CRYPTO_KEY_CALLBACK
|
||||
@ -101,14 +100,9 @@ class KeyMasterContext(WSGIContext):
|
||||
# Older py3 proxies may have written down crypto meta as WSGI
|
||||
# strings; we still need to be able to read that
|
||||
try:
|
||||
if six.PY2:
|
||||
alt_path = tuple(
|
||||
part.decode('utf-8').encode('latin1')
|
||||
for part in (key_acct, key_cont, key_obj))
|
||||
else:
|
||||
alt_path = tuple(
|
||||
part.encode('latin1').decode('utf-8')
|
||||
for part in (key_acct, key_cont, key_obj))
|
||||
alt_path = tuple(
|
||||
part.encode('latin1').decode('utf-8')
|
||||
for part in (key_acct, key_cont, key_obj))
|
||||
except UnicodeError:
|
||||
# Well, it was worth a shot
|
||||
pass
|
||||
@ -336,8 +330,7 @@ class BaseKeyMaster(object):
|
||||
self.logger.warning('Unrecognised secret id: %s' % secret_id)
|
||||
raise UnknownSecretIdError(secret_id)
|
||||
else:
|
||||
if not six.PY2:
|
||||
path = path.encode('utf-8')
|
||||
path = path.encode('utf-8')
|
||||
return hmac.new(key, path, digestmod=hashlib.sha256).digest()
|
||||
|
||||
|
||||
|
@ -120,8 +120,6 @@ Here's an example using ``curl`` with tiny 1-byte segments::
|
||||
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
from swift.common import constraints
|
||||
from swift.common.exceptions import ListingIterError, SegmentError
|
||||
from swift.common.http import is_success
|
||||
@ -207,8 +205,6 @@ class GetContext(WSGIContext):
|
||||
break
|
||||
|
||||
seg_name = segment['name']
|
||||
if six.PY2:
|
||||
seg_name = seg_name.encode("utf-8")
|
||||
|
||||
# We deliberately omit the etag and size here;
|
||||
# SegmentedIterable will check size and etag if
|
||||
|
@ -123,11 +123,9 @@ the file are simply ignored).
|
||||
__all__ = ['FormPost', 'filter_factory', 'READ_CHUNK_SIZE', 'MAX_VALUE_LENGTH']
|
||||
|
||||
import hmac
|
||||
import hashlib
|
||||
from time import time
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
|
||||
from swift.common.constraints import valid_api_version
|
||||
from swift.common.exceptions import MimeInvalid
|
||||
@ -249,9 +247,7 @@ class FormPost(object):
|
||||
('Content-Length', str(len(body)))))
|
||||
return [body]
|
||||
except (FormInvalid, EOFError) as err:
|
||||
body = 'FormPost: %s' % err
|
||||
if six.PY3:
|
||||
body = body.encode('utf-8')
|
||||
body = ('FormPost: %s' % err).encode('utf-8')
|
||||
start_response(
|
||||
'400 Bad Request',
|
||||
(('Content-Type', 'text/plain'),
|
||||
@ -273,8 +269,7 @@ class FormPost(object):
|
||||
:returns: status_line, headers_list, body
|
||||
"""
|
||||
keys = self._get_keys(env)
|
||||
if six.PY3:
|
||||
boundary = boundary.encode('utf-8')
|
||||
boundary = boundary.encode('utf-8')
|
||||
status = message = ''
|
||||
attributes = {}
|
||||
file_attributes = {}
|
||||
@ -320,8 +315,7 @@ class FormPost(object):
|
||||
data += chunk
|
||||
while fp.read(READ_CHUNK_SIZE):
|
||||
pass
|
||||
if six.PY3:
|
||||
data = data.decode('utf-8')
|
||||
data = data.decode('utf-8')
|
||||
if 'name' in attrs:
|
||||
attributes[attrs['name'].lower()] = data.rstrip('\r\n--')
|
||||
if not status:
|
||||
@ -337,8 +331,7 @@ class FormPost(object):
|
||||
body = status
|
||||
if message:
|
||||
body = status + '\r\nFormPost: ' + message.title()
|
||||
if six.PY3:
|
||||
body = body.encode('utf-8')
|
||||
body = body.encode('utf-8')
|
||||
if not is_success(status_code) and resp_body:
|
||||
body = resp_body
|
||||
headers.extend([('Content-Type', 'text/plain'),
|
||||
@ -352,8 +345,7 @@ class FormPost(object):
|
||||
quote(message))
|
||||
body = '<html><body><p><a href="%s">' \
|
||||
'Click to continue...</a></p></body></html>' % redirect
|
||||
if six.PY3:
|
||||
body = body.encode('utf-8')
|
||||
body = body.encode('utf-8')
|
||||
headers.extend(
|
||||
[('Location', redirect), ('Content-Length', str(len(body)))])
|
||||
return '303 See Other', headers, body
|
||||
@ -415,8 +407,7 @@ class FormPost(object):
|
||||
attributes.get('max_file_size') or '0',
|
||||
attributes.get('max_file_count') or '0',
|
||||
attributes.get('expires') or '0')
|
||||
if six.PY3:
|
||||
hmac_body = hmac_body.encode('utf-8')
|
||||
hmac_body = hmac_body.encode('utf-8')
|
||||
|
||||
has_valid_sig = False
|
||||
signature = attributes.get('signature', '')
|
||||
@ -426,13 +417,12 @@ class FormPost(object):
|
||||
raise FormUnauthorized('invalid signature')
|
||||
if hash_name not in self.allowed_digests:
|
||||
raise FormUnauthorized('invalid signature')
|
||||
hash_algorithm = getattr(hashlib, hash_name) if six.PY2 else hash_name
|
||||
|
||||
for key in keys:
|
||||
# Encode key like in swift.common.utls.get_hmac.
|
||||
if not isinstance(key, six.binary_type):
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('utf8')
|
||||
sig = hmac.new(key, hmac_body, hash_algorithm).hexdigest()
|
||||
sig = hmac.new(key, hmac_body, hash_name).hexdigest()
|
||||
if streq_const_time(sig, signature):
|
||||
has_valid_sig = True
|
||||
if not has_valid_sig:
|
||||
|
@ -36,7 +36,7 @@ from swift.common.utils import get_logger, config_true_value
|
||||
from swift.common.request_helpers import (
|
||||
remove_items, get_sys_meta_prefix, OBJECT_TRANSIENT_SYSMETA_PREFIX
|
||||
)
|
||||
from six.moves.urllib.parse import urlsplit
|
||||
from urllib.parse import urlsplit
|
||||
import re
|
||||
|
||||
#: A list of python regular expressions that will be used to
|
||||
|
@ -80,7 +80,7 @@ environment (everyone can query the locality data using this middleware).
|
||||
|
||||
import json
|
||||
|
||||
from six.moves.urllib.parse import quote, unquote
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
from swift.common.ring import Ring
|
||||
from swift.common.utils import get_logger, split_path
|
||||
|
@ -14,7 +14,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import six
|
||||
from xml.etree.cElementTree import Element, SubElement, tostring
|
||||
|
||||
from swift.common.constraints import valid_api_version
|
||||
@ -84,8 +83,7 @@ def account_to_xml(listing, account_name):
|
||||
else:
|
||||
sub = SubElement(doc, 'container')
|
||||
for field in ('name', 'count', 'bytes', 'last_modified'):
|
||||
SubElement(sub, field).text = six.text_type(
|
||||
record.pop(field))
|
||||
SubElement(sub, field).text = str(record.pop(field))
|
||||
sub.tail = '\n'
|
||||
return to_xml(doc)
|
||||
|
||||
@ -101,8 +99,7 @@ def container_to_xml(listing, base_name):
|
||||
sub = SubElement(doc, 'object')
|
||||
for field in ('name', 'hash', 'bytes', 'content_type',
|
||||
'last_modified'):
|
||||
SubElement(sub, field).text = six.text_type(
|
||||
record.pop(field))
|
||||
SubElement(sub, field).text = str(record.pop(field))
|
||||
return to_xml(doc)
|
||||
|
||||
|
||||
@ -126,8 +123,6 @@ class ListingFilter(object):
|
||||
for entry in list(listing):
|
||||
for key in ('name', 'subdir'):
|
||||
value = entry.get(key, '')
|
||||
if six.PY2:
|
||||
value = value.encode('utf-8')
|
||||
if RESERVED in value:
|
||||
if container:
|
||||
self.logger.warning(
|
||||
|
@ -16,8 +16,7 @@
|
||||
from base64 import standard_b64encode as b64encode
|
||||
from base64 import standard_b64decode as b64decode
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
|
||||
from swift.common import swob
|
||||
from swift.common.http import HTTP_OK
|
||||
@ -145,9 +144,8 @@ class BucketController(Controller):
|
||||
query['marker'] = swob.wsgi_to_str(req.params['start-after'])
|
||||
# continuation-token overrides start-after
|
||||
if 'continuation-token' in req.params:
|
||||
decoded = b64decode(req.params['continuation-token'])
|
||||
if not six.PY2:
|
||||
decoded = decoded.decode('utf8')
|
||||
decoded = b64decode(
|
||||
req.params['continuation-token']).decode('utf8')
|
||||
query['marker'] = decoded
|
||||
if 'fetch-owner' in req.params:
|
||||
fetch_owner = config_true_value(req.params['fetch-owner'])
|
||||
|
@ -65,17 +65,14 @@ import os
|
||||
import re
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
from swift.common import constraints
|
||||
from swift.common.swob import Range, bytes_to_wsgi, normalize_etag, \
|
||||
wsgi_to_str
|
||||
from swift.common.utils import json, public, reiterate, md5, Timestamp
|
||||
from swift.common.db import utf8encode
|
||||
from swift.common.request_helpers import get_container_update_override_key, \
|
||||
get_param
|
||||
|
||||
from six.moves.urllib.parse import quote, urlparse
|
||||
from urllib.parse import quote, urlparse
|
||||
|
||||
from swift.common.middleware.s3api.controllers.base import Controller, \
|
||||
bucket_operation, object_operation, check_container_existence
|
||||
@ -293,8 +290,6 @@ class UploadsController(Controller):
|
||||
|
||||
:return (non_delimited_uploads, common_prefixes)
|
||||
"""
|
||||
if six.PY2:
|
||||
(prefix, delimiter) = utf8encode(prefix, delimiter)
|
||||
non_delimited_uploads = []
|
||||
common_prefixes = set()
|
||||
for upload in uploads:
|
||||
@ -363,10 +358,7 @@ class UploadsController(Controller):
|
||||
new_uploads, prefix, delimiter)
|
||||
uploads.extend(new_uploads)
|
||||
prefixes.extend(new_prefixes)
|
||||
if six.PY2:
|
||||
query['marker'] = objects[-1]['name'].encode('utf-8')
|
||||
else:
|
||||
query['marker'] = objects[-1]['name']
|
||||
query['marker'] = objects[-1]['name']
|
||||
|
||||
truncated = len(uploads) >= maxuploads
|
||||
if len(uploads) > maxuploads:
|
||||
@ -542,10 +534,7 @@ class UploadController(Controller):
|
||||
if not new_objects:
|
||||
break
|
||||
objects.extend(new_objects)
|
||||
if six.PY2:
|
||||
query['marker'] = new_objects[-1]['name'].encode('utf-8')
|
||||
else:
|
||||
query['marker'] = new_objects[-1]['name']
|
||||
query['marker'] = new_objects[-1]['name']
|
||||
|
||||
last_part = 0
|
||||
|
||||
@ -642,10 +631,7 @@ class UploadController(Controller):
|
||||
container = req.container_name + MULTIUPLOAD_SUFFIX
|
||||
obj = bytes_to_wsgi(o['name'].encode('utf-8'))
|
||||
req.get_response(self.app, container=container, obj=obj)
|
||||
if six.PY2:
|
||||
query['marker'] = objects[-1]['name'].encode('utf-8')
|
||||
else:
|
||||
query['marker'] = objects[-1]['name']
|
||||
query['marker'] = objects[-1]['name']
|
||||
resp = req.get_response(self.app, 'GET', container, '',
|
||||
query=query)
|
||||
objects = json.loads(resp.body)
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
from swift.common.utils import public
|
||||
|
||||
from swift.common.middleware.s3api.controllers.base import Controller
|
||||
|
@ -26,12 +26,11 @@ except ImportError:
|
||||
else:
|
||||
import importlib.resources
|
||||
resource_stream = None
|
||||
import six
|
||||
|
||||
from swift.common.utils import get_logger
|
||||
from swift.common.middleware.s3api.exception import S3Exception
|
||||
from swift.common.middleware.s3api.utils import camel_to_snake, \
|
||||
utf8encode, utf8decode
|
||||
utf8decode
|
||||
|
||||
XMLNS_S3 = 'http://s3.amazonaws.com/doc/2006-03-01/'
|
||||
XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
|
||||
@ -51,7 +50,7 @@ def cleanup_namespaces(elem):
|
||||
tag = tag[len('{%s}' % ns):]
|
||||
return tag
|
||||
|
||||
if not isinstance(elem.tag, six.string_types):
|
||||
if not isinstance(elem.tag, str):
|
||||
# elem is a comment element.
|
||||
return
|
||||
|
||||
@ -136,8 +135,6 @@ class _Element(lxml.etree.ElementBase):
|
||||
"""
|
||||
utf-8 wrapper property of lxml.etree.Element.text
|
||||
"""
|
||||
if six.PY2:
|
||||
return utf8encode(lxml.etree.ElementBase.text.__get__(self))
|
||||
return lxml.etree.ElementBase.text.__get__(self)
|
||||
|
||||
@text.setter
|
||||
|
@ -144,7 +144,7 @@ https://github.com/swiftstack/s3compat in detail.
|
||||
from cgi import parse_header
|
||||
import json
|
||||
from paste.deploy import loadwsgi
|
||||
from six.moves.urllib.parse import parse_qs
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
from swift.common.constraints import valid_api_version
|
||||
from swift.common.middleware.listing_formats import \
|
||||
|
@ -20,9 +20,8 @@ from email.header import Header
|
||||
from hashlib import sha1, sha256
|
||||
import hmac
|
||||
import re
|
||||
import six
|
||||
# pylint: disable-msg=import-error
|
||||
from six.moves.urllib.parse import quote, unquote, parse_qsl
|
||||
from urllib.parse import quote, unquote, parse_qsl
|
||||
import string
|
||||
|
||||
from swift.common.utils import split_path, json, close_if_possible, md5, \
|
||||
@ -421,7 +420,7 @@ class SigV4Mixin(object):
|
||||
else: # mostly-functional fallback
|
||||
headers_lower_dict = dict(
|
||||
(k.lower().strip(), ' '.join(_header_strip(v or '').split()))
|
||||
for (k, v) in six.iteritems(self.headers))
|
||||
for (k, v) in self.headers.items())
|
||||
|
||||
if 'host' in headers_lower_dict and re.match(
|
||||
'Boto/2.[0-9].[0-2]',
|
||||
@ -636,9 +635,8 @@ class S3Request(swob.Request):
|
||||
secret = utf8encode(secret)
|
||||
user_signature = self.signature
|
||||
valid_signature = base64.b64encode(hmac.new(
|
||||
secret, self.string_to_sign, sha1).digest()).strip()
|
||||
if not six.PY2:
|
||||
valid_signature = valid_signature.decode('ascii')
|
||||
secret, self.string_to_sign, sha1
|
||||
).digest()).strip().decode('ascii')
|
||||
return streq_const_time(user_signature, valid_signature)
|
||||
|
||||
@property
|
||||
@ -1488,8 +1486,6 @@ class S3Request(swob.Request):
|
||||
self.user_id = "%s:%s" % (
|
||||
sw_resp.environ['HTTP_X_TENANT_NAME'],
|
||||
sw_resp.environ['HTTP_X_USER_NAME'])
|
||||
if six.PY2 and not isinstance(self.user_id, bytes):
|
||||
self.user_id = self.user_id.encode('utf8')
|
||||
else:
|
||||
# tempauth
|
||||
self.user_id = self.access_key
|
||||
@ -1687,8 +1683,6 @@ class S3AclRequest(S3Request):
|
||||
# keystone
|
||||
self.user_id = "%s:%s" % (sw_resp.environ['HTTP_X_TENANT_NAME'],
|
||||
sw_resp.environ['HTTP_X_USER_NAME'])
|
||||
if six.PY2 and not isinstance(self.user_id, bytes):
|
||||
self.user_id = self.user_id.encode('utf8')
|
||||
else:
|
||||
# tempauth
|
||||
self.user_id = self.access_key
|
||||
|
@ -14,10 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
try:
|
||||
from collections.abc import MutableMapping
|
||||
except ImportError:
|
||||
from collections import MutableMapping # py2
|
||||
from collections.abc import MutableMapping
|
||||
from functools import partial
|
||||
|
||||
from swift.common import header_key_dict
|
||||
|
@ -61,8 +61,7 @@ from keystoneclient.v3 import client as keystone_client
|
||||
from keystoneauth1 import session as keystone_session
|
||||
from keystoneauth1 import loading as keystone_loading
|
||||
import requests
|
||||
import six
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from swift.common.swob import Request, HTTPBadRequest, HTTPUnauthorized, \
|
||||
HTTPException, str_to_wsgi
|
||||
@ -217,9 +216,7 @@ class S3Token(object):
|
||||
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
|
||||
'<Error>\r\n <Code>%s</Code>\r\n '
|
||||
'<Message>%s</Message>\r\n</Error>\r\n' %
|
||||
(code, message))
|
||||
if six.PY3:
|
||||
error_msg = error_msg.encode()
|
||||
(code, message)).encode()
|
||||
resp.body = error_msg
|
||||
return resp
|
||||
|
||||
@ -266,18 +263,18 @@ class S3Token(object):
|
||||
return self._app(environ, start_response)
|
||||
|
||||
access = s3_auth_details['access_key']
|
||||
if isinstance(access, six.binary_type):
|
||||
if isinstance(access, bytes):
|
||||
access = access.decode('utf-8')
|
||||
|
||||
signature = s3_auth_details['signature']
|
||||
if isinstance(signature, six.binary_type):
|
||||
if isinstance(signature, bytes):
|
||||
signature = signature.decode('utf-8')
|
||||
|
||||
string_to_sign = s3_auth_details['string_to_sign']
|
||||
if isinstance(string_to_sign, six.text_type):
|
||||
if isinstance(string_to_sign, str):
|
||||
string_to_sign = string_to_sign.encode('utf-8')
|
||||
token = base64.urlsafe_b64encode(string_to_sign)
|
||||
if isinstance(token, six.binary_type):
|
||||
if isinstance(token, bytes):
|
||||
token = token.decode('ascii')
|
||||
|
||||
# NOTE(chmou): This is to handle the special case with nova
|
||||
@ -399,8 +396,6 @@ class S3Token(object):
|
||||
|
||||
req.headers.update(headers)
|
||||
tenant_to_connect = force_tenant or tenant['id']
|
||||
if six.PY2 and isinstance(tenant_to_connect, six.text_type):
|
||||
tenant_to_connect = tenant_to_connect.encode('utf-8')
|
||||
self._logger.debug('Connecting with tenant: %s', tenant_to_connect)
|
||||
new_tenant_name = '%s%s' % (self._reseller_prefix, tenant_to_connect)
|
||||
environ['PATH_INFO'] = environ['PATH_INFO'].replace(
|
||||
|
@ -43,8 +43,6 @@ http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html
|
||||
"""
|
||||
from functools import partial
|
||||
|
||||
import six
|
||||
|
||||
from swift.common.utils import json
|
||||
|
||||
from swift.common.middleware.s3api.s3response import InvalidArgument, \
|
||||
@ -233,7 +231,7 @@ class Owner(object):
|
||||
"""
|
||||
def __init__(self, id, name):
|
||||
self.id = id
|
||||
if not (name is None or isinstance(name, six.string_types)):
|
||||
if not (name is None or isinstance(name, str)):
|
||||
raise TypeError('name must be a string or None')
|
||||
self.name = name
|
||||
|
||||
@ -429,8 +427,6 @@ class ACL(object):
|
||||
return tostring(self.elem())
|
||||
|
||||
def __repr__(self):
|
||||
if six.PY2:
|
||||
return self.__bytes__()
|
||||
return self.__bytes__().decode('utf8')
|
||||
|
||||
@classmethod
|
||||
|
@ -18,7 +18,6 @@ import calendar
|
||||
import datetime
|
||||
import email.utils
|
||||
import re
|
||||
import six
|
||||
import time
|
||||
import uuid
|
||||
|
||||
@ -54,8 +53,6 @@ def snake_to_camel(snake):
|
||||
|
||||
def unique_id():
|
||||
result = base64.urlsafe_b64encode(str(uuid.uuid4()).encode('ascii'))
|
||||
if six.PY2:
|
||||
return result
|
||||
return result.decode('ascii')
|
||||
|
||||
|
||||
|
@ -347,8 +347,6 @@ import mimetypes
|
||||
import re
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
from swift.cli.container_deleter import make_delete_jobs
|
||||
from swift.common.header_key_dict import HeaderKeyDict
|
||||
from swift.common.exceptions import ListingIterError, SegmentError
|
||||
@ -361,7 +359,7 @@ from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \
|
||||
HTTPServiceUnavailable, Response, Range, normalize_etag, \
|
||||
RESPONSE_REASONS, str_to_wsgi, bytes_to_wsgi, wsgi_to_str, wsgi_quote
|
||||
from swift.common.utils import get_logger, config_true_value, \
|
||||
get_valid_utf8_str, override_bytes_from_content_type, split_path, \
|
||||
override_bytes_from_content_type, split_path, \
|
||||
RateLimitedIterator, quote, closing_if_possible, \
|
||||
LRUCache, StreamingPile, strict_b64decode, Timestamp, friendly_close, \
|
||||
get_expirer_container, md5
|
||||
@ -462,12 +460,12 @@ def parse_and_validate_input(req_body, req_path):
|
||||
continue
|
||||
|
||||
if segment_type == 'path':
|
||||
if not isinstance(seg_dict['path'], six.string_types):
|
||||
if not isinstance(seg_dict['path'], str):
|
||||
errors.append(b"Index %d: \"path\" must be a string" %
|
||||
seg_index)
|
||||
continue
|
||||
if not (seg_dict.get('etag') is None or
|
||||
isinstance(seg_dict['etag'], six.string_types)):
|
||||
isinstance(seg_dict['etag'], str)):
|
||||
errors.append(b'Index %d: "etag" must be a string or null '
|
||||
b'(if provided)' % seg_index)
|
||||
continue
|
||||
@ -761,9 +759,7 @@ class SloGetContext(WSGIContext):
|
||||
|
||||
if not sub_resp.is_success:
|
||||
# Error message should be short
|
||||
body = sub_resp.body
|
||||
if not six.PY2:
|
||||
body = body.decode('utf-8')
|
||||
body = sub_resp.body.decode('utf-8')
|
||||
msg = ('while fetching %s, GET of submanifest %s '
|
||||
'failed with status %d (%s)')
|
||||
raise ListingIterError(msg % (
|
||||
@ -873,10 +869,7 @@ class SloGetContext(WSGIContext):
|
||||
"While processing manifest %r, "
|
||||
"max recursion depth was exceeded" % req.path)
|
||||
|
||||
if six.PY2:
|
||||
sub_path = get_valid_utf8_str(seg_dict['name'])
|
||||
else:
|
||||
sub_path = seg_dict['name']
|
||||
sub_path = seg_dict['name']
|
||||
sub_cont, sub_obj = split_path(sub_path, 2, 2, True)
|
||||
if last_sub_path != sub_path:
|
||||
sub_segments = cached_fetch_sub_slo_segments(
|
||||
@ -895,8 +888,6 @@ class SloGetContext(WSGIContext):
|
||||
recursion_depth=recursion_depth + 1):
|
||||
yield sub_seg_dict
|
||||
else:
|
||||
if six.PY2 and isinstance(seg_dict['name'], six.text_type):
|
||||
seg_dict['name'] = seg_dict['name'].encode("utf-8")
|
||||
yield dict(seg_dict,
|
||||
first_byte=max(0, first_byte) + range_start,
|
||||
last_byte=min(range_end, range_start + last_byte))
|
||||
@ -1211,9 +1202,7 @@ class SloGetContext(WSGIContext):
|
||||
seg_dict['etag'] = seg_dict.pop('hash', None)
|
||||
|
||||
json_data = json.dumps(segments, sort_keys=True) # convert to string
|
||||
if six.PY3:
|
||||
json_data = json_data.encode('utf-8')
|
||||
return json_data
|
||||
return json_data.encode('utf-8')
|
||||
|
||||
def _get_manifest_read(self, resp_iter):
|
||||
with closing_if_possible(resp_iter):
|
||||
@ -1402,12 +1391,8 @@ class StaticLargeObject(object):
|
||||
path2indices[seg_dict['path']].append(index)
|
||||
|
||||
def do_head(obj_name):
|
||||
if six.PY2:
|
||||
obj_path = '/'.join(['', vrs, account,
|
||||
get_valid_utf8_str(obj_name).lstrip('/')])
|
||||
else:
|
||||
obj_path = '/'.join(['', vrs, account,
|
||||
str_to_wsgi(obj_name.lstrip('/'))])
|
||||
obj_path = '/'.join(['', vrs, account,
|
||||
str_to_wsgi(obj_name.lstrip('/'))])
|
||||
obj_path = wsgi_quote(obj_path)
|
||||
|
||||
sub_req = make_subrequest(
|
||||
@ -1559,7 +1544,7 @@ class StaticLargeObject(object):
|
||||
r = '%s:%s;' % (seg_data['hash'], seg_data['range'])
|
||||
else:
|
||||
r = seg_data['hash']
|
||||
slo_etag.update(r.encode('ascii') if six.PY3 else r)
|
||||
slo_etag.update(r.encode('ascii'))
|
||||
|
||||
slo_etag = slo_etag.hexdigest()
|
||||
client_etag = normalize_etag(req.headers.get('Etag'))
|
||||
@ -1569,7 +1554,7 @@ class StaticLargeObject(object):
|
||||
resp_dict = {}
|
||||
resp_dict['Response Status'] = err.status
|
||||
err_body = err.body
|
||||
if six.PY3 and isinstance(err_body, bytes):
|
||||
if isinstance(err_body, bytes):
|
||||
err_body = err_body.decode('utf-8', errors='replace')
|
||||
resp_dict['Response Body'] = err_body or '\n'.join(
|
||||
RESPONSE_REASONS.get(err.status_int, ['']))
|
||||
@ -1581,9 +1566,7 @@ class StaticLargeObject(object):
|
||||
yield chunk
|
||||
return
|
||||
|
||||
json_data = json.dumps(data_for_storage)
|
||||
if six.PY3:
|
||||
json_data = json_data.encode('utf-8')
|
||||
json_data = json.dumps(data_for_storage).encode('utf-8')
|
||||
req.body = json_data
|
||||
req.headers.update({
|
||||
SYSMETA_SLO_ETAG: slo_etag,
|
||||
@ -1618,7 +1601,7 @@ class StaticLargeObject(object):
|
||||
|
||||
if heartbeat:
|
||||
resp_body = resp.body
|
||||
if six.PY3 and isinstance(resp_body, bytes):
|
||||
if isinstance(resp_body, bytes):
|
||||
resp_body = resp_body.decode('utf-8')
|
||||
resp_dict['Response Body'] = resp_body
|
||||
yield separator + get_response_body(
|
||||
@ -1644,10 +1627,7 @@ class StaticLargeObject(object):
|
||||
raise HTTPPreconditionFailed(
|
||||
request=req, body='Invalid UTF8 or contains NULL')
|
||||
vrs, account, container, obj = req.split_path(4, 4, True)
|
||||
if six.PY2:
|
||||
obj_path = ('/%s/%s' % (container, obj)).decode('utf-8')
|
||||
else:
|
||||
obj_path = '/%s/%s' % (wsgi_to_str(container), wsgi_to_str(obj))
|
||||
obj_path = '/%s/%s' % (wsgi_to_str(container), wsgi_to_str(obj))
|
||||
|
||||
segments = [{
|
||||
'sub_slo': True,
|
||||
@ -1675,7 +1655,7 @@ class StaticLargeObject(object):
|
||||
except HTTPException as err:
|
||||
# allow bulk delete response to report errors
|
||||
err_body = err.body
|
||||
if six.PY3 and isinstance(err_body, bytes):
|
||||
if isinstance(err_body, bytes):
|
||||
err_body = err_body.decode('utf-8', errors='replace')
|
||||
seg_data['error'] = {'code': err.status_int,
|
||||
'message': err_body}
|
||||
@ -1684,8 +1664,6 @@ class StaticLargeObject(object):
|
||||
seg_data['sub_slo'] = False
|
||||
segments.append(seg_data)
|
||||
else:
|
||||
if six.PY2:
|
||||
seg_data['name'] = seg_data['name'].encode('utf-8')
|
||||
yield seg_data
|
||||
|
||||
def get_slo_segments(self, obj_name, req):
|
||||
@ -1714,15 +1692,9 @@ class StaticLargeObject(object):
|
||||
new_env['HTTP_USER_AGENT'] = \
|
||||
'%s MultipartDELETE' % new_env.get('HTTP_USER_AGENT')
|
||||
new_env['swift.source'] = 'SLO'
|
||||
if six.PY2:
|
||||
new_env['PATH_INFO'] = (
|
||||
'/%s/%s/%s' % (vrs, account,
|
||||
obj_name.lstrip('/').encode('utf-8'))
|
||||
)
|
||||
else:
|
||||
new_env['PATH_INFO'] = (
|
||||
'/%s/%s/%s' % (vrs, account, str_to_wsgi(obj_name.lstrip('/')))
|
||||
)
|
||||
new_env['PATH_INFO'] = (
|
||||
'/%s/%s/%s' % (vrs, account, str_to_wsgi(obj_name.lstrip('/')))
|
||||
)
|
||||
# Just request the last byte of non-SLO objects so we don't waste
|
||||
# a resources in friendly_close() below
|
||||
manifest_req = Request.blank('', new_env, range='bytes=-1')
|
||||
@ -1757,10 +1729,7 @@ class StaticLargeObject(object):
|
||||
raise HTTPPreconditionFailed(
|
||||
request=req, body='Invalid UTF8 or contains NULL')
|
||||
vrs, account, container, obj = req.split_path(4, 4, True)
|
||||
if six.PY2:
|
||||
obj_path = ('/%s/%s' % (container, obj)).decode('utf-8')
|
||||
else:
|
||||
obj_path = '/%s/%s' % (wsgi_to_str(container), wsgi_to_str(obj))
|
||||
obj_path = '/%s/%s' % (wsgi_to_str(container), wsgi_to_str(obj))
|
||||
segments = [seg for seg in self.get_slo_segments(obj_path, req)
|
||||
if 'data' not in seg]
|
||||
if not segments:
|
||||
|
@ -135,13 +135,12 @@ Example usage of this middleware via ``swift``:
|
||||
"""
|
||||
|
||||
|
||||
import html
|
||||
import json
|
||||
import six
|
||||
import time
|
||||
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from swift.common.request_helpers import html_escape
|
||||
from swift.common.utils import human_readable, split_path, config_true_value, \
|
||||
quote, get_logger
|
||||
from swift.common.registry import register_swift_info
|
||||
@ -256,7 +255,7 @@ class _StaticWebContext(WSGIContext):
|
||||
body = '<!DOCTYPE html>\n' \
|
||||
'<html>\n' \
|
||||
'<head>\n' \
|
||||
'<title>Listing of %s</title>\n' % html_escape(label)
|
||||
'<title>Listing of %s</title>\n' % html.escape(label)
|
||||
if self._listings_css:
|
||||
body += ' <link rel="stylesheet" type="text/css" ' \
|
||||
'href="%s" />\n' % self._build_css_path(prefix or '')
|
||||
@ -322,7 +321,7 @@ class _StaticWebContext(WSGIContext):
|
||||
'<html>\n' \
|
||||
' <head>\n' \
|
||||
' <title>Listing of %s</title>\n' % \
|
||||
html_escape(label)
|
||||
html.escape(label)
|
||||
if self._listings_css:
|
||||
body += ' <link rel="stylesheet" type="text/css" ' \
|
||||
'href="%s" />\n' % (self._build_css_path(prefix))
|
||||
@ -341,7 +340,7 @@ class _StaticWebContext(WSGIContext):
|
||||
' <th class="colname">Name</th>\n' \
|
||||
' <th class="colsize">Size</th>\n' \
|
||||
' <th class="coldate">Date</th>\n' \
|
||||
' </tr>\n' % html_escape(label)
|
||||
' </tr>\n' % html.escape(label)
|
||||
if len(prefix) > len(tempurl_prefix):
|
||||
body += ' <tr id="parent" class="item">\n' \
|
||||
' <td class="colname"><a href="../%s">../</a></td>\n' \
|
||||
@ -350,8 +349,7 @@ class _StaticWebContext(WSGIContext):
|
||||
' </tr>\n' % tempurl_qs
|
||||
for item in listing:
|
||||
if 'subdir' in item:
|
||||
subdir = item['subdir'] if six.PY3 else \
|
||||
item['subdir'].encode('utf-8')
|
||||
subdir = item['subdir']
|
||||
if prefix:
|
||||
subdir = subdir[len(wsgi_to_str(prefix)):]
|
||||
body += ' <tr class="item subdir">\n' \
|
||||
@ -359,28 +357,25 @@ class _StaticWebContext(WSGIContext):
|
||||
' <td class="colsize"> </td>\n' \
|
||||
' <td class="coldate"> </td>\n' \
|
||||
' </tr>\n' % \
|
||||
(quote(subdir) + tempurl_qs, html_escape(subdir))
|
||||
(quote(subdir) + tempurl_qs, html.escape(subdir))
|
||||
for item in listing:
|
||||
if 'name' in item:
|
||||
name = item['name'] if six.PY3 else \
|
||||
item['name'].encode('utf-8')
|
||||
name = item['name']
|
||||
if prefix:
|
||||
name = name[len(wsgi_to_str(prefix)):]
|
||||
content_type = item['content_type'] if six.PY3 else \
|
||||
item['content_type'].encode('utf-8')
|
||||
content_type = item['content_type']
|
||||
bytes = human_readable(item['bytes'])
|
||||
last_modified = (
|
||||
html_escape(item['last_modified'] if six.PY3 else
|
||||
item['last_modified'].encode('utf-8')).
|
||||
html.escape(item['last_modified']).
|
||||
split('.')[0].replace('T', ' '))
|
||||
body += ' <tr class="item %s">\n' \
|
||||
' <td class="colname"><a href="%s">%s</a></td>\n' \
|
||||
' <td class="colsize">%s</td>\n' \
|
||||
' <td class="coldate">%s</td>\n' \
|
||||
' </tr>\n' % \
|
||||
(' '.join('type-' + html_escape(t.lower())
|
||||
(' '.join('type-' + html.escape(t.lower())
|
||||
for t in content_type.split('/')),
|
||||
quote(name) + tempurl_qs, html_escape(name),
|
||||
quote(name) + tempurl_qs, html.escape(name),
|
||||
bytes, last_modified)
|
||||
body += ' </table>\n' \
|
||||
' </body>\n' \
|
||||
|
@ -182,7 +182,6 @@ from uuid import uuid4
|
||||
import base64
|
||||
|
||||
from eventlet import Timeout
|
||||
import six
|
||||
from swift.common.memcached import MemcacheConnectionError
|
||||
from swift.common.swob import (
|
||||
Response, Request, wsgi_to_str, str_to_wsgi, wsgi_unquote,
|
||||
@ -246,12 +245,9 @@ class TempAuth(object):
|
||||
# Because trailing equal signs would screw up config file
|
||||
# parsing, we auto-pad with '=' chars.
|
||||
account += '=' * (len(account) % 4)
|
||||
account = base64.b64decode(account)
|
||||
account = base64.b64decode(account).decode('utf8')
|
||||
username += '=' * (len(username) % 4)
|
||||
username = base64.b64decode(username)
|
||||
if not six.PY2:
|
||||
account = account.decode('utf8')
|
||||
username = username.decode('utf8')
|
||||
username = base64.b64decode(username).decode('utf8')
|
||||
values = conf[conf_key].split()
|
||||
if not values:
|
||||
raise ValueError('%s has no key set' % conf_key)
|
||||
@ -451,8 +447,6 @@ class TempAuth(object):
|
||||
expires, groups = cached_auth_data
|
||||
if expires < time():
|
||||
groups = None
|
||||
elif six.PY2:
|
||||
groups = groups.encode('utf8')
|
||||
|
||||
s3_auth_details = env.get('s3api.auth_details') or\
|
||||
env.get('swift3.auth_details')
|
||||
@ -530,7 +524,7 @@ class TempAuth(object):
|
||||
if not isinstance(result[key], list):
|
||||
return "Value for key %s must be a list" % json.dumps(key)
|
||||
for grantee in result[key]:
|
||||
if not isinstance(grantee, six.string_types):
|
||||
if not isinstance(grantee, str):
|
||||
return "Elements of %s list must be strings" % json.dumps(
|
||||
key)
|
||||
|
||||
@ -838,8 +832,7 @@ class TempAuth(object):
|
||||
cached_auth_data = memcache_client.get(memcache_token_key)
|
||||
if cached_auth_data:
|
||||
expires, old_groups = cached_auth_data
|
||||
old_groups = [group.encode('utf8') if six.PY2 else group
|
||||
for group in old_groups.split(',')]
|
||||
old_groups = [group for group in old_groups.split(',')]
|
||||
new_groups = self._get_user_groups(account, account_user,
|
||||
account_id)
|
||||
|
||||
|
@ -299,13 +299,11 @@ __all__ = ['TempURL', 'filter_factory',
|
||||
'DEFAULT_OUTGOING_ALLOW_HEADERS']
|
||||
|
||||
from calendar import timegm
|
||||
import six
|
||||
from os.path import basename
|
||||
from time import time, strftime, strptime, gmtime
|
||||
from ipaddress import ip_address, ip_network
|
||||
|
||||
from six.moves.urllib.parse import parse_qs
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from urllib.parse import parse_qs, urlencode
|
||||
|
||||
from swift.proxy.controllers.base import get_account_info, get_container_info
|
||||
from swift.common.header_key_dict import HeaderKeyDict
|
||||
@ -314,7 +312,7 @@ from swift.common.digest import get_allowed_digests, \
|
||||
extract_digest_and_algorithm, DEFAULT_ALLOWED_DIGESTS, get_hmac
|
||||
from swift.common.swob import header_to_environ_key, HTTPUnauthorized, \
|
||||
HTTPBadRequest, wsgi_to_str
|
||||
from swift.common.utils import split_path, get_valid_utf8_str, \
|
||||
from swift.common.utils import split_path, \
|
||||
streq_const_time, quote, get_logger, close_if_possible
|
||||
from swift.common.registry import register_swift_info, register_sensitive_param
|
||||
from swift.common.wsgi import WSGIContext
|
||||
@ -361,8 +359,7 @@ def get_tempurl_keys_from_metadata(meta):
|
||||
meta = get_account_info(...)['meta']
|
||||
keys = get_tempurl_keys_from_metadata(meta)
|
||||
"""
|
||||
return [(get_valid_utf8_str(value) if six.PY2 else value)
|
||||
for key, value in meta.items()
|
||||
return [value for key, value in meta.items()
|
||||
if key.lower() in ('temp-url-key', 'temp-url-key-2')]
|
||||
|
||||
|
||||
@ -575,8 +572,8 @@ class TempURL(object):
|
||||
if client_address is None:
|
||||
return self._invalid(env, start_response)
|
||||
try:
|
||||
allowed_ip_ranges = ip_network(six.u(temp_url_ip_range))
|
||||
if ip_address(six.u(client_address)) not in allowed_ip_ranges:
|
||||
allowed_ip_ranges = ip_network(str(temp_url_ip_range))
|
||||
if ip_address(str(client_address)) not in allowed_ip_ranges:
|
||||
return self._invalid(env, start_response)
|
||||
except ValueError:
|
||||
return self._invalid(env, start_response)
|
||||
|
@ -145,11 +145,10 @@ the option ``allow_object_versioning`` to ``True``.
|
||||
import calendar
|
||||
import itertools
|
||||
import json
|
||||
import six
|
||||
import time
|
||||
|
||||
from cgi import parse_header
|
||||
from six.moves.urllib.parse import unquote
|
||||
from urllib.parse import unquote
|
||||
|
||||
from swift.common.constraints import MAX_FILE_SIZE, valid_api_version, \
|
||||
ACCOUNT_LISTING_LIMIT, CONTAINER_LISTING_LIMIT
|
||||
@ -1190,7 +1189,6 @@ class ContainerContext(ObjectVersioningContext):
|
||||
name, ts = self._split_version_from_name(linked_name)
|
||||
if ts is None:
|
||||
continue
|
||||
name = name.decode('utf8') if six.PY2 else name
|
||||
is_latest = False
|
||||
if name not in is_latest_set:
|
||||
is_latest_set.add(name)
|
||||
@ -1231,8 +1229,7 @@ class ContainerContext(ObjectVersioningContext):
|
||||
path = '/v1/%s/%s/%s' % (
|
||||
wsgi_to_str(account),
|
||||
wsgi_to_str(location),
|
||||
item['name'].encode('utf8')
|
||||
if six.PY2 else item['name'])
|
||||
item['name'])
|
||||
|
||||
if path in current_versions:
|
||||
item['is_latest'] = True
|
||||
@ -1260,7 +1257,7 @@ class ContainerContext(ObjectVersioningContext):
|
||||
if ts is None:
|
||||
continue
|
||||
broken_listing.append({
|
||||
'name': name.decode('utf8') if six.PY2 else name,
|
||||
'name': name,
|
||||
'is_latest': True,
|
||||
'version_id': ts.internal,
|
||||
'content_type': item['content_type'],
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import html
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@ -26,7 +27,6 @@ from swift.common.middleware.x_profile.exceptions import MethodNotAllowed
|
||||
from swift.common.middleware.x_profile.exceptions import DataLoadFailure
|
||||
from swift.common.middleware.x_profile.exceptions import ProfileException
|
||||
from swift.common.middleware.x_profile.profile_model import Stats2
|
||||
from swift.common.request_helpers import html_escape
|
||||
|
||||
PLOTLIB_INSTALLED = True
|
||||
try:
|
||||
@ -453,7 +453,7 @@ class HTMLViewer(object):
|
||||
fmt = '<span id="L%d" rel="#L%d">%' + max_width\
|
||||
+ 'd|<code>%s</code></span>'
|
||||
for line in lines:
|
||||
el = html_escape(line)
|
||||
el = html.escape(line)
|
||||
i = i + 1
|
||||
if i == lineno:
|
||||
fmt2 = '<span id="L%d" style="background-color: \
|
||||
@ -517,7 +517,7 @@ class HTMLViewer(object):
|
||||
html.append('<td>-</td>')
|
||||
else:
|
||||
html.append('<td>%f</td>' % (float(ct) / cc))
|
||||
nfls = html_escape(stats.func_std_string(func))
|
||||
nfls = html.escape(stats.func_std_string(func))
|
||||
if nfls.split(':')[0] not in ['', 'profile'] and\
|
||||
os.path.isfile(nfls.split(':')[0]):
|
||||
html.append('<td><a href="%s/%s%s?format=python#L%d">\
|
||||
|
@ -85,8 +85,7 @@ import time
|
||||
|
||||
from eventlet import greenthread, GreenPool, patcher
|
||||
import eventlet.green.profile as eprofile
|
||||
import six
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from swift.common.utils import get_logger, config_true_value
|
||||
from swift.common.swob import Request
|
||||
@ -112,10 +111,7 @@ PROFILE_EXEC_LAZY = """
|
||||
app_iter_ = self.app(environ, start_response)
|
||||
"""
|
||||
|
||||
if six.PY3:
|
||||
thread = patcher.original('_thread') # non-monkeypatched module needed
|
||||
else:
|
||||
thread = patcher.original('thread') # non-monkeypatched module needed
|
||||
thread = patcher.original('_thread') # non-monkeypatched module needed
|
||||
|
||||
|
||||
# This monkey patch code fix the problem of eventlet profile tool
|
||||
@ -217,7 +213,7 @@ class ProfileMiddleware(object):
|
||||
query_dict,
|
||||
self.renew_profile)
|
||||
start_response('200 OK', headers)
|
||||
if isinstance(content, six.text_type):
|
||||
if isinstance(content, str):
|
||||
content = content.encode('utf-8')
|
||||
return [content]
|
||||
except MethodNotAllowed as mx:
|
||||
|
@ -13,8 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
|
||||
RECON_RELINKER_FILE = 'relinker.recon'
|
||||
RECON_OBJECT_FILE = 'object.recon'
|
||||
RECON_CONTAINER_FILE = 'container.recon'
|
||||
@ -24,7 +22,7 @@ DEFAULT_RECON_CACHE_PATH = '/var/cache/swift'
|
||||
|
||||
|
||||
def server_type_to_recon_file(server_type):
|
||||
if not isinstance(server_type, six.string_types) or \
|
||||
if not isinstance(server_type, str) or \
|
||||
server_type.lower() not in ('account', 'container', 'object'):
|
||||
raise ValueError('Invalid server_type')
|
||||
return "%s.recon" % server_type.lower()
|
||||
|
@ -16,7 +16,6 @@
|
||||
# Used by get_swift_info and register_swift_info to store information about
|
||||
# the swift cluster.
|
||||
from copy import deepcopy
|
||||
import six
|
||||
|
||||
_swift_info = {}
|
||||
_swift_admin_info = {}
|
||||
@ -115,10 +114,7 @@ def register_sensitive_header(header):
|
||||
"""
|
||||
if not isinstance(header, str):
|
||||
raise TypeError
|
||||
if six.PY2:
|
||||
header.decode('ascii')
|
||||
else:
|
||||
header.encode('ascii')
|
||||
header.encode('ascii')
|
||||
_sensitive_headers.add(header.lower())
|
||||
|
||||
|
||||
@ -147,8 +143,5 @@ def register_sensitive_param(query_param):
|
||||
"""
|
||||
if not isinstance(query_param, str):
|
||||
raise TypeError
|
||||
if six.PY2:
|
||||
query_param.decode('ascii')
|
||||
else:
|
||||
query_param.encode('ascii')
|
||||
query_param.encode('ascii')
|
||||
_sensitive_params.add(query_param)
|
||||
|
@ -21,10 +21,8 @@ from swob in here without creating circular imports.
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
import time
|
||||
|
||||
import six
|
||||
from swift.common.header_key_dict import HeaderKeyDict
|
||||
|
||||
from swift.common.constraints import AUTO_CREATE_ACCOUNT_PREFIX, \
|
||||
@ -51,15 +49,6 @@ USE_REPLICATION_NETWORK_HEADER = 'x-backend-use-replication-network'
|
||||
MISPLACED_OBJECTS_ACCOUNT = '.misplaced_objects'
|
||||
|
||||
|
||||
if six.PY2:
|
||||
import cgi
|
||||
|
||||
def html_escape(s, quote=True):
|
||||
return cgi.escape(s, quote=quote)
|
||||
else:
|
||||
from html import escape as html_escape # noqa: F401
|
||||
|
||||
|
||||
def get_param(req, name, default=None):
|
||||
"""
|
||||
Get a parameter from an HTTP request ensuring proper handling UTF-8
|
||||
@ -68,30 +57,20 @@ def get_param(req, name, default=None):
|
||||
:param req: request object
|
||||
:param name: parameter name
|
||||
:param default: result to return if the parameter is not found
|
||||
:returns: HTTP request parameter value, as a native string
|
||||
(in py2, as UTF-8 encoded str, not unicode object)
|
||||
:returns: HTTP request parameter value, as a native (not WSGI) string
|
||||
:raises HTTPBadRequest: if param not valid UTF-8 byte sequence
|
||||
"""
|
||||
value = req.params.get(name, default)
|
||||
if six.PY2:
|
||||
if value and not isinstance(value, six.text_type):
|
||||
try:
|
||||
value.decode('utf8') # Ensure UTF8ness
|
||||
except UnicodeDecodeError:
|
||||
raise HTTPBadRequest(
|
||||
request=req, content_type='text/plain',
|
||||
body='"%s" parameter not valid UTF-8' % name)
|
||||
else:
|
||||
if value:
|
||||
# req.params is a dict of WSGI strings, so encoding will succeed
|
||||
value = value.encode('latin1')
|
||||
try:
|
||||
# Ensure UTF8ness since we're at it
|
||||
value = value.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
raise HTTPBadRequest(
|
||||
request=req, content_type='text/plain',
|
||||
body='"%s" parameter not valid UTF-8' % name)
|
||||
if value:
|
||||
# req.params is a dict of WSGI strings, so encoding will succeed
|
||||
value = value.encode('latin1')
|
||||
try:
|
||||
# Ensure UTF8ness since we're at it
|
||||
value = value.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
raise HTTPBadRequest(
|
||||
request=req, content_type='text/plain',
|
||||
body='"%s" parameter not valid UTF-8' % name)
|
||||
return value
|
||||
|
||||
|
||||
@ -606,11 +585,10 @@ class SegmentedIterable(object):
|
||||
pending_etag = seg_etag
|
||||
pending_size = seg_size
|
||||
|
||||
except ListingIterError:
|
||||
e_type, e_value, e_traceback = sys.exc_info()
|
||||
except ListingIterError as e:
|
||||
if pending_req:
|
||||
yield pending_req, pending_etag, pending_size
|
||||
six.reraise(e_type, e_value, e_traceback)
|
||||
raise e
|
||||
|
||||
if pending_req:
|
||||
yield pending_req, pending_etag, pending_size
|
||||
@ -629,9 +607,7 @@ class SegmentedIterable(object):
|
||||
seg_resp = seg_req.get_response(self.app)
|
||||
if not is_success(seg_resp.status_int):
|
||||
# Error body should be short
|
||||
body = seg_resp.body
|
||||
if not six.PY2:
|
||||
body = body.decode('utf8')
|
||||
body = seg_resp.body.decode('utf8')
|
||||
msg = 'While processing manifest %s, got %d (%s) ' \
|
||||
'while retrieving %s' % (
|
||||
self.name, seg_resp.status_int,
|
||||
|
@ -22,14 +22,12 @@ import math
|
||||
import random
|
||||
import uuid
|
||||
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
from copy import deepcopy
|
||||
from contextlib import contextmanager
|
||||
|
||||
from array import array
|
||||
from collections import defaultdict
|
||||
import six
|
||||
from six.moves import range
|
||||
from time import time
|
||||
|
||||
from swift.common import exceptions
|
||||
@ -646,8 +644,7 @@ class RingBuilder(object):
|
||||
|
||||
dispersion_graph = {}
|
||||
# go over all the devices holding each replica part by part
|
||||
for part_id, dev_ids in enumerate(
|
||||
six.moves.zip(*self._replica2part2dev)):
|
||||
for part_id, dev_ids in enumerate(zip(*self._replica2part2dev)):
|
||||
# count the number of replicas of this part for each tier of each
|
||||
# device, some devices may have overlapping tiers!
|
||||
replicas_at_tier = defaultdict(int)
|
||||
@ -1741,7 +1738,7 @@ class RingBuilder(object):
|
||||
else:
|
||||
with fp:
|
||||
try:
|
||||
builder = pickle.load(fp)
|
||||
builder = pickle.load(fp) # nosec: B301
|
||||
except Exception:
|
||||
# raise error during unpickling as UnPicklingError
|
||||
raise exceptions.UnPicklingError(
|
||||
|
@ -16,7 +16,7 @@
|
||||
import array
|
||||
import contextlib
|
||||
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from gzip import GzipFile
|
||||
@ -29,9 +29,6 @@ from tempfile import NamedTemporaryFile
|
||||
import sys
|
||||
import zlib
|
||||
|
||||
import six
|
||||
from six.moves import range
|
||||
|
||||
from swift.common.exceptions import RingLoadError
|
||||
from swift.common.utils import hash_path, validate_configuration, md5
|
||||
from swift.common.ring.utils import tiers_for_dev
|
||||
@ -206,7 +203,7 @@ class RingData(object):
|
||||
else:
|
||||
# Assume old-style pickled ring
|
||||
gz_file.seek(0)
|
||||
ring_data = pickle.load(gz_file)
|
||||
ring_data = pickle.load(gz_file) # nosec: B301
|
||||
|
||||
if hasattr(ring_data, 'devs'):
|
||||
# pickled RingData; make sure we've got region/replication info
|
||||
@ -244,12 +241,7 @@ class RingData(object):
|
||||
file_obj.write(struct.pack('!I', json_len))
|
||||
file_obj.write(json_text)
|
||||
for part2dev_id in ring['replica2part2dev_id']:
|
||||
if six.PY2:
|
||||
# Can't just use tofile() because a GzipFile apparently
|
||||
# doesn't count as an 'open file'
|
||||
file_obj.write(part2dev_id.tostring())
|
||||
else:
|
||||
part2dev_id.tofile(file_obj)
|
||||
part2dev_id.tofile(file_obj)
|
||||
|
||||
def save(self, filename, mtime=1300507380.0):
|
||||
"""
|
||||
|
@ -18,8 +18,6 @@ Bindings to the `tee` and `splice` system calls
|
||||
'''
|
||||
|
||||
import os
|
||||
import operator
|
||||
import six
|
||||
import ctypes
|
||||
import ctypes.util
|
||||
|
||||
@ -82,8 +80,10 @@ class Tee(object):
|
||||
if not self.available:
|
||||
raise EnvironmentError('tee not available')
|
||||
|
||||
if not isinstance(flags, six.integer_types):
|
||||
c_flags = six.moves.reduce(operator.or_, flags, 0)
|
||||
if not isinstance(flags, int):
|
||||
c_flags = 0
|
||||
for flag in flags:
|
||||
c_flags |= flag
|
||||
else:
|
||||
c_flags = flags
|
||||
|
||||
@ -174,8 +174,10 @@ class Splice(object):
|
||||
if not self.available:
|
||||
raise EnvironmentError('splice not available')
|
||||
|
||||
if not isinstance(flags, six.integer_types):
|
||||
c_flags = six.moves.reduce(operator.or_, flags, 0)
|
||||
if not isinstance(flags, int):
|
||||
c_flags = 0
|
||||
for flag in flags:
|
||||
c_flags |= flag
|
||||
else:
|
||||
c_flags = flags
|
||||
|
||||
|
@ -21,7 +21,6 @@ from contextlib import closing
|
||||
from random import random
|
||||
|
||||
from eventlet.green import socket
|
||||
import six
|
||||
|
||||
|
||||
def get_statsd_client(conf=None, tail_prefix='', logger=None):
|
||||
@ -164,8 +163,7 @@ class StatsdClient(object):
|
||||
parts.append('@%s' % (sample_rate,))
|
||||
else:
|
||||
return
|
||||
if six.PY3:
|
||||
parts = [part.encode('utf-8') for part in parts]
|
||||
parts = [part.encode('utf-8') for part in parts]
|
||||
# Ideally, we'd cache a sending socket in self, but that
|
||||
# results in a socket getting shared by multiple green threads.
|
||||
with closing(self._open_socket()) as sock:
|
||||
|
@ -17,8 +17,7 @@ import os
|
||||
import string
|
||||
import sys
|
||||
import textwrap
|
||||
import six
|
||||
from six.moves.configparser import ConfigParser
|
||||
from configparser import ConfigParser
|
||||
from swift.common.utils import (
|
||||
config_true_value, quorum_size, whataremyips, list_from_csv,
|
||||
config_positive_int_value, get_zero_indexed_base_string, load_pkg_resource)
|
||||
@ -80,8 +79,10 @@ class BindPortsCache(object):
|
||||
# the first one we notice.
|
||||
|
||||
# Return the requested set of ports from our (now-freshened) cache
|
||||
return six.moves.reduce(set.union,
|
||||
self.portsets_by_ring_path.values(), set())
|
||||
res = set()
|
||||
for ports in self.portsets_by_ring_path.values():
|
||||
res.update(ports)
|
||||
return res
|
||||
|
||||
|
||||
class PolicyError(ValueError):
|
||||
@ -975,12 +976,9 @@ def reload_storage_policies():
|
||||
Reload POLICIES from ``swift.conf``.
|
||||
"""
|
||||
global _POLICIES
|
||||
if six.PY2:
|
||||
policy_conf = ConfigParser()
|
||||
else:
|
||||
# Python 3.2 disallows section or option duplicates by default
|
||||
# strict=False allows us to preserve the older behavior
|
||||
policy_conf = ConfigParser(strict=False)
|
||||
# Python disallows section or option duplicates by default
|
||||
# strict=False allows them, which Swift has always done
|
||||
policy_conf = ConfigParser(strict=False)
|
||||
policy_conf.read(utils.SWIFT_CONF_FILE)
|
||||
try:
|
||||
_POLICIES = parse_storage_policies(policy_conf)
|
||||
|
@ -36,10 +36,7 @@ needs to change.
|
||||
"""
|
||||
|
||||
from collections import defaultdict
|
||||
try:
|
||||
from collections.abc import MutableMapping
|
||||
except ImportError:
|
||||
from collections import MutableMapping # py2
|
||||
from collections.abc import MutableMapping
|
||||
import time
|
||||
from functools import partial
|
||||
from datetime import datetime
|
||||
@ -49,9 +46,8 @@ import random
|
||||
import functools
|
||||
from io import BytesIO
|
||||
|
||||
import six
|
||||
from six import StringIO
|
||||
from six.moves import urllib
|
||||
from io import StringIO
|
||||
import urllib
|
||||
|
||||
from swift.common.header_key_dict import HeaderKeyDict
|
||||
from swift.common.utils import UTC, reiterate, split_path, Timestamp, pairs, \
|
||||
@ -157,7 +153,7 @@ def _datetime_property(header):
|
||||
return None
|
||||
|
||||
def setter(self, value):
|
||||
if isinstance(value, (float,) + six.integer_types):
|
||||
if isinstance(value, (float, int)):
|
||||
self.headers[header] = time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(value))
|
||||
elif isinstance(value, datetime):
|
||||
@ -251,9 +247,7 @@ class HeaderEnvironProxy(MutableMapping):
|
||||
def __setitem__(self, key, value):
|
||||
if value is None:
|
||||
self.environ.pop(header_to_environ_key(key), None)
|
||||
elif six.PY2 and isinstance(value, six.text_type):
|
||||
self.environ[header_to_environ_key(key)] = value.encode('utf-8')
|
||||
elif not six.PY2 and isinstance(value, six.binary_type):
|
||||
elif isinstance(value, bytes):
|
||||
self.environ[header_to_environ_key(key)] = value.decode('latin1')
|
||||
else:
|
||||
self.environ[header_to_environ_key(key)] = str(value)
|
||||
@ -279,70 +273,42 @@ class HeaderEnvironProxy(MutableMapping):
|
||||
def wsgi_to_bytes(wsgi_str):
|
||||
if wsgi_str is None:
|
||||
return None
|
||||
if six.PY2:
|
||||
return wsgi_str
|
||||
return wsgi_str.encode('latin1')
|
||||
|
||||
|
||||
def wsgi_to_str(wsgi_str):
|
||||
if wsgi_str is None:
|
||||
return None
|
||||
if six.PY2:
|
||||
return wsgi_str
|
||||
return wsgi_to_bytes(wsgi_str).decode('utf8', errors='surrogateescape')
|
||||
|
||||
|
||||
def bytes_to_wsgi(byte_str):
|
||||
if six.PY2:
|
||||
return byte_str
|
||||
return byte_str.decode('latin1')
|
||||
|
||||
|
||||
def str_to_wsgi(native_str):
|
||||
if six.PY2:
|
||||
return native_str
|
||||
return bytes_to_wsgi(native_str.encode('utf8', errors='surrogateescape'))
|
||||
|
||||
|
||||
def wsgi_quote(wsgi_str, safe='/'):
|
||||
if six.PY2:
|
||||
if not isinstance(wsgi_str, bytes):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.quote(wsgi_str, safe=safe)
|
||||
|
||||
if not isinstance(wsgi_str, str) or any(ord(x) > 255 for x in wsgi_str):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.quote(wsgi_str, safe=safe, encoding='latin-1')
|
||||
|
||||
|
||||
def wsgi_unquote(wsgi_str):
|
||||
if six.PY2:
|
||||
if not isinstance(wsgi_str, bytes):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.unquote(wsgi_str)
|
||||
|
||||
if not isinstance(wsgi_str, str) or any(ord(x) > 255 for x in wsgi_str):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.unquote(wsgi_str, encoding='latin-1')
|
||||
|
||||
|
||||
def wsgi_quote_plus(wsgi_str):
|
||||
if six.PY2:
|
||||
if not isinstance(wsgi_str, bytes):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.quote_plus(wsgi_str)
|
||||
|
||||
if not isinstance(wsgi_str, str) or any(ord(x) > 255 for x in wsgi_str):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.quote_plus(wsgi_str, encoding='latin-1')
|
||||
|
||||
|
||||
def wsgi_unquote_plus(wsgi_str):
|
||||
if six.PY2:
|
||||
if not isinstance(wsgi_str, bytes):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.unquote_plus(wsgi_str)
|
||||
|
||||
if not isinstance(wsgi_str, str) or any(ord(x) > 255 for x in wsgi_str):
|
||||
raise TypeError('Expected a WSGI string; got %r' % wsgi_str)
|
||||
return urllib.parse.unquote_plus(wsgi_str, encoding='latin-1')
|
||||
@ -360,7 +326,7 @@ def _resp_status_property():
|
||||
return '%s %s' % (self.status_int, self.title)
|
||||
|
||||
def setter(self, value):
|
||||
if isinstance(value, six.integer_types):
|
||||
if isinstance(value, int):
|
||||
self.status_int = value
|
||||
self.explanation = self.title = RESPONSE_REASONS[value][0]
|
||||
else:
|
||||
@ -388,9 +354,9 @@ def _resp_body_property():
|
||||
return self._body
|
||||
|
||||
def setter(self, value):
|
||||
if isinstance(value, six.text_type):
|
||||
if isinstance(value, str):
|
||||
raise TypeError('WSGI responses must be bytes')
|
||||
if isinstance(value, six.binary_type):
|
||||
if isinstance(value, bytes):
|
||||
self.content_length = len(value)
|
||||
close_if_possible(self._app_iter)
|
||||
self._app_iter = None
|
||||
@ -805,19 +771,13 @@ def _req_environ_property(environ_field, is_wsgi_string_field=True):
|
||||
return self.environ.get(environ_field, None)
|
||||
|
||||
def setter(self, value):
|
||||
if six.PY2:
|
||||
if isinstance(value, six.text_type):
|
||||
self.environ[environ_field] = value.encode('utf-8')
|
||||
else:
|
||||
self.environ[environ_field] = value
|
||||
else:
|
||||
if is_wsgi_string_field:
|
||||
# Check that input is valid before setting
|
||||
if isinstance(value, str):
|
||||
value.encode('latin1').decode('utf-8')
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('latin1')
|
||||
self.environ[environ_field] = value
|
||||
if is_wsgi_string_field:
|
||||
# Check that input is valid before setting
|
||||
if isinstance(value, str):
|
||||
value.encode('latin1').decode('utf-8')
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('latin1')
|
||||
self.environ[environ_field] = value
|
||||
|
||||
return property(getter, setter, doc=("Get and set the %s property "
|
||||
"in the WSGI environment") % environ_field)
|
||||
@ -835,7 +795,7 @@ def _req_body_property():
|
||||
return body
|
||||
|
||||
def setter(self, value):
|
||||
if not isinstance(value, six.binary_type):
|
||||
if not isinstance(value, bytes):
|
||||
value = value.encode('utf8')
|
||||
self.environ['wsgi.input'] = WsgiBytesIO(value)
|
||||
self.environ['CONTENT_LENGTH'] = str(len(value))
|
||||
@ -932,15 +892,11 @@ class Request(object):
|
||||
"""
|
||||
headers = headers or {}
|
||||
environ = environ or {}
|
||||
if six.PY2:
|
||||
if isinstance(path, six.text_type):
|
||||
path = path.encode('utf-8')
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode('latin1')
|
||||
else:
|
||||
if isinstance(path, six.binary_type):
|
||||
path = path.decode('latin1')
|
||||
else:
|
||||
# Check that the input is valid
|
||||
path.encode('latin1')
|
||||
# Check that the input is valid
|
||||
path.encode('latin1')
|
||||
|
||||
parsed_path = urllib.parse.urlparse(path)
|
||||
server_name = 'localhost'
|
||||
@ -970,7 +926,7 @@ class Request(object):
|
||||
}
|
||||
env.update(environ)
|
||||
if body is not None:
|
||||
if not isinstance(body, six.binary_type):
|
||||
if not isinstance(body, bytes):
|
||||
body = body.encode('utf8')
|
||||
env['wsgi.input'] = WsgiBytesIO(body)
|
||||
env['CONTENT_LENGTH'] = str(len(body))
|
||||
@ -996,13 +952,9 @@ class Request(object):
|
||||
"Provides QUERY_STRING parameters as a dictionary"
|
||||
if self._params_cache is None:
|
||||
if 'QUERY_STRING' in self.environ:
|
||||
if six.PY2:
|
||||
self._params_cache = dict(urllib.parse.parse_qsl(
|
||||
self.environ['QUERY_STRING'], True))
|
||||
else:
|
||||
self._params_cache = dict(urllib.parse.parse_qsl(
|
||||
self.environ['QUERY_STRING'],
|
||||
keep_blank_values=True, encoding='latin-1'))
|
||||
self._params_cache = dict(urllib.parse.parse_qsl(
|
||||
self.environ['QUERY_STRING'],
|
||||
keep_blank_values=True, encoding='latin-1'))
|
||||
else:
|
||||
self._params_cache = {}
|
||||
return self._params_cache
|
||||
@ -1011,11 +963,8 @@ class Request(object):
|
||||
@params.setter
|
||||
def params(self, param_pairs):
|
||||
self._params_cache = None
|
||||
if six.PY2:
|
||||
self.query_string = urllib.parse.urlencode(param_pairs)
|
||||
else:
|
||||
self.query_string = urllib.parse.urlencode(param_pairs,
|
||||
encoding='latin-1')
|
||||
self.query_string = urllib.parse.urlencode(param_pairs,
|
||||
encoding='latin-1')
|
||||
|
||||
def ensure_x_timestamp(self):
|
||||
"""
|
||||
@ -1265,7 +1214,7 @@ class Response(object):
|
||||
self.request = request
|
||||
self._app_iter = None
|
||||
# Allow error messages to come as natural strings on py3.
|
||||
if isinstance(body, six.text_type):
|
||||
if isinstance(body, str):
|
||||
body = body.encode('utf8')
|
||||
self.body = body
|
||||
self.app_iter = app_iter
|
||||
|
@ -64,15 +64,12 @@ from eventlet.event import Event
|
||||
from eventlet.green import socket
|
||||
import eventlet.hubs
|
||||
import eventlet.queue
|
||||
import six
|
||||
|
||||
from six.moves import cPickle as pickle
|
||||
from six.moves.configparser import (ConfigParser, NoSectionError,
|
||||
NoOptionError)
|
||||
from six.moves import range
|
||||
from six.moves.urllib.parse import unquote
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from six.moves import UserList
|
||||
import pickle # nosec: B403
|
||||
from configparser import (ConfigParser, NoSectionError,
|
||||
NoOptionError)
|
||||
from urllib.parse import unquote, urlparse
|
||||
from collections import UserList
|
||||
|
||||
import swift.common.exceptions
|
||||
from swift.common.http import is_server_error
|
||||
@ -373,26 +370,18 @@ def validate_hash_conf():
|
||||
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
|
||||
hash_conf = ConfigParser()
|
||||
|
||||
if six.PY3:
|
||||
# Use Latin1 to accept arbitrary bytes in the hash prefix/suffix
|
||||
with open(SWIFT_CONF_FILE, encoding='latin1') as swift_conf_file:
|
||||
hash_conf.read_file(swift_conf_file)
|
||||
else:
|
||||
with open(SWIFT_CONF_FILE) as swift_conf_file:
|
||||
hash_conf.readfp(swift_conf_file)
|
||||
# Use Latin1 to accept arbitrary bytes in the hash prefix/suffix
|
||||
with open(SWIFT_CONF_FILE, encoding='latin1') as swift_conf_file:
|
||||
hash_conf.read_file(swift_conf_file)
|
||||
|
||||
try:
|
||||
HASH_PATH_SUFFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_suffix')
|
||||
if six.PY3:
|
||||
HASH_PATH_SUFFIX = HASH_PATH_SUFFIX.encode('latin1')
|
||||
HASH_PATH_SUFFIX = hash_conf.get(
|
||||
'swift-hash', 'swift_hash_path_suffix').encode('latin1')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
try:
|
||||
HASH_PATH_PREFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_prefix')
|
||||
if six.PY3:
|
||||
HASH_PATH_PREFIX = HASH_PATH_PREFIX.encode('latin1')
|
||||
HASH_PATH_PREFIX = hash_conf.get(
|
||||
'swift-hash', 'swift_hash_path_prefix').encode('latin1')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
|
||||
@ -498,7 +487,7 @@ class FileLikeIter(object):
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
"""
|
||||
next(x) -> the next value, or raise StopIteration
|
||||
"""
|
||||
@ -510,7 +499,6 @@ class FileLikeIter(object):
|
||||
return rv
|
||||
else:
|
||||
return next(self.iterator)
|
||||
__next__ = next
|
||||
|
||||
def read(self, size=-1):
|
||||
"""
|
||||
@ -949,7 +937,7 @@ class RateLimitedIterator(object):
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
next_value = next(self.iterator)
|
||||
|
||||
if self.ratelimit_if(next_value):
|
||||
@ -958,7 +946,6 @@ class RateLimitedIterator(object):
|
||||
else:
|
||||
self.rate_limiter.wait()
|
||||
return next_value
|
||||
__next__ = next
|
||||
|
||||
|
||||
class GreenthreadSafeIterator(object):
|
||||
@ -980,10 +967,9 @@ class GreenthreadSafeIterator(object):
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
with self.semaphore:
|
||||
return next(self.unsafe_iter)
|
||||
__next__ = next
|
||||
|
||||
|
||||
def timing_stats(**dec_kwargs):
|
||||
@ -1238,13 +1224,13 @@ def hash_path(account, container=None, object=None, raw_digest=False):
|
||||
"""
|
||||
if object and not container:
|
||||
raise ValueError('container is required if object is provided')
|
||||
paths = [account if isinstance(account, six.binary_type)
|
||||
paths = [account if isinstance(account, bytes)
|
||||
else account.encode('utf8')]
|
||||
if container:
|
||||
paths.append(container if isinstance(container, six.binary_type)
|
||||
paths.append(container if isinstance(container, bytes)
|
||||
else container.encode('utf8'))
|
||||
if object:
|
||||
paths.append(object if isinstance(object, six.binary_type)
|
||||
paths.append(object if isinstance(object, bytes)
|
||||
else object.encode('utf8'))
|
||||
if raw_digest:
|
||||
return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths)
|
||||
@ -2006,7 +1992,7 @@ class GreenAsyncPile(object):
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
while True:
|
||||
try:
|
||||
rv = self._responses.get_nowait()
|
||||
@ -2018,7 +2004,6 @@ class GreenAsyncPile(object):
|
||||
if rv is DEAD:
|
||||
continue
|
||||
return rv
|
||||
__next__ = next
|
||||
|
||||
|
||||
class StreamingPile(GreenAsyncPile):
|
||||
@ -2460,8 +2445,6 @@ class ClosingIterator(object):
|
||||
self.close()
|
||||
raise
|
||||
|
||||
next = __next__ # py2
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
for wrapped in self.closeables:
|
||||
@ -3085,18 +3068,14 @@ def parse_mime_headers(doc_file):
|
||||
while True:
|
||||
line = doc_file.readline()
|
||||
done = line in (b'\r\n', b'\n', b'')
|
||||
if six.PY3:
|
||||
try:
|
||||
line = line.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
line = line.decode('latin1')
|
||||
try:
|
||||
line = line.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
line = line.decode('latin1')
|
||||
headers.append(line)
|
||||
if done:
|
||||
break
|
||||
if six.PY3:
|
||||
header_string = ''.join(headers)
|
||||
else:
|
||||
header_string = b''.join(headers)
|
||||
header_string = ''.join(headers)
|
||||
headers = email.parser.Parser().parsestr(header_string)
|
||||
return HeaderKeyDict(headers)
|
||||
|
||||
@ -3111,7 +3090,7 @@ def mime_to_document_iters(input_file, boundary, read_chunk_size=4096):
|
||||
(e.g. "divider", not "--divider")
|
||||
:param read_chunk_size: size of strings read via input_file.read()
|
||||
"""
|
||||
if six.PY3 and isinstance(boundary, str):
|
||||
if isinstance(boundary, str):
|
||||
# Since the boundary is in client-supplied headers, it can contain
|
||||
# garbage that trips us and we don't like client-induced 500.
|
||||
boundary = boundary.encode('latin-1', errors='replace')
|
||||
@ -3351,8 +3330,6 @@ class NamespaceOuterBound(object):
|
||||
def __bool__(self):
|
||||
return False
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class Namespace(object):
|
||||
@ -3451,9 +3428,7 @@ class Namespace(object):
|
||||
|
||||
@classmethod
|
||||
def _encode(cls, value):
|
||||
if six.PY2 and isinstance(value, six.text_type):
|
||||
return value.encode('utf-8')
|
||||
if six.PY3 and isinstance(value, six.binary_type):
|
||||
if isinstance(value, bytes):
|
||||
# This should never fail -- the value should always be coming from
|
||||
# valid swift paths, which means UTF-8
|
||||
return value.decode('utf-8')
|
||||
@ -3462,8 +3437,8 @@ class Namespace(object):
|
||||
def _encode_bound(self, bound):
|
||||
if isinstance(bound, NamespaceOuterBound):
|
||||
return bound
|
||||
if not (isinstance(bound, six.text_type) or
|
||||
isinstance(bound, six.binary_type)):
|
||||
if not (isinstance(bound, str) or
|
||||
isinstance(bound, bytes)):
|
||||
raise TypeError('must be a string type')
|
||||
return self._encode(bound)
|
||||
|
||||
@ -4364,7 +4339,8 @@ class ShardRangeList(UserList):
|
||||
"""
|
||||
|
||||
def __getitem__(self, index):
|
||||
# workaround for py3 - not needed for py2.7,py3.8
|
||||
# workaround for py36,py37 - not needed for py3.8+
|
||||
# see https://github.com/python/cpython/commit/b1c3167c
|
||||
result = self.data[index]
|
||||
return ShardRangeList(result) if type(result) is list else result
|
||||
|
||||
@ -4593,7 +4569,7 @@ def strict_b64decode(value, allow_line_breaks=False):
|
||||
value = value.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError
|
||||
if not isinstance(value, six.text_type):
|
||||
if not isinstance(value, str):
|
||||
raise ValueError
|
||||
# b64decode will silently discard bad characters, but we want to
|
||||
# treat them as an error
|
||||
@ -4604,10 +4580,7 @@ def strict_b64decode(value, allow_line_breaks=False):
|
||||
strip_chars += '\r\n'
|
||||
if any(c not in valid_chars for c in value.strip(strip_chars)):
|
||||
raise ValueError
|
||||
try:
|
||||
return base64.b64decode(value)
|
||||
except (TypeError, binascii.Error): # (py2 error, py3 error)
|
||||
raise ValueError
|
||||
return base64.b64decode(value)
|
||||
|
||||
|
||||
def cap_length(value, max_length):
|
||||
|
@ -23,8 +23,7 @@ This module should not import from other utils modules.
|
||||
import codecs
|
||||
import hashlib
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import quote as _quote
|
||||
from urllib.parse import quote as _quote
|
||||
|
||||
|
||||
try:
|
||||
@ -50,10 +49,9 @@ except TypeError:
|
||||
|
||||
utf8_decoder = codecs.getdecoder('utf-8')
|
||||
utf8_encoder = codecs.getencoder('utf-8')
|
||||
if not six.PY2:
|
||||
# Apparently under py3 we need to go to utf-16 to collapse surrogates?
|
||||
utf16_decoder = codecs.getdecoder('utf-16')
|
||||
utf16_encoder = codecs.getencoder('utf-16')
|
||||
# Apparently under py3 we need to go to utf-16 to collapse surrogates?
|
||||
utf16_decoder = codecs.getdecoder('utf-16')
|
||||
utf16_encoder = codecs.getencoder('utf-16')
|
||||
|
||||
|
||||
def get_valid_utf8_str(str_or_unicode):
|
||||
@ -62,20 +60,15 @@ def get_valid_utf8_str(str_or_unicode):
|
||||
|
||||
:param str_or_unicode: a string or an unicode which can be invalid utf-8
|
||||
"""
|
||||
if six.PY2:
|
||||
if isinstance(str_or_unicode, six.text_type):
|
||||
(str_or_unicode, _len) = utf8_encoder(str_or_unicode, 'replace')
|
||||
(valid_unicode_str, _len) = utf8_decoder(str_or_unicode, 'replace')
|
||||
else:
|
||||
if isinstance(str_or_unicode, six.binary_type):
|
||||
try:
|
||||
(str_or_unicode, _len) = utf8_decoder(str_or_unicode,
|
||||
'surrogatepass')
|
||||
except UnicodeDecodeError:
|
||||
(str_or_unicode, _len) = utf8_decoder(str_or_unicode,
|
||||
'replace')
|
||||
(str_or_unicode, _len) = utf16_encoder(str_or_unicode, 'surrogatepass')
|
||||
(valid_unicode_str, _len) = utf16_decoder(str_or_unicode, 'replace')
|
||||
if isinstance(str_or_unicode, bytes):
|
||||
try:
|
||||
(str_or_unicode, _len) = utf8_decoder(str_or_unicode,
|
||||
'surrogatepass')
|
||||
except UnicodeDecodeError:
|
||||
(str_or_unicode, _len) = utf8_decoder(str_or_unicode,
|
||||
'replace')
|
||||
(str_or_unicode, _len) = utf16_encoder(str_or_unicode, 'surrogatepass')
|
||||
(valid_unicode_str, _len) = utf16_decoder(str_or_unicode, 'replace')
|
||||
return valid_unicode_str.encode('utf-8')
|
||||
|
||||
|
||||
@ -84,7 +77,7 @@ def quote(value, safe='/'):
|
||||
Patched version of urllib.quote that encodes utf-8 strings before quoting
|
||||
"""
|
||||
quoted = _quote(get_valid_utf8_str(value), safe)
|
||||
if isinstance(value, six.binary_type):
|
||||
if isinstance(value, bytes):
|
||||
quoted = quoted.encode('utf-8')
|
||||
return quoted
|
||||
|
||||
|
@ -13,12 +13,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
import os
|
||||
import operator
|
||||
import re
|
||||
from six.moves import configparser
|
||||
from six.moves.configparser import (ConfigParser, RawConfigParser)
|
||||
import configparser
|
||||
from configparser import ConfigParser, RawConfigParser
|
||||
|
||||
# Used when reading config values
|
||||
TRUE_VALUES = {'true', '1', 'yes', 'on', 't', 'y'}
|
||||
@ -30,7 +29,7 @@ def config_true_value(value):
|
||||
Returns False otherwise.
|
||||
"""
|
||||
return value is True or \
|
||||
(isinstance(value, six.string_types) and value.lower() in TRUE_VALUES)
|
||||
(isinstance(value, str) and value.lower() in TRUE_VALUES)
|
||||
|
||||
|
||||
def _non_negative_number(value, expected_type_f=float,
|
||||
@ -105,7 +104,7 @@ def config_auto_int_value(value, default):
|
||||
Returns value as an int or raises ValueError otherwise.
|
||||
"""
|
||||
if value is None or \
|
||||
(isinstance(value, six.string_types) and value.lower() == 'auto'):
|
||||
(isinstance(value, str) and value.lower() == 'auto'):
|
||||
return default
|
||||
try:
|
||||
value = int(value)
|
||||
@ -338,15 +337,12 @@ def read_conf_dir(parser, conf_dir):
|
||||
return parser.read(sorted(conf_files))
|
||||
|
||||
|
||||
if six.PY2:
|
||||
NicerInterpolation = None # just don't cause ImportErrors over in wsgi.py
|
||||
else:
|
||||
class NicerInterpolation(configparser.BasicInterpolation):
|
||||
def before_get(self, parser, section, option, value, defaults):
|
||||
if '%(' not in value:
|
||||
return value
|
||||
return super(NicerInterpolation, self).before_get(
|
||||
parser, section, option, value, defaults)
|
||||
class NicerInterpolation(configparser.BasicInterpolation):
|
||||
def before_get(self, parser, section, option, value, defaults):
|
||||
if '%(' not in value:
|
||||
return value
|
||||
return super(NicerInterpolation, self).before_get(
|
||||
parser, section, option, value, defaults)
|
||||
|
||||
|
||||
def readconf(conf_path, section_name=None, log_name=None, defaults=None,
|
||||
@ -370,27 +366,21 @@ def readconf(conf_path, section_name=None, log_name=None, defaults=None,
|
||||
if raw:
|
||||
c = RawConfigParser(defaults)
|
||||
else:
|
||||
if six.PY2:
|
||||
c = ConfigParser(defaults)
|
||||
else:
|
||||
# In general, we haven't really thought much about interpolation
|
||||
# in configs. Python's default ConfigParser has always supported
|
||||
# it, though, so *we* got it "for free". Unfortunatley, since we
|
||||
# "supported" interpolation, we have to assume there are
|
||||
# deployments in the wild that use it, and try not to break them.
|
||||
# So, do what we can to mimic the py2 behavior of passing through
|
||||
# values like "1%" (which we want to support for
|
||||
# fallocate_reserve).
|
||||
c = ConfigParser(defaults, interpolation=NicerInterpolation())
|
||||
# In general, we haven't really thought much about interpolation
|
||||
# in configs. Python's default ConfigParser has always supported
|
||||
# it, though, so *we* got it "for free". Unfortunatley, since we
|
||||
# "supported" interpolation, we have to assume there are
|
||||
# deployments in the wild that use it, and try not to break them.
|
||||
# So, do what we can to mimic the py2 behavior of passing through
|
||||
# values like "1%" (which we want to support for
|
||||
# fallocate_reserve).
|
||||
c = ConfigParser(defaults, interpolation=NicerInterpolation())
|
||||
c.optionxform = str # Don't lower-case keys
|
||||
|
||||
if hasattr(conf_path, 'readline'):
|
||||
if hasattr(conf_path, 'seek'):
|
||||
conf_path.seek(0)
|
||||
if six.PY2:
|
||||
c.readfp(conf_path)
|
||||
else:
|
||||
c.read_file(conf_path)
|
||||
c.read_file(conf_path)
|
||||
else:
|
||||
if os.path.isdir(conf_path):
|
||||
# read all configs in directory
|
||||
|
@ -27,7 +27,6 @@ import sys
|
||||
import time
|
||||
import fcntl
|
||||
import eventlet
|
||||
import six
|
||||
import datetime
|
||||
|
||||
from swift.common.utils.base import md5, quote, split_path
|
||||
@ -41,11 +40,8 @@ from swift.common.utils.config import config_true_value
|
||||
# we do the same here
|
||||
import swift.common.exceptions
|
||||
|
||||
if six.PY2:
|
||||
from eventlet.green import httplib as green_http_client
|
||||
else:
|
||||
from eventlet.green.http import client as green_http_client
|
||||
from six.moves import http_client
|
||||
from eventlet.green.http import client as green_http_client
|
||||
import http.client
|
||||
from eventlet.green import threading
|
||||
|
||||
|
||||
@ -350,7 +346,7 @@ class SwiftLogAdapter(logging.LoggerAdapter, object):
|
||||
_junk, exc, _junk = sys.exc_info()
|
||||
call = self.error
|
||||
emsg = ''
|
||||
if isinstance(exc, (http_client.BadStatusLine,
|
||||
if isinstance(exc, (http.client.BadStatusLine,
|
||||
green_http_client.BadStatusLine)):
|
||||
# Use error(); not really exceptional
|
||||
emsg = repr(exc)
|
||||
@ -503,9 +499,8 @@ class LoggerFileObject(object):
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
raise IOError(errno.EBADF, 'Bad file descriptor')
|
||||
__next__ = next
|
||||
|
||||
def read(self, size=-1):
|
||||
raise IOError(errno.EBADF, 'Bad file descriptor')
|
||||
@ -744,8 +739,7 @@ class StrAnonymizer(str):
|
||||
|
||||
def __new__(cls, data, method, salt):
|
||||
method = method.lower()
|
||||
if method not in (hashlib.algorithms if six.PY2 else
|
||||
hashlib.algorithms_guaranteed):
|
||||
if method not in hashlib.algorithms_guaranteed:
|
||||
raise ValueError('Unsupported hashing method: %r' % method)
|
||||
s = str.__new__(cls, data or '')
|
||||
s.method = method
|
||||
@ -762,8 +756,8 @@ class StrAnonymizer(str):
|
||||
else:
|
||||
h = getattr(hashlib, self.method)()
|
||||
if self.salt:
|
||||
h.update(six.b(self.salt))
|
||||
h.update(six.b(self))
|
||||
h.update(self.salt.encode('latin1'))
|
||||
h.update(self.encode('latin1'))
|
||||
return '{%s%s}%s' % ('S' if self.salt else '', self.method.upper(),
|
||||
h.hexdigest())
|
||||
|
||||
@ -880,7 +874,7 @@ def get_policy_index(req_headers, res_headers):
|
||||
"""
|
||||
header = 'X-Backend-Storage-Policy-Index'
|
||||
policy_index = res_headers.get(header, req_headers.get(header))
|
||||
if isinstance(policy_index, six.binary_type) and not six.PY2:
|
||||
if isinstance(policy_index, bytes):
|
||||
policy_index = policy_index.decode('ascii')
|
||||
return str(policy_index) if policy_index is not None else None
|
||||
|
||||
|
@ -21,8 +21,6 @@ import math
|
||||
import sys
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
|
||||
NORMAL_FORMAT = "%016.05f"
|
||||
INTERNAL_FORMAT = NORMAL_FORMAT + '_%016x'
|
||||
@ -90,7 +88,7 @@ class Timestamp(object):
|
||||
"""
|
||||
if isinstance(timestamp, bytes):
|
||||
timestamp = timestamp.decode('ascii')
|
||||
if isinstance(timestamp, six.string_types):
|
||||
if isinstance(timestamp, str):
|
||||
base, base_offset = timestamp.partition('_')[::2]
|
||||
self.timestamp = float(base)
|
||||
if '_' in base_offset:
|
||||
@ -140,11 +138,8 @@ class Timestamp(object):
|
||||
def __int__(self):
|
||||
return int(self.timestamp)
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.timestamp or self.offset)
|
||||
|
||||
def __bool__(self):
|
||||
return self.__nonzero__()
|
||||
return bool(self.timestamp or self.offset)
|
||||
|
||||
@property
|
||||
def normal(self):
|
||||
@ -176,24 +171,21 @@ class Timestamp(object):
|
||||
:return: an isoformat string
|
||||
"""
|
||||
t = float(self.normal)
|
||||
if six.PY3:
|
||||
# On Python 3, round manually using ROUND_HALF_EVEN rounding
|
||||
# method, to use the same rounding method than Python 2. Python 3
|
||||
# used a different rounding method, but Python 3.4.4 and 3.5.1 use
|
||||
# again ROUND_HALF_EVEN as Python 2.
|
||||
# See https://bugs.python.org/issue23517
|
||||
frac, t = math.modf(t)
|
||||
us = round(frac * 1e6)
|
||||
if us >= 1000000:
|
||||
t += 1
|
||||
us -= 1000000
|
||||
elif us < 0:
|
||||
t -= 1
|
||||
us += 1000000
|
||||
dt = datetime.datetime.fromtimestamp(t, UTC)
|
||||
dt = dt.replace(microsecond=us)
|
||||
else:
|
||||
dt = datetime.datetime.fromtimestamp(t, UTC)
|
||||
# On Python 3, round manually using ROUND_HALF_EVEN rounding
|
||||
# method, to use the same rounding method than Python 2. Python 3
|
||||
# used a different rounding method, but Python 3.4.4 and 3.5.1 use
|
||||
# again ROUND_HALF_EVEN as Python 2.
|
||||
# See https://bugs.python.org/issue23517
|
||||
frac, t = math.modf(t)
|
||||
us = round(frac * 1e6)
|
||||
if us >= 1000000:
|
||||
t += 1
|
||||
us -= 1000000
|
||||
elif us < 0:
|
||||
t -= 1
|
||||
us += 1000000
|
||||
dt = datetime.datetime.fromtimestamp(t, UTC)
|
||||
dt = dt.replace(microsecond=us)
|
||||
|
||||
isoformat = dt.isoformat()
|
||||
# need to drop tzinfo
|
||||
@ -316,7 +308,7 @@ def decode_timestamps(encoded, explicit=False):
|
||||
# TODO: some tests, e.g. in test_replicator, put float timestamps values
|
||||
# into container db's, hence this defensive check, but in real world
|
||||
# this may never happen.
|
||||
if not isinstance(encoded, six.string_types):
|
||||
if not isinstance(encoded, str):
|
||||
ts = Timestamp(encoded)
|
||||
return ts, ts, ts
|
||||
|
||||
|
@ -18,7 +18,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import errno
|
||||
import fcntl
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
@ -31,10 +30,7 @@ import eventlet.debug
|
||||
from eventlet import greenio, GreenPool, sleep, wsgi, listen, Timeout
|
||||
from paste.deploy import loadwsgi
|
||||
from eventlet.green import socket, ssl, os as green_os
|
||||
from io import BytesIO
|
||||
|
||||
import six
|
||||
from six import StringIO
|
||||
from io import BytesIO, StringIO
|
||||
|
||||
from swift.common import utils, constraints
|
||||
from swift.common.http_protocol import SwiftHttpProtocol, \
|
||||
@ -67,8 +63,7 @@ class NamedConfigLoader(loadwsgi.ConfigLoader):
|
||||
"""
|
||||
|
||||
def get_context(self, object_type, name=None, global_conf=None):
|
||||
if not six.PY2:
|
||||
self.parser._interpolation = NicerInterpolation()
|
||||
self.parser._interpolation = NicerInterpolation()
|
||||
context = super(NamedConfigLoader, self).get_context(
|
||||
object_type, name=name, global_conf=global_conf)
|
||||
context.name = name
|
||||
@ -128,10 +123,7 @@ class ConfigString(NamedConfigLoader):
|
||||
self.parser.optionxform = str # Don't lower-case keys
|
||||
# Defaults don't need interpolation (crazy PasteDeploy...)
|
||||
self.parser.defaults = lambda: dict(self.parser._defaults, **defaults)
|
||||
if six.PY2:
|
||||
self.parser.readfp(self.contents)
|
||||
else:
|
||||
self.parser.read_file(self.contents)
|
||||
self.parser.read_file(self.contents)
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
return self.contents.readline(*args, **kwargs)
|
||||
@ -198,10 +190,7 @@ def get_socket(conf):
|
||||
sock = listen(bind_addr, backlog=int(conf.get('backlog', 4096)),
|
||||
family=address_family)
|
||||
if 'cert_file' in conf:
|
||||
if six.PY2:
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
else:
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
context.load_cert_chain(conf['cert_file'], conf['key_file'])
|
||||
warn_ssl = True
|
||||
@ -513,13 +502,10 @@ class StrategyBase(object):
|
||||
"""
|
||||
|
||||
for sock in self.iter_sockets():
|
||||
if six.PY2:
|
||||
fcntl.fcntl(sock.fileno(), fcntl.F_SETFD, fcntl.FD_CLOEXEC)
|
||||
else:
|
||||
# Python 3.4 and later default to sockets having close-on-exec
|
||||
# set (what PEP 0446 calls "non-inheritable"). This new method
|
||||
# on socket objects is provided to toggle it.
|
||||
sock.set_inheritable(False)
|
||||
# Python 3.4 and later default to sockets having close-on-exec
|
||||
# set (what PEP 0446 calls "non-inheritable"). This new method
|
||||
# on socket objects is provided to toggle it.
|
||||
sock.set_inheritable(False)
|
||||
|
||||
def signal_ready(self):
|
||||
"""
|
||||
|
@ -20,9 +20,7 @@ import errno
|
||||
import os
|
||||
from uuid import uuid4
|
||||
|
||||
import six
|
||||
from six.moves import range
|
||||
from six.moves.urllib.parse import unquote
|
||||
from urllib.parse import unquote
|
||||
import sqlite3
|
||||
from eventlet import tpool
|
||||
|
||||
@ -33,7 +31,7 @@ from swift.common.utils import Timestamp, encode_timestamps, \
|
||||
ShardRange, renamer, MD5_OF_EMPTY_STRING, mkdirs, get_db_files, \
|
||||
parse_db_filename, make_db_file_path, split_path, RESERVED_BYTE, \
|
||||
ShardRangeList, Namespace
|
||||
from swift.common.db import DatabaseBroker, utf8encode, BROKER_TIMEOUT, \
|
||||
from swift.common.db import DatabaseBroker, BROKER_TIMEOUT, \
|
||||
zero_like, DatabaseAlreadyExists, SQLITE_ARG_LIMIT
|
||||
|
||||
DATADIR = 'containers'
|
||||
@ -1138,9 +1136,6 @@ class ContainerBroker(DatabaseBroker):
|
||||
if transform_func is None:
|
||||
transform_func = self._transform_record
|
||||
delim_force_gte = False
|
||||
if six.PY2:
|
||||
(marker, end_marker, prefix, delimiter, path) = utf8encode(
|
||||
marker, end_marker, prefix, delimiter, path)
|
||||
self._commit_puts_stale_ok()
|
||||
if reverse:
|
||||
# Reverse the markers if we are reversing the listing.
|
||||
@ -1335,9 +1330,7 @@ class ContainerBroker(DatabaseBroker):
|
||||
:param source: if defined, update incoming_sync with the source
|
||||
"""
|
||||
for item in item_list:
|
||||
if six.PY2 and isinstance(item['name'], six.text_type):
|
||||
item['name'] = item['name'].encode('utf-8')
|
||||
elif not six.PY2 and isinstance(item['name'], six.binary_type):
|
||||
if isinstance(item['name'], bytes):
|
||||
item['name'] = item['name'].decode('utf-8')
|
||||
|
||||
def _really_really_merge_items(conn):
|
||||
@ -1433,9 +1426,7 @@ class ContainerBroker(DatabaseBroker):
|
||||
if isinstance(item, ShardRange):
|
||||
item = dict(item)
|
||||
for col in ('name', 'lower', 'upper'):
|
||||
if six.PY2 and isinstance(item[col], six.text_type):
|
||||
item[col] = item[col].encode('utf-8')
|
||||
elif not six.PY2 and isinstance(item[col], six.binary_type):
|
||||
if isinstance(item[col], bytes):
|
||||
item[col] = item[col].decode('utf-8')
|
||||
item_list.append(item)
|
||||
|
||||
|
@ -19,7 +19,6 @@ import itertools
|
||||
import logging
|
||||
|
||||
from eventlet import GreenPile, GreenPool, Timeout
|
||||
import six
|
||||
|
||||
from swift.common import constraints
|
||||
from swift.common.daemon import Daemon, run_daemon
|
||||
@ -272,11 +271,7 @@ def parse_raw_obj(obj_info):
|
||||
:returns: a queue entry dict with the keys: q_policy_index, account,
|
||||
container, obj, q_op, q_ts, q_record, and path
|
||||
"""
|
||||
if six.PY2:
|
||||
raw_obj_name = obj_info['name'].encode('utf-8')
|
||||
else:
|
||||
raw_obj_name = obj_info['name']
|
||||
|
||||
raw_obj_name = obj_info['name']
|
||||
policy_index, obj_name = raw_obj_name.split(':', 1)
|
||||
q_policy_index = int(policy_index)
|
||||
account, container, obj = split_path(obj_name, 3, 3, rest_with_last=True)
|
||||
@ -758,9 +753,6 @@ class ContainerReconciler(Daemon):
|
||||
# reversed order since we expect older containers to be empty
|
||||
for c in reversed(one_page):
|
||||
container = c['name']
|
||||
if six.PY2:
|
||||
# encoding here is defensive
|
||||
container = container.encode('utf8')
|
||||
if container == current_container:
|
||||
continue # we've already hit this one this pass
|
||||
yield container
|
||||
|
@ -21,8 +21,7 @@ import traceback
|
||||
|
||||
from eventlet import Timeout
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
|
||||
import swift.common.db
|
||||
from swift.container.sync_store import ContainerSyncStore
|
||||
@ -237,10 +236,7 @@ class ContainerController(BaseStorageServer):
|
||||
return HTTPBadRequest(req=req)
|
||||
|
||||
if account_partition:
|
||||
# zip is lazy on py3, but we need a list, so force evaluation.
|
||||
# On py2 it's an extra list copy, but the list is so small
|
||||
# (one element per replica in account ring, usually 3) that it
|
||||
# doesn't matter.
|
||||
# zip is lazy, but we need a list, so force evaluation.
|
||||
updates = list(zip(account_hosts, account_devices))
|
||||
else:
|
||||
updates = []
|
||||
@ -644,11 +640,10 @@ class ContainerController(BaseStorageServer):
|
||||
"""
|
||||
# record is object info
|
||||
(name, created, size, content_type, etag) = record[:5]
|
||||
name_ = name.decode('utf8') if six.PY2 else name
|
||||
if content_type is None:
|
||||
return {'subdir': name_}
|
||||
return {'subdir': name}
|
||||
response = {
|
||||
'bytes': size, 'hash': etag, 'name': name_,
|
||||
'bytes': size, 'hash': etag, 'name': name,
|
||||
'content_type': content_type}
|
||||
override_bytes_from_content_type(response, logger=self.logger)
|
||||
response['last_modified'] = Timestamp(created).isoformat
|
||||
|
@ -24,8 +24,7 @@ from operator import itemgetter
|
||||
from random import random
|
||||
|
||||
import os
|
||||
import six
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
from eventlet import Timeout
|
||||
from contextlib import contextmanager
|
||||
|
||||
@ -657,18 +656,13 @@ class CleavingContext(object):
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(
|
||||
'%s=%r' % prop for prop in self))
|
||||
|
||||
def _encode(cls, value):
|
||||
if value is not None and six.PY2 and isinstance(value, six.text_type):
|
||||
return value.encode('utf-8')
|
||||
return value
|
||||
|
||||
@property
|
||||
def cursor(self):
|
||||
return self._cursor
|
||||
|
||||
@cursor.setter
|
||||
def cursor(self, value):
|
||||
self._cursor = self._encode(value)
|
||||
self._cursor = value
|
||||
|
||||
@property
|
||||
def marker(self):
|
||||
|
@ -22,7 +22,7 @@ from random import choice, random
|
||||
from struct import unpack_from
|
||||
|
||||
from eventlet import sleep, Timeout
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import swift.common.db
|
||||
from swift.common.db import DatabaseConnectionError
|
||||
|
@ -30,7 +30,7 @@ The remaining methods in this module are considered implementation specific and
|
||||
are also not considered part of the backend API.
|
||||
"""
|
||||
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
import binascii
|
||||
import copy
|
||||
import errno
|
||||
@ -52,7 +52,6 @@ from datetime import timedelta
|
||||
|
||||
from eventlet import Timeout, tpool
|
||||
from eventlet.hubs import trampoline
|
||||
import six
|
||||
from pyeclib.ec_iface import ECDriverError, ECInvalidFragmentMetadata, \
|
||||
ECBadFragmentChecksum, ECInvalidParameter
|
||||
|
||||
@ -154,16 +153,10 @@ def _encode_metadata(metadata):
|
||||
|
||||
:param metadata: a dict
|
||||
"""
|
||||
if six.PY2:
|
||||
def encode_str(item):
|
||||
if isinstance(item, six.text_type):
|
||||
return item.encode('utf8')
|
||||
return item
|
||||
else:
|
||||
def encode_str(item):
|
||||
if isinstance(item, six.text_type):
|
||||
return item.encode('utf8', 'surrogateescape')
|
||||
return item
|
||||
def encode_str(item):
|
||||
if isinstance(item, str):
|
||||
return item.encode('utf8', 'surrogateescape')
|
||||
return item
|
||||
|
||||
return dict(((encode_str(k), encode_str(v)) for k, v in metadata.items()))
|
||||
|
||||
@ -175,27 +168,16 @@ def _decode_metadata(metadata, metadata_written_by_py3):
|
||||
:param metadata: a dict
|
||||
:param metadata_written_by_py3:
|
||||
"""
|
||||
if six.PY2:
|
||||
def to_str(item, is_name=False):
|
||||
# For years, py2 and py3 handled non-ascii metadata differently;
|
||||
# see https://bugs.launchpad.net/swift/+bug/2012531
|
||||
if metadata_written_by_py3 and not is_name:
|
||||
# do our best to read new-style data replicated from a py3 node
|
||||
item = item.decode('utf8').encode('latin1')
|
||||
if isinstance(item, six.text_type):
|
||||
return item.encode('utf8')
|
||||
return item
|
||||
else:
|
||||
def to_str(item, is_name=False):
|
||||
# For years, py2 and py3 handled non-ascii metadata differently;
|
||||
# see https://bugs.launchpad.net/swift/+bug/2012531
|
||||
if not metadata_written_by_py3 and isinstance(item, bytes) \
|
||||
and not is_name:
|
||||
# do our best to read old py2 data
|
||||
item = item.decode('latin1')
|
||||
if isinstance(item, six.binary_type):
|
||||
return item.decode('utf8', 'surrogateescape')
|
||||
return item
|
||||
def to_str(item, is_name=False):
|
||||
# For years, py2 and py3 handled non-ascii metadata differently;
|
||||
# see https://bugs.launchpad.net/swift/+bug/2012531
|
||||
if not metadata_written_by_py3 and isinstance(item, bytes) \
|
||||
and not is_name:
|
||||
# do our best to read old py2 data
|
||||
item = item.decode('latin1')
|
||||
if isinstance(item, bytes):
|
||||
return item.decode('utf8', 'surrogateescape')
|
||||
return item
|
||||
|
||||
return {to_str(k): to_str(v, k == b'name') for k, v in metadata.items()}
|
||||
|
||||
@ -255,10 +237,7 @@ def read_metadata(fd, add_missing_checksum=False):
|
||||
# strings are utf-8 encoded when written, but have not always been
|
||||
# (see https://bugs.launchpad.net/swift/+bug/1678018) so encode them again
|
||||
# when read
|
||||
if six.PY2:
|
||||
metadata = pickle.loads(metadata)
|
||||
else:
|
||||
metadata = pickle.loads(metadata, encoding='bytes')
|
||||
metadata = pickle.loads(metadata, encoding='bytes') # nosec: B301
|
||||
return _decode_metadata(metadata, metadata_written_by_py3)
|
||||
|
||||
|
||||
@ -360,7 +339,7 @@ def quarantine_renamer(device_path, corrupted_file_path):
|
||||
|
||||
|
||||
def valid_suffix(value):
|
||||
if not isinstance(value, six.string_types) or len(value) != 3:
|
||||
if not isinstance(value, str) or len(value) != 3:
|
||||
return False
|
||||
return all(c in '0123456789abcdef' for c in value)
|
||||
|
||||
@ -381,7 +360,7 @@ def read_hashes(partition_dir):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
hashes = pickle.loads(pickled_hashes)
|
||||
hashes = pickle.loads(pickled_hashes) # nosec: B301
|
||||
except Exception:
|
||||
# pickle.loads() can raise a wide variety of exceptions when
|
||||
# given invalid input depending on the way in which the
|
||||
@ -626,11 +605,7 @@ def get_auditor_status(datadir_path, logger, auditor_type):
|
||||
datadir_path, "auditor_status_%s.json" % auditor_type)
|
||||
status = {}
|
||||
try:
|
||||
if six.PY3:
|
||||
statusfile = open(auditor_status, encoding='utf8')
|
||||
else:
|
||||
statusfile = open(auditor_status, 'rb')
|
||||
with statusfile:
|
||||
with open(auditor_status, encoding='utf8') as statusfile:
|
||||
status = statusfile.read()
|
||||
except (OSError, IOError) as e:
|
||||
if e.errno != errno.ENOENT and logger:
|
||||
@ -648,9 +623,7 @@ def get_auditor_status(datadir_path, logger, auditor_type):
|
||||
|
||||
|
||||
def update_auditor_status(datadir_path, logger, partitions, auditor_type):
|
||||
status = json.dumps({'partitions': partitions})
|
||||
if six.PY3:
|
||||
status = status.encode('utf8')
|
||||
status = json.dumps({'partitions': partitions}).encode('utf8')
|
||||
auditor_status = os.path.join(
|
||||
datadir_path, "auditor_status_%s.json" % auditor_type)
|
||||
try:
|
||||
@ -1170,22 +1143,19 @@ class BaseDiskFileManager(object):
|
||||
:param path: full path to directory
|
||||
:param policy: storage policy used
|
||||
"""
|
||||
if six.PY2:
|
||||
hashes = defaultdict(lambda: md5(usedforsecurity=False))
|
||||
else:
|
||||
class shim(object):
|
||||
def __init__(self):
|
||||
self.md5 = md5(usedforsecurity=False)
|
||||
class shim(object):
|
||||
def __init__(self):
|
||||
self.md5 = md5(usedforsecurity=False)
|
||||
|
||||
def update(self, s):
|
||||
if isinstance(s, str):
|
||||
self.md5.update(s.encode('utf-8'))
|
||||
else:
|
||||
self.md5.update(s)
|
||||
def update(self, s):
|
||||
if isinstance(s, str):
|
||||
self.md5.update(s.encode('utf-8'))
|
||||
else:
|
||||
self.md5.update(s)
|
||||
|
||||
def hexdigest(self):
|
||||
return self.md5.hexdigest()
|
||||
hashes = defaultdict(shim)
|
||||
def hexdigest(self):
|
||||
return self.md5.hexdigest()
|
||||
hashes = defaultdict(shim)
|
||||
try:
|
||||
path_contents = sorted(os.listdir(path))
|
||||
except OSError as err:
|
||||
@ -3206,7 +3176,7 @@ class ECDiskFileReader(BaseDiskFileReader):
|
||||
def _check_frag(self, frag):
|
||||
if not frag:
|
||||
return
|
||||
if not isinstance(frag, six.binary_type):
|
||||
if not isinstance(frag, bytes):
|
||||
# ECInvalidParameter can be returned if the frag violates the input
|
||||
# format so for safety, check the input chunk if it's binary to
|
||||
# avoid quarantining a valid fragment archive.
|
||||
|
@ -13,8 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from random import random
|
||||
from time import time
|
||||
@ -344,7 +343,7 @@ class ObjectExpirer(Daemon):
|
||||
task_container,
|
||||
acceptable_statuses=[2]):
|
||||
container_empty = False
|
||||
task_object = o['name'].encode('utf8') if six.PY2 else o['name']
|
||||
task_object = o['name']
|
||||
try:
|
||||
delete_timestamp, target_account, target_container, \
|
||||
target_object = parse_task_obj(task_object)
|
||||
|
@ -21,8 +21,7 @@ from os.path import join
|
||||
import random
|
||||
import time
|
||||
from collections import defaultdict
|
||||
import six
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
import shutil
|
||||
|
||||
from eventlet import (GreenPile, GreenPool, Timeout, sleep, tpool, spawn)
|
||||
@ -84,7 +83,7 @@ def _full_path(node, part, relative_path, policy):
|
||||
:class:`~swift.common.storage_policy.BaseStoragePolicy`
|
||||
:return: string representation of absolute path on node plus policy index
|
||||
"""
|
||||
if not isinstance(relative_path, six.text_type):
|
||||
if not isinstance(relative_path, str):
|
||||
relative_path = relative_path.decode('utf8')
|
||||
return '%(node)s/%(part)s%(path)s policy#%(policy)d' % {
|
||||
'node': node_to_string(node, replication=True),
|
||||
@ -935,7 +934,7 @@ class ObjectReconstructor(Daemon):
|
||||
"Invalid response %(resp)s from %(full_path)s",
|
||||
{'resp': resp.status, 'full_path': full_path})
|
||||
else:
|
||||
remote_suffixes = pickle.loads(resp.read())
|
||||
remote_suffixes = pickle.loads(resp.read()) # nosec: B301
|
||||
except (Exception, Timeout):
|
||||
# all exceptions are logged here so that our caller can
|
||||
# safely catch our exception and continue to the next node
|
||||
@ -1286,11 +1285,11 @@ class ObjectReconstructor(Daemon):
|
||||
policy2devices = {}
|
||||
for policy in self.policies:
|
||||
self.load_object_ring(policy)
|
||||
local_devices = list(six.moves.filter(
|
||||
lambda dev: dev and is_local_device(
|
||||
local_devices = [
|
||||
dev for dev in policy.object_ring.devs
|
||||
if dev and is_local_device(
|
||||
ips, self.port,
|
||||
dev['replication_ip'], dev['replication_port']),
|
||||
policy.object_ring.devs))
|
||||
dev['replication_ip'], dev['replication_port'])]
|
||||
policy2devices[policy] = local_devices
|
||||
return policy2devices
|
||||
|
||||
|
@ -22,8 +22,7 @@ import random
|
||||
import shutil
|
||||
import time
|
||||
import itertools
|
||||
from six import viewkeys
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
|
||||
import eventlet
|
||||
from eventlet import GreenPool, queue, tpool, Timeout, sleep
|
||||
@ -554,8 +553,8 @@ class ObjectReplicator(Daemon):
|
||||
failure_devs_info.add((node['replication_ip'],
|
||||
node['device']))
|
||||
if success and node['region'] != job['region']:
|
||||
synced_remote_regions[node['region']] = viewkeys(
|
||||
candidates)
|
||||
synced_remote_regions[node['region']] = \
|
||||
candidates.keys()
|
||||
responses.append(success)
|
||||
for cand_objs in synced_remote_regions.values():
|
||||
if delete_objs is None:
|
||||
@ -710,7 +709,8 @@ class ObjectReplicator(Daemon):
|
||||
failure_devs_info.add((node['replication_ip'],
|
||||
node['device']))
|
||||
continue
|
||||
remote_hash = pickle.loads(resp.read())
|
||||
remote_hash = pickle.loads(
|
||||
resp.read()) # nosec: B301
|
||||
finally:
|
||||
conn.close()
|
||||
del resp
|
||||
|
@ -15,9 +15,8 @@
|
||||
|
||||
""" Object Server for Swift """
|
||||
|
||||
import six
|
||||
import six.moves.cPickle as pickle
|
||||
from six.moves.urllib.parse import unquote
|
||||
import pickle # nosec: B403
|
||||
from urllib.parse import unquote
|
||||
import json
|
||||
import os
|
||||
import multiprocessing
|
||||
@ -200,8 +199,8 @@ class ObjectController(BaseStorageServer):
|
||||
# disk_chunk_size parameter. However, it affects all created sockets
|
||||
# using this class so we have chosen to tie it to the
|
||||
# network_chunk_size parameter value instead.
|
||||
if six.PY2:
|
||||
socket._fileobject.default_bufsize = self.network_chunk_size
|
||||
# if six.PY2:
|
||||
# socket._fileobject.default_bufsize = self.network_chunk_size
|
||||
# TODO: find a way to enable similar functionality in py3
|
||||
|
||||
# Provide further setup specific to an object server implementation.
|
||||
|
@ -17,7 +17,7 @@
|
||||
import eventlet.greenio
|
||||
import eventlet.wsgi
|
||||
from eventlet import sleep
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from swift.common import exceptions
|
||||
from swift.common import http
|
||||
|
@ -14,8 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from eventlet import sleep
|
||||
import six
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from swift.common import bufferedhttp
|
||||
from swift.common import exceptions
|
||||
@ -306,7 +305,7 @@ class Sender(object):
|
||||
frag_index=self.job.get('frag_index'),
|
||||
frag_prefs=frag_prefs)
|
||||
if self.remote_check_objs is not None:
|
||||
hash_gen = six.moves.filter(
|
||||
hash_gen = filter(
|
||||
lambda objhash_timestamps:
|
||||
objhash_timestamps[0] in
|
||||
self.remote_check_objs, hash_gen)
|
||||
@ -353,11 +352,10 @@ class Sender(object):
|
||||
if line == b':MISSING_CHECK: START':
|
||||
break
|
||||
elif line:
|
||||
if not six.PY2:
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
raise exceptions.ReplicationException(
|
||||
'Unexpected response: %r' % utils.cap_length(line, 1024))
|
||||
while True:
|
||||
@ -442,11 +440,10 @@ class Sender(object):
|
||||
if line == b':UPDATES: START':
|
||||
break
|
||||
elif line:
|
||||
if not six.PY2:
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
raise exceptions.ReplicationException(
|
||||
'Unexpected response: %r' % utils.cap_length(line, 1024))
|
||||
while True:
|
||||
@ -459,11 +456,10 @@ class Sender(object):
|
||||
if line == b':UPDATES: END':
|
||||
break
|
||||
elif line:
|
||||
if not six.PY2:
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
try:
|
||||
line = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
raise exceptions.ReplicationException(
|
||||
'Unexpected response: %r' % utils.cap_length(line, 1024))
|
||||
|
||||
|
@ -12,9 +12,9 @@
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from six.moves import queue
|
||||
import queue
|
||||
|
||||
import six.moves.cPickle as pickle
|
||||
import pickle # nosec: B403
|
||||
import errno
|
||||
import os
|
||||
import signal
|
||||
@ -62,8 +62,6 @@ class RateLimiterBucket(EventletRateLimiter):
|
||||
def __bool__(self):
|
||||
return bool(self.deque)
|
||||
|
||||
__nonzero__ = __bool__ # py2
|
||||
|
||||
def __lt__(self, other):
|
||||
# used to sort RateLimiterBuckets by readiness
|
||||
if isinstance(other, RateLimiterBucket):
|
||||
@ -145,7 +143,7 @@ class BucketizedUpdateSkippingLimiter(object):
|
||||
def _get_time(self):
|
||||
return time.time()
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
# first iterate over the wrapped iterator...
|
||||
for update_ctx in self.iterator:
|
||||
bucket = self.buckets[self._bucket_key(update_ctx['update'])]
|
||||
@ -213,8 +211,6 @@ class BucketizedUpdateSkippingLimiter(object):
|
||||
|
||||
raise StopIteration()
|
||||
|
||||
__next__ = next
|
||||
|
||||
|
||||
class OldestAsyncPendingTracker:
|
||||
"""
|
||||
@ -648,7 +644,7 @@ class ObjectUpdater(Daemon):
|
||||
|
||||
def _load_update(self, device, update_path):
|
||||
try:
|
||||
return pickle.load(open(update_path, 'rb'))
|
||||
return pickle.load(open(update_path, 'rb')) # nosec: B301
|
||||
except Exception as e:
|
||||
if getattr(e, 'errno', None) == errno.ENOENT:
|
||||
return
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from six.moves.urllib.parse import unquote
|
||||
from urllib.parse import unquote
|
||||
|
||||
from swift.account.utils import account_listing_response
|
||||
from swift.common.middleware.acl import parse_acl, format_acl
|
||||
|
@ -24,7 +24,7 @@
|
||||
# These shenanigans are to ensure all related objects can be garbage
|
||||
# collected. We've seen objects hang around forever otherwise.
|
||||
|
||||
from six.moves.urllib.parse import quote
|
||||
from urllib.parse import quote
|
||||
|
||||
import time
|
||||
import json
|
||||
@ -34,10 +34,8 @@ import itertools
|
||||
import operator
|
||||
import random
|
||||
from copy import deepcopy
|
||||
from sys import exc_info
|
||||
|
||||
from eventlet.timeout import Timeout
|
||||
import six
|
||||
|
||||
from swift.common.memcached import MemcacheConnectionError
|
||||
from swift.common.wsgi import make_pre_authed_env, make_pre_authed_request
|
||||
@ -626,16 +624,10 @@ def get_cache_key(account, container=None, obj=None, shard=None):
|
||||
with obj)
|
||||
:returns: a (native) string cache_key
|
||||
"""
|
||||
if six.PY2:
|
||||
def to_native(s):
|
||||
if s is None or isinstance(s, str):
|
||||
return s
|
||||
return s.encode('utf8')
|
||||
else:
|
||||
def to_native(s):
|
||||
if s is None or isinstance(s, str):
|
||||
return s
|
||||
return s.decode('utf8', 'surrogateescape')
|
||||
def to_native(s):
|
||||
if s is None or isinstance(s, str):
|
||||
return s
|
||||
return s.decode('utf8', 'surrogateescape')
|
||||
|
||||
account = to_native(account)
|
||||
container = to_native(container)
|
||||
@ -844,27 +836,6 @@ def _get_info_from_memcache(app, env, account, container=None):
|
||||
else:
|
||||
info = memcache.get(cache_key)
|
||||
cache_state = 'hit' if info else 'miss'
|
||||
if info and six.PY2:
|
||||
# Get back to native strings
|
||||
new_info = {}
|
||||
for key in info:
|
||||
new_key = key.encode("utf-8") if isinstance(
|
||||
key, six.text_type) else key
|
||||
if isinstance(info[key], six.text_type):
|
||||
new_info[new_key] = info[key].encode("utf-8")
|
||||
elif isinstance(info[key], dict):
|
||||
new_info[new_key] = {}
|
||||
for subkey, value in info[key].items():
|
||||
new_subkey = subkey.encode("utf-8") if isinstance(
|
||||
subkey, six.text_type) else subkey
|
||||
if isinstance(value, six.text_type):
|
||||
new_info[new_key][new_subkey] = \
|
||||
value.encode("utf-8")
|
||||
else:
|
||||
new_info[new_key][new_subkey] = value
|
||||
else:
|
||||
new_info[new_key] = info[key]
|
||||
info = new_info
|
||||
if info:
|
||||
env.setdefault('swift.infocache', {})[cache_key] = info
|
||||
return info, cache_state
|
||||
@ -921,12 +892,6 @@ def get_namespaces_from_cache(req, cache_key, skip_chance):
|
||||
cache_state = 'error'
|
||||
|
||||
if bounds:
|
||||
if six.PY2:
|
||||
# json.loads() in memcache.get will convert json 'string' to
|
||||
# 'unicode' with python2, here we cast 'unicode' back to 'str'
|
||||
bounds = [
|
||||
[lower.encode('utf-8'), name.encode('utf-8')]
|
||||
for lower, name in bounds]
|
||||
ns_bound_list = NamespaceBoundList(bounds)
|
||||
infocache[cache_key] = ns_bound_list
|
||||
else:
|
||||
@ -1439,14 +1404,13 @@ class GetOrHeadHandler(GetterBase):
|
||||
chunk = part_file.read(self.app.object_chunk_size)
|
||||
if nbytes is not None:
|
||||
nbytes -= len(chunk)
|
||||
except (ChunkReadTimeout, ShortReadError):
|
||||
exc_type, exc_value, exc_traceback = exc_info()
|
||||
except (ChunkReadTimeout, ShortReadError) as e:
|
||||
if self.newest or self.server_type != 'Object':
|
||||
raise
|
||||
try:
|
||||
self.fast_forward(self.bytes_used_from_backend)
|
||||
except (HTTPException, ValueError):
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise e
|
||||
except RangeAlreadyComplete:
|
||||
break
|
||||
if self._replace_source(
|
||||
@ -1458,10 +1422,10 @@ class GetOrHeadHandler(GetterBase):
|
||||
# Tried to find a new node from which to
|
||||
# finish the GET, but failed. There's
|
||||
# nothing more we can do here.
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise e
|
||||
part_file = ByteCountEnforcer(part_file, nbytes)
|
||||
else:
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise e
|
||||
else:
|
||||
if not chunk:
|
||||
break
|
||||
@ -1880,7 +1844,7 @@ class NodeIter(object):
|
||||
return dict(node, use_replication=is_use_replication_network(
|
||||
self.request.headers))
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
node = None
|
||||
if self._node_provider:
|
||||
# give node provider the opportunity to inject a node
|
||||
@ -1889,9 +1853,6 @@ class NodeIter(object):
|
||||
node = next(self._node_iter)
|
||||
return self._annotate_node(node)
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
|
||||
class Controller(object):
|
||||
"""Base WSGI controller class for the proxy"""
|
||||
|
@ -15,8 +15,7 @@
|
||||
|
||||
import json
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import unquote
|
||||
from urllib.parse import unquote
|
||||
|
||||
from swift.common.utils import public, private, csv_append, Timestamp, \
|
||||
config_true_value, cache_from_env, filter_namespaces, NamespaceBoundList
|
||||
@ -508,8 +507,6 @@ class ContainerController(Controller):
|
||||
else:
|
||||
last_name = objects[-1]['name']
|
||||
|
||||
if six.PY2:
|
||||
last_name = last_name.encode('utf8')
|
||||
params['marker'] = str_to_wsgi(last_name)
|
||||
elif marker:
|
||||
params['marker'] = str_to_wsgi(marker)
|
||||
@ -594,8 +591,6 @@ class ContainerController(Controller):
|
||||
break
|
||||
last_name = objects[-1].get('name',
|
||||
objects[-1].get('subdir', u''))
|
||||
if six.PY2:
|
||||
last_name = last_name.encode('utf8')
|
||||
if end_marker and reverse and end_marker >= last_name:
|
||||
break
|
||||
if end_marker and not reverse and end_marker <= last_name:
|
||||
|
@ -24,9 +24,7 @@
|
||||
# These shenanigans are to ensure all related objects can be garbage
|
||||
# collected. We've seen objects hang around forever otherwise.
|
||||
|
||||
import six
|
||||
from six.moves.urllib.parse import quote, unquote
|
||||
from six.moves import zip
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
@ -35,7 +33,6 @@ import mimetypes
|
||||
import time
|
||||
import math
|
||||
import random
|
||||
import sys
|
||||
|
||||
from greenlet import GreenletExit
|
||||
from eventlet import GreenPile
|
||||
@ -1301,8 +1298,6 @@ class ECAppIter(object):
|
||||
def __next__(self):
|
||||
return next(self.stashed_iter)
|
||||
|
||||
next = __next__ # py2
|
||||
|
||||
def _real_iter(self, req, resp_headers):
|
||||
if not self.range_specs:
|
||||
client_asked_for_range = False
|
||||
@ -2490,13 +2485,12 @@ class ECFragGetter(GetterBase):
|
||||
buf += chunk
|
||||
if nbytes is not None:
|
||||
nbytes -= len(chunk)
|
||||
except (ChunkReadTimeout, ShortReadError):
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
except (ChunkReadTimeout, ShortReadError) as e:
|
||||
try:
|
||||
self.fast_forward(self.bytes_used_from_backend)
|
||||
except (HTTPException, ValueError):
|
||||
self.logger.exception('Unable to fast forward')
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise e
|
||||
except RangeAlreadyComplete:
|
||||
break
|
||||
buf = b''
|
||||
@ -2509,10 +2503,10 @@ class ECFragGetter(GetterBase):
|
||||
# it's not clear to me how to make
|
||||
# _get_next_response_part raise StopIteration for the
|
||||
# first doc part of a new request
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise e
|
||||
part_file = ByteCountEnforcer(part_file, nbytes)
|
||||
else:
|
||||
six.reraise(exc_type, exc_value, exc_traceback)
|
||||
raise e
|
||||
else:
|
||||
if buf and self.skip_bytes:
|
||||
if self.skip_bytes < len(buf):
|
||||
|
@ -18,7 +18,6 @@ import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
import os
|
||||
from six import reraise
|
||||
|
||||
from unittest.util import safe_repr
|
||||
|
||||
@ -105,15 +104,15 @@ def annotate_failure(msg):
|
||||
try:
|
||||
yield
|
||||
except AssertionError as err:
|
||||
err_typ, err_val, err_tb = sys.exc_info()
|
||||
if err_val.args:
|
||||
msg = '%s Failed with %s' % (msg, err_val.args[0])
|
||||
err_val.args = (msg, ) + err_val.args[1:]
|
||||
if err.args:
|
||||
msg = '%s Failed with %s' % (msg, err.args[0])
|
||||
err.args = (msg, ) + err.args[1:]
|
||||
raise err
|
||||
else:
|
||||
# workaround for some IDE's raising custom AssertionErrors
|
||||
err_val = '%s Failed with %s' % (msg, err)
|
||||
err_typ = AssertionError
|
||||
reraise(err_typ, err_val, err_tb)
|
||||
raise AssertionError(
|
||||
'%s Failed with %s' % (msg, err)
|
||||
).with_traceback(err.__traceback__) from err.__cause__
|
||||
|
||||
|
||||
class BaseTestCase(unittest.TestCase):
|
||||
|
@ -24,9 +24,9 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from six.moves import urllib
|
||||
from six.moves import socketserver
|
||||
from six.moves import SimpleHTTPServer
|
||||
import urllib.parse
|
||||
import socketserver
|
||||
import http.server
|
||||
|
||||
try:
|
||||
import selenium.webdriver
|
||||
@ -51,12 +51,14 @@ TEST_TIMEOUT = 120.0 # seconds
|
||||
STEPS = 500
|
||||
|
||||
|
||||
# Hack up stdlib so SimpleHTTPRequestHandler works well on py2, too
|
||||
this_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
os.getcwd = lambda: this_dir
|
||||
class CORSSiteHandler(http.server.SimpleHTTPRequestHandler):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(
|
||||
*args,
|
||||
directory=os.path.realpath(os.path.dirname(__file__)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class CORSSiteHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
def log_message(self, fmt, *args):
|
||||
pass # quiet, you!
|
||||
|
||||
@ -67,7 +69,7 @@ class CORSSiteServer(socketserver.TCPServer):
|
||||
|
||||
class CORSSite(threading.Thread):
|
||||
def __init__(self, bind_port=8000):
|
||||
super(CORSSite, self).__init__()
|
||||
super().__init__()
|
||||
self.server = None
|
||||
self.bind_port = bind_port
|
||||
|
||||
|
@ -20,8 +20,7 @@ import contextlib
|
||||
|
||||
import mock
|
||||
import os
|
||||
import six
|
||||
from six.moves.urllib.parse import urlparse, urlsplit, urlunsplit
|
||||
from urllib.parse import urlparse, urlsplit, urlunsplit
|
||||
import sys
|
||||
import pickle
|
||||
import socket
|
||||
@ -39,9 +38,9 @@ from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
from unittest import SkipTest
|
||||
|
||||
from six.moves.configparser import ConfigParser
|
||||
from six.moves import http_client
|
||||
from six.moves.http_client import HTTPException
|
||||
from configparser import ConfigParser
|
||||
import http.client
|
||||
from http.client import HTTPException
|
||||
|
||||
from swift.common.middleware.memcache import MemcacheMiddleware
|
||||
from swift.common.storage_policy import parse_storage_policies, PolicyError
|
||||
@ -65,7 +64,7 @@ from swift.container import server as container_server
|
||||
from swift.obj import server as object_server, mem_server as mem_object_server
|
||||
import swift.proxy.controllers.obj
|
||||
|
||||
http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
|
||||
http.client._MAXHEADERS = constraints.MAX_HEADER_COUNT
|
||||
DEBUG = True
|
||||
|
||||
# In order to get the proper blocking behavior of sockets without using
|
||||
@ -357,9 +356,7 @@ def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs):
|
||||
conf,
|
||||
"proxy-logging proxy-server",
|
||||
"keymaster encryption proxy-logging proxy-server")
|
||||
root_secret = base64.b64encode(os.urandom(32))
|
||||
if not six.PY2:
|
||||
root_secret = root_secret.decode('ascii')
|
||||
root_secret = base64.b64encode(os.urandom(32)).decode('ascii')
|
||||
conf.set('filter:keymaster', 'encryption_root_secret', root_secret)
|
||||
conf.set('filter:versioned_writes', 'allow_object_versioning', 'true')
|
||||
conf.set('filter:etag-quoter', 'enable_by_default', 'true')
|
||||
@ -1129,10 +1126,6 @@ def get_url_token(user_index, os_options):
|
||||
swift_test_user[user_index],
|
||||
swift_test_key[user_index],
|
||||
**authargs)
|
||||
if six.PY2 and not isinstance(url, bytes):
|
||||
url = url.encode('utf-8')
|
||||
if six.PY2 and not isinstance(token, bytes):
|
||||
token = token.encode('utf-8')
|
||||
return url, token
|
||||
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import logging
|
||||
import os
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
import test.functional as tf
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
@ -27,7 +27,6 @@ try:
|
||||
)
|
||||
except ImportError:
|
||||
S3Connection = OrdinaryCallingFormat = S3ResponseError = None
|
||||
import six
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
@ -96,21 +95,11 @@ class Connection(object):
|
||||
break
|
||||
|
||||
for bucket in buckets:
|
||||
if six.PY2 and not isinstance(bucket.name, bytes):
|
||||
bucket.name = bucket.name.encode('utf-8')
|
||||
|
||||
try:
|
||||
for upload in bucket.list_multipart_uploads():
|
||||
upload.cancel_upload()
|
||||
|
||||
for obj in bucket.list_versions():
|
||||
if six.PY2:
|
||||
if not isinstance(obj.name, bytes):
|
||||
obj.name = obj.name.encode('utf-8')
|
||||
if obj.version_id is not None and \
|
||||
not isinstance(obj.version_id, bytes):
|
||||
obj.version_id = \
|
||||
obj.version_id.encode('utf-8')
|
||||
bucket.delete_key(
|
||||
obj.name, version_id=obj.version_id)
|
||||
|
||||
|
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
import unittest
|
||||
import os
|
||||
import test.functional as tf
|
||||
@ -83,8 +82,6 @@ class TestS3ApiMultiDelete(S3ApiBase):
|
||||
self.assertEqual(len(resp_objects), len(req_objects))
|
||||
for o in resp_objects:
|
||||
key = o.find('Key').text
|
||||
if six.PY2:
|
||||
key = key.decode('utf-8')
|
||||
self.assertTrue(key in req_objects)
|
||||
|
||||
# Delete 2 objects via MultiDelete API
|
||||
@ -101,8 +98,6 @@ class TestS3ApiMultiDelete(S3ApiBase):
|
||||
self.assertEqual(len(resp_objects), len(req_objects))
|
||||
for o in resp_objects:
|
||||
key = o.find('Key').text
|
||||
if six.PY2:
|
||||
key = key.decode('utf-8')
|
||||
self.assertTrue(key in req_objects)
|
||||
|
||||
if with_non_ascii:
|
||||
@ -124,8 +119,6 @@ class TestS3ApiMultiDelete(S3ApiBase):
|
||||
self.assertEqual(len(resp_objects), len(req_objects))
|
||||
for o in resp_objects:
|
||||
key = o.find('Key').text
|
||||
if six.PY2:
|
||||
key = key.decode('utf-8')
|
||||
self.assertTrue(key in req_objects)
|
||||
|
||||
# Delete 2 objects via MultiDelete API but no objects exist
|
||||
@ -142,8 +135,6 @@ class TestS3ApiMultiDelete(S3ApiBase):
|
||||
self.assertEqual(len(resp_objects), len(req_objects))
|
||||
for o in resp_objects:
|
||||
key = o.find('Key').text
|
||||
if six.PY2:
|
||||
key = key.decode('utf-8')
|
||||
self.assertTrue(key in req_objects)
|
||||
|
||||
def test_delete_multi_objects(self):
|
||||
|
@ -17,8 +17,8 @@ import base64
|
||||
import binascii
|
||||
import unittest
|
||||
|
||||
import six
|
||||
from six.moves import urllib, zip, zip_longest
|
||||
import urllib.parse
|
||||
from itertools import zip_longest
|
||||
|
||||
import test.functional as tf
|
||||
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||
@ -135,8 +135,6 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
elem = fromstring(body, 'InitiateMultipartUploadResult')
|
||||
self.assertEqual(elem.find('Bucket').text, bucket)
|
||||
key = elem.find('Key').text
|
||||
if six.PY2:
|
||||
expected_key = expected_key.encode('utf-8')
|
||||
self.assertEqual(expected_key, key)
|
||||
upload_id = elem.find('UploadId').text
|
||||
self.assertIsNotNone(upload_id)
|
||||
@ -475,10 +473,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
resp_objects = list(elem.findall('./Contents'))
|
||||
self.assertEqual(len(resp_objects), 1)
|
||||
o = resp_objects[0]
|
||||
if six.PY2:
|
||||
expected_key = keys[0].encode('utf-8')
|
||||
else:
|
||||
expected_key = keys[0]
|
||||
expected_key = keys[0]
|
||||
self.assertEqual(o.find('Key').text, expected_key)
|
||||
self.assertIsNotNone(o.find('LastModified').text)
|
||||
self.assertRegex(
|
||||
|
@ -20,7 +20,6 @@ import calendar
|
||||
import email.parser
|
||||
from email.utils import formatdate, parsedate
|
||||
from time import mktime
|
||||
import six
|
||||
|
||||
import test.functional as tf
|
||||
from swift.common import utils
|
||||
@ -787,10 +786,7 @@ class TestS3ApiObject(S3ApiBase):
|
||||
|
||||
# TODO: Using swift.common.utils.multipart_byteranges_to_document_iters
|
||||
# could be easy enough.
|
||||
if six.PY2:
|
||||
parser = email.parser.FeedParser()
|
||||
else:
|
||||
parser = email.parser.BytesFeedParser()
|
||||
parser = email.parser.BytesFeedParser()
|
||||
parser.feed(
|
||||
b"Content-Type: multipart/byterange; boundary=%s\r\n\r\n" %
|
||||
boundary_str.encode('ascii'))
|
||||
|
@ -15,7 +15,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import requests
|
||||
import six
|
||||
|
||||
import botocore
|
||||
|
||||
@ -62,11 +61,10 @@ class TestS3ApiXxeInjection(S3ApiBaseBoto3):
|
||||
finally:
|
||||
self.conn.meta.events.unregister(
|
||||
'before-sign.s3.*', self._clear_data)
|
||||
if not params.get('Key') and '/?' not in url and not six.PY2:
|
||||
if not params.get('Key') and '/?' not in url:
|
||||
# Some combination of dependencies seems to cause bucket requests
|
||||
# to not get the trailing slash despite signing with it? But only
|
||||
# new-enough versions sign with the trailing slash; py2 is stuck
|
||||
# with old.
|
||||
# new-enough versions sign with the trailing slash
|
||||
url = url.replace('?', '/?')
|
||||
return url
|
||||
|
||||
|
@ -25,9 +25,8 @@ import time
|
||||
from unittest import SkipTest
|
||||
from xml.dom import minidom
|
||||
|
||||
import six
|
||||
from six.moves import http_client
|
||||
from six.moves import urllib
|
||||
import http.client
|
||||
import urllib.parse
|
||||
from swiftclient import get_auth
|
||||
|
||||
from swift.common import constraints
|
||||
@ -37,7 +36,7 @@ from swift.common.utils import config_true_value, md5
|
||||
|
||||
from test import safe_repr
|
||||
|
||||
http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
|
||||
http.client._MAXHEADERS = constraints.MAX_HEADER_COUNT
|
||||
|
||||
|
||||
class AuthenticationFailed(Exception):
|
||||
@ -138,10 +137,10 @@ def putrequest(self, method, url, skip_host=False, skip_accept_encoding=False):
|
||||
and self._HTTPConnection__response.isclosed():
|
||||
self._HTTPConnection__response = None
|
||||
|
||||
if self._HTTPConnection__state == http_client._CS_IDLE:
|
||||
self._HTTPConnection__state = http_client._CS_REQ_STARTED
|
||||
if self._HTTPConnection__state == http.client._CS_IDLE:
|
||||
self._HTTPConnection__state = http.client._CS_REQ_STARTED
|
||||
else:
|
||||
raise http_client.CannotSendRequest(self._HTTPConnection__state)
|
||||
raise http.client.CannotSendRequest(self._HTTPConnection__state)
|
||||
|
||||
self._method = method
|
||||
if not url:
|
||||
@ -225,15 +224,12 @@ class Connection(object):
|
||||
|
||||
@storage_url.setter
|
||||
def storage_url(self, value):
|
||||
if six.PY2 and not isinstance(value, bytes):
|
||||
value = value.encode('utf-8')
|
||||
|
||||
url = urllib.parse.urlparse(value)
|
||||
|
||||
if url.scheme == 'http':
|
||||
self.conn_class = http_client.HTTPConnection
|
||||
self.conn_class = http.client.HTTPConnection
|
||||
elif url.scheme == 'https':
|
||||
self.conn_class = http_client.HTTPSConnection
|
||||
self.conn_class = http.client.HTTPSConnection
|
||||
else:
|
||||
raise ValueError('unexpected protocol %s' % (url.scheme))
|
||||
|
||||
@ -250,7 +246,7 @@ class Connection(object):
|
||||
def storage_scheme(self):
|
||||
if self.conn_class is None:
|
||||
return None
|
||||
if issubclass(self.conn_class, http_client.HTTPSConnection):
|
||||
if issubclass(self.conn_class, http.client.HTTPSConnection):
|
||||
return 'https'
|
||||
return 'http'
|
||||
|
||||
@ -411,7 +407,7 @@ class Connection(object):
|
||||
except socket.timeout as e:
|
||||
fail_messages.append(safe_repr(e))
|
||||
continue
|
||||
except http_client.HTTPException as e:
|
||||
except http.client.HTTPException as e:
|
||||
fail_messages.append(safe_repr(e))
|
||||
continue
|
||||
|
||||
@ -500,7 +496,7 @@ class Base(object):
|
||||
'x-container-bytes-used',
|
||||
)
|
||||
|
||||
# NB: on py2, headers are always lower; on py3, they match the bytes
|
||||
# NB: on py2, headers were always lower; on py3, they match the bytes
|
||||
# on the wire
|
||||
headers = dict((wsgi_to_str(h).lower(), wsgi_to_str(v))
|
||||
for h, v in self.conn.response.getheaders())
|
||||
@ -568,9 +564,6 @@ class Account(Base):
|
||||
if status == 200:
|
||||
if format_type == 'json':
|
||||
conts = json.loads(self.conn.response.read())
|
||||
if six.PY2:
|
||||
for cont in conts:
|
||||
cont['name'] = cont['name'].encode('utf-8')
|
||||
return conts
|
||||
elif format_type == 'xml':
|
||||
conts = []
|
||||
@ -582,8 +575,6 @@ class Account(Base):
|
||||
childNodes[0].nodeValue
|
||||
conts.append(cont)
|
||||
for cont in conts:
|
||||
if six.PY2:
|
||||
cont['name'] = cont['name'].encode('utf-8')
|
||||
for key in ('count', 'bytes'):
|
||||
cont[key] = int(cont[key])
|
||||
return conts
|
||||
@ -591,8 +582,6 @@ class Account(Base):
|
||||
lines = self.conn.response.read().split(b'\n')
|
||||
if lines and not lines[-1]:
|
||||
lines = lines[:-1]
|
||||
if six.PY2:
|
||||
return lines
|
||||
return [line.decode('utf-8') for line in lines]
|
||||
elif status == 204:
|
||||
return []
|
||||
@ -716,15 +705,7 @@ class Container(Base):
|
||||
parms=parms, cfg=cfg)
|
||||
if status == 200:
|
||||
if format_type == 'json' or 'versions' in parms:
|
||||
files = json.loads(self.conn.response.read())
|
||||
|
||||
if six.PY2:
|
||||
for file_item in files:
|
||||
for key in ('name', 'subdir', 'content_type',
|
||||
'version_id'):
|
||||
if key in file_item:
|
||||
file_item[key] = file_item[key].encode('utf-8')
|
||||
return files
|
||||
return json.loads(self.conn.response.read())
|
||||
elif format_type == 'xml':
|
||||
files = []
|
||||
tree = minidom.parseString(self.conn.response.read())
|
||||
@ -745,15 +726,7 @@ class Container(Base):
|
||||
files.append(file_item)
|
||||
|
||||
for file_item in files:
|
||||
if 'subdir' in file_item:
|
||||
if six.PY2:
|
||||
file_item['subdir'] = \
|
||||
file_item['subdir'].encode('utf-8')
|
||||
else:
|
||||
if six.PY2:
|
||||
file_item.update({
|
||||
k: file_item[k].encode('utf-8')
|
||||
for k in ('name', 'content_type')})
|
||||
if 'bytes' in file_item:
|
||||
file_item['bytes'] = int(file_item['bytes'])
|
||||
return files
|
||||
else:
|
||||
@ -762,8 +735,6 @@ class Container(Base):
|
||||
lines = content.split(b'\n')
|
||||
if lines and not lines[-1]:
|
||||
lines = lines[:-1]
|
||||
if six.PY2:
|
||||
return lines
|
||||
return [line.decode('utf-8') for line in lines]
|
||||
else:
|
||||
return []
|
||||
|
@ -15,7 +15,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
from six.moves.urllib.parse import urlparse, urlunparse
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
import uuid
|
||||
from random import shuffle
|
||||
|
||||
|
@ -17,11 +17,10 @@
|
||||
|
||||
import unittest
|
||||
import json
|
||||
import urllib.parse
|
||||
from uuid import uuid4
|
||||
from string import ascii_letters
|
||||
|
||||
import six
|
||||
from six.moves import range, urllib
|
||||
from swift.common.middleware.acl import format_acl
|
||||
from swift.common.utils import distribute_evenly
|
||||
|
||||
@ -756,40 +755,18 @@ class TestAccount(unittest.TestCase):
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
uni_key = u'X-Account-Meta-uni\u0E12'
|
||||
uni_value = u'uni\u0E12'
|
||||
# Note that py3 has issues with non-ascii header names; see
|
||||
# https://bugs.python.org/issue37093
|
||||
if (tf.web_front_end == 'integral' and six.PY2):
|
||||
resp = retry(post, uni_key, '1')
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (201, 204))
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (200, 204))
|
||||
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
|
||||
# https://bugs.python.org/issue37093 -- so we won't test with unicode
|
||||
# header names
|
||||
resp = retry(post, 'X-Account-Meta-uni', uni_value)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (200, 204))
|
||||
if six.PY2:
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
|
||||
uni_value.encode('utf8'))
|
||||
else:
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
|
||||
uni_value)
|
||||
# See above note about py3 and non-ascii header names
|
||||
if (tf.web_front_end == 'integral' and six.PY2):
|
||||
resp = retry(post, uni_key, uni_value)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (200, 204))
|
||||
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
|
||||
uni_value.encode('utf-8'))
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
|
||||
uni_value)
|
||||
|
||||
def test_multi_metadata(self):
|
||||
if tf.skip:
|
||||
|
@ -17,15 +17,13 @@
|
||||
|
||||
import json
|
||||
import unittest
|
||||
import urllib.parse
|
||||
from uuid import uuid4
|
||||
|
||||
from test.functional import check_response, cluster_info, retry, \
|
||||
requires_acls, load_constraint, requires_policies, SkipTest
|
||||
import test.functional as tf
|
||||
|
||||
import six
|
||||
from six.moves import range, urllib
|
||||
|
||||
|
||||
def setUpModule():
|
||||
tf.setup_package()
|
||||
@ -72,11 +70,7 @@ class TestContainer(unittest.TestCase):
|
||||
return check_response(conn)
|
||||
|
||||
def delete(url, token, parsed, conn, container, obj):
|
||||
if six.PY2:
|
||||
obj_name = obj['name'].encode('utf8')
|
||||
else:
|
||||
obj_name = obj['name']
|
||||
path = '/'.join([parsed.path, container, obj_name])
|
||||
path = '/'.join([parsed.path, container, obj['name']])
|
||||
conn.request('DELETE', path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
@ -219,40 +213,18 @@ class TestContainer(unittest.TestCase):
|
||||
{'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
uni_key = u'X-Container-Meta-uni\u0E12'
|
||||
uni_value = u'uni\u0E12'
|
||||
# Note that py3 has issues with non-ascii header names; see
|
||||
# https://bugs.python.org/issue37093
|
||||
if (tf.web_front_end == 'integral' and six.PY2):
|
||||
resp = retry(post, uni_key, '1')
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (200, 204))
|
||||
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
|
||||
# https://bugs.python.org/issue37093 -- so we won't test with unicode
|
||||
# header names
|
||||
resp = retry(post, 'X-Container-Meta-uni', uni_value)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (200, 204))
|
||||
if six.PY2:
|
||||
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
|
||||
uni_value.encode('utf-8'))
|
||||
else:
|
||||
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
|
||||
uni_value)
|
||||
# See above note about py3 and non-ascii header names
|
||||
if (tf.web_front_end == 'integral' and six.PY2):
|
||||
resp = retry(post, uni_key, uni_value)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (200, 204))
|
||||
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
|
||||
uni_value.encode('utf-8'))
|
||||
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
|
||||
uni_value)
|
||||
|
||||
def test_PUT_metadata(self):
|
||||
if tf.skip:
|
||||
@ -844,9 +816,7 @@ class TestContainer(unittest.TestCase):
|
||||
|
||||
# read-only can list containers
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertIn(self.name, listing)
|
||||
|
||||
@ -861,9 +831,7 @@ class TestContainer(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertIn(new_container_name, listing)
|
||||
|
||||
@ -963,9 +931,7 @@ class TestContainer(unittest.TestCase):
|
||||
|
||||
# can list containers
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertIn(self.name, listing)
|
||||
|
||||
@ -975,9 +941,7 @@ class TestContainer(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (201, 202))
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertIn(new_container_name, listing)
|
||||
|
||||
@ -986,9 +950,7 @@ class TestContainer(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertIn(resp.status, (204, 404))
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertNotIn(new_container_name, listing)
|
||||
|
||||
@ -1111,9 +1073,7 @@ class TestContainer(unittest.TestCase):
|
||||
|
||||
# can list containers
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertIn(self.name, listing)
|
||||
|
||||
@ -1123,9 +1083,7 @@ class TestContainer(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertIn(new_container_name, listing)
|
||||
|
||||
@ -1134,9 +1092,7 @@ class TestContainer(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(get, use_account=3)
|
||||
listing = resp.read()
|
||||
if not six.PY2:
|
||||
listing = listing.decode('utf8')
|
||||
listing = resp.read().decode('utf8')
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertNotIn(new_container_name, listing)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user