Blackify openstack.object_store

Black used with the '-l 79 -S' flags.

A future change will ignore this commit in git-blame history by adding a
'git-blame-ignore-revs' file.

Change-Id: I9c6e6b898fc7e3a196725bd37a3b5bdc77060cd3
Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
Stephen Finucane 2023-05-03 12:14:19 +01:00
parent 34da09f312
commit 4589e293e8
13 changed files with 904 additions and 571 deletions

View File

@ -28,13 +28,13 @@ class BaseResource(resource.Resource):
_last_headers = dict() _last_headers = dict()
def __init__(self, metadata=None, **attrs): def __init__(self, metadata=None, **attrs):
"""Process and save metadata known at creation stage """Process and save metadata known at creation stage"""
"""
super().__init__(**attrs) super().__init__(**attrs)
if metadata is not None: if metadata is not None:
for k, v in metadata.items(): for k, v in metadata.items():
if not k.lower().startswith( if not k.lower().startswith(
self._custom_metadata_prefix.lower()): self._custom_metadata_prefix.lower()
):
self.metadata[self._custom_metadata_prefix + k] = v self.metadata[self._custom_metadata_prefix + k] = v
else: else:
self.metadata[k] = v self.metadata[k] = v
@ -62,8 +62,8 @@ class BaseResource(resource.Resource):
def set_metadata(self, session, metadata, refresh=True): def set_metadata(self, session, metadata, refresh=True):
request = self._prepare_request() request = self._prepare_request()
response = session.post( response = session.post(
request.url, request.url, headers=self._calculate_headers(metadata)
headers=self._calculate_headers(metadata)) )
self._translate_response(response, has_body=False) self._translate_response(response, has_body=False)
if refresh: if refresh:
response = session.head(request.url) response = session.head(request.url)
@ -74,10 +74,11 @@ class BaseResource(resource.Resource):
request = self._prepare_request() request = self._prepare_request()
headers = {key: '' for key in keys} headers = {key: '' for key in keys}
response = session.post( response = session.post(
request.url, request.url, headers=self._calculate_headers(headers)
headers=self._calculate_headers(headers)) )
exceptions.raise_from_response( exceptions.raise_from_response(
response, error_message="Error deleting metadata keys") response, error_message="Error deleting metadata keys"
)
return self return self
def _set_metadata(self, headers): def _set_metadata(self, headers):
@ -85,10 +86,8 @@ class BaseResource(resource.Resource):
for header in headers: for header in headers:
# RADOS and other stuff in front may actually lowcase headers # RADOS and other stuff in front may actually lowcase headers
if header.lower().startswith( if header.lower().startswith(self._custom_metadata_prefix.lower()):
self._custom_metadata_prefix.lower() key = header[len(self._custom_metadata_prefix) :].lower()
):
key = header[len(self._custom_metadata_prefix):].lower()
self.metadata[key] = headers[header] self.metadata[key] = headers[header]
def _translate_response(self, response, has_body=None, error_message=None): def _translate_response(self, response, has_body=None, error_message=None):
@ -98,5 +97,6 @@ class BaseResource(resource.Resource):
# pops known headers. # pops known headers.
self._last_headers = response.headers.copy() self._last_headers = response.headers.copy()
super(BaseResource, self)._translate_response( super(BaseResource, self)._translate_response(
response, has_body=has_body, error_message=error_message) response, has_body=has_body, error_message=error_message
)
self._set_metadata(response.headers) self._set_metadata(response.headers)

View File

@ -40,7 +40,7 @@ class Proxy(proxy.Proxy):
"account": _account.Account, "account": _account.Account,
"container": _container.Container, "container": _container.Container,
"info": _info.Info, "info": _info.Info,
"object": _obj.Object "object": _obj.Object,
} }
skip_discovery = True skip_discovery = True
@ -60,19 +60,25 @@ class Proxy(proxy.Proxy):
# Split url into parts and exclude potential project_id in some urls # Split url into parts and exclude potential project_id in some urls
url_parts = [ url_parts = [
x for x in url_path.split('/') if ( x
for x in url_path.split('/')
if (
x != project_id x != project_id
and ( and (
not project_id not project_id
or (project_id and x != 'AUTH_' + project_id) or (project_id and x != 'AUTH_' + project_id)
)) )
)
] ]
# Strip leading version piece so that # Strip leading version piece so that
# GET /v1/AUTH_xxx # GET /v1/AUTH_xxx
# returns ['AUTH_xxx'] # returns ['AUTH_xxx']
if (url_parts[0] if (
and url_parts[0][0] == 'v' url_parts[0]
and url_parts[0][1] and url_parts[0][1].isdigit()): and url_parts[0][0] == 'v'
and url_parts[0][1]
and url_parts[0][1].isdigit()
):
url_parts = url_parts[1:] url_parts = url_parts[1:]
# Strip out anything that's empty or None # Strip out anything that's empty or None
@ -152,8 +158,9 @@ class Proxy(proxy.Proxy):
:returns: ``None`` :returns: ``None``
""" """
self._delete(_container.Container, container, self._delete(
ignore_missing=ignore_missing) _container.Container, container, ignore_missing=ignore_missing
)
def get_container_metadata(self, container): def get_container_metadata(self, container):
"""Get metadata for a container """Get metadata for a container
@ -219,8 +226,12 @@ class Proxy(proxy.Proxy):
container = self._get_container_name(container=container) container = self._get_container_name(container=container)
for obj in self._list( for obj in self._list(
_obj.Object, container=container, _obj.Object,
paginated=True, format='json', **query): container=container,
paginated=True,
format='json',
**query,
):
obj.container = container obj.container = container
yield obj yield obj
@ -236,8 +247,12 @@ class Proxy(proxy.Proxy):
raise ValueError("container must be specified") raise ValueError("container must be specified")
def get_object( def get_object(
self, obj, container=None, resp_chunk_size=1024, self,
outfile=None, remember_content=False obj,
container=None,
resp_chunk_size=1024,
outfile=None,
remember_content=False,
): ):
"""Get the data associated with an object """Get the data associated with an object
@ -262,20 +277,17 @@ class Proxy(proxy.Proxy):
:raises: :class:`~openstack.exceptions.ResourceNotFound` :raises: :class:`~openstack.exceptions.ResourceNotFound`
when no resource can be found. when no resource can be found.
""" """
container_name = self._get_container_name( container_name = self._get_container_name(obj=obj, container=container)
obj=obj, container=container)
_object = self._get_resource( _object = self._get_resource(
_obj.Object, obj, _obj.Object, obj, container=container_name
container=container_name) )
request = _object._prepare_request() request = _object._prepare_request()
get_stream = (outfile is not None) get_stream = outfile is not None
response = self.get( response = self.get(
request.url, request.url, headers=request.headers, stream=get_stream
headers=request.headers,
stream=get_stream
) )
exceptions.raise_from_response(response) exceptions.raise_from_response(response)
_object._translate_response(response, has_body=False) _object._translate_response(response, has_body=False)
@ -286,7 +298,8 @@ class Proxy(proxy.Proxy):
else: else:
outfile_handle = outfile outfile_handle = outfile
for chunk in response.iter_content( for chunk in response.iter_content(
resp_chunk_size, decode_unicode=False): resp_chunk_size, decode_unicode=False
):
outfile_handle.write(chunk) outfile_handle.write(chunk)
if isinstance(outfile, str): if isinstance(outfile, str):
outfile_handle.close() outfile_handle.close()
@ -308,10 +321,10 @@ class Proxy(proxy.Proxy):
:raises: :class:`~openstack.exceptions.ResourceNotFound` :raises: :class:`~openstack.exceptions.ResourceNotFound`
when no resource can be found. when no resource can be found.
""" """
container_name = self._get_container_name( container_name = self._get_container_name(obj=obj, container=container)
obj=obj, container=container)
obj = self._get_resource( obj = self._get_resource(
_obj.Object, obj, container=container_name, **attrs) _obj.Object, obj, container=container_name, **attrs
)
return obj.download(self) return obj.download(self)
def stream_object(self, obj, container=None, chunk_size=1024, **attrs): def stream_object(self, obj, container=None, chunk_size=1024, **attrs):
@ -326,18 +339,26 @@ class Proxy(proxy.Proxy):
when no resource can be found. when no resource can be found.
:returns: An iterator that iterates over chunk_size bytes :returns: An iterator that iterates over chunk_size bytes
""" """
container_name = self._get_container_name( container_name = self._get_container_name(obj=obj, container=container)
obj=obj, container=container)
obj = self._get_resource( obj = self._get_resource(
_obj.Object, obj, container=container_name, **attrs) _obj.Object, obj, container=container_name, **attrs
)
return obj.stream(self, chunk_size=chunk_size) return obj.stream(self, chunk_size=chunk_size)
def create_object( def create_object(
self, container, name, filename=None, self,
md5=None, sha256=None, segment_size=None, container,
use_slo=True, metadata=None, name,
generate_checksums=None, data=None, filename=None,
**headers): md5=None,
sha256=None,
segment_size=None,
use_slo=True,
metadata=None,
generate_checksums=None,
data=None,
**headers,
):
"""Create a file object. """Create a file object.
Automatically uses large-object segments if needed. Automatically uses large-object segments if needed.
@ -373,13 +394,14 @@ class Proxy(proxy.Proxy):
""" """
if data is not None and filename: if data is not None and filename:
raise ValueError( raise ValueError(
"Both filename and data given. Please choose one.") "Both filename and data given. Please choose one."
)
if data is not None and not name: if data is not None and not name:
raise ValueError( raise ValueError("name is a required parameter when data is given")
"name is a required parameter when data is given")
if data is not None and generate_checksums: if data is not None and generate_checksums:
raise ValueError( raise ValueError(
"checksums cannot be generated with data parameter") "checksums cannot be generated with data parameter"
)
if generate_checksums is None: if generate_checksums is None:
if data is not None: if data is not None:
generate_checksums = False generate_checksums = False
@ -400,17 +422,22 @@ class Proxy(proxy.Proxy):
metadata[self._connection._OBJECT_SHA256_KEY] = sha256 metadata[self._connection._OBJECT_SHA256_KEY] = sha256
container_name = self._get_container_name(container=container) container_name = self._get_container_name(container=container)
endpoint = '{container}/{name}'.format(container=container_name, endpoint = '{container}/{name}'.format(
name=name) container=container_name, name=name
)
if data is not None: if data is not None:
self.log.debug( self.log.debug(
"swift uploading data to %(endpoint)s", "swift uploading data to %(endpoint)s", {'endpoint': endpoint}
{'endpoint': endpoint}) )
return self._create( return self._create(
_obj.Object, container=container_name, _obj.Object,
name=name, data=data, metadata=metadata, container=container_name,
**headers) name=name,
data=data,
metadata=metadata,
**headers,
)
# segment_size gets used as a step value in a range call, so needs # segment_size gets used as a step value in a range call, so needs
# to be an int # to be an int
@ -423,7 +450,8 @@ class Proxy(proxy.Proxy):
self._connection.log.debug( self._connection.log.debug(
"swift uploading %(filename)s to %(endpoint)s", "swift uploading %(filename)s to %(endpoint)s",
{'filename': filename, 'endpoint': endpoint}) {'filename': filename, 'endpoint': endpoint},
)
if metadata is not None: if metadata is not None:
# Rely on the class headers calculation for requested metadata # Rely on the class headers calculation for requested metadata
@ -435,8 +463,13 @@ class Proxy(proxy.Proxy):
else: else:
self._upload_large_object( self._upload_large_object(
endpoint, filename, headers, endpoint,
file_size, segment_size, use_slo) filename,
headers,
file_size,
segment_size,
use_slo,
)
# Backwards compat # Backwards compat
upload_object = create_object upload_object = create_object
@ -461,8 +494,12 @@ class Proxy(proxy.Proxy):
""" """
container_name = self._get_container_name(obj, container) container_name = self._get_container_name(obj, container)
self._delete(_obj.Object, obj, ignore_missing=ignore_missing, self._delete(
container=container_name) _obj.Object,
obj,
ignore_missing=ignore_missing,
container=container_name,
)
def get_object_metadata(self, obj, container=None): def get_object_metadata(self, obj, container=None):
"""Get metadata for an object. """Get metadata for an object.
@ -522,7 +559,8 @@ class Proxy(proxy.Proxy):
return res return res
def is_object_stale( def is_object_stale(
self, container, name, filename, file_md5=None, file_sha256=None): self, container, name, filename, file_md5=None, file_sha256=None
):
"""Check to see if an object matches the hashes of a file. """Check to see if an object matches the hashes of a file.
:param container: Name of the container. :param container: Name of the container.
@ -538,37 +576,45 @@ class Proxy(proxy.Proxy):
except exceptions.NotFoundException: except exceptions.NotFoundException:
self._connection.log.debug( self._connection.log.debug(
"swift stale check, no object: {container}/{name}".format( "swift stale check, no object: {container}/{name}".format(
container=container, name=name)) container=container, name=name
)
)
return True return True
if not (file_md5 or file_sha256): if not (file_md5 or file_sha256):
(file_md5, file_sha256) = \ (file_md5, file_sha256) = utils._get_file_hashes(filename)
utils._get_file_hashes(filename)
md5_key = metadata.get( md5_key = metadata.get(
self._connection._OBJECT_MD5_KEY, self._connection._OBJECT_MD5_KEY,
metadata.get(self._connection._SHADE_OBJECT_MD5_KEY, '')) metadata.get(self._connection._SHADE_OBJECT_MD5_KEY, ''),
)
sha256_key = metadata.get( sha256_key = metadata.get(
self._connection._OBJECT_SHA256_KEY, metadata.get( self._connection._OBJECT_SHA256_KEY,
self._connection._SHADE_OBJECT_SHA256_KEY, '')) metadata.get(self._connection._SHADE_OBJECT_SHA256_KEY, ''),
)
up_to_date = utils._hashes_up_to_date( up_to_date = utils._hashes_up_to_date(
md5=file_md5, sha256=file_sha256, md5=file_md5,
md5_key=md5_key, sha256_key=sha256_key) sha256=file_sha256,
md5_key=md5_key,
sha256_key=sha256_key,
)
if not up_to_date: if not up_to_date:
self._connection.log.debug( self._connection.log.debug(
"swift checksum mismatch: " "swift checksum mismatch: "
" %(filename)s!=%(container)s/%(name)s", " %(filename)s!=%(container)s/%(name)s",
{'filename': filename, 'container': container, 'name': name}) {'filename': filename, 'container': container, 'name': name},
)
return True return True
self._connection.log.debug( self._connection.log.debug(
"swift object up to date: %(container)s/%(name)s", "swift object up to date: %(container)s/%(name)s",
{'container': container, 'name': name}) {'container': container, 'name': name},
)
return False return False
def _upload_large_object( def _upload_large_object(
self, endpoint, filename, self, endpoint, filename, headers, file_size, segment_size, use_slo
headers, file_size, segment_size, use_slo): ):
# If the object is big, we need to break it up into segments that # If the object is big, we need to break it up into segments that
# are no larger than segment_size, upload each of them individually # are no larger than segment_size, upload each of them individually
# and then upload a manifest object. The segments can be uploaded in # and then upload a manifest object. The segments can be uploaded in
@ -584,28 +630,32 @@ class Proxy(proxy.Proxy):
# segment, the value a FileSegment file-like object that is a # segment, the value a FileSegment file-like object that is a
# slice of the data for the segment. # slice of the data for the segment.
segments = self._get_file_segments( segments = self._get_file_segments(
endpoint, filename, file_size, segment_size) endpoint, filename, file_size, segment_size
)
# Schedule the segments for upload # Schedule the segments for upload
for name, segment in segments.items(): for name, segment in segments.items():
# Async call to put - schedules execution and returns a future # Async call to put - schedules execution and returns a future
segment_future = self._connection._pool_executor.submit( segment_future = self._connection._pool_executor.submit(
self.put, self.put, name, headers=headers, data=segment, raise_exc=False
name, headers=headers, data=segment, )
raise_exc=False)
segment_futures.append(segment_future) segment_futures.append(segment_future)
# TODO(mordred) Collect etags from results to add to this manifest # TODO(mordred) Collect etags from results to add to this manifest
# dict. Then sort the list of dicts by path. # dict. Then sort the list of dicts by path.
manifest.append(dict( manifest.append(
# While Object Storage usually expects the name to be dict(
# urlencoded in most requests, the SLO manifest requires # While Object Storage usually expects the name to be
# plain object names instead. # urlencoded in most requests, the SLO manifest requires
path='/{name}'.format(name=parse.unquote(name)), # plain object names instead.
size_bytes=segment.length)) path='/{name}'.format(name=parse.unquote(name)),
size_bytes=segment.length,
)
)
# Try once and collect failed results to retry # Try once and collect failed results to retry
segment_results, retry_results = self._connection._wait_for_futures( segment_results, retry_results = self._connection._wait_for_futures(
segment_futures, raise_on_error=False) segment_futures, raise_on_error=False
)
self._add_etag_to_manifest(segment_results, manifest) self._add_etag_to_manifest(segment_results, manifest)
@ -616,37 +666,41 @@ class Proxy(proxy.Proxy):
segment.seek(0) segment.seek(0)
# Async call to put - schedules execution and returns a future # Async call to put - schedules execution and returns a future
segment_future = self._connection._pool_executor.submit( segment_future = self._connection._pool_executor.submit(
self.put, self.put, name, headers=headers, data=segment
name, headers=headers, data=segment) )
# TODO(mordred) Collect etags from results to add to this manifest # TODO(mordred) Collect etags from results to add to this manifest
# dict. Then sort the list of dicts by path. # dict. Then sort the list of dicts by path.
retry_futures.append(segment_future) retry_futures.append(segment_future)
# If any segments fail the second time, just throw the error # If any segments fail the second time, just throw the error
segment_results, retry_results = self._connection._wait_for_futures( segment_results, retry_results = self._connection._wait_for_futures(
retry_futures, raise_on_error=True) retry_futures, raise_on_error=True
)
self._add_etag_to_manifest(segment_results, manifest) self._add_etag_to_manifest(segment_results, manifest)
try: try:
if use_slo: if use_slo:
return self._finish_large_object_slo( return self._finish_large_object_slo(
endpoint, headers, manifest) endpoint, headers, manifest
)
else: else:
return self._finish_large_object_dlo( return self._finish_large_object_dlo(endpoint, headers)
endpoint, headers)
except Exception: except Exception:
try: try:
segment_prefix = endpoint.split('/')[-1] segment_prefix = endpoint.split('/')[-1]
self.log.debug( self.log.debug(
"Failed to upload large object manifest for %s. " "Failed to upload large object manifest for %s. "
"Removing segment uploads.", segment_prefix) "Removing segment uploads.",
segment_prefix,
)
self._delete_autocreated_image_objects( self._delete_autocreated_image_objects(
segment_prefix=segment_prefix) segment_prefix=segment_prefix
)
except Exception: except Exception:
self.log.exception( self.log.exception(
"Failed to cleanup image objects for %s:", "Failed to cleanup image objects for %s:", segment_prefix
segment_prefix) )
raise raise
def _finish_large_object_slo(self, endpoint, headers, manifest): def _finish_large_object_slo(self, endpoint, headers, manifest):
@ -656,10 +710,13 @@ class Proxy(proxy.Proxy):
retries = 3 retries = 3
while True: while True:
try: try:
return exceptions.raise_from_response(self.put( return exceptions.raise_from_response(
endpoint, self.put(
params={'multipart-manifest': 'put'}, endpoint,
headers=headers, data=json.dumps(manifest)) params={'multipart-manifest': 'put'},
headers=headers,
data=json.dumps(manifest),
)
) )
except Exception: except Exception:
retries -= 1 retries -= 1
@ -673,7 +730,8 @@ class Proxy(proxy.Proxy):
while True: while True:
try: try:
return exceptions.raise_from_response( return exceptions.raise_from_response(
self.put(endpoint, headers=headers)) self.put(endpoint, headers=headers)
)
except Exception: except Exception:
retries -= 1 retries -= 1
if retries == 0: if retries == 0:
@ -681,8 +739,7 @@ class Proxy(proxy.Proxy):
def _upload_object(self, endpoint, filename, headers): def _upload_object(self, endpoint, filename, headers):
with open(filename, 'rb') as dt: with open(filename, 'rb') as dt:
return self.put( return self.put(endpoint, headers=headers, data=dt)
endpoint, headers=headers, data=dt)
def _get_file_segments(self, endpoint, filename, file_size, segment_size): def _get_file_segments(self, endpoint, filename, file_size, segment_size):
# Use an ordered dict here so that testing can replicate things # Use an ordered dict here so that testing can replicate things
@ -690,10 +747,13 @@ class Proxy(proxy.Proxy):
for (index, offset) in enumerate(range(0, file_size, segment_size)): for (index, offset) in enumerate(range(0, file_size, segment_size)):
remaining = file_size - (index * segment_size) remaining = file_size - (index * segment_size)
segment = _utils.FileSegment( segment = _utils.FileSegment(
filename, offset, filename,
segment_size if segment_size < remaining else remaining) offset,
segment_size if segment_size < remaining else remaining,
)
name = '{endpoint}/{index:0>6}'.format( name = '{endpoint}/{index:0>6}'.format(
endpoint=endpoint, index=index) endpoint=endpoint, index=index
)
segments[name] = segment segments[name] = segment
return segments return segments
@ -710,7 +770,8 @@ class Proxy(proxy.Proxy):
server_max_file_size = DEFAULT_MAX_FILE_SIZE server_max_file_size = DEFAULT_MAX_FILE_SIZE
self._connection.log.info( self._connection.log.info(
"Swift capabilities not supported. " "Swift capabilities not supported. "
"Using default max file size.") "Using default max file size."
)
else: else:
raise raise
else: else:
@ -740,9 +801,7 @@ class Proxy(proxy.Proxy):
continue continue
name = self._object_name_from_url(result.url) name = self._object_name_from_url(result.url)
for entry in manifest: for entry in manifest:
if entry['path'] == '/{name}'.format( if entry['path'] == '/{name}'.format(name=parse.unquote(name)):
name=parse.unquote(name)
):
entry['etag'] = result.headers['Etag'] entry['etag'] = result.headers['Etag']
def get_info(self): def get_info(self):
@ -788,12 +847,16 @@ class Proxy(proxy.Proxy):
temp_url_key = None temp_url_key = None
if container: if container:
container_meta = self.get_container_metadata(container) container_meta = self.get_container_metadata(container)
temp_url_key = (container_meta.meta_temp_url_key_2 temp_url_key = (
or container_meta.meta_temp_url_key) container_meta.meta_temp_url_key_2
or container_meta.meta_temp_url_key
)
if not temp_url_key: if not temp_url_key:
account_meta = self.get_account_metadata() account_meta = self.get_account_metadata()
temp_url_key = (account_meta.meta_temp_url_key_2 temp_url_key = (
or account_meta.meta_temp_url_key) account_meta.meta_temp_url_key_2
or account_meta.meta_temp_url_key
)
if temp_url_key and not isinstance(temp_url_key, bytes): if temp_url_key and not isinstance(temp_url_key, bytes):
temp_url_key = temp_url_key.encode('utf8') temp_url_key = temp_url_key.encode('utf8')
return temp_url_key return temp_url_key
@ -807,12 +870,20 @@ class Proxy(proxy.Proxy):
if not temp_url_key: if not temp_url_key:
raise exceptions.SDKException( raise exceptions.SDKException(
'temp_url_key was not given, nor was a temporary url key' 'temp_url_key was not given, nor was a temporary url key'
' found for the account or the container.') ' found for the account or the container.'
)
return temp_url_key return temp_url_key
def generate_form_signature( def generate_form_signature(
self, container, object_prefix, redirect_url, max_file_size, self,
max_upload_count, timeout, temp_url_key=None): container,
object_prefix,
redirect_url,
max_file_size,
max_upload_count,
timeout,
temp_url_key=None,
):
"""Generate a signature for a FormPost upload. """Generate a signature for a FormPost upload.
:param container: The value can be the name of a container or a :param container: The value can be the name of a container or a
@ -832,33 +903,50 @@ class Proxy(proxy.Proxy):
max_file_size = int(max_file_size) max_file_size = int(max_file_size)
if max_file_size < 1: if max_file_size < 1:
raise exceptions.SDKException( raise exceptions.SDKException(
'Please use a positive max_file_size value.') 'Please use a positive max_file_size value.'
)
max_upload_count = int(max_upload_count) max_upload_count = int(max_upload_count)
if max_upload_count < 1: if max_upload_count < 1:
raise exceptions.SDKException( raise exceptions.SDKException(
'Please use a positive max_upload_count value.') 'Please use a positive max_upload_count value.'
)
if timeout < 1: if timeout < 1:
raise exceptions.SDKException( raise exceptions.SDKException(
'Please use a positive <timeout> value.') 'Please use a positive <timeout> value.'
)
expires = int(time.time() + int(timeout)) expires = int(time.time() + int(timeout))
temp_url_key = self._check_temp_url_key(container=container, temp_url_key = self._check_temp_url_key(
temp_url_key=temp_url_key) container=container, temp_url_key=temp_url_key
)
res = self._get_resource(_container.Container, container) res = self._get_resource(_container.Container, container)
endpoint = parse.urlparse(self.get_endpoint()) endpoint = parse.urlparse(self.get_endpoint())
path = '/'.join([endpoint.path, res.name, object_prefix]) path = '/'.join([endpoint.path, res.name, object_prefix])
data = '%s\n%s\n%s\n%s\n%s' % (path, redirect_url, max_file_size, data = '%s\n%s\n%s\n%s\n%s' % (
max_upload_count, expires) path,
redirect_url,
max_file_size,
max_upload_count,
expires,
)
data = data.encode('utf8') data = data.encode('utf8')
sig = hmac.new(temp_url_key, data, sha1).hexdigest() sig = hmac.new(temp_url_key, data, sha1).hexdigest()
return (expires, sig) return (expires, sig)
def generate_temp_url( def generate_temp_url(
self, path, seconds, method, absolute=False, prefix=False, self,
iso8601=False, ip_range=None, temp_url_key=None): path,
seconds,
method,
absolute=False,
prefix=False,
iso8601=False,
ip_range=None,
temp_url_key=None,
):
"""Generates a temporary URL that gives unauthenticated access to the """Generates a temporary URL that gives unauthenticated access to the
Swift object. Swift object.
@ -894,7 +982,8 @@ class Proxy(proxy.Proxy):
formats = ( formats = (
EXPIRES_ISO8601_FORMAT, EXPIRES_ISO8601_FORMAT,
EXPIRES_ISO8601_FORMAT[:-1], EXPIRES_ISO8601_FORMAT[:-1],
SHORT_EXPIRES_ISO8601_FORMAT) SHORT_EXPIRES_ISO8601_FORMAT,
)
for f in formats: for f in formats:
try: try:
t = time.strptime(seconds, f) t = time.strptime(seconds, f)
@ -919,8 +1008,10 @@ class Proxy(proxy.Proxy):
if timestamp < 0: if timestamp < 0:
raise ValueError() raise ValueError()
except ValueError: except ValueError:
raise ValueError('time must either be a whole number ' raise ValueError(
'or in specific ISO 8601 format.') 'time must either be a whole number '
'or in specific ISO 8601 format.'
)
if isinstance(path, bytes): if isinstance(path, bytes):
try: try:
@ -931,50 +1022,61 @@ class Proxy(proxy.Proxy):
path_for_body = path path_for_body = path
parts = path_for_body.split('/', 4) parts = path_for_body.split('/', 4)
if len(parts) != 5 or parts[0] or not all( if (
parts[1:(4 if prefix else 5)]): len(parts) != 5
or parts[0]
or not all(parts[1 : (4 if prefix else 5)])
):
if prefix: if prefix:
raise ValueError('path must at least contain /v1/a/c/') raise ValueError('path must at least contain /v1/a/c/')
else: else:
raise ValueError('path must be full path to an object' raise ValueError(
' e.g. /v1/a/c/o') 'path must be full path to an object' ' e.g. /v1/a/c/o'
)
standard_methods = ['GET', 'PUT', 'HEAD', 'POST', 'DELETE'] standard_methods = ['GET', 'PUT', 'HEAD', 'POST', 'DELETE']
if method.upper() not in standard_methods: if method.upper() not in standard_methods:
self.log.warning('Non default HTTP method %s for tempurl ' self.log.warning(
'specified, possibly an error', method.upper()) 'Non default HTTP method %s for tempurl '
'specified, possibly an error',
method.upper(),
)
if not absolute: if not absolute:
expiration = int(time.time() + timestamp) expiration = int(time.time() + timestamp)
else: else:
expiration = timestamp expiration = timestamp
hmac_parts = [method.upper(), str(expiration), hmac_parts = [
('prefix:' if prefix else '') + path_for_body] method.upper(),
str(expiration),
('prefix:' if prefix else '') + path_for_body,
]
if ip_range: if ip_range:
if isinstance(ip_range, bytes): if isinstance(ip_range, bytes):
try: try:
ip_range = ip_range.decode('utf-8') ip_range = ip_range.decode('utf-8')
except UnicodeDecodeError: except UnicodeDecodeError:
raise ValueError( raise ValueError('ip_range must be representable as UTF-8')
'ip_range must be representable as UTF-8'
)
hmac_parts.insert(0, "ip=%s" % ip_range) hmac_parts.insert(0, "ip=%s" % ip_range)
hmac_body = u'\n'.join(hmac_parts) hmac_body = u'\n'.join(hmac_parts)
temp_url_key = self._check_temp_url_key(temp_url_key=temp_url_key) temp_url_key = self._check_temp_url_key(temp_url_key=temp_url_key)
sig = hmac.new(temp_url_key, hmac_body.encode('utf-8'), sig = hmac.new(
sha1).hexdigest() temp_url_key, hmac_body.encode('utf-8'), sha1
).hexdigest()
if iso8601: if iso8601:
expiration = time.strftime( expiration = time.strftime(
EXPIRES_ISO8601_FORMAT, time.gmtime(expiration)) EXPIRES_ISO8601_FORMAT, time.gmtime(expiration)
)
temp_url = u'{path}?temp_url_sig={sig}&temp_url_expires={exp}'.format( temp_url = u'{path}?temp_url_sig={sig}&temp_url_expires={exp}'.format(
path=path_for_body, sig=sig, exp=expiration) path=path_for_body, sig=sig, exp=expiration
)
if ip_range: if ip_range:
temp_url += u'&temp_url_ip_range={}'.format(ip_range) temp_url += u'&temp_url_ip_range={}'.format(ip_range)
@ -1020,11 +1122,7 @@ class Proxy(proxy.Proxy):
# ========== Project Cleanup ========== # ========== Project Cleanup ==========
def _get_cleanup_dependencies(self): def _get_cleanup_dependencies(self):
return { return {'object_store': {'before': []}}
'object_store': {
'before': []
}
}
def _service_cleanup( def _service_cleanup(
self, self,
@ -1032,7 +1130,7 @@ class Proxy(proxy.Proxy):
client_status_queue=None, client_status_queue=None,
identified_resources=None, identified_resources=None,
filters=None, filters=None,
resource_evaluation_fn=None resource_evaluation_fn=None,
): ):
is_bulk_delete_supported = False is_bulk_delete_supported = False
bulk_delete_max_per_request = None bulk_delete_max_per_request = None
@ -1044,7 +1142,8 @@ class Proxy(proxy.Proxy):
bulk_delete = caps.swift.get("bulk_delete", {}) bulk_delete = caps.swift.get("bulk_delete", {})
is_bulk_delete_supported = bulk_delete is not None is_bulk_delete_supported = bulk_delete is not None
bulk_delete_max_per_request = bulk_delete.get( bulk_delete_max_per_request = bulk_delete.get(
"max_deletes_per_request", 100) "max_deletes_per_request", 100
)
elements = [] elements = []
for cont in self.containers(): for cont in self.containers():
@ -1058,7 +1157,8 @@ class Proxy(proxy.Proxy):
client_status_queue=client_status_queue, client_status_queue=client_status_queue,
identified_resources=identified_resources, identified_resources=identified_resources,
filters=filters, filters=filters,
resource_evaluation_fn=resource_evaluation_fn) resource_evaluation_fn=resource_evaluation_fn,
)
if need_delete: if need_delete:
if not is_bulk_delete_supported and not dry_run: if not is_bulk_delete_supported and not dry_run:
self.delete_object(obj, cont) self.delete_object(obj, cont)
@ -1083,7 +1183,8 @@ class Proxy(proxy.Proxy):
client_status_queue=client_status_queue, client_status_queue=client_status_queue,
identified_resources=identified_resources, identified_resources=identified_resources,
filters=filters, filters=filters,
resource_evaluation_fn=resource_evaluation_fn) resource_evaluation_fn=resource_evaluation_fn,
)
def _bulk_delete(self, elements, dry_run=False): def _bulk_delete(self, elements, dry_run=False):
data = "\n".join([parse.quote(x) for x in elements]) data = "\n".join([parse.quote(x) for x in elements])
@ -1093,6 +1194,6 @@ class Proxy(proxy.Proxy):
data=data, data=data,
headers={ headers={
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'Accept': 'application/json' 'Accept': 'application/json',
} },
) )

View File

@ -28,8 +28,9 @@ class Account(_base.BaseResource):
#: the account. #: the account.
account_bytes_used = resource.Header("x-account-bytes-used", type=int) account_bytes_used = resource.Header("x-account-bytes-used", type=int)
#: The number of containers. #: The number of containers.
account_container_count = resource.Header("x-account-container-count", account_container_count = resource.Header(
type=int) "x-account-container-count", type=int
)
#: The number of objects in the account. #: The number of objects in the account.
account_object_count = resource.Header("x-account-object-count", type=int) account_object_count = resource.Header("x-account-object-count", type=int)
#: The secret key value for temporary URLs. If not set, #: The secret key value for temporary URLs. If not set,

View File

@ -25,7 +25,7 @@ class Container(_base.BaseResource):
"read_ACL": "x-container-read", "read_ACL": "x-container-read",
"write_ACL": "x-container-write", "write_ACL": "x-container-write",
"sync_to": "x-container-sync-to", "sync_to": "x-container-sync-to",
"sync_key": "x-container-sync-key" "sync_key": "x-container-sync-key",
} }
base_path = "/" base_path = "/"
@ -38,9 +38,7 @@ class Container(_base.BaseResource):
allow_list = True allow_list = True
allow_head = True allow_head = True
_query_mapping = resource.QueryParameters( _query_mapping = resource.QueryParameters('prefix', 'format')
'prefix', 'format'
)
# Container body data (when id=None) # Container body data (when id=None)
#: The name of the container. #: The name of the container.
@ -54,10 +52,12 @@ class Container(_base.BaseResource):
# Container metadata (when id=name) # Container metadata (when id=name)
#: The number of objects. #: The number of objects.
object_count = resource.Header( object_count = resource.Header(
"x-container-object-count", type=int, alias='count') "x-container-object-count", type=int, alias='count'
)
#: The count of bytes used in total. #: The count of bytes used in total.
bytes_used = resource.Header( bytes_used = resource.Header(
"x-container-bytes-used", type=int, alias='bytes') "x-container-bytes-used", type=int, alias='bytes'
)
#: The timestamp of the transaction. #: The timestamp of the transaction.
timestamp = resource.Header("x-timestamp") timestamp = resource.Header("x-timestamp")
@ -94,8 +94,9 @@ class Container(_base.BaseResource):
#: If set to true, Object Storage guesses the content type based #: If set to true, Object Storage guesses the content type based
#: on the file extension and ignores the value sent in the #: on the file extension and ignores the value sent in the
#: Content-Type header, if present. *Type: bool* #: Content-Type header, if present. *Type: bool*
is_content_type_detected = resource.Header("x-detect-content-type", is_content_type_detected = resource.Header(
type=bool) "x-detect-content-type", type=bool
)
#: Storage policy used by the container. #: Storage policy used by the container.
#: It is not possible to change policy of an existing container #: It is not possible to change policy of an existing container
@ -136,9 +137,9 @@ class Container(_base.BaseResource):
:data:`Resource.allow_create` is not set to ``True``. :data:`Resource.allow_create` is not set to ``True``.
""" """
request = self._prepare_request( request = self._prepare_request(
requires_id=True, prepend_key=prepend_key, base_path=base_path) requires_id=True, prepend_key=prepend_key, base_path=base_path
response = session.put( )
request.url, headers=request.headers) response = session.put(request.url, headers=request.headers)
self._translate_response(response, has_body=False) self._translate_response(response, has_body=False)
return self return self

View File

@ -34,8 +34,12 @@ class Info(resource.Resource):
tempurl = resource.Body("tempurl", type=dict) tempurl = resource.Body("tempurl", type=dict)
def fetch( def fetch(
self, session, requires_id=False, self,
base_path=None, skip_cache=False, error_message=None session,
requires_id=False,
base_path=None,
skip_cache=False,
error_message=None,
): ):
"""Get a remote resource based on this instance. """Get a remote resource based on this instance.
@ -64,7 +68,8 @@ class Info(resource.Resource):
session = self._get_session(session) session = self._get_session(session)
endpoint = urllib.parse.urlparse(session.get_endpoint()) endpoint = urllib.parse.urlparse(session.get_endpoint())
url = "{scheme}://{netloc}/info".format( url = "{scheme}://{netloc}/info".format(
scheme=endpoint.scheme, netloc=endpoint.netloc) scheme=endpoint.scheme, netloc=endpoint.netloc
)
microversion = self._get_microversion(session, action='fetch') microversion = self._get_microversion(session, action='fetch')
response = session.get(url, microversion=microversion) response = session.get(url, microversion=microversion)

View File

@ -30,7 +30,7 @@ class Object(_base.BaseResource):
"is_content_type_detected": "x-detect-content-type", "is_content_type_detected": "x-detect-content-type",
"manifest": "x-object-manifest", "manifest": "x-object-manifest",
# Rax hack - the need CORS as different header # Rax hack - the need CORS as different header
"access_control_allow_origin": "access-control-allow-origin" "access_control_allow_origin": "access-control-allow-origin",
} }
base_path = "/%(container)s" base_path = "/%(container)s"
@ -44,10 +44,14 @@ class Object(_base.BaseResource):
allow_head = True allow_head = True
_query_mapping = resource.QueryParameters( _query_mapping = resource.QueryParameters(
'prefix', 'format', 'prefix',
'temp_url_sig', 'temp_url_expires', 'format',
'filename', 'multipart_manifest', 'symlink', 'temp_url_sig',
multipart_manifest='multipart-manifest' 'temp_url_expires',
'filename',
'multipart_manifest',
'symlink',
multipart_manifest='multipart-manifest',
) )
# Data to be passed during a POST call to create an object on the server. # Data to be passed during a POST call to create an object on the server.
@ -117,7 +121,8 @@ class Object(_base.BaseResource):
#: size of the response body. Instead it contains the size of #: size of the response body. Instead it contains the size of
#: the object, in bytes. #: the object, in bytes.
content_length = resource.Header( content_length = resource.Header(
"content-length", type=int, alias='_bytes') "content-length", type=int, alias='_bytes'
)
#: The MIME type of the object. #: The MIME type of the object.
content_type = resource.Header("content-type", alias="_content_type") content_type = resource.Header("content-type", alias="_content_type")
#: The type of ranges that the object accepts. #: The type of ranges that the object accepts.
@ -136,8 +141,9 @@ class Object(_base.BaseResource):
etag = resource.Header("etag", alias='_hash') etag = resource.Header("etag", alias='_hash')
#: Set to True if this object is a static large object manifest object. #: Set to True if this object is a static large object manifest object.
#: *Type: bool* #: *Type: bool*
is_static_large_object = resource.Header("x-static-large-object", is_static_large_object = resource.Header(
type=bool) "x-static-large-object", type=bool
)
#: If set, the value of the Content-Encoding metadata. #: If set, the value of the Content-Encoding metadata.
#: If not set, this header is not returned by this operation. #: If not set, this header is not returned by this operation.
content_encoding = resource.Header("content-encoding") content_encoding = resource.Header("content-encoding")
@ -164,9 +170,8 @@ class Object(_base.BaseResource):
#: The date and time that the object was created or the last #: The date and time that the object was created or the last
#: time that the metadata was changed. #: time that the metadata was changed.
last_modified_at = resource.Header( last_modified_at = resource.Header(
"last-modified", "last-modified", alias='_last_modified', aka='updated_at'
alias='_last_modified', )
aka='updated_at')
# Headers for PUT and POST requests # Headers for PUT and POST requests
#: Set to chunked to enable chunked transfer encoding. If used, #: Set to chunked to enable chunked transfer encoding. If used,
@ -175,8 +180,9 @@ class Object(_base.BaseResource):
#: If set to true, Object Storage guesses the content type based #: If set to true, Object Storage guesses the content type based
#: on the file extension and ignores the value sent in the #: on the file extension and ignores the value sent in the
#: Content-Type header, if present. *Type: bool* #: Content-Type header, if present. *Type: bool*
is_content_type_detected = resource.Header("x-detect-content-type", is_content_type_detected = resource.Header(
type=bool) "x-detect-content-type", type=bool
)
#: If set, this is the name of an object used to create the new #: If set, this is the name of an object used to create the new
#: object by copying the X-Copy-From object. The value is in form #: object by copying the X-Copy-From object. The value is in form
#: {container}/{object}. You must UTF-8-encode and then URL-encode #: {container}/{object}. You must UTF-8-encode and then URL-encode
@ -195,7 +201,8 @@ class Object(_base.BaseResource):
#: CORS for RAX (deviating from standard) #: CORS for RAX (deviating from standard)
access_control_allow_origin = resource.Header( access_control_allow_origin = resource.Header(
"access-control-allow-origin") "access-control-allow-origin"
)
has_body = False has_body = False
@ -209,8 +216,9 @@ class Object(_base.BaseResource):
def set_metadata(self, session, metadata): def set_metadata(self, session, metadata):
# Filter out items with empty values so the create metadata behaviour # Filter out items with empty values so the create metadata behaviour
# is the same as account and container # is the same as account and container
filtered_metadata = \ filtered_metadata = {
{key: value for key, value in metadata.items() if value} key: value for key, value in metadata.items() if value
}
# Update from remote if we only have locally created information # Update from remote if we only have locally created information
if not self.last_modified_at: if not self.last_modified_at:
@ -281,9 +289,11 @@ class Object(_base.BaseResource):
request = self._prepare_request() request = self._prepare_request()
response = session.post( response = session.post(
request.url, headers=self._calculate_headers(metadata)) request.url, headers=self._calculate_headers(metadata)
)
exceptions.raise_from_response( exceptions.raise_from_response(
response, error_message="Error deleting metadata keys") response, error_message="Error deleting metadata keys"
)
# Only delete from local object if the remote delete was successful # Only delete from local object if the remote delete was successful
for key in attr_keys_to_delete: for key in attr_keys_to_delete:
@ -296,7 +306,8 @@ class Object(_base.BaseResource):
request = self._prepare_request() request = self._prepare_request()
response = session.get( response = session.get(
request.url, headers=request.headers, stream=stream) request.url, headers=request.headers, stream=stream
)
exceptions.raise_from_response(response, error_message=error_message) exceptions.raise_from_response(response, error_message=error_message)
return response return response
@ -306,16 +317,15 @@ class Object(_base.BaseResource):
def stream(self, session, error_message=None, chunk_size=1024): def stream(self, session, error_message=None, chunk_size=1024):
response = self._download( response = self._download(
session, error_message=error_message, stream=True) session, error_message=error_message, stream=True
)
return response.iter_content(chunk_size, decode_unicode=False) return response.iter_content(chunk_size, decode_unicode=False)
def create(self, session, base_path=None, **params): def create(self, session, base_path=None, **params):
request = self._prepare_request(base_path=base_path) request = self._prepare_request(base_path=base_path)
response = session.put( response = session.put(
request.url, request.url, data=self.data, headers=request.headers
data=self.data,
headers=request.headers
) )
self._translate_response(response, has_body=False) self._translate_response(response, has_body=False)
return self return self
@ -339,6 +349,5 @@ class Object(_base.BaseResource):
headers['multipart-manifest'] = 'delete' headers['multipart-manifest'] = 'delete'
return session.delete( return session.delete(
request.url, request.url, headers=headers, microversion=microversion
headers=headers, )
microversion=microversion)

View File

@ -14,7 +14,6 @@ from openstack.tests.functional import base
class TestAccount(base.BaseFunctionalTest): class TestAccount(base.BaseFunctionalTest):
def setUp(self): def setUp(self):
super(TestAccount, self).setUp() super(TestAccount, self).setUp()
self.require_service('object-store') self.require_service('object-store')

View File

@ -15,7 +15,6 @@ from openstack.tests.functional import base
class TestContainer(base.BaseFunctionalTest): class TestContainer(base.BaseFunctionalTest):
def setUp(self): def setUp(self):
super(TestContainer, self).setUp() super(TestContainer, self).setUp()
self.require_service('object-store') self.require_service('object-store')
@ -24,7 +23,9 @@ class TestContainer(base.BaseFunctionalTest):
container = self.conn.object_store.create_container(name=self.NAME) container = self.conn.object_store.create_container(name=self.NAME)
self.addEmptyCleanup( self.addEmptyCleanup(
self.conn.object_store.delete_container, self.conn.object_store.delete_container,
self.NAME, ignore_missing=False) self.NAME,
ignore_missing=False,
)
assert isinstance(container, _container.Container) assert isinstance(container, _container.Container)
self.assertEqual(self.NAME, container.name) self.assertEqual(self.NAME, container.name)
@ -43,21 +44,24 @@ class TestContainer(base.BaseFunctionalTest):
self.assertIsNone(container.read_ACL) self.assertIsNone(container.read_ACL)
self.assertIsNone(container.write_ACL) self.assertIsNone(container.write_ACL)
self.conn.object_store.set_container_metadata( self.conn.object_store.set_container_metadata(
container, read_ACL='.r:*', write_ACL='demo:demo') container, read_ACL='.r:*', write_ACL='demo:demo'
)
container = self.conn.object_store.get_container_metadata(self.NAME) container = self.conn.object_store.get_container_metadata(self.NAME)
self.assertEqual('.r:*', container.read_ACL) self.assertEqual('.r:*', container.read_ACL)
self.assertEqual('demo:demo', container.write_ACL) self.assertEqual('demo:demo', container.write_ACL)
# update system metadata # update system metadata
self.conn.object_store.set_container_metadata( self.conn.object_store.set_container_metadata(
container, read_ACL='.r:demo') container, read_ACL='.r:demo'
)
container = self.conn.object_store.get_container_metadata(self.NAME) container = self.conn.object_store.get_container_metadata(self.NAME)
self.assertEqual('.r:demo', container.read_ACL) self.assertEqual('.r:demo', container.read_ACL)
self.assertEqual('demo:demo', container.write_ACL) self.assertEqual('demo:demo', container.write_ACL)
# set system metadata and custom metadata # set system metadata and custom metadata
self.conn.object_store.set_container_metadata( self.conn.object_store.set_container_metadata(
container, k0='v0', sync_key='1234') container, k0='v0', sync_key='1234'
)
container = self.conn.object_store.get_container_metadata(self.NAME) container = self.conn.object_store.get_container_metadata(self.NAME)
self.assertTrue(container.metadata) self.assertTrue(container.metadata)
self.assertIn('k0', container.metadata) self.assertIn('k0', container.metadata)
@ -67,8 +71,9 @@ class TestContainer(base.BaseFunctionalTest):
self.assertEqual('1234', container.sync_key) self.assertEqual('1234', container.sync_key)
# unset system metadata # unset system metadata
self.conn.object_store.delete_container_metadata(container, self.conn.object_store.delete_container_metadata(
['sync_key']) container, ['sync_key']
)
container = self.conn.object_store.get_container_metadata(self.NAME) container = self.conn.object_store.get_container_metadata(self.NAME)
self.assertTrue(container.metadata) self.assertTrue(container.metadata)
self.assertIn('k0', container.metadata) self.assertIn('k0', container.metadata)

View File

@ -26,19 +26,25 @@ class TestObject(base.BaseFunctionalTest):
self.conn.object_store.create_container(name=self.FOLDER) self.conn.object_store.create_container(name=self.FOLDER)
self.addCleanup(self.conn.object_store.delete_container, self.FOLDER) self.addCleanup(self.conn.object_store.delete_container, self.FOLDER)
self.sot = self.conn.object_store.upload_object( self.sot = self.conn.object_store.upload_object(
container=self.FOLDER, name=self.FILE, data=self.DATA) container=self.FOLDER, name=self.FILE, data=self.DATA
)
self.addEmptyCleanup( self.addEmptyCleanup(
self.conn.object_store.delete_object, self.sot, self.conn.object_store.delete_object,
ignore_missing=False) self.sot,
ignore_missing=False,
)
def test_list(self): def test_list(self):
names = [o.name for o names = [
in self.conn.object_store.objects(container=self.FOLDER)] o.name
for o in self.conn.object_store.objects(container=self.FOLDER)
]
self.assertIn(self.FILE, names) self.assertIn(self.FILE, names)
def test_download_object(self): def test_download_object(self):
result = self.conn.object_store.download_object( result = self.conn.object_store.download_object(
self.FILE, container=self.FOLDER) self.FILE, container=self.FOLDER
)
self.assertEqual(self.DATA, result) self.assertEqual(self.DATA, result)
result = self.conn.object_store.download_object(self.sot) result = self.conn.object_store.download_object(self.sot)
self.assertEqual(self.DATA, result) self.assertEqual(self.DATA, result)
@ -46,25 +52,29 @@ class TestObject(base.BaseFunctionalTest):
def test_system_metadata(self): def test_system_metadata(self):
# get system metadata # get system metadata
obj = self.conn.object_store.get_object_metadata( obj = self.conn.object_store.get_object_metadata(
self.FILE, container=self.FOLDER) self.FILE, container=self.FOLDER
)
# TODO(shade) obj.bytes is coming up None on python3 but not python2 # TODO(shade) obj.bytes is coming up None on python3 but not python2
# self.assertGreaterEqual(0, obj.bytes) # self.assertGreaterEqual(0, obj.bytes)
self.assertIsNotNone(obj.etag) self.assertIsNotNone(obj.etag)
# set system metadata # set system metadata
obj = self.conn.object_store.get_object_metadata( obj = self.conn.object_store.get_object_metadata(
self.FILE, container=self.FOLDER) self.FILE, container=self.FOLDER
)
self.assertIsNone(obj.content_disposition) self.assertIsNone(obj.content_disposition)
self.assertIsNone(obj.content_encoding) self.assertIsNone(obj.content_encoding)
self.conn.object_store.set_object_metadata( self.conn.object_store.set_object_metadata(
obj, content_disposition='attachment', content_encoding='gzip') obj, content_disposition='attachment', content_encoding='gzip'
)
obj = self.conn.object_store.get_object_metadata(obj) obj = self.conn.object_store.get_object_metadata(obj)
self.assertEqual('attachment', obj.content_disposition) self.assertEqual('attachment', obj.content_disposition)
self.assertEqual('gzip', obj.content_encoding) self.assertEqual('gzip', obj.content_encoding)
# update system metadata # update system metadata
self.conn.object_store.set_object_metadata( self.conn.object_store.set_object_metadata(
obj, content_encoding='deflate') obj, content_encoding='deflate'
)
obj = self.conn.object_store.get_object_metadata(obj) obj = self.conn.object_store.get_object_metadata(obj)
self.assertEqual('attachment', obj.content_disposition) self.assertEqual('attachment', obj.content_disposition)
self.assertEqual('deflate', obj.content_encoding) self.assertEqual('deflate', obj.content_encoding)
@ -79,7 +89,8 @@ class TestObject(base.BaseFunctionalTest):
# unset more system metadata # unset more system metadata
self.conn.object_store.delete_object_metadata( self.conn.object_store.delete_object_metadata(
obj, keys=['content_disposition']) obj, keys=['content_disposition']
)
obj = self.conn.object_store.get_object_metadata(obj) obj = self.conn.object_store.get_object_metadata(obj)
self.assertIn('k0', obj.metadata) self.assertIn('k0', obj.metadata)
self.assertEqual('v0', obj.metadata['k0']) self.assertEqual('v0', obj.metadata['k0'])
@ -90,7 +101,8 @@ class TestObject(base.BaseFunctionalTest):
def test_custom_metadata(self): def test_custom_metadata(self):
# get custom metadata # get custom metadata
obj = self.conn.object_store.get_object_metadata( obj = self.conn.object_store.get_object_metadata(
self.FILE, container=self.FOLDER) self.FILE, container=self.FOLDER
)
self.assertFalse(obj.metadata) self.assertFalse(obj.metadata)
# set no custom metadata # set no custom metadata
@ -112,8 +124,9 @@ class TestObject(base.BaseFunctionalTest):
self.assertEqual('v1', obj.metadata['k1']) self.assertEqual('v1', obj.metadata['k1'])
# set more custom metadata by named object and container # set more custom metadata by named object and container
self.conn.object_store.set_object_metadata(self.FILE, self.FOLDER, self.conn.object_store.set_object_metadata(
k2='v2') self.FILE, self.FOLDER, k2='v2'
)
obj = self.conn.object_store.get_object_metadata(obj) obj = self.conn.object_store.get_object_metadata(obj)
self.assertTrue(obj.metadata) self.assertTrue(obj.metadata)
self.assertEqual(2, len(obj.metadata)) self.assertEqual(2, len(obj.metadata))

View File

@ -24,12 +24,11 @@ ACCOUNT_EXAMPLE = {
'x-account-container-count': '678', 'x-account-container-count': '678',
'content-type': 'text/plain; charset=utf-8', 'content-type': 'text/plain; charset=utf-8',
'x-account-object-count': '98765', 'x-account-object-count': '98765',
'x-timestamp': '1453413555.88937' 'x-timestamp': '1453413555.88937',
} }
class TestAccount(base.TestCase): class TestAccount(base.TestCase):
def setUp(self): def setUp(self):
super(TestAccount, self).setUp() super(TestAccount, self).setUp()
self.endpoint = self.cloud.object_store.get_endpoint() + '/' self.endpoint = self.cloud.object_store.get_endpoint() + '/'
@ -49,28 +48,41 @@ class TestAccount(base.TestCase):
def test_make_it(self): def test_make_it(self):
sot = account.Account(**ACCOUNT_EXAMPLE) sot = account.Account(**ACCOUNT_EXAMPLE)
self.assertIsNone(sot.id) self.assertIsNone(sot.id)
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-bytes-used']), self.assertEqual(
sot.account_bytes_used) int(ACCOUNT_EXAMPLE['x-account-bytes-used']),
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-container-count']), sot.account_bytes_used,
sot.account_container_count) )
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-object-count']), self.assertEqual(
sot.account_object_count) int(ACCOUNT_EXAMPLE['x-account-container-count']),
sot.account_container_count,
)
self.assertEqual(
int(ACCOUNT_EXAMPLE['x-account-object-count']),
sot.account_object_count,
)
self.assertEqual(ACCOUNT_EXAMPLE['x-timestamp'], sot.timestamp) self.assertEqual(ACCOUNT_EXAMPLE['x-timestamp'], sot.timestamp)
def test_set_temp_url_key(self): def test_set_temp_url_key(self):
sot = account.Account() sot = account.Account()
key = 'super-secure-key' key = 'super-secure-key'
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.endpoint,
'x-account-meta-temp-url-key': key})), status_code=204,
dict(method='HEAD', uri=self.endpoint, validate=dict(
headers={ headers={'x-account-meta-temp-url-key': key}
'x-account-meta-temp-url-key': key}), ),
]) ),
dict(
method='HEAD',
uri=self.endpoint,
headers={'x-account-meta-temp-url-key': key},
),
]
)
sot.set_temp_url_key(self.cloud.object_store, key) sot.set_temp_url_key(self.cloud.object_store, key)
self.assert_calls() self.assert_calls()
@ -78,15 +90,22 @@ class TestAccount(base.TestCase):
sot = account.Account() sot = account.Account()
key = 'super-secure-key' key = 'super-secure-key'
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.endpoint,
'x-account-meta-temp-url-key-2': key})), status_code=204,
dict(method='HEAD', uri=self.endpoint, validate=dict(
headers={ headers={'x-account-meta-temp-url-key-2': key}
'x-account-meta-temp-url-key-2': key}), ),
]) ),
dict(
method='HEAD',
uri=self.endpoint,
headers={'x-account-meta-temp-url-key-2': key},
),
]
)
sot.set_temp_url_key(self.cloud.object_store, key, secondary=True) sot.set_temp_url_key(self.cloud.object_store, key, secondary=True)
self.assert_calls() self.assert_calls()

View File

@ -17,13 +17,13 @@ from openstack.tests.unit import base
class TestContainer(base.TestCase): class TestContainer(base.TestCase):
def setUp(self): def setUp(self):
super(TestContainer, self).setUp() super(TestContainer, self).setUp()
self.container = self.getUniqueString() self.container = self.getUniqueString()
self.endpoint = self.cloud.object_store.get_endpoint() + '/' self.endpoint = self.cloud.object_store.get_endpoint() + '/'
self.container_endpoint = '{endpoint}{container}'.format( self.container_endpoint = '{endpoint}{container}'.format(
endpoint=self.endpoint, container=self.container) endpoint=self.endpoint, container=self.container
)
self.body = { self.body = {
"count": 2, "count": 2,
@ -42,7 +42,7 @@ class TestContainer(base.TestCase):
'x-history-location': 'history-location', 'x-history-location': 'history-location',
'content-type': 'application/json; charset=utf-8', 'content-type': 'application/json; charset=utf-8',
'x-timestamp': '1453414055.48672', 'x-timestamp': '1453414055.48672',
'x-storage-policy': 'Gold' 'x-storage-policy': 'Gold',
} }
self.body_plus_headers = dict(self.body, **self.headers) self.body_plus_headers = dict(self.body, **self.headers)
@ -81,49 +81,44 @@ class TestContainer(base.TestCase):
# Attributes from header # Attributes from header
self.assertEqual( self.assertEqual(
int(self.body_plus_headers['x-container-object-count']), int(self.body_plus_headers['x-container-object-count']),
sot.object_count) sot.object_count,
)
self.assertEqual( self.assertEqual(
int(self.body_plus_headers['x-container-bytes-used']), int(self.body_plus_headers['x-container-bytes-used']),
sot.bytes_used) sot.bytes_used,
)
self.assertEqual( self.assertEqual(
self.body_plus_headers['x-container-read'], self.body_plus_headers['x-container-read'], sot.read_ACL
sot.read_ACL) )
self.assertEqual( self.assertEqual(
self.body_plus_headers['x-container-write'], self.body_plus_headers['x-container-write'], sot.write_ACL
sot.write_ACL) )
self.assertEqual( self.assertEqual(
self.body_plus_headers['x-container-sync-to'], self.body_plus_headers['x-container-sync-to'], sot.sync_to
sot.sync_to) )
self.assertEqual( self.assertEqual(
self.body_plus_headers['x-container-sync-key'], self.body_plus_headers['x-container-sync-key'], sot.sync_key
sot.sync_key) )
self.assertEqual( self.assertEqual(
self.body_plus_headers['x-versions-location'], self.body_plus_headers['x-versions-location'],
sot.versions_location) sot.versions_location,
)
self.assertEqual( self.assertEqual(
self.body_plus_headers['x-history-location'], self.body_plus_headers['x-history-location'], sot.history_location
sot.history_location) )
self.assertEqual(self.body_plus_headers['x-timestamp'], sot.timestamp) self.assertEqual(self.body_plus_headers['x-timestamp'], sot.timestamp)
self.assertEqual(self.body_plus_headers['x-storage-policy'], self.assertEqual(
sot.storage_policy) self.body_plus_headers['x-storage-policy'], sot.storage_policy
)
def test_list(self): def test_list(self):
containers = [ containers = [
{ {"count": 999, "bytes": 12345, "name": "container1"},
"count": 999, {"count": 888, "bytes": 54321, "name": "container2"},
"bytes": 12345,
"name": "container1"
},
{
"count": 888,
"bytes": 54321,
"name": "container2"
}
] ]
self.register_uris([ self.register_uris(
dict(method='GET', uri=self.endpoint, [dict(method='GET', uri=self.endpoint, json=containers)]
json=containers) )
])
response = container.Container.list(self.cloud.object_store) response = container.Container.list(self.cloud.object_store)
@ -144,25 +139,32 @@ class TestContainer(base.TestCase):
"x-container-read": "some ACL", "x-container-read": "some ACL",
"x-container-write": "another ACL", "x-container-write": "another ACL",
"x-detect-content-type": 'True', "x-detect-content-type": 'True',
"X-Container-Meta-foo": "bar" "X-Container-Meta-foo": "bar",
} }
self.register_uris([ self.register_uris(
dict(method=sess_method, uri=self.container_endpoint, [
json=self.body, dict(
validate=dict(headers=headers)), method=sess_method,
]) uri=self.container_endpoint,
json=self.body,
validate=dict(headers=headers),
),
]
)
sot_call(self.cloud.object_store) sot_call(self.cloud.object_store)
self.assert_calls() self.assert_calls()
def test_create(self): def test_create(self):
sot = container.Container.new( sot = container.Container.new(
name=self.container, metadata={'foo': 'bar'}) name=self.container, metadata={'foo': 'bar'}
)
self._test_create_update(sot, sot.create, 'PUT') self._test_create_update(sot, sot.create, 'PUT')
def test_commit(self): def test_commit(self):
sot = container.Container.new( sot = container.Container.new(
name=self.container, metadata={'foo': 'bar'}) name=self.container, metadata={'foo': 'bar'}
)
self._test_create_update(sot, sot.commit, 'POST') self._test_create_update(sot, sot.commit, 'POST')
def test_to_dict_recursion(self): def test_to_dict_recursion(self):
@ -200,15 +202,22 @@ class TestContainer(base.TestCase):
'versions_location': None, 'versions_location': None,
'history_location': None, 'history_location': None,
'write_ACL': None, 'write_ACL': None,
'storage_policy': None 'storage_policy': None,
}, json.loads(json.dumps(sot))) },
json.loads(json.dumps(sot)),
)
def _test_no_headers(self, sot, sot_call, sess_method): def _test_no_headers(self, sot, sot_call, sess_method):
headers = {} headers = {}
self.register_uris([ self.register_uris(
dict(method=sess_method, uri=self.container_endpoint, [
validate=dict(headers=headers)) dict(
]) method=sess_method,
uri=self.container_endpoint,
validate=dict(headers=headers),
)
]
)
sot_call(self.cloud.object_store) sot_call(self.cloud.object_store)
def test_create_no_headers(self): def test_create_no_headers(self):
@ -225,16 +234,23 @@ class TestContainer(base.TestCase):
sot = container.Container.new(name=self.container) sot = container.Container.new(name=self.container)
key = self.getUniqueString() key = self.getUniqueString()
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.container_endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.container_endpoint,
'x-container-meta-temp-url-key': key})), status_code=204,
dict(method='HEAD', uri=self.container_endpoint, validate=dict(
headers={ headers={'x-container-meta-temp-url-key': key}
'x-container-meta-temp-url-key': key}), ),
]) ),
dict(
method='HEAD',
uri=self.container_endpoint,
headers={'x-container-meta-temp-url-key': key},
),
]
)
sot.set_temp_url_key(self.cloud.object_store, key) sot.set_temp_url_key(self.cloud.object_store, key)
self.assert_calls() self.assert_calls()
@ -242,15 +258,22 @@ class TestContainer(base.TestCase):
sot = container.Container.new(name=self.container) sot = container.Container.new(name=self.container)
key = self.getUniqueString() key = self.getUniqueString()
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.container_endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.container_endpoint,
'x-container-meta-temp-url-key-2': key})), status_code=204,
dict(method='HEAD', uri=self.container_endpoint, validate=dict(
headers={ headers={'x-container-meta-temp-url-key-2': key}
'x-container-meta-temp-url-key-2': key}), ),
]) ),
dict(
method='HEAD',
uri=self.container_endpoint,
headers={'x-container-meta-temp-url-key-2': key},
),
]
)
sot.set_temp_url_key(self.cloud.object_store, key, secondary=True) sot.set_temp_url_key(self.cloud.object_store, key, secondary=True)
self.assert_calls() self.assert_calls()

View File

@ -27,7 +27,6 @@ from openstack.tests.unit.cloud import test_object as base_test_object
class TestObject(base_test_object.BaseTestObject): class TestObject(base_test_object.BaseTestObject):
def setUp(self): def setUp(self):
super(TestObject, self).setUp() super(TestObject, self).setUp()
self.the_data = b'test body' self.the_data = b'test body'
@ -39,7 +38,7 @@ class TestObject(base_test_object.BaseTestObject):
"last_modified": "2014-07-13T18:41:03.319240", "last_modified": "2014-07-13T18:41:03.319240",
"bytes": self.the_data_length, "bytes": self.the_data_length,
"name": self.object, "name": self.object,
"content_type": "application/octet-stream" "content_type": "application/octet-stream",
} }
self.headers = { self.headers = {
'Content-Length': str(len(self.the_data)), 'Content-Length': str(len(self.the_data)),
@ -78,9 +77,9 @@ class TestObject(base_test_object.BaseTestObject):
'prefix': 'prefix', 'prefix': 'prefix',
'symlink': 'symlink', 'symlink': 'symlink',
'temp_url_expires': 'temp_url_expires', 'temp_url_expires': 'temp_url_expires',
'temp_url_sig': 'temp_url_sig' 'temp_url_sig': 'temp_url_sig',
}, },
sot._query_mapping._mapping sot._query_mapping._mapping,
) )
def test_new(self): def test_new(self):
@ -95,8 +94,7 @@ class TestObject(base_test_object.BaseTestObject):
# Attributes from header # Attributes from header
self.assertEqual(self.container, sot.container) self.assertEqual(self.container, sot.container)
self.assertEqual( self.assertEqual(int(self.body['bytes']), sot.content_length)
int(self.body['bytes']), sot.content_length)
self.assertEqual(self.body['last_modified'], sot.last_modified_at) self.assertEqual(self.body['last_modified'], sot.last_modified_at)
self.assertEqual(self.body['hash'], sot.etag) self.assertEqual(self.body['hash'], sot.etag)
self.assertEqual(self.body['content_type'], sot.content_type) self.assertEqual(self.body['content_type'], sot.content_type)
@ -108,7 +106,8 @@ class TestObject(base_test_object.BaseTestObject):
# Attributes from header # Attributes from header
self.assertEqual(self.container, sot.container) self.assertEqual(self.container, sot.container)
self.assertEqual( self.assertEqual(
int(self.headers['Content-Length']), sot.content_length) int(self.headers['Content-Length']), sot.content_length
)
self.assertEqual(self.headers['Accept-Ranges'], sot.accept_ranges) self.assertEqual(self.headers['Accept-Ranges'], sot.accept_ranges)
self.assertEqual(self.headers['Last-Modified'], sot.last_modified_at) self.assertEqual(self.headers['Last-Modified'], sot.last_modified_at)
self.assertEqual(self.headers['Etag'], sot.etag) self.assertEqual(self.headers['Etag'], sot.etag)
@ -129,16 +128,19 @@ class TestObject(base_test_object.BaseTestObject):
headers = { headers = {
'X-Newest': 'True', 'X-Newest': 'True',
'If-Match': self.headers['Etag'], 'If-Match': self.headers['Etag'],
'Accept': '*/*' 'Accept': '*/*',
} }
self.register_uris([ self.register_uris(
dict(method='GET', uri=self.object_endpoint, [
headers=self.headers, dict(
content=self.the_data, method='GET',
validate=dict( uri=self.object_endpoint,
headers=headers headers=self.headers,
)) content=self.the_data,
]) validate=dict(headers=headers),
)
]
)
sot = obj.Object.new(container=self.container, name=self.object) sot = obj.Object.new(container=self.container, name=self.object)
sot.is_newest = True sot.is_newest = True
# if_match is a list type, but we're passing a string. This tests # if_match is a list type, but we're passing a string. This tests
@ -153,19 +155,23 @@ class TestObject(base_test_object.BaseTestObject):
def _test_create(self, method, data): def _test_create(self, method, data):
sot = obj.Object.new( sot = obj.Object.new(
container=self.container, name=self.object, container=self.container,
data=data, metadata={'foo': 'bar'}) name=self.object,
data=data,
metadata={'foo': 'bar'},
)
sot.is_newest = True sot.is_newest = True
sent_headers = { sent_headers = {"x-newest": 'True', "X-Object-Meta-foo": "bar"}
"x-newest": 'True', self.register_uris(
"X-Object-Meta-foo": "bar" [
} dict(
self.register_uris([ method=method,
dict(method=method, uri=self.object_endpoint, uri=self.object_endpoint,
headers=self.headers, headers=self.headers,
validate=dict( validate=dict(headers=sent_headers),
headers=sent_headers)) )
]) ]
)
rv = sot.create(self.cloud.object_store) rv = sot.create(self.cloud.object_store)
self.assertEqual(rv.etag, self.headers['Etag']) self.assertEqual(rv.etag, self.headers['Etag'])

View File

@ -47,24 +47,30 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
self.container = self.getUniqueString() self.container = self.getUniqueString()
self.endpoint = self.cloud.object_store.get_endpoint() + '/' self.endpoint = self.cloud.object_store.get_endpoint() + '/'
self.container_endpoint = '{endpoint}{container}'.format( self.container_endpoint = '{endpoint}{container}'.format(
endpoint=self.endpoint, container=self.container) endpoint=self.endpoint, container=self.container
)
def test_account_metadata_get(self): def test_account_metadata_get(self):
self.verify_head( self.verify_head(
self.proxy.get_account_metadata, account.Account, self.proxy.get_account_metadata, account.Account, method_args=[]
method_args=[]) )
def test_container_metadata_get(self): def test_container_metadata_get(self):
self.verify_head(self.proxy.get_container_metadata, self.verify_head(
container.Container, method_args=["container"]) self.proxy.get_container_metadata,
container.Container,
method_args=["container"],
)
def test_container_delete(self): def test_container_delete(self):
self.verify_delete(self.proxy.delete_container, self.verify_delete(
container.Container, False) self.proxy.delete_container, container.Container, False
)
def test_container_delete_ignore(self): def test_container_delete_ignore(self):
self.verify_delete(self.proxy.delete_container, self.verify_delete(
container.Container, True) self.proxy.delete_container, container.Container, True
)
def test_container_create_attrs(self): def test_container_create_attrs(self):
self.verify_create( self.verify_create(
@ -72,7 +78,8 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
container.Container, container.Container,
method_args=['container_name'], method_args=['container_name'],
expected_args=[], expected_args=[],
expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3}) expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3},
)
def test_object_metadata_get(self): def test_object_metadata_get(self):
self._verify( self._verify(
@ -81,7 +88,8 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
method_args=['object'], method_args=['object'],
method_kwargs={'container': 'container'}, method_kwargs={'container': 'container'},
expected_args=[obj.Object, 'object'], expected_args=[obj.Object, 'object'],
expected_kwargs={'container': 'container'}) expected_kwargs={'container': 'container'},
)
def _test_object_delete(self, ignore): def _test_object_delete(self, ignore):
expected_kwargs = { expected_kwargs = {
@ -95,7 +103,8 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
method_args=["resource"], method_args=["resource"],
method_kwargs=expected_kwargs, method_kwargs=expected_kwargs,
expected_args=[obj.Object, "resource"], expected_args=[obj.Object, "resource"],
expected_kwargs=expected_kwargs) expected_kwargs=expected_kwargs,
)
def test_object_delete(self): def test_object_delete(self):
self._test_object_delete(False) self._test_object_delete(False)
@ -108,7 +117,7 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
"name": "test", "name": "test",
"data": "data", "data": "data",
"container": "name", "container": "name",
"metadata": {} "metadata": {},
} }
self._verify( self._verify(
@ -116,52 +125,57 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
self.proxy.upload_object, self.proxy.upload_object,
method_kwargs=kwargs, method_kwargs=kwargs,
expected_args=[obj.Object], expected_args=[obj.Object],
expected_kwargs=kwargs) expected_kwargs=kwargs,
)
def test_object_create_no_container(self): def test_object_create_no_container(self):
self.assertRaises(TypeError, self.proxy.upload_object) self.assertRaises(TypeError, self.proxy.upload_object)
def test_object_get(self): def test_object_get(self):
with requests_mock.Mocker() as m: with requests_mock.Mocker() as m:
m.get("%scontainer/object" % self.endpoint, m.get("%scontainer/object" % self.endpoint, text="data")
text="data")
res = self.proxy.get_object("object", container="container") res = self.proxy.get_object("object", container="container")
self.assertIsNone(res.data) self.assertIsNone(res.data)
def test_object_get_write_file(self): def test_object_get_write_file(self):
with requests_mock.Mocker() as m: with requests_mock.Mocker() as m:
m.get("%scontainer/object" % self.endpoint, m.get("%scontainer/object" % self.endpoint, text="data")
text="data")
with tempfile.NamedTemporaryFile() as f: with tempfile.NamedTemporaryFile() as f:
self.proxy.get_object( self.proxy.get_object(
"object", container="container", "object", container="container", outfile=f.name
outfile=f.name) )
dt = open(f.name).read() dt = open(f.name).read()
self.assertEqual(dt, "data") self.assertEqual(dt, "data")
def test_object_get_remember_content(self): def test_object_get_remember_content(self):
with requests_mock.Mocker() as m: with requests_mock.Mocker() as m:
m.get("%scontainer/object" % self.endpoint, m.get("%scontainer/object" % self.endpoint, text="data")
text="data")
res = self.proxy.get_object( res = self.proxy.get_object(
"object", container="container", "object", container="container", remember_content=True
remember_content=True) )
self.assertEqual(res.data, "data") self.assertEqual(res.data, "data")
def test_set_temp_url_key(self): def test_set_temp_url_key(self):
key = 'super-secure-key' key = 'super-secure-key'
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.endpoint,
'x-account-meta-temp-url-key': key})), status_code=204,
dict(method='HEAD', uri=self.endpoint, validate=dict(
headers={ headers={'x-account-meta-temp-url-key': key}
'x-account-meta-temp-url-key': key}), ),
]) ),
dict(
method='HEAD',
uri=self.endpoint,
headers={'x-account-meta-temp-url-key': key},
),
]
)
self.proxy.set_account_temp_url_key(key) self.proxy.set_account_temp_url_key(key)
self.assert_calls() self.assert_calls()
@ -169,16 +183,23 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
key = 'super-secure-key' key = 'super-secure-key'
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.endpoint,
'x-account-meta-temp-url-key-2': key})), status_code=204,
dict(method='HEAD', uri=self.endpoint, validate=dict(
headers={ headers={'x-account-meta-temp-url-key-2': key}
'x-account-meta-temp-url-key-2': key}), ),
]) ),
dict(
method='HEAD',
uri=self.endpoint,
headers={'x-account-meta-temp-url-key-2': key},
),
]
)
self.proxy.set_account_temp_url_key(key, secondary=True) self.proxy.set_account_temp_url_key(key, secondary=True)
self.assert_calls() self.assert_calls()
@ -186,16 +207,23 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
key = 'super-secure-key' key = 'super-secure-key'
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.container_endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.container_endpoint,
'x-container-meta-temp-url-key': key})), status_code=204,
dict(method='HEAD', uri=self.container_endpoint, validate=dict(
headers={ headers={'x-container-meta-temp-url-key': key}
'x-container-meta-temp-url-key': key}), ),
]) ),
dict(
method='HEAD',
uri=self.container_endpoint,
headers={'x-container-meta-temp-url-key': key},
),
]
)
self.proxy.set_container_temp_url_key(self.container, key) self.proxy.set_container_temp_url_key(self.container, key)
self.assert_calls() self.assert_calls()
@ -203,18 +231,26 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
key = 'super-secure-key' key = 'super-secure-key'
self.register_uris([ self.register_uris(
dict(method='POST', uri=self.container_endpoint, [
status_code=204, dict(
validate=dict( method='POST',
headers={ uri=self.container_endpoint,
'x-container-meta-temp-url-key-2': key})), status_code=204,
dict(method='HEAD', uri=self.container_endpoint, validate=dict(
headers={ headers={'x-container-meta-temp-url-key-2': key}
'x-container-meta-temp-url-key-2': key}), ),
]) ),
dict(
method='HEAD',
uri=self.container_endpoint,
headers={'x-container-meta-temp-url-key-2': key},
),
]
)
self.proxy.set_container_temp_url_key( self.proxy.set_container_temp_url_key(
self.container, key, secondary=True) self.container, key, secondary=True
)
self.assert_calls() self.assert_calls()
def test_copy_object(self): def test_copy_object(self):
@ -222,9 +258,10 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
def test_file_segment(self): def test_file_segment(self):
file_size = 4200 file_size = 4200
content = ''.join(random.choice( content = ''.join(
string.ascii_uppercase + string.digits) random.choice(string.ascii_uppercase + string.digits)
for _ in range(file_size)).encode('latin-1') for _ in range(file_size)
).encode('latin-1')
self.imagefile = tempfile.NamedTemporaryFile(delete=False) self.imagefile = tempfile.NamedTemporaryFile(delete=False)
self.imagefile.write(content) self.imagefile.write(content)
self.imagefile.close() self.imagefile.close()
@ -233,50 +270,60 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
endpoint='test_container/test_image', endpoint='test_container/test_image',
filename=self.imagefile.name, filename=self.imagefile.name,
file_size=file_size, file_size=file_size,
segment_size=1000) segment_size=1000,
)
self.assertEqual(len(segments), 5) self.assertEqual(len(segments), 5)
segment_content = b'' segment_content = b''
for (index, (name, segment)) in enumerate(segments.items()): for (index, (name, segment)) in enumerate(segments.items()):
self.assertEqual( self.assertEqual(
'test_container/test_image/{index:0>6}'.format(index=index), 'test_container/test_image/{index:0>6}'.format(index=index),
name) name,
)
segment_content += segment.read() segment_content += segment.read()
self.assertEqual(content, segment_content) self.assertEqual(content, segment_content)
class TestDownloadObject(base_test_object.BaseTestObject): class TestDownloadObject(base_test_object.BaseTestObject):
def setUp(self): def setUp(self):
super(TestDownloadObject, self).setUp() super(TestDownloadObject, self).setUp()
self.the_data = b'test body' self.the_data = b'test body'
self.register_uris([ self.register_uris(
dict(method='GET', uri=self.object_endpoint, [
headers={ dict(
'Content-Length': str(len(self.the_data)), method='GET',
'Content-Type': 'application/octet-stream', uri=self.object_endpoint,
'Accept-Ranges': 'bytes', headers={
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT', 'Content-Length': str(len(self.the_data)),
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"', 'Content-Type': 'application/octet-stream',
'X-Timestamp': '1481808853.65009', 'Accept-Ranges': 'bytes',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1', 'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT', 'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Static-Large-Object': 'True', 'X-Timestamp': '1481808853.65009',
'X-Object-Meta-Mtime': '1481513709.168512', 'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
}, 'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
content=self.the_data)]) 'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
},
content=self.the_data,
)
]
)
def test_download(self): def test_download(self):
data = self.cloud.object_store.download_object( data = self.cloud.object_store.download_object(
self.object, container=self.container) self.object, container=self.container
)
self.assertEqual(data, self.the_data) self.assertEqual(data, self.the_data)
self.assert_calls() self.assert_calls()
def test_stream(self): def test_stream(self):
chunk_size = 2 chunk_size = 2
for index, chunk in enumerate(self.cloud.object_store.stream_object( for index, chunk in enumerate(
self.object, container=self.container, self.cloud.object_store.stream_object(
chunk_size=chunk_size)): self.object, container=self.container, chunk_size=chunk_size
)
):
chunk_len = len(chunk) chunk_len = len(chunk)
start = index * chunk_size start = index * chunk_size
end = start + chunk_len end = start + chunk_len
@ -290,12 +337,17 @@ class TestExtractName(TestObjectStoreProxy):
scenarios = [ scenarios = [
('discovery', dict(url='/', parts=['account'])), ('discovery', dict(url='/', parts=['account'])),
('endpoints', dict(url='/endpoints', parts=['endpoints'])), ('endpoints', dict(url='/endpoints', parts=['endpoints'])),
('container', dict(url='/AUTH_123/container_name', (
parts=['container'])), 'container',
('object', dict(url='/container_name/object_name', dict(url='/AUTH_123/container_name', parts=['container']),
parts=['object'])), ),
('object_long', dict(url='/v1/AUTH_123/cnt/path/deep/object_name', ('object', dict(url='/container_name/object_name', parts=['object'])),
parts=['object'])) (
'object_long',
dict(
url='/v1/AUTH_123/cnt/path/deep/object_name', parts=['object']
),
),
] ]
def test_extract_name(self): def test_extract_name(self):
@ -307,36 +359,44 @@ class TestExtractName(TestObjectStoreProxy):
class TestTempURL(TestObjectStoreProxy): class TestTempURL(TestObjectStoreProxy):
expires_iso8601_format = '%Y-%m-%dT%H:%M:%SZ' expires_iso8601_format = '%Y-%m-%dT%H:%M:%SZ'
short_expires_iso8601_format = '%Y-%m-%d' short_expires_iso8601_format = '%Y-%m-%d'
time_errmsg = ('time must either be a whole number or in specific ' time_errmsg = (
'ISO 8601 format.') 'time must either be a whole number or in specific ' 'ISO 8601 format.'
)
path_errmsg = 'path must be full path to an object e.g. /v1/a/c/o' path_errmsg = 'path must be full path to an object e.g. /v1/a/c/o'
url = '/v1/AUTH_account/c/o' url = '/v1/AUTH_account/c/o'
seconds = 3600 seconds = 3600
key = 'correcthorsebatterystaple' key = 'correcthorsebatterystaple'
method = 'GET' method = 'GET'
expected_url = url + ('?temp_url_sig=temp_url_signature' expected_url = url + (
'&temp_url_expires=1400003600') '?temp_url_sig=temp_url_signature' '&temp_url_expires=1400003600'
expected_body = '\n'.join([ )
method, expected_body = '\n'.join(
'1400003600', [
url, method,
]).encode('utf-8') '1400003600',
url,
]
).encode('utf-8')
@mock.patch('hmac.HMAC') @mock.patch('hmac.HMAC')
@mock.patch('time.time', return_value=1400000000) @mock.patch('time.time', return_value=1400000000)
def test_generate_temp_url(self, time_mock, hmac_mock): def test_generate_temp_url(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature' hmac_mock().hexdigest.return_value = 'temp_url_signature'
url = self.proxy.generate_temp_url( url = self.proxy.generate_temp_url(
self.url, self.seconds, self.method, temp_url_key=self.key) self.url, self.seconds, self.method, temp_url_key=self.key
)
key = self.key key = self.key
if not isinstance(key, bytes): if not isinstance(key, bytes):
key = key.encode('utf-8') key = key.encode('utf-8')
self.assertEqual(url, self.expected_url) self.assertEqual(url, self.expected_url)
self.assertEqual(hmac_mock.mock_calls, [ self.assertEqual(
mock.call(), hmac_mock.mock_calls,
mock.call(key, self.expected_body, sha1), [
mock.call().hexdigest(), mock.call(),
]) mock.call(key, self.expected_body, sha1),
mock.call().hexdigest(),
],
)
self.assertIsInstance(url, type(self.url)) self.assertIsInstance(url, type(self.url))
@mock.patch('hmac.HMAC') @mock.patch('hmac.HMAC')
@ -344,62 +404,83 @@ class TestTempURL(TestObjectStoreProxy):
def test_generate_temp_url_ip_range(self, time_mock, hmac_mock): def test_generate_temp_url_ip_range(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature' hmac_mock().hexdigest.return_value = 'temp_url_signature'
ip_ranges = [ ip_ranges = [
'1.2.3.4', '1.2.3.4/24', '2001:db8::', '1.2.3.4',
b'1.2.3.4', b'1.2.3.4/24', b'2001:db8::', '1.2.3.4/24',
'2001:db8::',
b'1.2.3.4',
b'1.2.3.4/24',
b'2001:db8::',
] ]
path = '/v1/AUTH_account/c/o/' path = '/v1/AUTH_account/c/o/'
expected_url = path + ('?temp_url_sig=temp_url_signature' expected_url = path + (
'&temp_url_expires=1400003600' '?temp_url_sig=temp_url_signature'
'&temp_url_ip_range=') '&temp_url_expires=1400003600'
'&temp_url_ip_range='
)
for ip_range in ip_ranges: for ip_range in ip_ranges:
hmac_mock.reset_mock() hmac_mock.reset_mock()
url = self.proxy.generate_temp_url( url = self.proxy.generate_temp_url(
path, self.seconds, self.method, path,
temp_url_key=self.key, ip_range=ip_range) self.seconds,
self.method,
temp_url_key=self.key,
ip_range=ip_range,
)
key = self.key key = self.key
if not isinstance(key, bytes): if not isinstance(key, bytes):
key = key.encode('utf-8') key = key.encode('utf-8')
if isinstance(ip_range, bytes): if isinstance(ip_range, bytes):
ip_range_expected_url = ( ip_range_expected_url = expected_url + ip_range.decode('utf-8')
expected_url + ip_range.decode('utf-8') expected_body = '\n'.join(
) [
expected_body = '\n'.join([ 'ip=' + ip_range.decode('utf-8'),
'ip=' + ip_range.decode('utf-8'), self.method,
self.method, '1400003600',
'1400003600', path,
path, ]
]).encode('utf-8') ).encode('utf-8')
else: else:
ip_range_expected_url = expected_url + ip_range ip_range_expected_url = expected_url + ip_range
expected_body = '\n'.join([ expected_body = '\n'.join(
'ip=' + ip_range, [
self.method, 'ip=' + ip_range,
'1400003600', self.method,
path, '1400003600',
]).encode('utf-8') path,
]
).encode('utf-8')
self.assertEqual(url, ip_range_expected_url) self.assertEqual(url, ip_range_expected_url)
self.assertEqual(hmac_mock.mock_calls, [ self.assertEqual(
mock.call(key, expected_body, sha1), hmac_mock.mock_calls,
mock.call().hexdigest(), [
]) mock.call(key, expected_body, sha1),
mock.call().hexdigest(),
],
)
self.assertIsInstance(url, type(path)) self.assertIsInstance(url, type(path))
@mock.patch('hmac.HMAC') @mock.patch('hmac.HMAC')
def test_generate_temp_url_iso8601_argument(self, hmac_mock): def test_generate_temp_url_iso8601_argument(self, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature' hmac_mock().hexdigest.return_value = 'temp_url_signature'
url = self.proxy.generate_temp_url( url = self.proxy.generate_temp_url(
self.url, '2014-05-13T17:53:20Z', self.method, self.url,
temp_url_key=self.key) '2014-05-13T17:53:20Z',
self.method,
temp_url_key=self.key,
)
self.assertEqual(url, self.expected_url) self.assertEqual(url, self.expected_url)
# Don't care about absolute arg. # Don't care about absolute arg.
url = self.proxy.generate_temp_url(self.url, '2014-05-13T17:53:20Z', url = self.proxy.generate_temp_url(
self.method, self.url,
temp_url_key=self.key, '2014-05-13T17:53:20Z',
absolute=True) self.method,
temp_url_key=self.key,
absolute=True,
)
self.assertEqual(url, self.expected_url) self.assertEqual(url, self.expected_url)
lt = time.localtime() lt = time.localtime()
@ -407,14 +488,16 @@ class TestTempURL(TestObjectStoreProxy):
if not isinstance(self.expected_url, str): if not isinstance(self.expected_url, str):
expected_url = self.expected_url.replace( expected_url = self.expected_url.replace(
b'1400003600', bytes(str(int(time.mktime(lt))), b'1400003600',
encoding='ascii')) bytes(str(int(time.mktime(lt))), encoding='ascii'),
)
else: else:
expected_url = self.expected_url.replace( expected_url = self.expected_url.replace(
'1400003600', str(int(time.mktime(lt)))) '1400003600', str(int(time.mktime(lt)))
url = self.proxy.generate_temp_url(self.url, expires, )
self.method, url = self.proxy.generate_temp_url(
temp_url_key=self.key) self.url, expires, self.method, temp_url_key=self.key
)
self.assertEqual(url, expected_url) self.assertEqual(url, expected_url)
expires = time.strftime(self.short_expires_iso8601_format, lt) expires = time.strftime(self.short_expires_iso8601_format, lt)
@ -422,39 +505,48 @@ class TestTempURL(TestObjectStoreProxy):
if not isinstance(self.expected_url, str): if not isinstance(self.expected_url, str):
expected_url = self.expected_url.replace( expected_url = self.expected_url.replace(
b'1400003600', bytes(str(int(time.mktime(lt))), b'1400003600',
encoding='ascii')) bytes(str(int(time.mktime(lt))), encoding='ascii'),
)
else: else:
expected_url = self.expected_url.replace( expected_url = self.expected_url.replace(
'1400003600', str(int(time.mktime(lt)))) '1400003600', str(int(time.mktime(lt)))
url = self.proxy.generate_temp_url(self.url, expires, )
self.method, url = self.proxy.generate_temp_url(
temp_url_key=self.key) self.url, expires, self.method, temp_url_key=self.key
)
self.assertEqual(url, expected_url) self.assertEqual(url, expected_url)
@mock.patch('hmac.HMAC') @mock.patch('hmac.HMAC')
@mock.patch('time.time', return_value=1400000000) @mock.patch('time.time', return_value=1400000000)
def test_generate_temp_url_iso8601_output(self, time_mock, hmac_mock): def test_generate_temp_url_iso8601_output(self, time_mock, hmac_mock):
hmac_mock().hexdigest.return_value = 'temp_url_signature' hmac_mock().hexdigest.return_value = 'temp_url_signature'
url = self.proxy.generate_temp_url(self.url, self.seconds, url = self.proxy.generate_temp_url(
self.method, self.url,
temp_url_key=self.key, self.seconds,
iso8601=True) self.method,
temp_url_key=self.key,
iso8601=True,
)
key = self.key key = self.key
if not isinstance(key, bytes): if not isinstance(key, bytes):
key = key.encode('utf-8') key = key.encode('utf-8')
expires = time.strftime(self.expires_iso8601_format, expires = time.strftime(
time.gmtime(1400003600)) self.expires_iso8601_format, time.gmtime(1400003600)
)
if not isinstance(self.url, str): if not isinstance(self.url, str):
self.assertTrue(url.endswith(bytes(expires, 'utf-8'))) self.assertTrue(url.endswith(bytes(expires, 'utf-8')))
else: else:
self.assertTrue(url.endswith(expires)) self.assertTrue(url.endswith(expires))
self.assertEqual(hmac_mock.mock_calls, [ self.assertEqual(
mock.call(), hmac_mock.mock_calls,
mock.call(key, self.expected_body, sha1), [
mock.call().hexdigest(), mock.call(),
]) mock.call(key, self.expected_body, sha1),
mock.call().hexdigest(),
],
)
self.assertIsInstance(url, type(self.url)) self.assertIsInstance(url, type(self.url))
@mock.patch('hmac.HMAC') @mock.patch('hmac.HMAC')
@ -465,25 +557,36 @@ class TestTempURL(TestObjectStoreProxy):
for p in prefixes: for p in prefixes:
hmac_mock.reset_mock() hmac_mock.reset_mock()
path = '/v1/AUTH_account/c/' + p path = '/v1/AUTH_account/c/' + p
expected_url = path + ('?temp_url_sig=temp_url_signature' expected_url = path + (
'&temp_url_expires=1400003600' '?temp_url_sig=temp_url_signature'
'&temp_url_prefix=' + p) '&temp_url_expires=1400003600'
expected_body = '\n'.join([ '&temp_url_prefix=' + p
self.method, )
'1400003600', expected_body = '\n'.join(
'prefix:' + path, [
]).encode('utf-8') self.method,
'1400003600',
'prefix:' + path,
]
).encode('utf-8')
url = self.proxy.generate_temp_url( url = self.proxy.generate_temp_url(
path, self.seconds, self.method, prefix=True, path,
temp_url_key=self.key) self.seconds,
self.method,
prefix=True,
temp_url_key=self.key,
)
key = self.key key = self.key
if not isinstance(key, bytes): if not isinstance(key, bytes):
key = key.encode('utf-8') key = key.encode('utf-8')
self.assertEqual(url, expected_url) self.assertEqual(url, expected_url)
self.assertEqual(hmac_mock.mock_calls, [ self.assertEqual(
mock.call(key, expected_body, sha1), hmac_mock.mock_calls,
mock.call().hexdigest(), [
]) mock.call(key, expected_body, sha1),
mock.call().hexdigest(),
],
)
self.assertIsInstance(url, type(path)) self.assertIsInstance(url, type(path))
@ -491,94 +594,142 @@ class TestTempURL(TestObjectStoreProxy):
self.assertRaisesRegex( self.assertRaisesRegex(
ValueError, ValueError,
'path must be representable as UTF-8', 'path must be representable as UTF-8',
self.proxy.generate_temp_url, b'/v1/a/c/\xff', self.seconds, self.proxy.generate_temp_url,
self.method, temp_url_key=self.key) b'/v1/a/c/\xff',
self.seconds,
self.method,
temp_url_key=self.key,
)
@mock.patch('hmac.HMAC.hexdigest', return_value="temp_url_signature") @mock.patch('hmac.HMAC.hexdigest', return_value="temp_url_signature")
def test_generate_absolute_expiry_temp_url(self, hmac_mock): def test_generate_absolute_expiry_temp_url(self, hmac_mock):
if isinstance(self.expected_url, bytes): if isinstance(self.expected_url, bytes):
expected_url = self.expected_url.replace( expected_url = self.expected_url.replace(
b'1400003600', b'2146636800') b'1400003600', b'2146636800'
)
else: else:
expected_url = self.expected_url.replace( expected_url = self.expected_url.replace(
u'1400003600', u'2146636800') u'1400003600', u'2146636800'
)
url = self.proxy.generate_temp_url( url = self.proxy.generate_temp_url(
self.url, 2146636800, self.method, absolute=True, self.url,
temp_url_key=self.key) 2146636800,
self.method,
absolute=True,
temp_url_key=self.key,
)
self.assertEqual(url, expected_url) self.assertEqual(url, expected_url)
def test_generate_temp_url_bad_time(self): def test_generate_temp_url_bad_time(self):
for bad_time in ['not_an_int', -1, 1.1, '-1', '1.1', '2015-05', for bad_time in [
'2015-05-01T01:00']: 'not_an_int',
-1,
1.1,
'-1',
'1.1',
'2015-05',
'2015-05-01T01:00',
]:
self.assertRaisesRegex( self.assertRaisesRegex(
ValueError, self.time_errmsg, ValueError,
self.proxy.generate_temp_url, self.url, bad_time, self.time_errmsg,
self.method, temp_url_key=self.key) self.proxy.generate_temp_url,
self.url,
bad_time,
self.method,
temp_url_key=self.key,
)
def test_generate_temp_url_bad_path(self): def test_generate_temp_url_bad_path(self):
for bad_path in ['/v1/a/c', 'v1/a/c/o', 'blah/v1/a/c/o', '/v1//c/o', for bad_path in [
'/v1/a/c/', '/v1/a/c']: '/v1/a/c',
'v1/a/c/o',
'blah/v1/a/c/o',
'/v1//c/o',
'/v1/a/c/',
'/v1/a/c',
]:
self.assertRaisesRegex( self.assertRaisesRegex(
ValueError, self.path_errmsg, ValueError,
self.proxy.generate_temp_url, bad_path, 60, self.method, self.path_errmsg,
temp_url_key=self.key) self.proxy.generate_temp_url,
bad_path,
60,
self.method,
temp_url_key=self.key,
)
class TestTempURLUnicodePathAndKey(TestTempURL): class TestTempURLUnicodePathAndKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3' url = u'/v1/\u00e4/c/\u00f3'
key = u'k\u00e9y' key = u'k\u00e9y'
expected_url = (u'%s?temp_url_sig=temp_url_signature' expected_url = (
u'&temp_url_expires=1400003600') % url u'%s?temp_url_sig=temp_url_signature' u'&temp_url_expires=1400003600'
expected_body = u'\n'.join([ ) % url
u'GET', expected_body = u'\n'.join(
u'1400003600', [
url, u'GET',
]).encode('utf-8') u'1400003600',
url,
]
).encode('utf-8')
class TestTempURLUnicodePathBytesKey(TestTempURL): class TestTempURLUnicodePathBytesKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3' url = u'/v1/\u00e4/c/\u00f3'
key = u'k\u00e9y'.encode('utf-8') key = u'k\u00e9y'.encode('utf-8')
expected_url = (u'%s?temp_url_sig=temp_url_signature' expected_url = (
u'&temp_url_expires=1400003600') % url u'%s?temp_url_sig=temp_url_signature' u'&temp_url_expires=1400003600'
expected_body = '\n'.join([ ) % url
u'GET', expected_body = '\n'.join(
u'1400003600', [
url, u'GET',
]).encode('utf-8') u'1400003600',
url,
]
).encode('utf-8')
class TestTempURLBytesPathUnicodeKey(TestTempURL): class TestTempURLBytesPathUnicodeKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8') url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8')
key = u'k\u00e9y' key = u'k\u00e9y'
expected_url = url + (b'?temp_url_sig=temp_url_signature' expected_url = url + (
b'&temp_url_expires=1400003600') b'?temp_url_sig=temp_url_signature' b'&temp_url_expires=1400003600'
expected_body = b'\n'.join([ )
b'GET', expected_body = b'\n'.join(
b'1400003600', [
url, b'GET',
]) b'1400003600',
url,
]
)
class TestTempURLBytesPathAndKey(TestTempURL): class TestTempURLBytesPathAndKey(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8') url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8')
key = u'k\u00e9y'.encode('utf-8') key = u'k\u00e9y'.encode('utf-8')
expected_url = url + (b'?temp_url_sig=temp_url_signature' expected_url = url + (
b'&temp_url_expires=1400003600') b'?temp_url_sig=temp_url_signature' b'&temp_url_expires=1400003600'
expected_body = b'\n'.join([ )
b'GET', expected_body = b'\n'.join(
b'1400003600', [
url, b'GET',
]) b'1400003600',
url,
]
)
class TestTempURLBytesPathAndNonUtf8Key(TestTempURL): class TestTempURLBytesPathAndNonUtf8Key(TestTempURL):
url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8') url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8')
key = b'k\xffy' key = b'k\xffy'
expected_url = url + (b'?temp_url_sig=temp_url_signature' expected_url = url + (
b'&temp_url_expires=1400003600') b'?temp_url_sig=temp_url_signature' b'&temp_url_expires=1400003600'
expected_body = b'\n'.join([ )
b'GET', expected_body = b'\n'.join(
b'1400003600', [
url, b'GET',
]) b'1400003600',
url,
]
)