diff --git a/openstack/object_store/v1/_base.py b/openstack/object_store/v1/_base.py index d429b13a1..19f4c4cdc 100644 --- a/openstack/object_store/v1/_base.py +++ b/openstack/object_store/v1/_base.py @@ -28,13 +28,13 @@ class BaseResource(resource.Resource): _last_headers = dict() def __init__(self, metadata=None, **attrs): - """Process and save metadata known at creation stage - """ + """Process and save metadata known at creation stage""" super().__init__(**attrs) if metadata is not None: for k, v in metadata.items(): if not k.lower().startswith( - self._custom_metadata_prefix.lower()): + self._custom_metadata_prefix.lower() + ): self.metadata[self._custom_metadata_prefix + k] = v else: self.metadata[k] = v @@ -62,8 +62,8 @@ class BaseResource(resource.Resource): def set_metadata(self, session, metadata, refresh=True): request = self._prepare_request() response = session.post( - request.url, - headers=self._calculate_headers(metadata)) + request.url, headers=self._calculate_headers(metadata) + ) self._translate_response(response, has_body=False) if refresh: response = session.head(request.url) @@ -74,10 +74,11 @@ class BaseResource(resource.Resource): request = self._prepare_request() headers = {key: '' for key in keys} response = session.post( - request.url, - headers=self._calculate_headers(headers)) + request.url, headers=self._calculate_headers(headers) + ) exceptions.raise_from_response( - response, error_message="Error deleting metadata keys") + response, error_message="Error deleting metadata keys" + ) return self def _set_metadata(self, headers): @@ -85,10 +86,8 @@ class BaseResource(resource.Resource): for header in headers: # RADOS and other stuff in front may actually lowcase headers - if header.lower().startswith( - self._custom_metadata_prefix.lower() - ): - key = header[len(self._custom_metadata_prefix):].lower() + if header.lower().startswith(self._custom_metadata_prefix.lower()): + key = header[len(self._custom_metadata_prefix) :].lower() self.metadata[key] = headers[header] def _translate_response(self, response, has_body=None, error_message=None): @@ -98,5 +97,6 @@ class BaseResource(resource.Resource): # pops known headers. self._last_headers = response.headers.copy() super(BaseResource, self)._translate_response( - response, has_body=has_body, error_message=error_message) + response, has_body=has_body, error_message=error_message + ) self._set_metadata(response.headers) diff --git a/openstack/object_store/v1/_proxy.py b/openstack/object_store/v1/_proxy.py index f6dc9328b..3bd4edf2a 100644 --- a/openstack/object_store/v1/_proxy.py +++ b/openstack/object_store/v1/_proxy.py @@ -40,7 +40,7 @@ class Proxy(proxy.Proxy): "account": _account.Account, "container": _container.Container, "info": _info.Info, - "object": _obj.Object + "object": _obj.Object, } skip_discovery = True @@ -60,19 +60,25 @@ class Proxy(proxy.Proxy): # Split url into parts and exclude potential project_id in some urls url_parts = [ - x for x in url_path.split('/') if ( + x + for x in url_path.split('/') + if ( x != project_id and ( not project_id or (project_id and x != 'AUTH_' + project_id) - )) + ) + ) ] # Strip leading version piece so that # GET /v1/AUTH_xxx # returns ['AUTH_xxx'] - if (url_parts[0] - and url_parts[0][0] == 'v' - and url_parts[0][1] and url_parts[0][1].isdigit()): + if ( + url_parts[0] + and url_parts[0][0] == 'v' + and url_parts[0][1] + and url_parts[0][1].isdigit() + ): url_parts = url_parts[1:] # Strip out anything that's empty or None @@ -152,8 +158,9 @@ class Proxy(proxy.Proxy): :returns: ``None`` """ - self._delete(_container.Container, container, - ignore_missing=ignore_missing) + self._delete( + _container.Container, container, ignore_missing=ignore_missing + ) def get_container_metadata(self, container): """Get metadata for a container @@ -219,8 +226,12 @@ class Proxy(proxy.Proxy): container = self._get_container_name(container=container) for obj in self._list( - _obj.Object, container=container, - paginated=True, format='json', **query): + _obj.Object, + container=container, + paginated=True, + format='json', + **query, + ): obj.container = container yield obj @@ -236,8 +247,12 @@ class Proxy(proxy.Proxy): raise ValueError("container must be specified") def get_object( - self, obj, container=None, resp_chunk_size=1024, - outfile=None, remember_content=False + self, + obj, + container=None, + resp_chunk_size=1024, + outfile=None, + remember_content=False, ): """Get the data associated with an object @@ -262,20 +277,17 @@ class Proxy(proxy.Proxy): :raises: :class:`~openstack.exceptions.ResourceNotFound` when no resource can be found. """ - container_name = self._get_container_name( - obj=obj, container=container) + container_name = self._get_container_name(obj=obj, container=container) _object = self._get_resource( - _obj.Object, obj, - container=container_name) + _obj.Object, obj, container=container_name + ) request = _object._prepare_request() - get_stream = (outfile is not None) + get_stream = outfile is not None response = self.get( - request.url, - headers=request.headers, - stream=get_stream + request.url, headers=request.headers, stream=get_stream ) exceptions.raise_from_response(response) _object._translate_response(response, has_body=False) @@ -286,7 +298,8 @@ class Proxy(proxy.Proxy): else: outfile_handle = outfile for chunk in response.iter_content( - resp_chunk_size, decode_unicode=False): + resp_chunk_size, decode_unicode=False + ): outfile_handle.write(chunk) if isinstance(outfile, str): outfile_handle.close() @@ -308,10 +321,10 @@ class Proxy(proxy.Proxy): :raises: :class:`~openstack.exceptions.ResourceNotFound` when no resource can be found. """ - container_name = self._get_container_name( - obj=obj, container=container) + container_name = self._get_container_name(obj=obj, container=container) obj = self._get_resource( - _obj.Object, obj, container=container_name, **attrs) + _obj.Object, obj, container=container_name, **attrs + ) return obj.download(self) def stream_object(self, obj, container=None, chunk_size=1024, **attrs): @@ -326,18 +339,26 @@ class Proxy(proxy.Proxy): when no resource can be found. :returns: An iterator that iterates over chunk_size bytes """ - container_name = self._get_container_name( - obj=obj, container=container) + container_name = self._get_container_name(obj=obj, container=container) obj = self._get_resource( - _obj.Object, obj, container=container_name, **attrs) + _obj.Object, obj, container=container_name, **attrs + ) return obj.stream(self, chunk_size=chunk_size) def create_object( - self, container, name, filename=None, - md5=None, sha256=None, segment_size=None, - use_slo=True, metadata=None, - generate_checksums=None, data=None, - **headers): + self, + container, + name, + filename=None, + md5=None, + sha256=None, + segment_size=None, + use_slo=True, + metadata=None, + generate_checksums=None, + data=None, + **headers, + ): """Create a file object. Automatically uses large-object segments if needed. @@ -373,13 +394,14 @@ class Proxy(proxy.Proxy): """ if data is not None and filename: raise ValueError( - "Both filename and data given. Please choose one.") + "Both filename and data given. Please choose one." + ) if data is not None and not name: - raise ValueError( - "name is a required parameter when data is given") + raise ValueError("name is a required parameter when data is given") if data is not None and generate_checksums: raise ValueError( - "checksums cannot be generated with data parameter") + "checksums cannot be generated with data parameter" + ) if generate_checksums is None: if data is not None: generate_checksums = False @@ -400,17 +422,22 @@ class Proxy(proxy.Proxy): metadata[self._connection._OBJECT_SHA256_KEY] = sha256 container_name = self._get_container_name(container=container) - endpoint = '{container}/{name}'.format(container=container_name, - name=name) + endpoint = '{container}/{name}'.format( + container=container_name, name=name + ) if data is not None: self.log.debug( - "swift uploading data to %(endpoint)s", - {'endpoint': endpoint}) + "swift uploading data to %(endpoint)s", {'endpoint': endpoint} + ) return self._create( - _obj.Object, container=container_name, - name=name, data=data, metadata=metadata, - **headers) + _obj.Object, + container=container_name, + name=name, + data=data, + metadata=metadata, + **headers, + ) # segment_size gets used as a step value in a range call, so needs # to be an int @@ -423,7 +450,8 @@ class Proxy(proxy.Proxy): self._connection.log.debug( "swift uploading %(filename)s to %(endpoint)s", - {'filename': filename, 'endpoint': endpoint}) + {'filename': filename, 'endpoint': endpoint}, + ) if metadata is not None: # Rely on the class headers calculation for requested metadata @@ -435,8 +463,13 @@ class Proxy(proxy.Proxy): else: self._upload_large_object( - endpoint, filename, headers, - file_size, segment_size, use_slo) + endpoint, + filename, + headers, + file_size, + segment_size, + use_slo, + ) # Backwards compat upload_object = create_object @@ -461,8 +494,12 @@ class Proxy(proxy.Proxy): """ container_name = self._get_container_name(obj, container) - self._delete(_obj.Object, obj, ignore_missing=ignore_missing, - container=container_name) + self._delete( + _obj.Object, + obj, + ignore_missing=ignore_missing, + container=container_name, + ) def get_object_metadata(self, obj, container=None): """Get metadata for an object. @@ -522,7 +559,8 @@ class Proxy(proxy.Proxy): return res def is_object_stale( - self, container, name, filename, file_md5=None, file_sha256=None): + self, container, name, filename, file_md5=None, file_sha256=None + ): """Check to see if an object matches the hashes of a file. :param container: Name of the container. @@ -538,37 +576,45 @@ class Proxy(proxy.Proxy): except exceptions.NotFoundException: self._connection.log.debug( "swift stale check, no object: {container}/{name}".format( - container=container, name=name)) + container=container, name=name + ) + ) return True if not (file_md5 or file_sha256): - (file_md5, file_sha256) = \ - utils._get_file_hashes(filename) + (file_md5, file_sha256) = utils._get_file_hashes(filename) md5_key = metadata.get( self._connection._OBJECT_MD5_KEY, - metadata.get(self._connection._SHADE_OBJECT_MD5_KEY, '')) + metadata.get(self._connection._SHADE_OBJECT_MD5_KEY, ''), + ) sha256_key = metadata.get( - self._connection._OBJECT_SHA256_KEY, metadata.get( - self._connection._SHADE_OBJECT_SHA256_KEY, '')) + self._connection._OBJECT_SHA256_KEY, + metadata.get(self._connection._SHADE_OBJECT_SHA256_KEY, ''), + ) up_to_date = utils._hashes_up_to_date( - md5=file_md5, sha256=file_sha256, - md5_key=md5_key, sha256_key=sha256_key) + md5=file_md5, + sha256=file_sha256, + md5_key=md5_key, + sha256_key=sha256_key, + ) if not up_to_date: self._connection.log.debug( "swift checksum mismatch: " " %(filename)s!=%(container)s/%(name)s", - {'filename': filename, 'container': container, 'name': name}) + {'filename': filename, 'container': container, 'name': name}, + ) return True self._connection.log.debug( "swift object up to date: %(container)s/%(name)s", - {'container': container, 'name': name}) + {'container': container, 'name': name}, + ) return False def _upload_large_object( - self, endpoint, filename, - headers, file_size, segment_size, use_slo): + self, endpoint, filename, headers, file_size, segment_size, use_slo + ): # If the object is big, we need to break it up into segments that # are no larger than segment_size, upload each of them individually # and then upload a manifest object. The segments can be uploaded in @@ -584,28 +630,32 @@ class Proxy(proxy.Proxy): # segment, the value a FileSegment file-like object that is a # slice of the data for the segment. segments = self._get_file_segments( - endpoint, filename, file_size, segment_size) + endpoint, filename, file_size, segment_size + ) # Schedule the segments for upload for name, segment in segments.items(): # Async call to put - schedules execution and returns a future segment_future = self._connection._pool_executor.submit( - self.put, - name, headers=headers, data=segment, - raise_exc=False) + self.put, name, headers=headers, data=segment, raise_exc=False + ) segment_futures.append(segment_future) # TODO(mordred) Collect etags from results to add to this manifest # dict. Then sort the list of dicts by path. - manifest.append(dict( - # While Object Storage usually expects the name to be - # urlencoded in most requests, the SLO manifest requires - # plain object names instead. - path='/{name}'.format(name=parse.unquote(name)), - size_bytes=segment.length)) + manifest.append( + dict( + # While Object Storage usually expects the name to be + # urlencoded in most requests, the SLO manifest requires + # plain object names instead. + path='/{name}'.format(name=parse.unquote(name)), + size_bytes=segment.length, + ) + ) # Try once and collect failed results to retry segment_results, retry_results = self._connection._wait_for_futures( - segment_futures, raise_on_error=False) + segment_futures, raise_on_error=False + ) self._add_etag_to_manifest(segment_results, manifest) @@ -616,37 +666,41 @@ class Proxy(proxy.Proxy): segment.seek(0) # Async call to put - schedules execution and returns a future segment_future = self._connection._pool_executor.submit( - self.put, - name, headers=headers, data=segment) + self.put, name, headers=headers, data=segment + ) # TODO(mordred) Collect etags from results to add to this manifest # dict. Then sort the list of dicts by path. retry_futures.append(segment_future) # If any segments fail the second time, just throw the error segment_results, retry_results = self._connection._wait_for_futures( - retry_futures, raise_on_error=True) + retry_futures, raise_on_error=True + ) self._add_etag_to_manifest(segment_results, manifest) try: if use_slo: return self._finish_large_object_slo( - endpoint, headers, manifest) + endpoint, headers, manifest + ) else: - return self._finish_large_object_dlo( - endpoint, headers) + return self._finish_large_object_dlo(endpoint, headers) except Exception: try: segment_prefix = endpoint.split('/')[-1] self.log.debug( "Failed to upload large object manifest for %s. " - "Removing segment uploads.", segment_prefix) + "Removing segment uploads.", + segment_prefix, + ) self._delete_autocreated_image_objects( - segment_prefix=segment_prefix) + segment_prefix=segment_prefix + ) except Exception: self.log.exception( - "Failed to cleanup image objects for %s:", - segment_prefix) + "Failed to cleanup image objects for %s:", segment_prefix + ) raise def _finish_large_object_slo(self, endpoint, headers, manifest): @@ -656,10 +710,13 @@ class Proxy(proxy.Proxy): retries = 3 while True: try: - return exceptions.raise_from_response(self.put( - endpoint, - params={'multipart-manifest': 'put'}, - headers=headers, data=json.dumps(manifest)) + return exceptions.raise_from_response( + self.put( + endpoint, + params={'multipart-manifest': 'put'}, + headers=headers, + data=json.dumps(manifest), + ) ) except Exception: retries -= 1 @@ -673,7 +730,8 @@ class Proxy(proxy.Proxy): while True: try: return exceptions.raise_from_response( - self.put(endpoint, headers=headers)) + self.put(endpoint, headers=headers) + ) except Exception: retries -= 1 if retries == 0: @@ -681,8 +739,7 @@ class Proxy(proxy.Proxy): def _upload_object(self, endpoint, filename, headers): with open(filename, 'rb') as dt: - return self.put( - endpoint, headers=headers, data=dt) + return self.put(endpoint, headers=headers, data=dt) def _get_file_segments(self, endpoint, filename, file_size, segment_size): # Use an ordered dict here so that testing can replicate things @@ -690,10 +747,13 @@ class Proxy(proxy.Proxy): for (index, offset) in enumerate(range(0, file_size, segment_size)): remaining = file_size - (index * segment_size) segment = _utils.FileSegment( - filename, offset, - segment_size if segment_size < remaining else remaining) + filename, + offset, + segment_size if segment_size < remaining else remaining, + ) name = '{endpoint}/{index:0>6}'.format( - endpoint=endpoint, index=index) + endpoint=endpoint, index=index + ) segments[name] = segment return segments @@ -710,7 +770,8 @@ class Proxy(proxy.Proxy): server_max_file_size = DEFAULT_MAX_FILE_SIZE self._connection.log.info( "Swift capabilities not supported. " - "Using default max file size.") + "Using default max file size." + ) else: raise else: @@ -740,9 +801,7 @@ class Proxy(proxy.Proxy): continue name = self._object_name_from_url(result.url) for entry in manifest: - if entry['path'] == '/{name}'.format( - name=parse.unquote(name) - ): + if entry['path'] == '/{name}'.format(name=parse.unquote(name)): entry['etag'] = result.headers['Etag'] def get_info(self): @@ -788,12 +847,16 @@ class Proxy(proxy.Proxy): temp_url_key = None if container: container_meta = self.get_container_metadata(container) - temp_url_key = (container_meta.meta_temp_url_key_2 - or container_meta.meta_temp_url_key) + temp_url_key = ( + container_meta.meta_temp_url_key_2 + or container_meta.meta_temp_url_key + ) if not temp_url_key: account_meta = self.get_account_metadata() - temp_url_key = (account_meta.meta_temp_url_key_2 - or account_meta.meta_temp_url_key) + temp_url_key = ( + account_meta.meta_temp_url_key_2 + or account_meta.meta_temp_url_key + ) if temp_url_key and not isinstance(temp_url_key, bytes): temp_url_key = temp_url_key.encode('utf8') return temp_url_key @@ -807,12 +870,20 @@ class Proxy(proxy.Proxy): if not temp_url_key: raise exceptions.SDKException( 'temp_url_key was not given, nor was a temporary url key' - ' found for the account or the container.') + ' found for the account or the container.' + ) return temp_url_key def generate_form_signature( - self, container, object_prefix, redirect_url, max_file_size, - max_upload_count, timeout, temp_url_key=None): + self, + container, + object_prefix, + redirect_url, + max_file_size, + max_upload_count, + timeout, + temp_url_key=None, + ): """Generate a signature for a FormPost upload. :param container: The value can be the name of a container or a @@ -832,33 +903,50 @@ class Proxy(proxy.Proxy): max_file_size = int(max_file_size) if max_file_size < 1: raise exceptions.SDKException( - 'Please use a positive max_file_size value.') + 'Please use a positive max_file_size value.' + ) max_upload_count = int(max_upload_count) if max_upload_count < 1: raise exceptions.SDKException( - 'Please use a positive max_upload_count value.') + 'Please use a positive max_upload_count value.' + ) if timeout < 1: raise exceptions.SDKException( - 'Please use a positive value.') + 'Please use a positive value.' + ) expires = int(time.time() + int(timeout)) - temp_url_key = self._check_temp_url_key(container=container, - temp_url_key=temp_url_key) + temp_url_key = self._check_temp_url_key( + container=container, temp_url_key=temp_url_key + ) res = self._get_resource(_container.Container, container) endpoint = parse.urlparse(self.get_endpoint()) path = '/'.join([endpoint.path, res.name, object_prefix]) - data = '%s\n%s\n%s\n%s\n%s' % (path, redirect_url, max_file_size, - max_upload_count, expires) + data = '%s\n%s\n%s\n%s\n%s' % ( + path, + redirect_url, + max_file_size, + max_upload_count, + expires, + ) data = data.encode('utf8') sig = hmac.new(temp_url_key, data, sha1).hexdigest() return (expires, sig) def generate_temp_url( - self, path, seconds, method, absolute=False, prefix=False, - iso8601=False, ip_range=None, temp_url_key=None): + self, + path, + seconds, + method, + absolute=False, + prefix=False, + iso8601=False, + ip_range=None, + temp_url_key=None, + ): """Generates a temporary URL that gives unauthenticated access to the Swift object. @@ -894,7 +982,8 @@ class Proxy(proxy.Proxy): formats = ( EXPIRES_ISO8601_FORMAT, EXPIRES_ISO8601_FORMAT[:-1], - SHORT_EXPIRES_ISO8601_FORMAT) + SHORT_EXPIRES_ISO8601_FORMAT, + ) for f in formats: try: t = time.strptime(seconds, f) @@ -919,8 +1008,10 @@ class Proxy(proxy.Proxy): if timestamp < 0: raise ValueError() except ValueError: - raise ValueError('time must either be a whole number ' - 'or in specific ISO 8601 format.') + raise ValueError( + 'time must either be a whole number ' + 'or in specific ISO 8601 format.' + ) if isinstance(path, bytes): try: @@ -931,50 +1022,61 @@ class Proxy(proxy.Proxy): path_for_body = path parts = path_for_body.split('/', 4) - if len(parts) != 5 or parts[0] or not all( - parts[1:(4 if prefix else 5)]): + if ( + len(parts) != 5 + or parts[0] + or not all(parts[1 : (4 if prefix else 5)]) + ): if prefix: raise ValueError('path must at least contain /v1/a/c/') else: - raise ValueError('path must be full path to an object' - ' e.g. /v1/a/c/o') + raise ValueError( + 'path must be full path to an object' ' e.g. /v1/a/c/o' + ) standard_methods = ['GET', 'PUT', 'HEAD', 'POST', 'DELETE'] if method.upper() not in standard_methods: - self.log.warning('Non default HTTP method %s for tempurl ' - 'specified, possibly an error', method.upper()) + self.log.warning( + 'Non default HTTP method %s for tempurl ' + 'specified, possibly an error', + method.upper(), + ) if not absolute: expiration = int(time.time() + timestamp) else: expiration = timestamp - hmac_parts = [method.upper(), str(expiration), - ('prefix:' if prefix else '') + path_for_body] + hmac_parts = [ + method.upper(), + str(expiration), + ('prefix:' if prefix else '') + path_for_body, + ] if ip_range: if isinstance(ip_range, bytes): try: ip_range = ip_range.decode('utf-8') except UnicodeDecodeError: - raise ValueError( - 'ip_range must be representable as UTF-8' - ) + raise ValueError('ip_range must be representable as UTF-8') hmac_parts.insert(0, "ip=%s" % ip_range) hmac_body = u'\n'.join(hmac_parts) temp_url_key = self._check_temp_url_key(temp_url_key=temp_url_key) - sig = hmac.new(temp_url_key, hmac_body.encode('utf-8'), - sha1).hexdigest() + sig = hmac.new( + temp_url_key, hmac_body.encode('utf-8'), sha1 + ).hexdigest() if iso8601: expiration = time.strftime( - EXPIRES_ISO8601_FORMAT, time.gmtime(expiration)) + EXPIRES_ISO8601_FORMAT, time.gmtime(expiration) + ) temp_url = u'{path}?temp_url_sig={sig}&temp_url_expires={exp}'.format( - path=path_for_body, sig=sig, exp=expiration) + path=path_for_body, sig=sig, exp=expiration + ) if ip_range: temp_url += u'&temp_url_ip_range={}'.format(ip_range) @@ -1020,11 +1122,7 @@ class Proxy(proxy.Proxy): # ========== Project Cleanup ========== def _get_cleanup_dependencies(self): - return { - 'object_store': { - 'before': [] - } - } + return {'object_store': {'before': []}} def _service_cleanup( self, @@ -1032,7 +1130,7 @@ class Proxy(proxy.Proxy): client_status_queue=None, identified_resources=None, filters=None, - resource_evaluation_fn=None + resource_evaluation_fn=None, ): is_bulk_delete_supported = False bulk_delete_max_per_request = None @@ -1044,7 +1142,8 @@ class Proxy(proxy.Proxy): bulk_delete = caps.swift.get("bulk_delete", {}) is_bulk_delete_supported = bulk_delete is not None bulk_delete_max_per_request = bulk_delete.get( - "max_deletes_per_request", 100) + "max_deletes_per_request", 100 + ) elements = [] for cont in self.containers(): @@ -1058,7 +1157,8 @@ class Proxy(proxy.Proxy): client_status_queue=client_status_queue, identified_resources=identified_resources, filters=filters, - resource_evaluation_fn=resource_evaluation_fn) + resource_evaluation_fn=resource_evaluation_fn, + ) if need_delete: if not is_bulk_delete_supported and not dry_run: self.delete_object(obj, cont) @@ -1083,7 +1183,8 @@ class Proxy(proxy.Proxy): client_status_queue=client_status_queue, identified_resources=identified_resources, filters=filters, - resource_evaluation_fn=resource_evaluation_fn) + resource_evaluation_fn=resource_evaluation_fn, + ) def _bulk_delete(self, elements, dry_run=False): data = "\n".join([parse.quote(x) for x in elements]) @@ -1093,6 +1194,6 @@ class Proxy(proxy.Proxy): data=data, headers={ 'Content-Type': 'text/plain', - 'Accept': 'application/json' - } + 'Accept': 'application/json', + }, ) diff --git a/openstack/object_store/v1/account.py b/openstack/object_store/v1/account.py index eb3496bfa..e4d17db9e 100644 --- a/openstack/object_store/v1/account.py +++ b/openstack/object_store/v1/account.py @@ -28,8 +28,9 @@ class Account(_base.BaseResource): #: the account. account_bytes_used = resource.Header("x-account-bytes-used", type=int) #: The number of containers. - account_container_count = resource.Header("x-account-container-count", - type=int) + account_container_count = resource.Header( + "x-account-container-count", type=int + ) #: The number of objects in the account. account_object_count = resource.Header("x-account-object-count", type=int) #: The secret key value for temporary URLs. If not set, diff --git a/openstack/object_store/v1/container.py b/openstack/object_store/v1/container.py index b9c84fd22..5c225e2e0 100644 --- a/openstack/object_store/v1/container.py +++ b/openstack/object_store/v1/container.py @@ -25,7 +25,7 @@ class Container(_base.BaseResource): "read_ACL": "x-container-read", "write_ACL": "x-container-write", "sync_to": "x-container-sync-to", - "sync_key": "x-container-sync-key" + "sync_key": "x-container-sync-key", } base_path = "/" @@ -38,9 +38,7 @@ class Container(_base.BaseResource): allow_list = True allow_head = True - _query_mapping = resource.QueryParameters( - 'prefix', 'format' - ) + _query_mapping = resource.QueryParameters('prefix', 'format') # Container body data (when id=None) #: The name of the container. @@ -54,10 +52,12 @@ class Container(_base.BaseResource): # Container metadata (when id=name) #: The number of objects. object_count = resource.Header( - "x-container-object-count", type=int, alias='count') + "x-container-object-count", type=int, alias='count' + ) #: The count of bytes used in total. bytes_used = resource.Header( - "x-container-bytes-used", type=int, alias='bytes') + "x-container-bytes-used", type=int, alias='bytes' + ) #: The timestamp of the transaction. timestamp = resource.Header("x-timestamp") @@ -94,8 +94,9 @@ class Container(_base.BaseResource): #: If set to true, Object Storage guesses the content type based #: on the file extension and ignores the value sent in the #: Content-Type header, if present. *Type: bool* - is_content_type_detected = resource.Header("x-detect-content-type", - type=bool) + is_content_type_detected = resource.Header( + "x-detect-content-type", type=bool + ) #: Storage policy used by the container. #: It is not possible to change policy of an existing container @@ -136,9 +137,9 @@ class Container(_base.BaseResource): :data:`Resource.allow_create` is not set to ``True``. """ request = self._prepare_request( - requires_id=True, prepend_key=prepend_key, base_path=base_path) - response = session.put( - request.url, headers=request.headers) + requires_id=True, prepend_key=prepend_key, base_path=base_path + ) + response = session.put(request.url, headers=request.headers) self._translate_response(response, has_body=False) return self diff --git a/openstack/object_store/v1/info.py b/openstack/object_store/v1/info.py index 76e740540..a4f048eab 100644 --- a/openstack/object_store/v1/info.py +++ b/openstack/object_store/v1/info.py @@ -34,8 +34,12 @@ class Info(resource.Resource): tempurl = resource.Body("tempurl", type=dict) def fetch( - self, session, requires_id=False, - base_path=None, skip_cache=False, error_message=None + self, + session, + requires_id=False, + base_path=None, + skip_cache=False, + error_message=None, ): """Get a remote resource based on this instance. @@ -64,7 +68,8 @@ class Info(resource.Resource): session = self._get_session(session) endpoint = urllib.parse.urlparse(session.get_endpoint()) url = "{scheme}://{netloc}/info".format( - scheme=endpoint.scheme, netloc=endpoint.netloc) + scheme=endpoint.scheme, netloc=endpoint.netloc + ) microversion = self._get_microversion(session, action='fetch') response = session.get(url, microversion=microversion) diff --git a/openstack/object_store/v1/obj.py b/openstack/object_store/v1/obj.py index 095a2a56e..af95f2ca3 100644 --- a/openstack/object_store/v1/obj.py +++ b/openstack/object_store/v1/obj.py @@ -30,7 +30,7 @@ class Object(_base.BaseResource): "is_content_type_detected": "x-detect-content-type", "manifest": "x-object-manifest", # Rax hack - the need CORS as different header - "access_control_allow_origin": "access-control-allow-origin" + "access_control_allow_origin": "access-control-allow-origin", } base_path = "/%(container)s" @@ -44,10 +44,14 @@ class Object(_base.BaseResource): allow_head = True _query_mapping = resource.QueryParameters( - 'prefix', 'format', - 'temp_url_sig', 'temp_url_expires', - 'filename', 'multipart_manifest', 'symlink', - multipart_manifest='multipart-manifest' + 'prefix', + 'format', + 'temp_url_sig', + 'temp_url_expires', + 'filename', + 'multipart_manifest', + 'symlink', + multipart_manifest='multipart-manifest', ) # Data to be passed during a POST call to create an object on the server. @@ -117,7 +121,8 @@ class Object(_base.BaseResource): #: size of the response body. Instead it contains the size of #: the object, in bytes. content_length = resource.Header( - "content-length", type=int, alias='_bytes') + "content-length", type=int, alias='_bytes' + ) #: The MIME type of the object. content_type = resource.Header("content-type", alias="_content_type") #: The type of ranges that the object accepts. @@ -136,8 +141,9 @@ class Object(_base.BaseResource): etag = resource.Header("etag", alias='_hash') #: Set to True if this object is a static large object manifest object. #: *Type: bool* - is_static_large_object = resource.Header("x-static-large-object", - type=bool) + is_static_large_object = resource.Header( + "x-static-large-object", type=bool + ) #: If set, the value of the Content-Encoding metadata. #: If not set, this header is not returned by this operation. content_encoding = resource.Header("content-encoding") @@ -164,9 +170,8 @@ class Object(_base.BaseResource): #: The date and time that the object was created or the last #: time that the metadata was changed. last_modified_at = resource.Header( - "last-modified", - alias='_last_modified', - aka='updated_at') + "last-modified", alias='_last_modified', aka='updated_at' + ) # Headers for PUT and POST requests #: Set to chunked to enable chunked transfer encoding. If used, @@ -175,8 +180,9 @@ class Object(_base.BaseResource): #: If set to true, Object Storage guesses the content type based #: on the file extension and ignores the value sent in the #: Content-Type header, if present. *Type: bool* - is_content_type_detected = resource.Header("x-detect-content-type", - type=bool) + is_content_type_detected = resource.Header( + "x-detect-content-type", type=bool + ) #: If set, this is the name of an object used to create the new #: object by copying the X-Copy-From object. The value is in form #: {container}/{object}. You must UTF-8-encode and then URL-encode @@ -195,7 +201,8 @@ class Object(_base.BaseResource): #: CORS for RAX (deviating from standard) access_control_allow_origin = resource.Header( - "access-control-allow-origin") + "access-control-allow-origin" + ) has_body = False @@ -209,8 +216,9 @@ class Object(_base.BaseResource): def set_metadata(self, session, metadata): # Filter out items with empty values so the create metadata behaviour # is the same as account and container - filtered_metadata = \ - {key: value for key, value in metadata.items() if value} + filtered_metadata = { + key: value for key, value in metadata.items() if value + } # Update from remote if we only have locally created information if not self.last_modified_at: @@ -281,9 +289,11 @@ class Object(_base.BaseResource): request = self._prepare_request() response = session.post( - request.url, headers=self._calculate_headers(metadata)) + request.url, headers=self._calculate_headers(metadata) + ) exceptions.raise_from_response( - response, error_message="Error deleting metadata keys") + response, error_message="Error deleting metadata keys" + ) # Only delete from local object if the remote delete was successful for key in attr_keys_to_delete: @@ -296,7 +306,8 @@ class Object(_base.BaseResource): request = self._prepare_request() response = session.get( - request.url, headers=request.headers, stream=stream) + request.url, headers=request.headers, stream=stream + ) exceptions.raise_from_response(response, error_message=error_message) return response @@ -306,16 +317,15 @@ class Object(_base.BaseResource): def stream(self, session, error_message=None, chunk_size=1024): response = self._download( - session, error_message=error_message, stream=True) + session, error_message=error_message, stream=True + ) return response.iter_content(chunk_size, decode_unicode=False) def create(self, session, base_path=None, **params): request = self._prepare_request(base_path=base_path) response = session.put( - request.url, - data=self.data, - headers=request.headers + request.url, data=self.data, headers=request.headers ) self._translate_response(response, has_body=False) return self @@ -339,6 +349,5 @@ class Object(_base.BaseResource): headers['multipart-manifest'] = 'delete' return session.delete( - request.url, - headers=headers, - microversion=microversion) + request.url, headers=headers, microversion=microversion + ) diff --git a/openstack/tests/functional/object_store/v1/test_account.py b/openstack/tests/functional/object_store/v1/test_account.py index a71e63a4e..abdf821e7 100644 --- a/openstack/tests/functional/object_store/v1/test_account.py +++ b/openstack/tests/functional/object_store/v1/test_account.py @@ -14,7 +14,6 @@ from openstack.tests.functional import base class TestAccount(base.BaseFunctionalTest): - def setUp(self): super(TestAccount, self).setUp() self.require_service('object-store') diff --git a/openstack/tests/functional/object_store/v1/test_container.py b/openstack/tests/functional/object_store/v1/test_container.py index 25e0f7ad5..d0211497b 100644 --- a/openstack/tests/functional/object_store/v1/test_container.py +++ b/openstack/tests/functional/object_store/v1/test_container.py @@ -15,7 +15,6 @@ from openstack.tests.functional import base class TestContainer(base.BaseFunctionalTest): - def setUp(self): super(TestContainer, self).setUp() self.require_service('object-store') @@ -24,7 +23,9 @@ class TestContainer(base.BaseFunctionalTest): container = self.conn.object_store.create_container(name=self.NAME) self.addEmptyCleanup( self.conn.object_store.delete_container, - self.NAME, ignore_missing=False) + self.NAME, + ignore_missing=False, + ) assert isinstance(container, _container.Container) self.assertEqual(self.NAME, container.name) @@ -43,21 +44,24 @@ class TestContainer(base.BaseFunctionalTest): self.assertIsNone(container.read_ACL) self.assertIsNone(container.write_ACL) self.conn.object_store.set_container_metadata( - container, read_ACL='.r:*', write_ACL='demo:demo') + container, read_ACL='.r:*', write_ACL='demo:demo' + ) container = self.conn.object_store.get_container_metadata(self.NAME) self.assertEqual('.r:*', container.read_ACL) self.assertEqual('demo:demo', container.write_ACL) # update system metadata self.conn.object_store.set_container_metadata( - container, read_ACL='.r:demo') + container, read_ACL='.r:demo' + ) container = self.conn.object_store.get_container_metadata(self.NAME) self.assertEqual('.r:demo', container.read_ACL) self.assertEqual('demo:demo', container.write_ACL) # set system metadata and custom metadata self.conn.object_store.set_container_metadata( - container, k0='v0', sync_key='1234') + container, k0='v0', sync_key='1234' + ) container = self.conn.object_store.get_container_metadata(self.NAME) self.assertTrue(container.metadata) self.assertIn('k0', container.metadata) @@ -67,8 +71,9 @@ class TestContainer(base.BaseFunctionalTest): self.assertEqual('1234', container.sync_key) # unset system metadata - self.conn.object_store.delete_container_metadata(container, - ['sync_key']) + self.conn.object_store.delete_container_metadata( + container, ['sync_key'] + ) container = self.conn.object_store.get_container_metadata(self.NAME) self.assertTrue(container.metadata) self.assertIn('k0', container.metadata) diff --git a/openstack/tests/functional/object_store/v1/test_obj.py b/openstack/tests/functional/object_store/v1/test_obj.py index fdbefcf6f..7012325b5 100644 --- a/openstack/tests/functional/object_store/v1/test_obj.py +++ b/openstack/tests/functional/object_store/v1/test_obj.py @@ -26,19 +26,25 @@ class TestObject(base.BaseFunctionalTest): self.conn.object_store.create_container(name=self.FOLDER) self.addCleanup(self.conn.object_store.delete_container, self.FOLDER) self.sot = self.conn.object_store.upload_object( - container=self.FOLDER, name=self.FILE, data=self.DATA) + container=self.FOLDER, name=self.FILE, data=self.DATA + ) self.addEmptyCleanup( - self.conn.object_store.delete_object, self.sot, - ignore_missing=False) + self.conn.object_store.delete_object, + self.sot, + ignore_missing=False, + ) def test_list(self): - names = [o.name for o - in self.conn.object_store.objects(container=self.FOLDER)] + names = [ + o.name + for o in self.conn.object_store.objects(container=self.FOLDER) + ] self.assertIn(self.FILE, names) def test_download_object(self): result = self.conn.object_store.download_object( - self.FILE, container=self.FOLDER) + self.FILE, container=self.FOLDER + ) self.assertEqual(self.DATA, result) result = self.conn.object_store.download_object(self.sot) self.assertEqual(self.DATA, result) @@ -46,25 +52,29 @@ class TestObject(base.BaseFunctionalTest): def test_system_metadata(self): # get system metadata obj = self.conn.object_store.get_object_metadata( - self.FILE, container=self.FOLDER) + self.FILE, container=self.FOLDER + ) # TODO(shade) obj.bytes is coming up None on python3 but not python2 # self.assertGreaterEqual(0, obj.bytes) self.assertIsNotNone(obj.etag) # set system metadata obj = self.conn.object_store.get_object_metadata( - self.FILE, container=self.FOLDER) + self.FILE, container=self.FOLDER + ) self.assertIsNone(obj.content_disposition) self.assertIsNone(obj.content_encoding) self.conn.object_store.set_object_metadata( - obj, content_disposition='attachment', content_encoding='gzip') + obj, content_disposition='attachment', content_encoding='gzip' + ) obj = self.conn.object_store.get_object_metadata(obj) self.assertEqual('attachment', obj.content_disposition) self.assertEqual('gzip', obj.content_encoding) # update system metadata self.conn.object_store.set_object_metadata( - obj, content_encoding='deflate') + obj, content_encoding='deflate' + ) obj = self.conn.object_store.get_object_metadata(obj) self.assertEqual('attachment', obj.content_disposition) self.assertEqual('deflate', obj.content_encoding) @@ -79,7 +89,8 @@ class TestObject(base.BaseFunctionalTest): # unset more system metadata self.conn.object_store.delete_object_metadata( - obj, keys=['content_disposition']) + obj, keys=['content_disposition'] + ) obj = self.conn.object_store.get_object_metadata(obj) self.assertIn('k0', obj.metadata) self.assertEqual('v0', obj.metadata['k0']) @@ -90,7 +101,8 @@ class TestObject(base.BaseFunctionalTest): def test_custom_metadata(self): # get custom metadata obj = self.conn.object_store.get_object_metadata( - self.FILE, container=self.FOLDER) + self.FILE, container=self.FOLDER + ) self.assertFalse(obj.metadata) # set no custom metadata @@ -112,8 +124,9 @@ class TestObject(base.BaseFunctionalTest): self.assertEqual('v1', obj.metadata['k1']) # set more custom metadata by named object and container - self.conn.object_store.set_object_metadata(self.FILE, self.FOLDER, - k2='v2') + self.conn.object_store.set_object_metadata( + self.FILE, self.FOLDER, k2='v2' + ) obj = self.conn.object_store.get_object_metadata(obj) self.assertTrue(obj.metadata) self.assertEqual(2, len(obj.metadata)) diff --git a/openstack/tests/unit/object_store/v1/test_account.py b/openstack/tests/unit/object_store/v1/test_account.py index 61bd80f88..d0ca018d5 100644 --- a/openstack/tests/unit/object_store/v1/test_account.py +++ b/openstack/tests/unit/object_store/v1/test_account.py @@ -24,12 +24,11 @@ ACCOUNT_EXAMPLE = { 'x-account-container-count': '678', 'content-type': 'text/plain; charset=utf-8', 'x-account-object-count': '98765', - 'x-timestamp': '1453413555.88937' + 'x-timestamp': '1453413555.88937', } class TestAccount(base.TestCase): - def setUp(self): super(TestAccount, self).setUp() self.endpoint = self.cloud.object_store.get_endpoint() + '/' @@ -49,28 +48,41 @@ class TestAccount(base.TestCase): def test_make_it(self): sot = account.Account(**ACCOUNT_EXAMPLE) self.assertIsNone(sot.id) - self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-bytes-used']), - sot.account_bytes_used) - self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-container-count']), - sot.account_container_count) - self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-object-count']), - sot.account_object_count) + self.assertEqual( + int(ACCOUNT_EXAMPLE['x-account-bytes-used']), + sot.account_bytes_used, + ) + self.assertEqual( + int(ACCOUNT_EXAMPLE['x-account-container-count']), + sot.account_container_count, + ) + self.assertEqual( + int(ACCOUNT_EXAMPLE['x-account-object-count']), + sot.account_object_count, + ) self.assertEqual(ACCOUNT_EXAMPLE['x-timestamp'], sot.timestamp) def test_set_temp_url_key(self): sot = account.Account() key = 'super-secure-key' - self.register_uris([ - dict(method='POST', uri=self.endpoint, - status_code=204, - validate=dict( - headers={ - 'x-account-meta-temp-url-key': key})), - dict(method='HEAD', uri=self.endpoint, - headers={ - 'x-account-meta-temp-url-key': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.endpoint, + status_code=204, + validate=dict( + headers={'x-account-meta-temp-url-key': key} + ), + ), + dict( + method='HEAD', + uri=self.endpoint, + headers={'x-account-meta-temp-url-key': key}, + ), + ] + ) sot.set_temp_url_key(self.cloud.object_store, key) self.assert_calls() @@ -78,15 +90,22 @@ class TestAccount(base.TestCase): sot = account.Account() key = 'super-secure-key' - self.register_uris([ - dict(method='POST', uri=self.endpoint, - status_code=204, - validate=dict( - headers={ - 'x-account-meta-temp-url-key-2': key})), - dict(method='HEAD', uri=self.endpoint, - headers={ - 'x-account-meta-temp-url-key-2': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.endpoint, + status_code=204, + validate=dict( + headers={'x-account-meta-temp-url-key-2': key} + ), + ), + dict( + method='HEAD', + uri=self.endpoint, + headers={'x-account-meta-temp-url-key-2': key}, + ), + ] + ) sot.set_temp_url_key(self.cloud.object_store, key, secondary=True) self.assert_calls() diff --git a/openstack/tests/unit/object_store/v1/test_container.py b/openstack/tests/unit/object_store/v1/test_container.py index 3c58bf17d..dc1f2e539 100644 --- a/openstack/tests/unit/object_store/v1/test_container.py +++ b/openstack/tests/unit/object_store/v1/test_container.py @@ -17,13 +17,13 @@ from openstack.tests.unit import base class TestContainer(base.TestCase): - def setUp(self): super(TestContainer, self).setUp() self.container = self.getUniqueString() self.endpoint = self.cloud.object_store.get_endpoint() + '/' self.container_endpoint = '{endpoint}{container}'.format( - endpoint=self.endpoint, container=self.container) + endpoint=self.endpoint, container=self.container + ) self.body = { "count": 2, @@ -42,7 +42,7 @@ class TestContainer(base.TestCase): 'x-history-location': 'history-location', 'content-type': 'application/json; charset=utf-8', 'x-timestamp': '1453414055.48672', - 'x-storage-policy': 'Gold' + 'x-storage-policy': 'Gold', } self.body_plus_headers = dict(self.body, **self.headers) @@ -81,49 +81,44 @@ class TestContainer(base.TestCase): # Attributes from header self.assertEqual( int(self.body_plus_headers['x-container-object-count']), - sot.object_count) + sot.object_count, + ) self.assertEqual( int(self.body_plus_headers['x-container-bytes-used']), - sot.bytes_used) + sot.bytes_used, + ) self.assertEqual( - self.body_plus_headers['x-container-read'], - sot.read_ACL) + self.body_plus_headers['x-container-read'], sot.read_ACL + ) self.assertEqual( - self.body_plus_headers['x-container-write'], - sot.write_ACL) + self.body_plus_headers['x-container-write'], sot.write_ACL + ) self.assertEqual( - self.body_plus_headers['x-container-sync-to'], - sot.sync_to) + self.body_plus_headers['x-container-sync-to'], sot.sync_to + ) self.assertEqual( - self.body_plus_headers['x-container-sync-key'], - sot.sync_key) + self.body_plus_headers['x-container-sync-key'], sot.sync_key + ) self.assertEqual( self.body_plus_headers['x-versions-location'], - sot.versions_location) + sot.versions_location, + ) self.assertEqual( - self.body_plus_headers['x-history-location'], - sot.history_location) + self.body_plus_headers['x-history-location'], sot.history_location + ) self.assertEqual(self.body_plus_headers['x-timestamp'], sot.timestamp) - self.assertEqual(self.body_plus_headers['x-storage-policy'], - sot.storage_policy) + self.assertEqual( + self.body_plus_headers['x-storage-policy'], sot.storage_policy + ) def test_list(self): containers = [ - { - "count": 999, - "bytes": 12345, - "name": "container1" - }, - { - "count": 888, - "bytes": 54321, - "name": "container2" - } + {"count": 999, "bytes": 12345, "name": "container1"}, + {"count": 888, "bytes": 54321, "name": "container2"}, ] - self.register_uris([ - dict(method='GET', uri=self.endpoint, - json=containers) - ]) + self.register_uris( + [dict(method='GET', uri=self.endpoint, json=containers)] + ) response = container.Container.list(self.cloud.object_store) @@ -144,25 +139,32 @@ class TestContainer(base.TestCase): "x-container-read": "some ACL", "x-container-write": "another ACL", "x-detect-content-type": 'True', - "X-Container-Meta-foo": "bar" + "X-Container-Meta-foo": "bar", } - self.register_uris([ - dict(method=sess_method, uri=self.container_endpoint, - json=self.body, - validate=dict(headers=headers)), - ]) + self.register_uris( + [ + dict( + method=sess_method, + uri=self.container_endpoint, + json=self.body, + validate=dict(headers=headers), + ), + ] + ) sot_call(self.cloud.object_store) self.assert_calls() def test_create(self): sot = container.Container.new( - name=self.container, metadata={'foo': 'bar'}) + name=self.container, metadata={'foo': 'bar'} + ) self._test_create_update(sot, sot.create, 'PUT') def test_commit(self): sot = container.Container.new( - name=self.container, metadata={'foo': 'bar'}) + name=self.container, metadata={'foo': 'bar'} + ) self._test_create_update(sot, sot.commit, 'POST') def test_to_dict_recursion(self): @@ -200,15 +202,22 @@ class TestContainer(base.TestCase): 'versions_location': None, 'history_location': None, 'write_ACL': None, - 'storage_policy': None - }, json.loads(json.dumps(sot))) + 'storage_policy': None, + }, + json.loads(json.dumps(sot)), + ) def _test_no_headers(self, sot, sot_call, sess_method): headers = {} - self.register_uris([ - dict(method=sess_method, uri=self.container_endpoint, - validate=dict(headers=headers)) - ]) + self.register_uris( + [ + dict( + method=sess_method, + uri=self.container_endpoint, + validate=dict(headers=headers), + ) + ] + ) sot_call(self.cloud.object_store) def test_create_no_headers(self): @@ -225,16 +234,23 @@ class TestContainer(base.TestCase): sot = container.Container.new(name=self.container) key = self.getUniqueString() - self.register_uris([ - dict(method='POST', uri=self.container_endpoint, - status_code=204, - validate=dict( - headers={ - 'x-container-meta-temp-url-key': key})), - dict(method='HEAD', uri=self.container_endpoint, - headers={ - 'x-container-meta-temp-url-key': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.container_endpoint, + status_code=204, + validate=dict( + headers={'x-container-meta-temp-url-key': key} + ), + ), + dict( + method='HEAD', + uri=self.container_endpoint, + headers={'x-container-meta-temp-url-key': key}, + ), + ] + ) sot.set_temp_url_key(self.cloud.object_store, key) self.assert_calls() @@ -242,15 +258,22 @@ class TestContainer(base.TestCase): sot = container.Container.new(name=self.container) key = self.getUniqueString() - self.register_uris([ - dict(method='POST', uri=self.container_endpoint, - status_code=204, - validate=dict( - headers={ - 'x-container-meta-temp-url-key-2': key})), - dict(method='HEAD', uri=self.container_endpoint, - headers={ - 'x-container-meta-temp-url-key-2': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.container_endpoint, + status_code=204, + validate=dict( + headers={'x-container-meta-temp-url-key-2': key} + ), + ), + dict( + method='HEAD', + uri=self.container_endpoint, + headers={'x-container-meta-temp-url-key-2': key}, + ), + ] + ) sot.set_temp_url_key(self.cloud.object_store, key, secondary=True) self.assert_calls() diff --git a/openstack/tests/unit/object_store/v1/test_obj.py b/openstack/tests/unit/object_store/v1/test_obj.py index 30f58a3f4..f3f6cb7c1 100644 --- a/openstack/tests/unit/object_store/v1/test_obj.py +++ b/openstack/tests/unit/object_store/v1/test_obj.py @@ -27,7 +27,6 @@ from openstack.tests.unit.cloud import test_object as base_test_object class TestObject(base_test_object.BaseTestObject): - def setUp(self): super(TestObject, self).setUp() self.the_data = b'test body' @@ -39,7 +38,7 @@ class TestObject(base_test_object.BaseTestObject): "last_modified": "2014-07-13T18:41:03.319240", "bytes": self.the_data_length, "name": self.object, - "content_type": "application/octet-stream" + "content_type": "application/octet-stream", } self.headers = { 'Content-Length': str(len(self.the_data)), @@ -78,9 +77,9 @@ class TestObject(base_test_object.BaseTestObject): 'prefix': 'prefix', 'symlink': 'symlink', 'temp_url_expires': 'temp_url_expires', - 'temp_url_sig': 'temp_url_sig' + 'temp_url_sig': 'temp_url_sig', }, - sot._query_mapping._mapping + sot._query_mapping._mapping, ) def test_new(self): @@ -95,8 +94,7 @@ class TestObject(base_test_object.BaseTestObject): # Attributes from header self.assertEqual(self.container, sot.container) - self.assertEqual( - int(self.body['bytes']), sot.content_length) + self.assertEqual(int(self.body['bytes']), sot.content_length) self.assertEqual(self.body['last_modified'], sot.last_modified_at) self.assertEqual(self.body['hash'], sot.etag) self.assertEqual(self.body['content_type'], sot.content_type) @@ -108,7 +106,8 @@ class TestObject(base_test_object.BaseTestObject): # Attributes from header self.assertEqual(self.container, sot.container) self.assertEqual( - int(self.headers['Content-Length']), sot.content_length) + int(self.headers['Content-Length']), sot.content_length + ) self.assertEqual(self.headers['Accept-Ranges'], sot.accept_ranges) self.assertEqual(self.headers['Last-Modified'], sot.last_modified_at) self.assertEqual(self.headers['Etag'], sot.etag) @@ -129,16 +128,19 @@ class TestObject(base_test_object.BaseTestObject): headers = { 'X-Newest': 'True', 'If-Match': self.headers['Etag'], - 'Accept': '*/*' + 'Accept': '*/*', } - self.register_uris([ - dict(method='GET', uri=self.object_endpoint, - headers=self.headers, - content=self.the_data, - validate=dict( - headers=headers - )) - ]) + self.register_uris( + [ + dict( + method='GET', + uri=self.object_endpoint, + headers=self.headers, + content=self.the_data, + validate=dict(headers=headers), + ) + ] + ) sot = obj.Object.new(container=self.container, name=self.object) sot.is_newest = True # if_match is a list type, but we're passing a string. This tests @@ -153,19 +155,23 @@ class TestObject(base_test_object.BaseTestObject): def _test_create(self, method, data): sot = obj.Object.new( - container=self.container, name=self.object, - data=data, metadata={'foo': 'bar'}) + container=self.container, + name=self.object, + data=data, + metadata={'foo': 'bar'}, + ) sot.is_newest = True - sent_headers = { - "x-newest": 'True', - "X-Object-Meta-foo": "bar" - } - self.register_uris([ - dict(method=method, uri=self.object_endpoint, - headers=self.headers, - validate=dict( - headers=sent_headers)) - ]) + sent_headers = {"x-newest": 'True', "X-Object-Meta-foo": "bar"} + self.register_uris( + [ + dict( + method=method, + uri=self.object_endpoint, + headers=self.headers, + validate=dict(headers=sent_headers), + ) + ] + ) rv = sot.create(self.cloud.object_store) self.assertEqual(rv.etag, self.headers['Etag']) diff --git a/openstack/tests/unit/object_store/v1/test_proxy.py b/openstack/tests/unit/object_store/v1/test_proxy.py index 64b74095f..72ba171a2 100644 --- a/openstack/tests/unit/object_store/v1/test_proxy.py +++ b/openstack/tests/unit/object_store/v1/test_proxy.py @@ -47,24 +47,30 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): self.container = self.getUniqueString() self.endpoint = self.cloud.object_store.get_endpoint() + '/' self.container_endpoint = '{endpoint}{container}'.format( - endpoint=self.endpoint, container=self.container) + endpoint=self.endpoint, container=self.container + ) def test_account_metadata_get(self): self.verify_head( - self.proxy.get_account_metadata, account.Account, - method_args=[]) + self.proxy.get_account_metadata, account.Account, method_args=[] + ) def test_container_metadata_get(self): - self.verify_head(self.proxy.get_container_metadata, - container.Container, method_args=["container"]) + self.verify_head( + self.proxy.get_container_metadata, + container.Container, + method_args=["container"], + ) def test_container_delete(self): - self.verify_delete(self.proxy.delete_container, - container.Container, False) + self.verify_delete( + self.proxy.delete_container, container.Container, False + ) def test_container_delete_ignore(self): - self.verify_delete(self.proxy.delete_container, - container.Container, True) + self.verify_delete( + self.proxy.delete_container, container.Container, True + ) def test_container_create_attrs(self): self.verify_create( @@ -72,7 +78,8 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): container.Container, method_args=['container_name'], expected_args=[], - expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3}) + expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3}, + ) def test_object_metadata_get(self): self._verify( @@ -81,7 +88,8 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): method_args=['object'], method_kwargs={'container': 'container'}, expected_args=[obj.Object, 'object'], - expected_kwargs={'container': 'container'}) + expected_kwargs={'container': 'container'}, + ) def _test_object_delete(self, ignore): expected_kwargs = { @@ -95,7 +103,8 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): method_args=["resource"], method_kwargs=expected_kwargs, expected_args=[obj.Object, "resource"], - expected_kwargs=expected_kwargs) + expected_kwargs=expected_kwargs, + ) def test_object_delete(self): self._test_object_delete(False) @@ -108,7 +117,7 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): "name": "test", "data": "data", "container": "name", - "metadata": {} + "metadata": {}, } self._verify( @@ -116,52 +125,57 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): self.proxy.upload_object, method_kwargs=kwargs, expected_args=[obj.Object], - expected_kwargs=kwargs) + expected_kwargs=kwargs, + ) def test_object_create_no_container(self): self.assertRaises(TypeError, self.proxy.upload_object) def test_object_get(self): with requests_mock.Mocker() as m: - m.get("%scontainer/object" % self.endpoint, - text="data") + m.get("%scontainer/object" % self.endpoint, text="data") res = self.proxy.get_object("object", container="container") self.assertIsNone(res.data) def test_object_get_write_file(self): with requests_mock.Mocker() as m: - m.get("%scontainer/object" % self.endpoint, - text="data") + m.get("%scontainer/object" % self.endpoint, text="data") with tempfile.NamedTemporaryFile() as f: self.proxy.get_object( - "object", container="container", - outfile=f.name) + "object", container="container", outfile=f.name + ) dt = open(f.name).read() self.assertEqual(dt, "data") def test_object_get_remember_content(self): with requests_mock.Mocker() as m: - m.get("%scontainer/object" % self.endpoint, - text="data") + m.get("%scontainer/object" % self.endpoint, text="data") res = self.proxy.get_object( - "object", container="container", - remember_content=True) + "object", container="container", remember_content=True + ) self.assertEqual(res.data, "data") def test_set_temp_url_key(self): key = 'super-secure-key' - self.register_uris([ - dict(method='POST', uri=self.endpoint, - status_code=204, - validate=dict( - headers={ - 'x-account-meta-temp-url-key': key})), - dict(method='HEAD', uri=self.endpoint, - headers={ - 'x-account-meta-temp-url-key': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.endpoint, + status_code=204, + validate=dict( + headers={'x-account-meta-temp-url-key': key} + ), + ), + dict( + method='HEAD', + uri=self.endpoint, + headers={'x-account-meta-temp-url-key': key}, + ), + ] + ) self.proxy.set_account_temp_url_key(key) self.assert_calls() @@ -169,16 +183,23 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): key = 'super-secure-key' - self.register_uris([ - dict(method='POST', uri=self.endpoint, - status_code=204, - validate=dict( - headers={ - 'x-account-meta-temp-url-key-2': key})), - dict(method='HEAD', uri=self.endpoint, - headers={ - 'x-account-meta-temp-url-key-2': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.endpoint, + status_code=204, + validate=dict( + headers={'x-account-meta-temp-url-key-2': key} + ), + ), + dict( + method='HEAD', + uri=self.endpoint, + headers={'x-account-meta-temp-url-key-2': key}, + ), + ] + ) self.proxy.set_account_temp_url_key(key, secondary=True) self.assert_calls() @@ -186,16 +207,23 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): key = 'super-secure-key' - self.register_uris([ - dict(method='POST', uri=self.container_endpoint, - status_code=204, - validate=dict( - headers={ - 'x-container-meta-temp-url-key': key})), - dict(method='HEAD', uri=self.container_endpoint, - headers={ - 'x-container-meta-temp-url-key': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.container_endpoint, + status_code=204, + validate=dict( + headers={'x-container-meta-temp-url-key': key} + ), + ), + dict( + method='HEAD', + uri=self.container_endpoint, + headers={'x-container-meta-temp-url-key': key}, + ), + ] + ) self.proxy.set_container_temp_url_key(self.container, key) self.assert_calls() @@ -203,18 +231,26 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): key = 'super-secure-key' - self.register_uris([ - dict(method='POST', uri=self.container_endpoint, - status_code=204, - validate=dict( - headers={ - 'x-container-meta-temp-url-key-2': key})), - dict(method='HEAD', uri=self.container_endpoint, - headers={ - 'x-container-meta-temp-url-key-2': key}), - ]) + self.register_uris( + [ + dict( + method='POST', + uri=self.container_endpoint, + status_code=204, + validate=dict( + headers={'x-container-meta-temp-url-key-2': key} + ), + ), + dict( + method='HEAD', + uri=self.container_endpoint, + headers={'x-container-meta-temp-url-key-2': key}, + ), + ] + ) self.proxy.set_container_temp_url_key( - self.container, key, secondary=True) + self.container, key, secondary=True + ) self.assert_calls() def test_copy_object(self): @@ -222,9 +258,10 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): def test_file_segment(self): file_size = 4200 - content = ''.join(random.choice( - string.ascii_uppercase + string.digits) - for _ in range(file_size)).encode('latin-1') + content = ''.join( + random.choice(string.ascii_uppercase + string.digits) + for _ in range(file_size) + ).encode('latin-1') self.imagefile = tempfile.NamedTemporaryFile(delete=False) self.imagefile.write(content) self.imagefile.close() @@ -233,50 +270,60 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase): endpoint='test_container/test_image', filename=self.imagefile.name, file_size=file_size, - segment_size=1000) + segment_size=1000, + ) self.assertEqual(len(segments), 5) segment_content = b'' for (index, (name, segment)) in enumerate(segments.items()): self.assertEqual( 'test_container/test_image/{index:0>6}'.format(index=index), - name) + name, + ) segment_content += segment.read() self.assertEqual(content, segment_content) class TestDownloadObject(base_test_object.BaseTestObject): - def setUp(self): super(TestDownloadObject, self).setUp() self.the_data = b'test body' - self.register_uris([ - dict(method='GET', uri=self.object_endpoint, - headers={ - 'Content-Length': str(len(self.the_data)), - 'Content-Type': 'application/octet-stream', - 'Accept-Ranges': 'bytes', - 'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT', - 'Etag': '"b5c454b44fbd5344793e3fb7e3850768"', - 'X-Timestamp': '1481808853.65009', - 'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1', - 'Date': 'Mon, 19 Dec 2016 14:24:00 GMT', - 'X-Static-Large-Object': 'True', - 'X-Object-Meta-Mtime': '1481513709.168512', - }, - content=self.the_data)]) + self.register_uris( + [ + dict( + method='GET', + uri=self.object_endpoint, + headers={ + 'Content-Length': str(len(self.the_data)), + 'Content-Type': 'application/octet-stream', + 'Accept-Ranges': 'bytes', + 'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT', + 'Etag': '"b5c454b44fbd5344793e3fb7e3850768"', + 'X-Timestamp': '1481808853.65009', + 'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1', + 'Date': 'Mon, 19 Dec 2016 14:24:00 GMT', + 'X-Static-Large-Object': 'True', + 'X-Object-Meta-Mtime': '1481513709.168512', + }, + content=self.the_data, + ) + ] + ) def test_download(self): data = self.cloud.object_store.download_object( - self.object, container=self.container) + self.object, container=self.container + ) self.assertEqual(data, self.the_data) self.assert_calls() def test_stream(self): chunk_size = 2 - for index, chunk in enumerate(self.cloud.object_store.stream_object( - self.object, container=self.container, - chunk_size=chunk_size)): + for index, chunk in enumerate( + self.cloud.object_store.stream_object( + self.object, container=self.container, chunk_size=chunk_size + ) + ): chunk_len = len(chunk) start = index * chunk_size end = start + chunk_len @@ -290,12 +337,17 @@ class TestExtractName(TestObjectStoreProxy): scenarios = [ ('discovery', dict(url='/', parts=['account'])), ('endpoints', dict(url='/endpoints', parts=['endpoints'])), - ('container', dict(url='/AUTH_123/container_name', - parts=['container'])), - ('object', dict(url='/container_name/object_name', - parts=['object'])), - ('object_long', dict(url='/v1/AUTH_123/cnt/path/deep/object_name', - parts=['object'])) + ( + 'container', + dict(url='/AUTH_123/container_name', parts=['container']), + ), + ('object', dict(url='/container_name/object_name', parts=['object'])), + ( + 'object_long', + dict( + url='/v1/AUTH_123/cnt/path/deep/object_name', parts=['object'] + ), + ), ] def test_extract_name(self): @@ -307,36 +359,44 @@ class TestExtractName(TestObjectStoreProxy): class TestTempURL(TestObjectStoreProxy): expires_iso8601_format = '%Y-%m-%dT%H:%M:%SZ' short_expires_iso8601_format = '%Y-%m-%d' - time_errmsg = ('time must either be a whole number or in specific ' - 'ISO 8601 format.') + time_errmsg = ( + 'time must either be a whole number or in specific ' 'ISO 8601 format.' + ) path_errmsg = 'path must be full path to an object e.g. /v1/a/c/o' url = '/v1/AUTH_account/c/o' seconds = 3600 key = 'correcthorsebatterystaple' method = 'GET' - expected_url = url + ('?temp_url_sig=temp_url_signature' - '&temp_url_expires=1400003600') - expected_body = '\n'.join([ - method, - '1400003600', - url, - ]).encode('utf-8') + expected_url = url + ( + '?temp_url_sig=temp_url_signature' '&temp_url_expires=1400003600' + ) + expected_body = '\n'.join( + [ + method, + '1400003600', + url, + ] + ).encode('utf-8') @mock.patch('hmac.HMAC') @mock.patch('time.time', return_value=1400000000) def test_generate_temp_url(self, time_mock, hmac_mock): hmac_mock().hexdigest.return_value = 'temp_url_signature' url = self.proxy.generate_temp_url( - self.url, self.seconds, self.method, temp_url_key=self.key) + self.url, self.seconds, self.method, temp_url_key=self.key + ) key = self.key if not isinstance(key, bytes): key = key.encode('utf-8') self.assertEqual(url, self.expected_url) - self.assertEqual(hmac_mock.mock_calls, [ - mock.call(), - mock.call(key, self.expected_body, sha1), - mock.call().hexdigest(), - ]) + self.assertEqual( + hmac_mock.mock_calls, + [ + mock.call(), + mock.call(key, self.expected_body, sha1), + mock.call().hexdigest(), + ], + ) self.assertIsInstance(url, type(self.url)) @mock.patch('hmac.HMAC') @@ -344,62 +404,83 @@ class TestTempURL(TestObjectStoreProxy): def test_generate_temp_url_ip_range(self, time_mock, hmac_mock): hmac_mock().hexdigest.return_value = 'temp_url_signature' ip_ranges = [ - '1.2.3.4', '1.2.3.4/24', '2001:db8::', - b'1.2.3.4', b'1.2.3.4/24', b'2001:db8::', + '1.2.3.4', + '1.2.3.4/24', + '2001:db8::', + b'1.2.3.4', + b'1.2.3.4/24', + b'2001:db8::', ] path = '/v1/AUTH_account/c/o/' - expected_url = path + ('?temp_url_sig=temp_url_signature' - '&temp_url_expires=1400003600' - '&temp_url_ip_range=') + expected_url = path + ( + '?temp_url_sig=temp_url_signature' + '&temp_url_expires=1400003600' + '&temp_url_ip_range=' + ) for ip_range in ip_ranges: hmac_mock.reset_mock() url = self.proxy.generate_temp_url( - path, self.seconds, self.method, - temp_url_key=self.key, ip_range=ip_range) + path, + self.seconds, + self.method, + temp_url_key=self.key, + ip_range=ip_range, + ) key = self.key if not isinstance(key, bytes): key = key.encode('utf-8') if isinstance(ip_range, bytes): - ip_range_expected_url = ( - expected_url + ip_range.decode('utf-8') - ) - expected_body = '\n'.join([ - 'ip=' + ip_range.decode('utf-8'), - self.method, - '1400003600', - path, - ]).encode('utf-8') + ip_range_expected_url = expected_url + ip_range.decode('utf-8') + expected_body = '\n'.join( + [ + 'ip=' + ip_range.decode('utf-8'), + self.method, + '1400003600', + path, + ] + ).encode('utf-8') else: ip_range_expected_url = expected_url + ip_range - expected_body = '\n'.join([ - 'ip=' + ip_range, - self.method, - '1400003600', - path, - ]).encode('utf-8') + expected_body = '\n'.join( + [ + 'ip=' + ip_range, + self.method, + '1400003600', + path, + ] + ).encode('utf-8') self.assertEqual(url, ip_range_expected_url) - self.assertEqual(hmac_mock.mock_calls, [ - mock.call(key, expected_body, sha1), - mock.call().hexdigest(), - ]) + self.assertEqual( + hmac_mock.mock_calls, + [ + mock.call(key, expected_body, sha1), + mock.call().hexdigest(), + ], + ) self.assertIsInstance(url, type(path)) @mock.patch('hmac.HMAC') def test_generate_temp_url_iso8601_argument(self, hmac_mock): hmac_mock().hexdigest.return_value = 'temp_url_signature' url = self.proxy.generate_temp_url( - self.url, '2014-05-13T17:53:20Z', self.method, - temp_url_key=self.key) + self.url, + '2014-05-13T17:53:20Z', + self.method, + temp_url_key=self.key, + ) self.assertEqual(url, self.expected_url) # Don't care about absolute arg. - url = self.proxy.generate_temp_url(self.url, '2014-05-13T17:53:20Z', - self.method, - temp_url_key=self.key, - absolute=True) + url = self.proxy.generate_temp_url( + self.url, + '2014-05-13T17:53:20Z', + self.method, + temp_url_key=self.key, + absolute=True, + ) self.assertEqual(url, self.expected_url) lt = time.localtime() @@ -407,14 +488,16 @@ class TestTempURL(TestObjectStoreProxy): if not isinstance(self.expected_url, str): expected_url = self.expected_url.replace( - b'1400003600', bytes(str(int(time.mktime(lt))), - encoding='ascii')) + b'1400003600', + bytes(str(int(time.mktime(lt))), encoding='ascii'), + ) else: expected_url = self.expected_url.replace( - '1400003600', str(int(time.mktime(lt)))) - url = self.proxy.generate_temp_url(self.url, expires, - self.method, - temp_url_key=self.key) + '1400003600', str(int(time.mktime(lt))) + ) + url = self.proxy.generate_temp_url( + self.url, expires, self.method, temp_url_key=self.key + ) self.assertEqual(url, expected_url) expires = time.strftime(self.short_expires_iso8601_format, lt) @@ -422,39 +505,48 @@ class TestTempURL(TestObjectStoreProxy): if not isinstance(self.expected_url, str): expected_url = self.expected_url.replace( - b'1400003600', bytes(str(int(time.mktime(lt))), - encoding='ascii')) + b'1400003600', + bytes(str(int(time.mktime(lt))), encoding='ascii'), + ) else: expected_url = self.expected_url.replace( - '1400003600', str(int(time.mktime(lt)))) - url = self.proxy.generate_temp_url(self.url, expires, - self.method, - temp_url_key=self.key) + '1400003600', str(int(time.mktime(lt))) + ) + url = self.proxy.generate_temp_url( + self.url, expires, self.method, temp_url_key=self.key + ) self.assertEqual(url, expected_url) @mock.patch('hmac.HMAC') @mock.patch('time.time', return_value=1400000000) def test_generate_temp_url_iso8601_output(self, time_mock, hmac_mock): hmac_mock().hexdigest.return_value = 'temp_url_signature' - url = self.proxy.generate_temp_url(self.url, self.seconds, - self.method, - temp_url_key=self.key, - iso8601=True) + url = self.proxy.generate_temp_url( + self.url, + self.seconds, + self.method, + temp_url_key=self.key, + iso8601=True, + ) key = self.key if not isinstance(key, bytes): key = key.encode('utf-8') - expires = time.strftime(self.expires_iso8601_format, - time.gmtime(1400003600)) + expires = time.strftime( + self.expires_iso8601_format, time.gmtime(1400003600) + ) if not isinstance(self.url, str): self.assertTrue(url.endswith(bytes(expires, 'utf-8'))) else: self.assertTrue(url.endswith(expires)) - self.assertEqual(hmac_mock.mock_calls, [ - mock.call(), - mock.call(key, self.expected_body, sha1), - mock.call().hexdigest(), - ]) + self.assertEqual( + hmac_mock.mock_calls, + [ + mock.call(), + mock.call(key, self.expected_body, sha1), + mock.call().hexdigest(), + ], + ) self.assertIsInstance(url, type(self.url)) @mock.patch('hmac.HMAC') @@ -465,25 +557,36 @@ class TestTempURL(TestObjectStoreProxy): for p in prefixes: hmac_mock.reset_mock() path = '/v1/AUTH_account/c/' + p - expected_url = path + ('?temp_url_sig=temp_url_signature' - '&temp_url_expires=1400003600' - '&temp_url_prefix=' + p) - expected_body = '\n'.join([ - self.method, - '1400003600', - 'prefix:' + path, - ]).encode('utf-8') + expected_url = path + ( + '?temp_url_sig=temp_url_signature' + '&temp_url_expires=1400003600' + '&temp_url_prefix=' + p + ) + expected_body = '\n'.join( + [ + self.method, + '1400003600', + 'prefix:' + path, + ] + ).encode('utf-8') url = self.proxy.generate_temp_url( - path, self.seconds, self.method, prefix=True, - temp_url_key=self.key) + path, + self.seconds, + self.method, + prefix=True, + temp_url_key=self.key, + ) key = self.key if not isinstance(key, bytes): key = key.encode('utf-8') self.assertEqual(url, expected_url) - self.assertEqual(hmac_mock.mock_calls, [ - mock.call(key, expected_body, sha1), - mock.call().hexdigest(), - ]) + self.assertEqual( + hmac_mock.mock_calls, + [ + mock.call(key, expected_body, sha1), + mock.call().hexdigest(), + ], + ) self.assertIsInstance(url, type(path)) @@ -491,94 +594,142 @@ class TestTempURL(TestObjectStoreProxy): self.assertRaisesRegex( ValueError, 'path must be representable as UTF-8', - self.proxy.generate_temp_url, b'/v1/a/c/\xff', self.seconds, - self.method, temp_url_key=self.key) + self.proxy.generate_temp_url, + b'/v1/a/c/\xff', + self.seconds, + self.method, + temp_url_key=self.key, + ) @mock.patch('hmac.HMAC.hexdigest', return_value="temp_url_signature") def test_generate_absolute_expiry_temp_url(self, hmac_mock): if isinstance(self.expected_url, bytes): expected_url = self.expected_url.replace( - b'1400003600', b'2146636800') + b'1400003600', b'2146636800' + ) else: expected_url = self.expected_url.replace( - u'1400003600', u'2146636800') + u'1400003600', u'2146636800' + ) url = self.proxy.generate_temp_url( - self.url, 2146636800, self.method, absolute=True, - temp_url_key=self.key) + self.url, + 2146636800, + self.method, + absolute=True, + temp_url_key=self.key, + ) self.assertEqual(url, expected_url) def test_generate_temp_url_bad_time(self): - for bad_time in ['not_an_int', -1, 1.1, '-1', '1.1', '2015-05', - '2015-05-01T01:00']: + for bad_time in [ + 'not_an_int', + -1, + 1.1, + '-1', + '1.1', + '2015-05', + '2015-05-01T01:00', + ]: self.assertRaisesRegex( - ValueError, self.time_errmsg, - self.proxy.generate_temp_url, self.url, bad_time, - self.method, temp_url_key=self.key) + ValueError, + self.time_errmsg, + self.proxy.generate_temp_url, + self.url, + bad_time, + self.method, + temp_url_key=self.key, + ) def test_generate_temp_url_bad_path(self): - for bad_path in ['/v1/a/c', 'v1/a/c/o', 'blah/v1/a/c/o', '/v1//c/o', - '/v1/a/c/', '/v1/a/c']: + for bad_path in [ + '/v1/a/c', + 'v1/a/c/o', + 'blah/v1/a/c/o', + '/v1//c/o', + '/v1/a/c/', + '/v1/a/c', + ]: self.assertRaisesRegex( - ValueError, self.path_errmsg, - self.proxy.generate_temp_url, bad_path, 60, self.method, - temp_url_key=self.key) + ValueError, + self.path_errmsg, + self.proxy.generate_temp_url, + bad_path, + 60, + self.method, + temp_url_key=self.key, + ) class TestTempURLUnicodePathAndKey(TestTempURL): url = u'/v1/\u00e4/c/\u00f3' key = u'k\u00e9y' - expected_url = (u'%s?temp_url_sig=temp_url_signature' - u'&temp_url_expires=1400003600') % url - expected_body = u'\n'.join([ - u'GET', - u'1400003600', - url, - ]).encode('utf-8') + expected_url = ( + u'%s?temp_url_sig=temp_url_signature' u'&temp_url_expires=1400003600' + ) % url + expected_body = u'\n'.join( + [ + u'GET', + u'1400003600', + url, + ] + ).encode('utf-8') class TestTempURLUnicodePathBytesKey(TestTempURL): url = u'/v1/\u00e4/c/\u00f3' key = u'k\u00e9y'.encode('utf-8') - expected_url = (u'%s?temp_url_sig=temp_url_signature' - u'&temp_url_expires=1400003600') % url - expected_body = '\n'.join([ - u'GET', - u'1400003600', - url, - ]).encode('utf-8') + expected_url = ( + u'%s?temp_url_sig=temp_url_signature' u'&temp_url_expires=1400003600' + ) % url + expected_body = '\n'.join( + [ + u'GET', + u'1400003600', + url, + ] + ).encode('utf-8') class TestTempURLBytesPathUnicodeKey(TestTempURL): url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8') key = u'k\u00e9y' - expected_url = url + (b'?temp_url_sig=temp_url_signature' - b'&temp_url_expires=1400003600') - expected_body = b'\n'.join([ - b'GET', - b'1400003600', - url, - ]) + expected_url = url + ( + b'?temp_url_sig=temp_url_signature' b'&temp_url_expires=1400003600' + ) + expected_body = b'\n'.join( + [ + b'GET', + b'1400003600', + url, + ] + ) class TestTempURLBytesPathAndKey(TestTempURL): url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8') key = u'k\u00e9y'.encode('utf-8') - expected_url = url + (b'?temp_url_sig=temp_url_signature' - b'&temp_url_expires=1400003600') - expected_body = b'\n'.join([ - b'GET', - b'1400003600', - url, - ]) + expected_url = url + ( + b'?temp_url_sig=temp_url_signature' b'&temp_url_expires=1400003600' + ) + expected_body = b'\n'.join( + [ + b'GET', + b'1400003600', + url, + ] + ) class TestTempURLBytesPathAndNonUtf8Key(TestTempURL): url = u'/v1/\u00e4/c/\u00f3'.encode('utf-8') key = b'k\xffy' - expected_url = url + (b'?temp_url_sig=temp_url_signature' - b'&temp_url_expires=1400003600') - expected_body = b'\n'.join([ - b'GET', - b'1400003600', - url, - ]) + expected_url = url + ( + b'?temp_url_sig=temp_url_signature' b'&temp_url_expires=1400003600' + ) + expected_body = b'\n'.join( + [ + b'GET', + b'1400003600', + url, + ] + )