diff --git a/b2blaze/__init__.py b/b2blaze/__init__.py index cc61261..4ffc783 100644 --- a/b2blaze/__init__.py +++ b/b2blaze/__init__.py @@ -1,2 +1,2 @@ from b2blaze.b2lib import B2 -from .api import API_VERSION, BASE_URL, API \ No newline at end of file +from .api import API_VERSION, BASE_URL, API diff --git a/b2blaze/api.py b/b2blaze/api.py index 80ed85a..219f860 100644 --- a/b2blaze/api.py +++ b/b2blaze/api.py @@ -1,22 +1,22 @@ # api.py # BackBlaze API endpoints -API_VERSION = '/b2api/v2' -BASE_URL = 'https://api.backblazeb2.com' + API_VERSION +API_VERSION = "/b2api/v2" +BASE_URL = "https://api.backblazeb2.com" + API_VERSION -class API(): - authorize = '/b2_authorize_account' - delete_file = '/b2_hide_file' - delete_file_version = '/b2_delete_file_version' - file_info = '/b2_get_file_info' - download_file_by_id = '/b2_download_file_by_id' - list_all_files = '/b2_list_file_names' - list_file_versions = '/b2_list_file_versions' - upload_url = '/b2_get_upload_url' - upload_large = '/b2_start_large_file' - upload_large_part = '/b2_get_upload_part_url' - upload_large_finish = '/b2_finish_large_file' - create_bucket = '/b2_create_bucket' - delete_bucket = '/b2_delete_bucket' - list_all_buckets = '/b2_list_buckets' \ No newline at end of file +class API: + authorize = "/b2_authorize_account" + delete_file = "/b2_hide_file" + delete_file_version = "/b2_delete_file_version" + file_info = "/b2_get_file_info" + download_file_by_id = "/b2_download_file_by_id" + list_all_files = "/b2_list_file_names" + list_file_versions = "/b2_list_file_versions" + upload_url = "/b2_get_upload_url" + upload_large = "/b2_start_large_file" + upload_large_part = "/b2_get_upload_part_url" + upload_large_finish = "/b2_finish_large_file" + create_bucket = "/b2_create_bucket" + delete_bucket = "/b2_delete_bucket" + list_all_buckets = "/b2_list_buckets" diff --git a/b2blaze/b2_exceptions.py b/b2blaze/b2_exceptions.py index ff28887..9f79ca3 100644 --- a/b2blaze/b2_exceptions.py +++ b/b2blaze/b2_exceptions.py @@ -2,16 +2,16 @@ Copyright George Sibble 2018 """ -import json - class B2ApplicationKeyNotSet(Exception): """ You must set the B2_KEY_ID environment variable before running the application """ + pass class B2KeyIDNotSet(Exception): """ You must set the B2_APPLICATION_KEY environment variable before running the application """ + pass @@ -23,94 +23,115 @@ def parse(response): """ Parse the response error code and return the related error type. """ API_EXCEPTION_CODES = { - 400 : B2RequestError, - 401 : B2UnauthorizedError, - 403 : B2ForbiddenError, - 404 : B2FileNotFoundError, - 408 : B2RequestTimeoutError, - 429 : B2TooManyRequestsError, - 500 : B2InternalError, - 503 : B2ServiceUnavailableError, + 400: B2RequestError, + 401: B2UnauthorizedError, + 403: B2ForbiddenError, + 404: B2FileNotFoundError, + 408: B2RequestTimeoutError, + 429: B2TooManyRequestsError, + 500: B2InternalError, + 503: B2ServiceUnavailableError, } - + try: response_json = response.json() - message = response_json['message'] - code = response_json['code'] - status = int(response_json['status']) + message = response_json["message"] + code = response_json["code"] + status = int(response_json["status"]) # Return B2Exception if unrecognized status code if not status in API_EXCEPTION_CODES: - return B2Exception('{} - {}: {}'.format(status, code, message)) - + return B2Exception("{} - {}: {}".format(status, code, message)) + ErrorClass = API_EXCEPTION_CODES[status] - return ErrorClass('{} - {}: {}'.format(status, code, message)) + return ErrorClass("{} - {}: {}".format(status, code, message)) except: - return Exception('error parsing response. status code - {} Response JSON: {}'.format(response.status_code, response_json) ) + return Exception( + "error parsing response. status code - {} Response JSON: {}".format( + response.status_code, response_json + ) + ) + class B2FileNotFoundError(Exception): """ 404 Not Found """ + pass class B2RequestError(Exception): """ There is a problem with a passed in request parameters. See returned message for details """ + pass class B2UnauthorizedError(Exception): - """ When calling b2_authorize_account, this means that there was something wrong with the accountId/applicationKeyId or with the applicationKey that was provided. The code unauthorized means that the application key is bad. The code unsupported means that the application key is only valid in a later version of the API. + """ When calling b2_authorize_account, this means that there was something wrong with the accountId/applicationKeyId or with the applicationKey that was provided. The code unauthorized means that the application key is bad. The code unsupported means that the application key is only valid in a later version of the API. The code unauthorized means that the auth token is valid, but does not allow you to make this call with these parameters. When the code is either bad_auth_token or expired_auth_token you should call b2_authorize_account again to get a new auth token. """ + pass class B2ForbiddenError(Exception): - """ You have a reached a storage cap limit, or account access may be impacted in some other way; see the human-readable message. + """ You have a reached a storage cap limit, or account access may be impacted in some other way; see the human-readable message. """ + pass class B2RequestTimeoutError(Exception): """ The service timed out trying to read your request. """ + pass + class B2OutOfRangeError(Exception): """ The Range header in the request is outside the size of the file.. """ + pass class B2TooManyRequestsError(Exception): """ B2 may limit API requests on a per-account basis. """ + pass class B2InternalError(Exception): """ An unexpected error has occurred. """ + pass class B2ServiceUnavailableError(Exception): - """ The service is temporarily unavailable. The human-readable message identifies the nature of the issue, in general we recommend retrying with an exponential backoff between retries in response to this error. + """ The service is temporarily unavailable. The human-readable message identifies the nature of the issue, in general we recommend retrying with an exponential backoff between retries in response to this error. """ + pass class B2InvalidBucketName(Exception): """ Bucket name must be alphanumeric or '-' """ + pass class B2InvalidBucketConfiguration(Exception): """ Value error in bucket configuration """ + pass + class B2AuthorizationError(Exception): """ An error with the authorization request """ + pass + class B2InvalidRequestType(Exception): """ Request type must be get or post """ + pass diff --git a/b2blaze/b2lib.py b/b2blaze/b2lib.py index a5eebdd..8ad1d39 100644 --- a/b2blaze/b2lib.py +++ b/b2blaze/b2lib.py @@ -2,14 +2,17 @@ Copyright George Sibble 2018 """ import os + from b2blaze.b2_exceptions import B2ApplicationKeyNotSet, B2KeyIDNotSet from b2blaze.connector import B2Connector from b2blaze.models.bucket_list import B2Buckets + class B2(object): """ """ + def __init__(self, key_id=None, application_key=None): """ @@ -17,16 +20,17 @@ def __init__(self, key_id=None, application_key=None): :param application_key: """ if key_id is None or application_key is None: - key_id = os.environ.get('B2_KEY_ID', None) - application_key = os.environ.get('B2_APPLICATION_KEY', None) + key_id = os.environ.get("B2_KEY_ID", None) + application_key = os.environ.get("B2_APPLICATION_KEY", None) if key_id is None: raise B2KeyIDNotSet if application_key is None: raise B2ApplicationKeyNotSet self.key_id = key_id self.application_key = application_key - self.connector = B2Connector(key_id=self.key_id, application_key=self.application_key) - + self.connector = B2Connector( + key_id=self.key_id, application_key=self.application_key + ) @property def buckets(self): @@ -35,4 +39,3 @@ def buckets(self): :return: """ return B2Buckets(connector=self.connector) - diff --git a/b2blaze/connector.py b/b2blaze/connector.py index be4a3e0..fcdd0db 100644 --- a/b2blaze/connector.py +++ b/b2blaze/connector.py @@ -1,19 +1,57 @@ """ Copyright George Sibble 2018 """ -import requests + import datetime -from requests.auth import HTTPBasicAuth -from b2blaze.b2_exceptions import B2Exception, B2AuthorizationError, B2InvalidRequestType -import sys from hashlib import sha1 -from b2blaze.utilities import b2_url_encode, decode_error, get_content_length, StreamWithHashProgress + +import requests +from requests.auth import HTTPBasicAuth +from requests.adapters import HTTPAdapter +from requests.packages.urllib3.util.retry import Retry + +import requests_cache + +from b2blaze.b2_exceptions import ( + B2Exception, + B2AuthorizationError, + B2InvalidRequestType, +) + +from b2blaze.utilities import b2_url_encode, get_content_length, StreamWithHashProgress + from .api import BASE_URL, API_VERSION, API + +# TODO: Fix this quick hack +requests_cache.install_cache("backblaze_cache", backend="redis", expire_after=180) + + +def requests_retry_session( + retries=3, + backoff_factor=0.3, + status_forcelist=(408, 500, 501, 502, 503, 504), + session=None, +): + session = session or requests.Session() + retry = Retry( + total=retries, + read=retries, + connect=retries, + backoff_factor=backoff_factor, + status_forcelist=status_forcelist, + ) + adapter = HTTPAdapter(max_retries=retry) + session.mount("http://", adapter) + session.mount("https://", adapter) + return session + + class B2Connector(object): """ """ + def __init__(self, key_id, application_key): """ @@ -28,11 +66,9 @@ def __init__(self, key_id, application_key): self.api_url = None self.download_url = None self.recommended_part_size = None - self.api_session = None - #TODO: Part Size + # TODO: Part Size self._authorize() - @property def authorized(self): """ @@ -42,11 +78,12 @@ def authorized(self): if self.auth_token is None: return False else: - if (datetime.datetime.utcnow() - self.authorized_at) > datetime.timedelta(hours=23): + if (datetime.datetime.utcnow() - self.authorized_at) > datetime.timedelta( + hours=23 + ): self._authorize() return True - def _authorize(self): """ @@ -54,24 +91,25 @@ def _authorize(self): """ path = BASE_URL + API.authorize - result = requests.get(path, auth=HTTPBasicAuth(self.key_id, self.application_key)) + result = requests_retry_session().get( + path, auth=HTTPBasicAuth(self.key_id, self.application_key) + ) if result.status_code == 200: result_json = result.json() self.authorized_at = datetime.datetime.utcnow() - self.account_id = result_json['accountId'] - self.auth_token = result_json['authorizationToken'] - self.api_url = result_json['apiUrl'] + API_VERSION - self.download_url = result_json['downloadUrl'] + API_VERSION + API.download_file_by_id - self.recommended_part_size = result_json['recommendedPartSize'] - self.api_session = requests.Session() - self.api_session.headers.update({ - 'Authorization': self.auth_token - }) + self.account_id = result_json["accountId"] + self.auth_token = result_json["authorizationToken"] + self.api_url = result_json["apiUrl"] + API_VERSION + self.download_url = ( + result_json["downloadUrl"] + API_VERSION + API.download_file_by_id + ) + self.recommended_part_size = result_json["recommendedPartSize"] else: raise B2Exception.parse(result) - - def make_request(self, path, method='get', headers={}, params={}, account_id_required=False): + def make_request( + self, path, method="get", headers={}, params={}, account_id_required=False + ): """ :param path: @@ -81,26 +119,33 @@ def make_request(self, path, method='get', headers={}, params={}, account_id_req :param account_id_required: :return: """ + headers.update({"Authorization": self.auth_token}) + if self.authorized: url = self.api_url + path - if method == 'get': - return self.api_session.get(url, headers=headers) - elif method == 'post': + if method == "get": + return requests_retry_session().get(url, headers=headers) + elif method == "post": if account_id_required: - params.update({ - 'accountId': self.account_id - }) - headers.update({ - 'Content-Type': 'application/json' - }) - return self.api_session.post(url, json=params, headers=headers) + params.update({"accountId": self.account_id}) + headers.update({"Content-Type": "application/json"}) + return requests_retry_session().post(url, json=params, headers=headers) else: - raise B2InvalidRequestType('Request type must be get or post') + raise B2InvalidRequestType("Request type must be get or post") else: - raise B2AuthorizationError('Unknown Error') - - def upload_file(self, file_contents, file_name, upload_url, auth_token, - direct=False, mime_content_type=None, content_length=None, progress_listener=None): + raise B2AuthorizationError("Unknown Error") + + def upload_file( + self, + file_contents, + file_name, + upload_url, + auth_token, + direct=False, + mime_content_type=None, + content_length=None, + progress_listener=None, + ): """ :param file_contents: @@ -112,11 +157,13 @@ def upload_file(self, file_contents, file_name, upload_url, auth_token, :param progress_listener :return: """ - if hasattr(file_contents, 'read'): + if hasattr(file_contents, "read"): if content_length is None: content_length = get_content_length(file_contents) - file_sha = 'hex_digits_at_end' - data = StreamWithHashProgress(stream=file_contents, progress_listener=progress_listener) + file_sha = "hex_digits_at_end" + data = StreamWithHashProgress( + stream=file_contents, progress_listener=progress_listener + ) content_length += data.hash_size() else: if content_length is None: @@ -125,16 +172,24 @@ def upload_file(self, file_contents, file_name, upload_url, auth_token, data = file_contents headers = { - 'Content-Type': mime_content_type or 'b2/x-auto', - 'Content-Length': str(content_length), - 'X-Bz-Content-Sha1': file_sha, - 'X-Bz-File-Name': b2_url_encode(file_name), - 'Authorization': auth_token + "Content-Type": mime_content_type or "b2/x-auto", + "Content-Length": str(content_length), + "X-Bz-Content-Sha1": file_sha, + "X-Bz-File-Name": b2_url_encode(file_name), + "Authorization": auth_token, } - return requests.post(upload_url, headers=headers, data=data) - - def upload_part(self, file_contents, content_length, part_number, upload_url, auth_token, progress_listener=None): + return requests_retry_session().post(upload_url, headers=headers, data=data) + + def upload_part( + self, + file_contents, + content_length, + part_number, + upload_url, + auth_token, + progress_listener=None, + ): """ :param file_contents: @@ -145,18 +200,20 @@ def upload_part(self, file_contents, content_length, part_number, upload_url, au :param progress_listener: :return: """ - file_sha = 'hex_digits_at_end' - data = StreamWithHashProgress(stream=file_contents, progress_listener=progress_listener) + file_sha = "hex_digits_at_end" + data = StreamWithHashProgress( + stream=file_contents, progress_listener=progress_listener + ) content_length += data.hash_size() headers = { - 'Content-Length': str(content_length), - 'X-Bz-Content-Sha1': file_sha, - 'X-Bz-Part-Number': str(part_number), - 'Authorization': auth_token + "Content-Length": str(content_length), + "X-Bz-Content-Sha1": file_sha, + "X-Bz-Part-Number": str(part_number), + "Authorization": auth_token, } - return requests.post(upload_url, headers=headers, data=data) + return requests_retry_session().post(upload_url, headers=headers, data=data) def download_file(self, file_id): """ @@ -165,12 +222,7 @@ def download_file(self, file_id): :return: """ url = self.download_url - params = { - 'fileId': file_id - } - headers = { - 'Authorization': self.auth_token - } - - return requests.get(url, headers=headers, params=params) + params = {"fileId": file_id} + headers = {"Authorization": self.auth_token} + return requests_retry_session().get(url, headers=headers, params=params) diff --git a/b2blaze/models/b2_file.py b/b2blaze/models/b2_file.py index 320ca34..e9b7439 100644 --- a/b2blaze/models/b2_file.py +++ b/b2blaze/models/b2_file.py @@ -2,16 +2,32 @@ Copyright George Sibble 2018 """ from io import BytesIO -from ..utilities import b2_url_encode, b2_url_decode, decode_error + +from ..utilities import b2_url_encode from ..b2_exceptions import B2Exception from ..api import API + class B2File(object): """ """ - def __init__(self, connector, parent_list, fileId, fileName, contentSha1, contentLength, contentType, - fileInfo, action, uploadTimestamp, *args, **kwargs): + + def __init__( + self, + connector, + parent_list, + fileId, + fileName, + contentSha1, + contentLength, + contentType, + fileInfo, + action, + uploadTimestamp, + *args, + **kwargs + ): """ :param connector: @@ -29,7 +45,7 @@ def __init__(self, connector, parent_list, fileId, fileName, contentSha1, conten """ self.file_id = fileId # self.file_name_decoded = b2_url_decode(fileName) - #TODO: Find out if this is necessary + # TODO: Find out if this is necessary self.file_name = fileName self.content_sha1 = contentSha1 self.content_length = contentLength @@ -41,7 +57,7 @@ def __init__(self, connector, parent_list, fileId, fileName, contentSha1, conten self.parent_list = parent_list self.deleted = False - def get_versions(self, limit=None): + def get_versions(self, limit=None): """ Fetch list of all versions of the current file. Params: limit: (int) Limit number of results returned (optional, default 10000) @@ -54,31 +70,32 @@ def get_versions(self, limit=None): path = API.list_file_versions file_versions = [] params = { - 'bucketId': bucket_id, - 'maxFileCount': limit or 10000, - 'startFileId': self.file_id, - 'startFileName': self.file_name, + "bucketId": bucket_id, + "maxFileCount": limit or 10000, + "startFileId": self.file_id, + "startFileName": self.file_name, } - response = self.connector.make_request(path=path, method='post', params=params) + response = self.connector.make_request(path=path, method="post", params=params) if response.status_code == 200: files_json = response.json() - for file_json in files_json['files']: - new_file = B2File(connector=self.connector, parent_list=self, **file_json) + for file_json in files_json["files"]: + new_file = B2File( + connector=self.connector, parent_list=self, **file_json + ) file_versions.append(new_file) else: raise B2Exception.parse(response) return file_versions - def hide(self): """ Soft-delete a file (hide it from files list, but previous versions are saved.) """ path = API.delete_file params = { - 'bucketId': self.parent_list.bucket.bucket_id, - 'fileName': b2_url_encode(self.file_name) + "bucketId": self.parent_list.bucket.bucket_id, + "fileName": b2_url_encode(self.file_name), } - response = self.connector.make_request(path=path, method='post', params=params) + response = self.connector.make_request(path=path, method="post", params=params) if response.status_code == 200: self.deleted = True # Delete from parent list if exists @@ -87,52 +104,48 @@ def hide(self): else: raise B2Exception.parse(response) - def delete_all_versions(self, confirm=False): - """ Delete completely all versions of a file. + """ Delete completely all versions of a file. ** NOTE THAT THIS CAN BE VERY EXPENSIVE IN TERMS OF YOUR API LIMITS ** - Each call to delete_all_versions will result in multiple API calls: + Each call to delete_all_versions will result in multiple API calls: One API call per file version to be deleted, per file. 1. Call '/b2_list_file_versions' to get file versions 2. Call '/b2_delete_file_version' once for each version of the file - This means: if you have 10 files with 50 versions each and call delete_all_versions, + This means: if you have 10 files with 50 versions each and call delete_all_versions, you will spend (10 + 1) x 50 == 550 API calls against your BackBlaze b2 API limit. ** You have been warned! BE CAREFUL!!! ** """ - print(self.delete_all_versions.__name__, self.delete_all_versions.__doc__) # Print warnings at call time. + print( + self.delete_all_versions.__name__, self.delete_all_versions.__doc__ + ) # Print warnings at call time. # Confirm deletion if not confirm: - print('To call this function, use delete_all_versions(confirm=True)') + print("To call this function, use delete_all_versions(confirm=True)") return False versions = self.get_versions() version_count = len(versions) if not version_count > 0: - print('No file versions') + print("No file versions") else: - print(version_count, 'file versions') + print(version_count, "file versions") for count, v in enumerate(versions): - print('deleting [{}/{}]'.format(count + 1 , version_count)) + print("deleting [{}/{}]".format(count + 1, version_count)) v.delete() - def delete(self): """ Delete a file version (Does not delete entire file history: only most recent version) """ path = API.delete_file_version - params = { - 'fileId': self.file_id, - 'fileName': b2_url_encode(self.file_name) - } - response = self.connector.make_request(path=path, method='post', params=params) + params = {"fileId": self.file_id, "fileName": b2_url_encode(self.file_name)} + response = self.connector.make_request(path=path, method="post", params=params) if not response.status_code == 200: raise B2Exception.parse(response) self.deleted = True - def download(self): """ Download latest file version """ response = self.connector.download_file(file_id=self.file_id) @@ -144,4 +157,4 @@ def download(self): @property def url(self): """ Return file download URL """ - return self.connector.download_url + '?fileId=' + self.file_id + return self.connector.download_url + "?fileId=" + self.file_id diff --git a/b2blaze/models/bucket.py b/b2blaze/models/bucket.py index 693ff32..04b9fd6 100644 --- a/b2blaze/models/bucket.py +++ b/b2blaze/models/bucket.py @@ -5,12 +5,26 @@ from ..b2_exceptions import B2Exception from ..api import API + class B2Bucket(object): """ """ - def __init__(self, connector, parent_list, bucketId, bucketName, bucketType, bucketInfo, lifecycleRules, revision, - corsRules, *args, **kwargs): + + def __init__( + self, + connector, + parent_list, + bucketId, + bucketName, + bucketType, + bucketInfo, + lifecycleRules, + revision, + corsRules, + *args, + **kwargs + ): """ :param connector: @@ -47,16 +61,17 @@ def delete(self, delete_files=False, confirm_non_empty=False): files = self.files.all(include_hidden=True) if delete_files: if not confirm_non_empty: - raise B2Exception('Bucket is not empty! Must confirm deletion of all files with confirm_non_empty=True') + raise B2Exception( + "Bucket is not empty! Must confirm deletion of all files with confirm_non_empty=True" + ) else: print("Deleting all files from bucket. Beware API limits!") self.files.delete_all(confirm=True) - - params = { - 'bucketId': self.bucket_id - } - response = self.connector.make_request(path=path, method='post', params=params, account_id_required=True) + params = {"bucketId": self.bucket_id} + response = self.connector.make_request( + path=path, method="post", params=params, account_id_required=True + ) if response.status_code == 200: self.deleted = True del self.parent_list._buckets_by_name[self.bucket_name] @@ -65,10 +80,10 @@ def delete(self, delete_files=False, confirm_non_empty=False): raise B2Exception.parse(response) def edit(self): - #TODO: Edit details + # TODO: Edit details pass @property def files(self): """ List of files in the bucket. B2FileList instance. """ - return B2FileList(connector=self.connector, bucket=self) \ No newline at end of file + return B2FileList(connector=self.connector, bucket=self) diff --git a/b2blaze/models/bucket_list.py b/b2blaze/models/bucket_list.py index 5ea558f..c8b580a 100644 --- a/b2blaze/models/bucket_list.py +++ b/b2blaze/models/bucket_list.py @@ -2,7 +2,11 @@ Copyright George Sibble 2018 """ -from ..b2_exceptions import B2Exception, B2InvalidBucketName, B2InvalidBucketConfiguration +from ..b2_exceptions import ( + B2Exception, + B2InvalidBucketName, + B2InvalidBucketConfiguration, +) from .bucket import B2Bucket from ..api import API @@ -11,8 +15,9 @@ class B2Buckets(object): """ """ - public = 'allPublic' - private = 'allPrivate' + + public = "allPublic" + private = "allPrivate" def __init__(self, connector): """ @@ -37,17 +42,21 @@ def _update_bucket_list(self, retrieve=False): :return: """ path = API.list_all_buckets - response = self.connector.make_request(path=path, method='post', account_id_required=True) + response = self.connector.make_request( + path=path, method="post", account_id_required=True + ) if response.status_code == 200: response_json = response.json() buckets = [] self._buckets_by_name = {} self._buckets_by_id = {} - for bucket_json in response_json['buckets']: - new_bucket = B2Bucket(connector=self.connector, parent_list=self, **bucket_json) + for bucket_json in response_json["buckets"]: + new_bucket = B2Bucket( + connector=self.connector, parent_list=self, **bucket_json + ) buckets.append(new_bucket) - self._buckets_by_name[bucket_json['bucketName']] = new_bucket - self._buckets_by_id[bucket_json['bucketId']] = new_bucket + self._buckets_by_name[bucket_json["bucketName"]] = new_bucket + self._buckets_by_id[bucket_json["bucketId"]] = new_bucket if retrieve: return buckets else: @@ -79,18 +88,22 @@ def create(self, bucket_name, security, configuration=None): if type(configuration) != dict and configuration is not None: raise B2InvalidBucketConfiguration params = { - 'bucketName': bucket_name, - 'bucketType': security, - #TODO: bucketInfo - #TODO: corsRules - #TODO: lifeCycleRules + "bucketName": bucket_name, + "bucketType": security, + # TODO: bucketInfo + # TODO: corsRules + # TODO: lifeCycleRules } - response = self.connector.make_request(path=path, method='post', params=params, account_id_required=True) + response = self.connector.make_request( + path=path, method="post", params=params, account_id_required=True + ) if response.status_code == 200: bucket_json = response.json() - new_bucket = B2Bucket(connector=self.connector, parent_list=self, **bucket_json) - self._buckets_by_name[bucket_json['bucketName']] = new_bucket - self._buckets_by_id[bucket_json['bucketId']] = new_bucket + new_bucket = B2Bucket( + connector=self.connector, parent_list=self, **bucket_json + ) + self._buckets_by_name[bucket_json["bucketName"]] = new_bucket + self._buckets_by_id[bucket_json["bucketId"]] = new_bucket return new_bucket else: - raise B2Exception.parse(response) \ No newline at end of file + raise B2Exception.parse(response) diff --git a/b2blaze/models/file_list.py b/b2blaze/models/file_list.py index ecf8c1e..991dd7c 100644 --- a/b2blaze/models/file_list.py +++ b/b2blaze/models/file_list.py @@ -3,15 +3,23 @@ """ from .b2_file import B2File -from ..utilities import b2_url_encode, get_content_length, get_part_ranges, decode_error, RangeStream, StreamWithHashProgress +from ..utilities import ( + b2_url_encode, + get_content_length, + get_part_ranges, + RangeStream, +) + from ..b2_exceptions import B2Exception, B2FileNotFoundError from multiprocessing.dummy import Pool as ThreadPool from ..api import API + class B2FileList(object): """ """ + def __init__(self, connector, bucket): """ @@ -36,22 +44,22 @@ def all(self, include_hidden=False, limit=None): return self._update_files_list(retrieve=True, limit=limit) else: results = self.all_file_versions(limit=limit) - versions = results['file_versions'] - file_ids = results['file_ids'] + versions = results["file_versions"] + file_ids = results["file_ids"] if versions: # Return only the first file from a given file with multiple versions files = [versions[f][0] for f in file_ids] return files - return [] # Return empty set on no results + return [] # Return empty set on no results def delete_all(self, confirm=False): - """ Delete all files in the bucket. + """ Delete all files in the bucket. Parameters: confirm: (bool) Safety check. Confirm deletion - """ + """ if not confirm: - raise Exception('This will delete all files! Pass confirm=True') - + raise Exception("This will delete all files! Pass confirm=True") + all_files = self.all(include_hidden=True) try: for f in all_files: @@ -60,9 +68,8 @@ def delete_all(self, confirm=False): raise B2Exception.parse(E) return [] - def _update_files_list(self, retrieve=False, limit=None): - """ Retrieve list of all files in bucket + """ Retrieve list of all files in bucket Parameters: limit: (int) Max number of file results, default 10000 retrieve: (bool) Refresh local store. (default: false) @@ -70,39 +77,39 @@ def _update_files_list(self, retrieve=False, limit=None): path = API.list_all_files files = [] new_files_to_retrieve = True - params = { - 'bucketId': self.bucket.bucket_id, - 'maxFileCount': limit or 10000 - } + params = {"bucketId": self.bucket.bucket_id, "maxFileCount": limit or 10000} while new_files_to_retrieve: - response = self.connector.make_request(path=path, method='post', params=params) + response = self.connector.make_request( + path=path, method="post", params=params + ) if response.status_code == 200: files_json = response.json() self._files_by_name = {} self._files_by_id = {} - for file_json in files_json['files']: - new_file = B2File(connector=self.connector, parent_list=self, **file_json) + for file_json in files_json["files"]: + new_file = B2File( + connector=self.connector, parent_list=self, **file_json + ) files.append(new_file) - self._files_by_name[file_json['fileName']] = new_file - self._files_by_id[file_json['fileId']] = new_file - if files_json['nextFileName'] is None: + self._files_by_name[file_json["fileName"]] = new_file + self._files_by_id[file_json["fileId"]] = new_file + if files_json["nextFileName"] is None: new_files_to_retrieve = False else: - params['startFileName'] = files_json['nextFileName'] + params["startFileName"] = files_json["nextFileName"] else: raise B2Exception.parse(response) if retrieve: return files - def get(self, file_name=None, file_id=None): """ Get a file by file name or id. Required: file_name or file_id Parameters: - file_name: (str) File name - file_id: (str) File ID + file_name: (str) File name + file_id: (str) File ID """ if file_name: file = self._get_by_name(file_name) @@ -110,13 +117,12 @@ def get(self, file_name=None, file_id=None): elif file_id: file = self._get_by_id(file_id) else: - raise ValueError('file_name or file_id must be passed') - - return file + raise ValueError("file_name or file_id must be passed") + return file def get_versions(self, file_name=None, file_id=None, limit=None): - """ Return list of all the versions of one file in current bucket. + """ Return list of all the versions of one file in current bucket. Required: file_id or file_name (either) @@ -127,16 +133,15 @@ def get_versions(self, file_name=None, file_id=None, limit=None): Returns: file_versions (list) B2FileObject of all file versions - """ + """ if file_name: file = self.get(file_name) elif file_id: file = self.get(file_id=file_id) else: - raise ValueError('Either file_id or file_name required for get_versions') + raise ValueError("Either file_id or file_name required for get_versions") return file.get_versions() - def all_file_versions(self, limit=None): """ Return all the versions of all files in a given bucket. @@ -144,88 +149,95 @@ def all_file_versions(self, limit=None): Params: limit: (int) Limit number of results returned (optional). Defaults to 10000 - Returns dict: + Returns dict: 'file_names': (list) String filenames 'file_ids': (list) File IDs 'file_versions': (dict) b2blaze File objects, keyed by file name - """ + """ path = API.list_file_versions file_versions = dict() file_names = [] file_ids = [] new_files_to_retrieve = True - params = { - 'bucketId': self.bucket.bucket_id, - 'maxFileCount': 10000 - } + params = {"bucketId": self.bucket.bucket_id, "maxFileCount": 10000} # Limit files if limit: - params['maxFileCount'] = limit + params["maxFileCount"] = limit while new_files_to_retrieve: - response = self.connector.make_request(path=path, method='post', params=params) + response = self.connector.make_request( + path=path, method="post", params=params + ) if response.status_code == 200: files_json = response.json() - for file_json in files_json['files']: - new_file = B2File(connector=self.connector, parent_list=self, **file_json) + for file_json in files_json["files"]: + new_file = B2File( + connector=self.connector, parent_list=self, **file_json + ) # Append file_id, file_name to lists - file_name, file_id = file_json['fileName'], file_json['fileId'] + file_name, file_id = file_json["fileName"], file_json["fileId"] file_names.append(file_name) file_ids.append(file_id) - + # Add file to list keyed by file_id if file_id in file_versions: file_versions[file_id].append(new_file) else: file_versions[file_id] = [new_file] - if files_json['nextFileName'] is None: + if files_json["nextFileName"] is None: new_files_to_retrieve = False else: - params['startFileName'] = files_json['nextFileName'] + params["startFileName"] = files_json["nextFileName"] else: raise B2Exception.parse(response) - return {'file_names': file_names, 'file_versions': file_versions, 'file_ids': file_ids} - + return { + "file_names": file_names, + "file_versions": file_versions, + "file_ids": file_ids, + } def _get_by_name(self, file_name): - """ Internal method, return single file by file name """ + """ Internal method, return single file by file name """ path = API.list_all_files - params = { - 'prefix': b2_url_encode(file_name), - 'bucketId': self.bucket.bucket_id - } + params = {"prefix": b2_url_encode(file_name), "bucketId": self.bucket.bucket_id} - response = self.connector.make_request(path, method='post', params=params) + response = self.connector.make_request(path, method="post", params=params) file_json = response.json() # Handle errors and empty files if not response.status_code == 200: raise B2Exception.parse(response) - if not len(file_json['files']) > 0: - raise B2FileNotFoundError('Filename {} not found'.format(file_name)) + if not len(file_json["files"]) > 0: + raise B2FileNotFoundError("Filename {} not found".format(file_name)) else: - return B2File(connector=self.connector, parent_list=self, **file_json['files'][0]) + return B2File( + connector=self.connector, parent_list=self, **file_json["files"][0] + ) def _get_by_id(self, file_id): - """ Internal method, return single file by file id """ + """ Internal method, return single file by file id """ path = API.file_info - params = { - 'fileId': file_id - } - response = self.connector.make_request(path, method='post', params=params) + params = {"fileId": file_id} + response = self.connector.make_request(path, method="post", params=params) if response.status_code == 200: file_json = response.json() return B2File(connector=self.connector, parent_list=self, **file_json) else: raise B2Exception.parse(response) - - def upload(self, contents, file_name, mime_content_type=None, content_length=None, progress_listener=None): + def upload( + self, + contents, + file_name, + mime_content_type=None, + content_length=None, + progress_listener=None, + ): """ :param contents: @@ -235,21 +247,28 @@ def upload(self, contents, file_name, mime_content_type=None, content_length=Non :param progress_listener: :return: """ - if file_name[0] == '/': + if file_name[0] == "/": file_name = file_name[1:] get_upload_url_path = API.upload_url - params = { - 'bucketId': self.bucket.bucket_id - } - upload_url_response = self.connector.make_request(path=get_upload_url_path, method='post', params=params) + params = {"bucketId": self.bucket.bucket_id} + upload_url_response = self.connector.make_request( + path=get_upload_url_path, method="post", params=params + ) if upload_url_response.status_code == 200: - upload_url = upload_url_response.json().get('uploadUrl', None) - auth_token = upload_url_response.json().get('authorizationToken', None) - upload_response = self.connector.upload_file(file_contents=contents, file_name=file_name, - upload_url=upload_url, auth_token=auth_token, - content_length=content_length, progress_listener=progress_listener) + upload_url = upload_url_response.json().get("uploadUrl", None) + auth_token = upload_url_response.json().get("authorizationToken", None) + upload_response = self.connector.upload_file( + file_contents=contents, + file_name=file_name, + upload_url=upload_url, + auth_token=auth_token, + content_length=content_length, + progress_listener=progress_listener, + ) if upload_response.status_code == 200: - new_file = B2File(connector=self.connector, parent_list=self, **upload_response.json()) + new_file = B2File( + connector=self.connector, parent_list=self, **upload_response.json() + ) # Update file list after upload self._update_files_list() return new_file @@ -258,8 +277,16 @@ def upload(self, contents, file_name, mime_content_type=None, content_length=Non else: raise B2Exception.parse(upload_url_response) - def upload_large_file(self, contents, file_name, part_size=None, num_threads=4, - mime_content_type=None, content_length=None, progress_listener=None): + def upload_large_file( + self, + contents, + file_name, + part_size=None, + num_threads=4, + mime_content_type=None, + content_length=None, + progress_listener=None, + ): """ :param contents: @@ -271,7 +298,7 @@ def upload_large_file(self, contents, file_name, part_size=None, num_threads=4, :param progress_listener: :return: """ - if file_name[0] == '/': + if file_name[0] == "/": file_name = file_name[1:] if part_size == None: part_size = self.connector.recommended_part_size @@ -279,48 +306,65 @@ def upload_large_file(self, contents, file_name, part_size=None, num_threads=4, content_length = get_content_length(contents) start_large_file_path = API.upload_large params = { - 'bucketId': self.bucket.bucket_id, - 'fileName': b2_url_encode(file_name), - 'contentType': mime_content_type or 'b2/x-auto' + "bucketId": self.bucket.bucket_id, + "fileName": b2_url_encode(file_name), + "contentType": mime_content_type or "b2/x-auto", } - large_file_response = self.connector.make_request(path=start_large_file_path, method='post', params=params) + large_file_response = self.connector.make_request( + path=start_large_file_path, method="post", params=params + ) if large_file_response.status_code == 200: - file_id = large_file_response.json().get('fileId', None) + file_id = large_file_response.json().get("fileId", None) get_upload_part_url_path = API.upload_large_part - params = { - 'fileId': file_id - } + params = {"fileId": file_id} pool = ThreadPool(num_threads) + def upload_part_worker(args): part_number, part_range = args offset, content_length = part_range - with open(contents.name, 'rb') as file: + with open(contents.name, "rb") as file: file.seek(offset) stream = RangeStream(file, offset, content_length) - upload_part_url_response = self.connector.make_request(path=get_upload_part_url_path, method='post', params=params) + upload_part_url_response = self.connector.make_request( + path=get_upload_part_url_path, method="post", params=params + ) if upload_part_url_response.status_code == 200: - upload_url = upload_part_url_response.json().get('uploadUrl') - auth_token = upload_part_url_response.json().get('authorizationToken') - upload_part_response = self.connector.upload_part(file_contents=stream, content_length=content_length, - part_number=part_number, upload_url=upload_url, - auth_token=auth_token, progress_listener=progress_listener) + upload_url = upload_part_url_response.json().get("uploadUrl") + auth_token = upload_part_url_response.json().get( + "authorizationToken" + ) + upload_part_response = self.connector.upload_part( + file_contents=stream, + content_length=content_length, + part_number=part_number, + upload_url=upload_url, + auth_token=auth_token, + progress_listener=progress_listener, + ) if upload_part_response.status_code == 200: - return upload_part_response.json().get('contentSha1', None) + return upload_part_response.json().get("contentSha1", None) else: raise B2Exception.parse(upload_part_response) else: raise B2Exception.parse(upload_part_url_response) - sha_list = pool.map(upload_part_worker, enumerate(get_part_ranges(content_length, part_size), 1)) + + sha_list = pool.map( + upload_part_worker, + enumerate(get_part_ranges(content_length, part_size), 1), + ) pool.close() pool.join() finish_large_file_path = API.upload_large_finish - params = { - 'fileId': file_id, - 'partSha1Array': sha_list - } - finish_large_file_response = self.connector.make_request(path=finish_large_file_path, method='post', params=params) + params = {"fileId": file_id, "partSha1Array": sha_list} + finish_large_file_response = self.connector.make_request( + path=finish_large_file_path, method="post", params=params + ) if finish_large_file_response.status_code == 200: - new_file = B2File(connector=self.connector, parent_list=self, **finish_large_file_response.json()) + new_file = B2File( + connector=self.connector, + parent_list=self, + **finish_large_file_response.json() + ) return new_file else: raise B2Exception.parse(finish_large_file_response) diff --git a/b2blaze/utilities.py b/b2blaze/utilities.py index cf5ce02..87c66b5 100644 --- a/b2blaze/utilities.py +++ b/b2blaze/utilities.py @@ -19,24 +19,27 @@ def b2_url_encode(s): - return quote(s.encode('utf-8')) + return quote(s.encode("utf-8")) def b2_url_decode(s): - return unquote_plus(str(s)).decode('utf-8') + return unquote_plus(str(s)).decode("utf-8") + def get_content_length(file): - if hasattr(file, 'name') and os.path.isfile(file.name): + if hasattr(file, "name") and os.path.isfile(file.name): return os.path.getsize(file.name) else: - raise Exception('Content-Length could not be automatically determined.') + raise Exception("Content-Length could not be automatically determined.") + def decode_error(response): try: response_json = response.json() - return str(response.status_code) + ' - ' + str(response_json) + return str(response.status_code) + " - " + str(response_json) except ValueError: - raise ValueError(str(response.status_code) + ' - Invalid JSON Response') + raise ValueError(str(response.status_code) + " - Invalid JSON Response") + def get_part_ranges(content_length, part_size): parts = [] @@ -49,6 +52,7 @@ def get_part_ranges(content_length, part_size): content_length -= part_size return parts + class RangeStream: """ Wraps a file-like object (read only) and reads the selected @@ -86,6 +90,7 @@ def read(self, size=None): self.remaining -= len(data) return data + class StreamWithHashProgress: """ Wraps a file-like object (read-only), hashes on-the-fly, and @@ -113,7 +118,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): return self.stream.__exit__(exc_type, exc_val, exc_tb) def read(self, size=None): - data = b'' + data = b"" if self.hash is None: # Read some bytes from stream if size is None: @@ -136,7 +141,7 @@ def read(self, size=None): else: # The end of stream was reached, return hash now size = size or len(self.hash) - data += str.encode(self.hash[self.hash_read:self.hash_read + size]) + data += str.encode(self.hash[self.hash_read : self.hash_read + size]) self.hash_read += size return data diff --git a/setup.py b/setup.py index 5d33deb..5923d95 100644 --- a/setup.py +++ b/setup.py @@ -2,32 +2,32 @@ from setuptools import setup, find_packages -VERSION = '0.2.1' +VERSION = "0.2.1" from os import path + this_directory = path.abspath(path.dirname(__file__)) -with open(path.join(this_directory, 'README.md')) as f: +with open(path.join(this_directory, "README.md")) as f: long_description = f.read() -setup(name='b2blaze', - version=VERSION, - description='Forked from George Sibble\'s B2Blaze (0.1.10). All credits to author. Original package: https://github.com/sibblegp/b2blaze', - long_description=long_description, - long_description_content_type='text/markdown', - packages=find_packages(), - author='George Sibble', - author_email='gsibble@gmail.com', - python_requires='>=2.7', - url='https://github.com/fieldse/b2blaze-fork', - install_requires=[ - 'requests==2.19.1' - ], - keywords='backblaze b2 cloud storage', - classifiers=[ - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.6', - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Libraries', - ], - license='MIT' +setup( + name="b2blaze", + version=VERSION, + description="Forked from George Sibble's B2Blaze (0.1.10). All credits to author. Original package: https://github.com/sibblegp/b2blaze", + long_description=long_description, + long_description_content_type="text/markdown", + packages=find_packages(), + author="George Sibble", + author_email="gsibble@gmail.com", + python_requires=">=2.7", + url="https://github.com/fieldse/b2blaze-fork", + install_requires=["requests==2.19.1"], + keywords="backblaze b2 cloud storage", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + "Intended Audience :: Developers", + "Topic :: Software Development :: Libraries", + ], + license="MIT", ) diff --git a/tests.py b/tests.py index ce026a9..e77041a 100644 --- a/tests.py +++ b/tests.py @@ -6,6 +6,7 @@ import pytest from b2blaze.b2_exceptions import B2Exception, B2RequestError, B2FileNotFoundError + class TestB2(object): """ Tests for the b2blaze library """ @@ -16,16 +17,15 @@ def setup_class(cls): :return: None """ cls.b2 = b2blaze.b2lib.B2() - timestamp = datetime.now().strftime('%Y-%m-%d-%H%M%S') - cls.bucket_name = 'testbucket-' + timestamp - print('test bucket: ', cls.bucket_name) + timestamp = datetime.now().strftime("%Y-%m-%d-%H%M%S") + cls.bucket_name = "testbucket-" + timestamp + print("test bucket: ", cls.bucket_name) # Helper methods def test_create_b2_instance(self): """Create a B2 instance """ b2 = b2blaze.b2lib.B2() - @classmethod def create_bucket(cls): return cls.b2.buckets.create(cls.bucket_name, security=cls.b2.buckets.public) @@ -35,17 +35,16 @@ def getbucket(cls): return cls.b2.buckets.get(bucket_name=cls.bucket_name) or cls.create_bucket() @classmethod - def upload_textfile(cls, contents="hello there", file_name='test/hello.txt'): + def upload_textfile(cls, contents="hello there", file_name="test/hello.txt"): """ Upload text file with name 'test/hello.txt' """ - contents=contents.encode('utf-8') # These fail unless encoded to UTF8 + contents = contents.encode("utf-8") # These fail unless encoded to UTF8 bucket = cls.getbucket() return bucket.files.upload(contents=contents, file_name=file_name) - @classmethod def is_b2_file(cls, obj): """ hacky method for checking object class/type is B2File""" - if 'B2File' in str(type(obj)): + if "B2File" in str(type(obj)): return True return False @@ -55,12 +54,14 @@ def is_b2_file(cls, obj): @pytest.mark.versions def test_create_bucket(self): """ Create a bucket by name. """ - self.bucket = self.b2.buckets.create(self.bucket_name, security=self.b2.buckets.public) + self.bucket = self.b2.buckets.create( + self.bucket_name, security=self.b2.buckets.public + ) assert self.bucket @pytest.mark.bucket def test_get_bucket(self): - """ Get a bucket by name """ + """ Get a bucket by name """ bucket = self.getbucket() assert bucket @@ -72,43 +73,41 @@ def test_get_all_buckets(self): @pytest.mark.bucket def test_get_nonexistent_bucket(self): - """ Get a bucket which doesn't exist should return None """ - bucket = self.b2.buckets.get(bucket_name='this doesnt exist') + """ Get a bucket which doesn't exist should return None """ + bucket = self.b2.buckets.get(bucket_name="this doesnt exist") assert not bucket @pytest.mark.files def test_create_file_and_retrieve_by_id(self): """ Create a file and retrieve by ID """ bucket = self.getbucket() - contents='Hello World!'.encode('utf-8') # These fail unless encoded to UTF8 - file = bucket.files.upload(contents=contents, file_name='test/hello.txt') + contents = "Hello World!".encode("utf-8") # These fail unless encoded to UTF8 + file = bucket.files.upload(contents=contents, file_name="test/hello.txt") file2 = bucket.files.get(file_id=file.file_id) # It should be a B2File - assert self.is_b2_file(file2), 'Should be a B2File object' - + assert self.is_b2_file(file2), "Should be a B2File object" @pytest.mark.files def test_direct_upload_file(self): """ Upload binary file """ bucket = self.getbucket() - binary_file = open('b2blaze/test_pic.jpg', 'rb') - uploaded_file = bucket.files.upload(contents=binary_file, file_name='test_pic2.jpg') + binary_file = open("b2blaze/test_pic.jpg", "rb") + uploaded_file = bucket.files.upload( + contents=binary_file, file_name="test_pic2.jpg" + ) binary_file.close() assert self.is_b2_file(uploaded_file) - @pytest.mark.files def test_get_all_files(self): """ Get all files from a bucket. Returned objects are B2Files """ bucket = self.getbucket() files = bucket.files.all() - print('test_get_files: all files: ', len(files)) + print("test_get_files: all files: ", len(files)) # check type - assert self.is_b2_file(files[0]), 'Should be a B2File object' - - + assert self.is_b2_file(files[0]), "Should be a B2File object" @pytest.mark.versions @pytest.mark.files @@ -117,9 +116,8 @@ def test_get_all_file_versions(self): bucket = self.getbucket() file = self.upload_textfile() files = bucket.files.all_file_versions() - print('test_get_all_file_versions: all versions: ', len(files['file_versions'])) - assert len(files['file_versions']) > 0, 'File versions should exist' - + print("test_get_all_file_versions: all versions: ", len(files["file_versions"])) + assert len(files["file_versions"]) > 0, "File versions should exist" @pytest.mark.files def test_get_file_by_name(self): @@ -128,8 +126,7 @@ def test_get_file_by_name(self): file = self.upload_textfile() # check type - assert self.is_b2_file(file), 'Should be a B2File object' - + assert self.is_b2_file(file), "Should be a B2File object" @pytest.mark.files def test_get_file_by_id(self): @@ -138,8 +135,7 @@ def test_get_file_by_id(self): file = self.upload_textfile() # check type - assert self.is_b2_file(file), 'Should be a B2File object' - + assert self.is_b2_file(file), "Should be a B2File object" @pytest.mark.versions @pytest.mark.files @@ -148,13 +144,12 @@ def test_get_file_versions(self): Returned data should be a list, and items should be of type B2File """ bucket = self.getbucket() - file = bucket.files.get(file_name='test/hello.txt') + file = bucket.files.get(file_name="test/hello.txt") versions = file.get_versions() - assert len(versions) > 0, 'File should have multiple versions' - - # check type - assert self.is_b2_file(versions[0]), 'Should be a B2File object' + assert len(versions) > 0, "File should have multiple versions" + # check type + assert self.is_b2_file(versions[0]), "Should be a B2File object" @pytest.mark.versions @pytest.mark.files @@ -163,10 +158,9 @@ def test_bucket_get_file_versions_by_name(self): Returned data should be a list, and items should be of type B2File """ bucket = self.getbucket() - versions = bucket.files.get_versions(file_name='test/hello.txt') - assert len(versions) > 0, 'File should have multiple versions' - assert self.is_b2_file(versions[0]), 'Should be a B2File object' - + versions = bucket.files.get_versions(file_name="test/hello.txt") + assert len(versions) > 0, "File should have multiple versions" + assert self.is_b2_file(versions[0]), "Should be a B2File object" @pytest.mark.versions @pytest.mark.files @@ -175,11 +169,10 @@ def test_bucket_get_file_versions_by_id(self): Returned data should be a list, and items should be of type B2File """ bucket = self.getbucket() - file = bucket.files.get(file_name='test/hello.txt') + file = bucket.files.get(file_name="test/hello.txt") versions = bucket.files.get_versions(file_id=file.file_id) - assert len(versions) > 0, 'File should have multiple versions' - assert self.is_b2_file(versions[0]), 'Should be a B2File object' - + assert len(versions) > 0, "File should have multiple versions" + assert self.is_b2_file(versions[0]), "Should be a B2File object" @pytest.mark.files @pytest.mark.b2errors @@ -187,10 +180,9 @@ def test_get_file_doesnt_exist(self): """ Get file which doesn't exist should raise B2FileNotFoundError, get by ID should raise B2RequestError """ bucket = self.getbucket() with pytest.raises(B2FileNotFoundError): - file = bucket.files.get(file_name='nope.txt') + file = bucket.files.get(file_name="nope.txt") with pytest.raises(B2RequestError): - file2 = bucket.files.get(file_id='abcd') - + file2 = bucket.files.get(file_id="abcd") @pytest.mark.files def test_download_file(self): @@ -200,11 +192,11 @@ def test_download_file(self): data = file.download() assert len(data.read()) > 0 - @pytest.mark.files def test_download_url(self): """ Download file url should be publicly GET accessible """ import requests + bucket = self.getbucket() file = self.upload_textfile() url = file.url @@ -213,31 +205,31 @@ def test_download_url(self): print(downloaded_file.json()) raise ValueError - @pytest.mark.files @pytest.mark.b2errors - def test_hide_file(self): + def test_hide_file(self): """ Should create + upload, then hide / soft-delete a file by name. File should no longer be returned when searched by name in bucket. """ bucket = self.getbucket() - upload = self.upload_textfile(contents='Delete this', file_name='test/deleteme.txt') - + upload = self.upload_textfile( + contents="Delete this", file_name="test/deleteme.txt" + ) + # Delete - print('test_delete_file: upload.file_id', upload.file_id) - print('test_delete_file: upload.file_name', upload.file_name) + print("test_delete_file: upload.file_id", upload.file_id) + print("test_delete_file: upload.file_name", upload.file_name) upload.hide() # Refresh bucket; getting the the file should fail with pytest.raises(B2FileNotFoundError): bucket = self.getbucket() file = bucket.files.get(file_name=upload.file_name) - assert not file, 'Deleted file should not be in files list' - + assert not file, "Deleted file should not be in files list" @pytest.mark.files @pytest.mark.versions - def test_delete_file_version(self): + def test_delete_file_version(self): """ Delete a file version by name. It should still exist when searched.""" bucket = self.getbucket() @@ -248,46 +240,44 @@ def test_delete_file_version(self): # Update versions versions = file.get_versions() - assert len(versions) > 1, 'File should should have multiple version' + assert len(versions) > 1, "File should should have multiple version" # Delete version - print('test_delete_file_version: file_name', file.file_name) - print('test_delete_file_version: file_id', file.file_id) + print("test_delete_file_version: file_name", file.file_name) + print("test_delete_file_version: file_id", file.file_id) file.delete() # Refresh bucket; getting the the file should fail file2 = bucket.files.get(file_name=file.file_name) - assert file2, 'Deleted file version only, file should still exist' - assert self.is_b2_file(file2), 'Should be a B2File object' - + assert file2, "Deleted file version only, file should still exist" + assert self.is_b2_file(file2), "Should be a B2File object" # @pytest.mark.versions @pytest.mark.files - def test_delete_all_file_versions(self): + def test_delete_all_file_versions(self): """ Delete all versions of a file. It should be gone completely from bucket.""" bucket = self.getbucket() # Create file, make sure we have multiple versions - contents='Hello World!'.encode('utf-8') # These fail unless encoded to UTF8 - upload = bucket.files.upload(contents=contents, file_name='test/hello.txt') - + contents = "Hello World!".encode("utf-8") # These fail unless encoded to UTF8 + upload = bucket.files.upload(contents=contents, file_name="test/hello.txt") + # Get # versions = bucket.files.get_versions(file_name='test/hello.txt') - file = bucket.files.get(file_name='test/hello.txt') + file = bucket.files.get(file_name="test/hello.txt") versions = file.get_versions() - assert len(versions) > 0, 'File should should have multiple version' + assert len(versions) > 0, "File should should have multiple version" # Delete - print('test_delete_all_file_versions: file_name', file.file_name) - print('test_delete_all_file_versions: file_id', file.file_id) + print("test_delete_all_file_versions: file_name", file.file_name) + print("test_delete_all_file_versions: file_id", file.file_id) file.delete_all_versions(confirm=True) # Refresh bucket; getting the the file should fail with pytest.raises(B2FileNotFoundError): bucket = self.getbucket() file2 = bucket.files.get(file_name=file.file_name) - assert not file2, 'Deleted all file versions, file should not exist' - + assert not file2, "Deleted all file versions, file should not exist" @pytest.mark.bucket def test_delete_non_empty_bucket(self): @@ -296,22 +286,23 @@ def test_delete_non_empty_bucket(self): # Upload file self.upload_textfile() - assert len(bucket.files.all()) > 0, "Bucket should still contain files" - + assert len(bucket.files.all()) > 0, "Bucket should still contain files" + # Should raise exception on non-empty without confirm - with pytest.raises(B2RequestError): - bucket.delete() # Try to delete without confirmation + with pytest.raises(B2RequestError): + bucket.delete() # Try to delete without confirmation # Bucket should still exist - assert self.b2.buckets.get(bucket_name=bucket.bucket_name), 'bucket should still exist' - + assert self.b2.buckets.get( + bucket_name=bucket.bucket_name + ), "bucket should still exist" + # # Delete with confirmation # bucket.delete(delete_files=True, confirm_non_empty=True) - + # # Bucket should be gone # assert self.b2.buckets.get(bucket_name=bucket.bucket_name), 'bucket should not exist' - @pytest.mark.bucket @pytest.mark.files @pytest.mark.versions @@ -321,12 +312,11 @@ def test_bucket_delete_all_files(self): self.upload_textfile() files = bucket.files.all() - assert len(files) > 0, 'Bucket should still contain files' - + assert len(files) > 0, "Bucket should still contain files" + # Delete all files bucket.files.delete_all(confirm=True) - assert len(bucket.files.all()) == 0, 'Bucket should be empty' - + assert len(bucket.files.all()) == 0, "Bucket should be empty" @pytest.mark.bucket def test_delete_bucket(self): @@ -335,18 +325,25 @@ def test_delete_bucket(self): # Ascertain it's empty files_new = bucket.files.all(include_hidden=True) - assert len(files_new) == 0, "Bucket should contain no files but contains {}".format(len(files_new)) - + assert ( + len(files_new) == 0 + ), "Bucket should contain no files but contains {}".format(len(files_new)) + # Delete bucket.delete() # Confirm bucket is gone. bucket.get() nonexistent should return None. - assert not self.b2.buckets.get(bucket_name=bucket.bucket_name), 'Deleted bucket still exists' + assert not self.b2.buckets.get( + bucket_name=bucket.bucket_name + ), "Deleted bucket still exists" + def main(): import pytest - pytest_args = [ __file__, '--verbose'] + + pytest_args = [__file__, "--verbose"] pytest.main(pytest_args) -if __name__ == '__main__': - main() \ No newline at end of file + +if __name__ == "__main__": + main()