From f197d5227f4abc25a9001b8ecf856c8cb3a4a4f0 Mon Sep 17 00:00:00 2001 From: eric Date: Wed, 19 Jun 2019 15:03:16 -0400 Subject: [PATCH] always close cloudstorage sessions 1. propagate proper pdf filesize handling to GDrive and MSDrive classes 2. use the session as a context handler to assure it closes even if an exception is unhandled. --- Dropbox.py | 14 ++++++++------ GDrive.py | 27 ++++++++++++++------------- MSDrive.py | 35 +++++++++++++++++------------------ 3 files changed, 39 insertions(+), 37 deletions(-) diff --git a/Dropbox.py b/Dropbox.py index 4a5452b..916b4f5 100644 --- a/Dropbox.py +++ b/Dropbox.py @@ -57,9 +57,11 @@ class Dropbox (CloudStorage.CloudStorage): 'Content-Type' : 'application/octet-stream', 'Dropbox-API-Arg' : json.dumps (parameters) } - with closing (session.post (self.upload_endpoint, - data = response.content, - headers = headers)) as r: - if 'error_summary' in r.text: - CloudStorage.error_log (r.text) - r.raise_for_status () + data = response.content + with session as s: + with closing (s.post (self.upload_endpoint, + data = data, + headers = headers)) as r: + if 'error_summary' in r.text: + CloudStorage.error_log (r.text) + r.raise_for_status () diff --git a/GDrive.py b/GDrive.py index b759f3b..070c419 100644 --- a/GDrive.py +++ b/GDrive.py @@ -54,19 +54,20 @@ class GDrive (CloudStorage.CloudStorage): } headers = { 'X-Upload-Content-Type': request.headers['Content-Type'], - 'X-Upload-Content-Length': request.headers['Content-Length'], + 'X-Upload-Content-Length': str(len(request.content)), 'Content-Type': 'application/json; charset=UTF-8', } - with closing (session.post (self.upload_endpoint, - data = json.dumps (file_metadata), - headers = headers)) as r2: - r2.raise_for_status () - session_uri = r2.headers['Location'] + with session as s: + with closing (s.post (self.upload_endpoint, + data = json.dumps (file_metadata), + headers = headers)) as r2: + r2.raise_for_status () + session_uri = r2.headers['Location'] - headers = { - 'Content-Type': request.headers['Content-Type'], - } - with closing (session.put (session_uri, - data = request.iter_content (1024 * 1024), - headers = headers)) as r3: - r3.raise_for_status () + headers = { + 'Content-Type': request.headers['Content-Type'], + } + with closing (s.put (session_uri, + data = request.iter_content (1024 * 1024), + headers = headers)) as r3: + r3.raise_for_status () diff --git a/MSDrive.py b/MSDrive.py index 2c6ad3f..965ba86 100644 --- a/MSDrive.py +++ b/MSDrive.py @@ -50,7 +50,7 @@ class MSDrive(CloudStorage.CloudStorage): 'description': 'A Project Gutenberg Ebook', "@microsoft.graph.conflictBehavior": "rename", } - filesize = int(response.headers['Content-Length']) + filesize = len(response.content) url = self.upload_endpoint.format(filename=filename) chunk_size = 327680 # weird onedrive thing related to FAT tables upload_data = session.post(url, json={'item': item_data}).json() @@ -60,21 +60,20 @@ class MSDrive(CloudStorage.CloudStorage): 'Content-Length': str(end - start + 1), 'Content-Range': 'bytes {}-{}/{}'.format(start, end, filesize) } + with session as s: + if 'uploadUrl' in upload_data: + session_uri = upload_data['uploadUrl'] + start = 0 + end = min(chunk_size - 1, filesize - 1) - if 'uploadUrl' in upload_data: - session_uri = upload_data['uploadUrl'] - start = 0 - end = min(chunk_size - 1, filesize - 1) - - for chunk in response.iter_content(chunk_size): - r = session.put( - session_uri, - data=chunk, - headers=headers(start, end, filesize), - ) - start = start + chunk_size - end = min(end + chunk_size, filesize - 1) - r.raise_for_status() - else: - CloudStorage.log('no uploadUrl in %s' % upload_data) - session.close() + for chunk in response.iter_content(chunk_size): + r = s.put( + session_uri, + data=chunk, + headers=headers(start, end, filesize), + ) + start = start + chunk_size + end = min(end + chunk_size, filesize - 1) + r.raise_for_status() + else: + CloudStorage.log('no uploadUrl in %s' % upload_data)