always close cloudstorage sessions
1. propagate proper pdf filesize handling to GDrive and MSDrive classes 2. use the session as a context handler to assure it closes even if an exception is unhandled.fix_pdf
parent
ee826a5148
commit
f197d5227f
14
Dropbox.py
14
Dropbox.py
|
@ -57,9 +57,11 @@ class Dropbox (CloudStorage.CloudStorage):
|
|||
'Content-Type' : 'application/octet-stream',
|
||||
'Dropbox-API-Arg' : json.dumps (parameters)
|
||||
}
|
||||
with closing (session.post (self.upload_endpoint,
|
||||
data = response.content,
|
||||
headers = headers)) as r:
|
||||
if 'error_summary' in r.text:
|
||||
CloudStorage.error_log (r.text)
|
||||
r.raise_for_status ()
|
||||
data = response.content
|
||||
with session as s:
|
||||
with closing (s.post (self.upload_endpoint,
|
||||
data = data,
|
||||
headers = headers)) as r:
|
||||
if 'error_summary' in r.text:
|
||||
CloudStorage.error_log (r.text)
|
||||
r.raise_for_status ()
|
||||
|
|
27
GDrive.py
27
GDrive.py
|
@ -54,19 +54,20 @@ class GDrive (CloudStorage.CloudStorage):
|
|||
}
|
||||
headers = {
|
||||
'X-Upload-Content-Type': request.headers['Content-Type'],
|
||||
'X-Upload-Content-Length': request.headers['Content-Length'],
|
||||
'X-Upload-Content-Length': str(len(request.content)),
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
}
|
||||
with closing (session.post (self.upload_endpoint,
|
||||
data = json.dumps (file_metadata),
|
||||
headers = headers)) as r2:
|
||||
r2.raise_for_status ()
|
||||
session_uri = r2.headers['Location']
|
||||
with session as s:
|
||||
with closing (s.post (self.upload_endpoint,
|
||||
data = json.dumps (file_metadata),
|
||||
headers = headers)) as r2:
|
||||
r2.raise_for_status ()
|
||||
session_uri = r2.headers['Location']
|
||||
|
||||
headers = {
|
||||
'Content-Type': request.headers['Content-Type'],
|
||||
}
|
||||
with closing (session.put (session_uri,
|
||||
data = request.iter_content (1024 * 1024),
|
||||
headers = headers)) as r3:
|
||||
r3.raise_for_status ()
|
||||
headers = {
|
||||
'Content-Type': request.headers['Content-Type'],
|
||||
}
|
||||
with closing (s.put (session_uri,
|
||||
data = request.iter_content (1024 * 1024),
|
||||
headers = headers)) as r3:
|
||||
r3.raise_for_status ()
|
||||
|
|
35
MSDrive.py
35
MSDrive.py
|
@ -50,7 +50,7 @@ class MSDrive(CloudStorage.CloudStorage):
|
|||
'description': 'A Project Gutenberg Ebook',
|
||||
"@microsoft.graph.conflictBehavior": "rename",
|
||||
}
|
||||
filesize = int(response.headers['Content-Length'])
|
||||
filesize = len(response.content)
|
||||
url = self.upload_endpoint.format(filename=filename)
|
||||
chunk_size = 327680 # weird onedrive thing related to FAT tables
|
||||
upload_data = session.post(url, json={'item': item_data}).json()
|
||||
|
@ -60,21 +60,20 @@ class MSDrive(CloudStorage.CloudStorage):
|
|||
'Content-Length': str(end - start + 1),
|
||||
'Content-Range': 'bytes {}-{}/{}'.format(start, end, filesize)
|
||||
}
|
||||
with session as s:
|
||||
if 'uploadUrl' in upload_data:
|
||||
session_uri = upload_data['uploadUrl']
|
||||
start = 0
|
||||
end = min(chunk_size - 1, filesize - 1)
|
||||
|
||||
if 'uploadUrl' in upload_data:
|
||||
session_uri = upload_data['uploadUrl']
|
||||
start = 0
|
||||
end = min(chunk_size - 1, filesize - 1)
|
||||
|
||||
for chunk in response.iter_content(chunk_size):
|
||||
r = session.put(
|
||||
session_uri,
|
||||
data=chunk,
|
||||
headers=headers(start, end, filesize),
|
||||
)
|
||||
start = start + chunk_size
|
||||
end = min(end + chunk_size, filesize - 1)
|
||||
r.raise_for_status()
|
||||
else:
|
||||
CloudStorage.log('no uploadUrl in %s' % upload_data)
|
||||
session.close()
|
||||
for chunk in response.iter_content(chunk_size):
|
||||
r = s.put(
|
||||
session_uri,
|
||||
data=chunk,
|
||||
headers=headers(start, end, filesize),
|
||||
)
|
||||
start = start + chunk_size
|
||||
end = min(end + chunk_size, filesize - 1)
|
||||
r.raise_for_status()
|
||||
else:
|
||||
CloudStorage.log('no uploadUrl in %s' % upload_data)
|
||||
|
|
Loading…
Reference in New Issue