Merge pull request #777 from Gluejar/doab-misc

Doab misc
pull/91/head
eshellman 2018-05-10 22:01:43 -04:00 committed by GitHub
commit 95b8276829
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 35 additions and 16 deletions

View File

@ -369,7 +369,7 @@ DROPBOX_DL = re.compile(r'"(https://dl.dropboxusercontent.com/content_link/[^"]+
def dl_online(ebook):
if ebook.format != 'online':
return
return None, False
if ebook.url.find(u'dropbox.com/s/') >= 0:
response = requests.get(ebook.url, headers={"User-Agent": settings.USER_AGENT})
@ -387,7 +387,7 @@ def dl_online(ebook):
def make_dl_ebook(url, ebook):
if EbookFile.objects.filter(source=ebook.url):
return EbookFile.objects.filter(source=ebook.url)[0]
return EbookFile.objects.filter(source=ebook.url)[0], False
response = requests.get(url, headers={"User-Agent": settings.USER_AGENT})
if response.status_code == 200:
filesize = int(response.headers.get("Content-Length", 0))
@ -413,7 +413,7 @@ def make_dl_ebook(url, ebook):
)
new_ebf.ebook = new_ebook
new_ebf.save()
return new_ebf
return new_ebf, True
def type_for_url(url, content_type=None):
if not url:

View File

@ -12,8 +12,8 @@ class Command(BaseCommand):
onlines = Ebook.objects.filter(format='online')
done = 0
for online in onlines:
new_ebf = dl_online(online)
if new_ebf:
new_ebf, new = dl_online(online)
if new_ebf and new:
done += 1
if done > limit:
break

View File

@ -42,7 +42,7 @@ OTHER_ID_CHOICES = (
('edid', 'pragmatic edition ID'),
)
WORK_IDENTIFIERS = ('doi','olwk','glue','ltwk', 'http')
WORK_IDENTIFIERS = ('doi','olwk','glue','ltwk', 'http', 'doab')
ID_CHOICES_MAP = dict(ID_CHOICES)

View File

@ -149,14 +149,27 @@ class EditionForm(forms.ModelForm):
id_type = self.cleaned_data['id_type']
id_value = self.cleaned_data.get('id_value','').strip()
if id_value:
identifier = Identifier.objects.filter(type=id_type, value=id_value)
if identifier:
err_msg = "{} is a duplicate for work #{}.".format(identifier[0], identifier[0].work_id)
self.add_error('id_value', forms.ValidationError(err_msg))
try:
self.cleaned_data['id_value'] = identifier_cleaner(id_type)(id_value)
id_value = identifier_cleaner(id_type)(id_value)
identifier = Identifier.objects.filter(type=id_type, value=id_value)
ident = identifier[0] if identifier else None
if not ident or not self.instance:
self.cleaned_data['id_value'] = id_value
elif ident.edition_id == self.instance.id:
self.cleaned_data['id_value'] = id_value
elif not ident.edition_id and ident.work_id == self.instance.work_id:
self.cleaned_data['id_value'] = id_value
else:
if ident.edition_id:
err_msg = "{} is a duplicate for edition #{}.".format(id_value, ident.edition_id)
else:
err_msg = "{} is a duplicate for work #{}.".format(id_value, ident.work_id)
self.add_error('id_value', forms.ValidationError(err_msg))
except forms.ValidationError, ve:
self.add_error('id_value', forms.ValidationError('{}: {}'.format(ve.message, id_value)))
self.add_error(
'id_value',
forms.ValidationError('{}: {}'.format(ve.message, id_value))
)
return self.cleaned_data
class Meta:

View File

@ -302,11 +302,17 @@ def edit_edition(request, work_id, edition_id, by=None):
id_type = form.cleaned_data['id_type']
id_val = form.cleaned_data['id_value']
if id_val == 'delete':
if edition.identifiers.exclude(type=id_type):
edition.identifiers.filter(type=id_type).delete()
if id_val == 'delete':
if id_type in WORK_IDENTIFIERS:
if edition.work.identifiers.exclude(type=id_type):
edition.work.identifiers.filter(type=id_type).delete()
else:
alert = ('Can\'t delete identifier - must have at least one left.')
else:
alert = ('Can\'t delete identifier - must have at least one left.')
if edition.identifiers.exclude(type=id_type):
edition.identifiers.filter(type=id_type).delete()
else:
alert = ('Can\'t delete identifier - must have at least one left.')
elif id_val:
models.Identifier.set(
type=id_type,