regluit/core/loaders/doab.py

501 lines
18 KiB
Python
Raw Normal View History

2016-11-29 20:37:02 +00:00
#!/usr/bin/env python
# encoding: utf-8
import datetime
import logging
2016-11-29 20:37:02 +00:00
import re
import requests
2018-04-07 22:38:33 +00:00
from django.db.models import Q
from django.core.files.base import ContentFile
2018-04-07 22:38:33 +00:00
from django.core.files.storage import default_storage
from oaipmh.client import Client
2021-03-03 23:00:40 +00:00
from oaipmh.error import IdDoesNotExistError, NoRecordsMatchError
from oaipmh.metadata import MetadataRegistry
from regluit.core import bookloader, cc
2018-04-07 22:38:33 +00:00
from regluit.core import models, tasks
from regluit.core.bookloader import merge_works
2020-08-16 00:21:56 +00:00
from regluit.core.models.loader import type_for_url
from regluit.core.validation import identifier_cleaner, valid_subject, explode_bics
2018-04-12 19:08:29 +00:00
from . import scrape_language
2021-03-03 23:00:40 +00:00
from .doab_utils import doab_lang_to_iso_639_1, doab_cover, doab_reader, online_to_download
logger = logging.getLogger(__name__)
def unlist(alist):
2018-04-07 22:38:33 +00:00
if not alist:
return None
return alist[0]
2018-04-07 22:38:33 +00:00
SPRINGER_COVER = re.compile(r'ftp.+springer\.de.+(\d{13}\.jpg)$', flags=re.U)
SPRINGER_IMAGE = u'https://images.springer.com/sgw/books/medium/{}.jpg'
def store_doab_cover(doab_id, redo=False):
2018-04-07 22:38:33 +00:00
"""
returns tuple: 1) cover URL, 2) whether newly created (boolean)
"""
2021-03-05 17:16:12 +00:00
if not doab_id:
return (None, False)
2018-04-07 22:38:33 +00:00
2021-03-03 23:00:40 +00:00
cover_file_name = '/doab/%s' % doab_id
2018-04-07 22:38:33 +00:00
# if we don't want to redo and the cover exists, return the URL of the cover
2018-04-07 22:38:33 +00:00
if not redo and default_storage.exists(cover_file_name):
return (default_storage.url(cover_file_name), False)
2018-04-07 22:38:33 +00:00
# download cover image to cover_file
2021-03-03 23:00:40 +00:00
url = doab_cover(doab_id)
2021-03-05 02:48:26 +00:00
if not url:
return (None, False)
try:
r = requests.get(url, allow_redirects=False) # requests doesn't handle ftp redirects.
if r.status_code == 302:
redirurl = r.headers['Location']
if redirurl.startswith(u'ftp'):
2018-04-07 22:38:33 +00:00
springerftp = SPRINGER_COVER.match(redirurl)
if springerftp:
2018-04-07 22:38:33 +00:00
redirurl = SPRINGER_IMAGE.format(springerftp.groups(1))
r = requests.get(redirurl)
2018-04-17 18:20:44 +00:00
else:
r = requests.get(url)
else:
2018-04-07 22:38:33 +00:00
r = requests.get(url)
cover_file = ContentFile(r.content)
2018-07-10 17:58:38 +00:00
content_type = r.headers.get('content-type', '')
2020-09-26 16:30:50 +00:00
if not 'image/' in content_type:
2021-03-04 01:27:25 +00:00
logger.warning('Non-image returned for doab_id=%s', doab_id)
2018-07-10 17:58:38 +00:00
return (None, False)
cover_file.content_type = content_type
2019-03-28 01:46:25 +00:00
2018-04-07 22:38:33 +00:00
default_storage.save(cover_file_name, cover_file)
return (default_storage.url(cover_file_name), True)
2020-02-12 22:56:04 +00:00
except Exception as e:
# if there is a problem, return None for cover URL
2021-03-04 01:27:25 +00:00
logger.warning('Failed to make cover image for doab_id=%s: %s', doab_id, e)
return (None, False)
2018-04-18 15:29:57 +00:00
def update_cover_doab(doab_id, edition, store_cover=True, redo=True):
"""
update the cover url for work with doab_id
if store_cover is True, use the cover from our own storage
"""
if store_cover:
2018-04-18 15:29:57 +00:00
(cover_url, new_cover) = store_doab_cover(doab_id, redo=redo)
else:
2021-03-03 23:00:40 +00:00
cover_url = doab_cover(doab_id)
if cover_url is not None:
edition.cover_image = cover_url
edition.save()
2020-09-26 16:30:50 +00:00
good = edition.cover_image_small() and edition.cover_image_thumbnail()
if not good:
# oh well
2021-03-04 01:27:25 +00:00
logger.warning("Couldn't make thumbnails for %s using %s", doab_id, cover_url)
2020-09-26 16:30:50 +00:00
edition.cover_image = None
edition.save()
return cover_url
2018-04-07 22:38:33 +00:00
return None
2016-10-12 20:07:54 +00:00
def attach_more_doab_metadata(edition, description, subjects,
2021-03-04 01:27:25 +00:00
publication_date, publisher_name=None, language=None,
dois=None, authors=None, editors=None):
2018-04-07 22:38:33 +00:00
"""
2016-10-12 20:07:54 +00:00
for given edition, attach description, subjects, publication date to
corresponding Edition and Work
"""
2018-04-07 22:38:33 +00:00
# if edition doesn't have a publication date, update it
if not edition.publication_date:
edition.publication_date = publication_date
2018-04-07 22:38:33 +00:00
# if edition.publisher_name is empty, set it
2014-07-25 22:16:06 +00:00
if not edition.publisher_name:
edition.set_publisher(publisher_name)
2018-04-07 22:38:33 +00:00
2016-10-28 00:05:43 +00:00
edition.save()
2018-04-07 22:38:33 +00:00
# attach description to work if it's not empty
work = edition.work
2020-09-26 20:34:16 +00:00
if description and not work.description:
work.description = description.replace('\r\n', '\n')
2018-04-07 22:38:33 +00:00
# update subjects
subjects = explode_bics(subjects)
2015-01-30 16:39:48 +00:00
for s in subjects:
if valid_subject(s):
models.Subject.set_by_name(s, work=work)
2018-04-07 22:38:33 +00:00
2016-10-12 20:07:54 +00:00
# set reading level of work if it's empty; doab is for adults.
if not work.age_level:
work.age_level = '18-'
2018-04-07 22:38:33 +00:00
if language and language != 'xx':
2016-10-28 00:05:43 +00:00
work.language = language
work.save()
2018-04-07 22:38:33 +00:00
2021-03-03 23:00:40 +00:00
if authors or editors:
authlist = creator_list(authors, editors)
if edition.authors.all().count() < len(authlist):
edition.authors.clear()
if authlist is not None:
2018-04-07 22:38:33 +00:00
for [rel, auth] in authlist:
edition.add_author(auth, rel)
2018-04-07 22:38:33 +00:00
2021-03-04 01:27:25 +00:00
for doi in dois if dois else []:
2020-07-22 23:10:05 +00:00
if not edition.work.doi:
models.Identifier.set('doi', doi, work=edition.work)
break
2016-10-12 20:07:54 +00:00
return edition
2016-10-28 18:40:16 +00:00
def add_all_isbns(isbns, work, language=None, title=None):
first_edition = None
2016-10-28 18:40:16 +00:00
for isbn in isbns:
edition = bookloader.add_by_isbn(isbn, work, language=language, title=title)
if edition:
2018-04-07 22:38:33 +00:00
first_edition = first_edition if first_edition else edition
if work and (edition.work_id != work.id):
2018-06-18 21:04:40 +00:00
if work.doab and edition.work.doab and work.doab != edition.work.doab:
if work.created < edition.work.created:
work = merge_works(work, edition.work)
else:
work = merge_works(edition.work, work)
2016-10-28 18:40:16 +00:00
else:
work = edition.work
return work, first_edition
2016-10-28 18:40:16 +00:00
def load_doab_edition(title, doab_id, url, format, rights,
2021-03-04 01:27:25 +00:00
language, isbns, provider, dois=None, **kwargs):
"""
load a record from doabooks.org represented by input parameters and return an ebook
"""
2021-03-04 01:27:25 +00:00
logger.info('load doab %s %s %s %s %s', doab_id, format, rights, language, provider)
url = url.strip()
2016-10-28 00:05:43 +00:00
if language and isinstance(language, list):
language = language[0]
2018-04-12 19:08:29 +00:00
if language == 'xx' and format == 'online':
language = scrape_language(url)
# check to see whether the Edition hasn't already been loaded first
# search by url
ebooks = models.Ebook.objects.filter(url=url)
2018-04-07 22:38:33 +00:00
# 1 match
# > 1 matches
# 0 match
# simplest case -- if match (1 or more), we could check whether any
# ebook.edition.work has a doab id matching given doab_id
2018-04-07 22:38:33 +00:00
# put a migration to force Ebook.url to be unique id
2018-04-07 22:38:33 +00:00
# if yes, then return one of the Edition(s) whose work is doab_id
2018-04-07 22:38:33 +00:00
# if no, then
2016-10-12 20:07:54 +00:00
ebook = None
if len(ebooks) > 1:
2018-04-07 22:38:33 +00:00
raise Exception("There is more than one Ebook matching url {0}".format(url))
2021-03-04 01:27:25 +00:00
if len(ebooks) == 1:
ebook = ebooks[0]
2021-04-28 19:53:31 +00:00
if not ebook.edition.work.doab or ebook.edition.work.doab == doab_id:
models.Identifier.get_or_add(type='doab', value=doab_id, work=ebook.edition.work)
if not ebook.rights:
ebook.rights = rights
ebook.save()
# update the cover id
update_cover_doab(doab_id, ebook.edition, redo=False)
# attach more metadata
attach_more_doab_metadata(
ebook.edition,
description=unlist(kwargs.get('description')),
subjects=kwargs.get('subject'),
publication_date=unlist(kwargs.get('date')),
publisher_name=unlist(kwargs.get('publisher')),
language=language,
authors=kwargs.get('creator'),
dois=dois,
)
# make sure all isbns are added
add_all_isbns(isbns, ebook.edition.work, language=language, title=title)
return ebook.edition
# don't add a second doab to an existing Work
return None
2016-10-12 20:07:54 +00:00
# remaining case --> no ebook, load record, create ebook if there is one.
assert not ebooks
2018-04-07 22:38:33 +00:00
2016-10-12 20:07:54 +00:00
# we need to find the right Edition/Work to tie Ebook to...
2018-04-07 22:38:33 +00:00
# look for the Edition with which to associate ebook.
# loop through the isbns to see whether we get one that is not None
2016-10-28 18:40:16 +00:00
work, edition = add_all_isbns(isbns, None, language=language, title=title)
2016-10-28 00:05:43 +00:00
if doab_id and not work:
2016-10-12 20:07:54 +00:00
# make sure there's not already a doab_id
idents = models.Identifier.objects.filter(type='doab', value=doab_id)
for ident in idents:
2016-10-28 18:40:16 +00:00
edition = ident.work.preferred_edition
work = edition.work
2016-10-12 20:07:54 +00:00
break
2018-04-07 22:38:33 +00:00
if edition is not None:
# if this is a new edition, then add related editions SYNCHRONOUSLY
2018-04-07 22:38:33 +00:00
if getattr(edition, 'new', False):
tasks.populate_edition(edition.isbn_13)
edition.refresh_from_db()
2016-10-12 20:07:54 +00:00
doab_identifer = models.Identifier.get_or_add(type='doab', value=doab_id,
2018-04-07 22:38:33 +00:00
work=edition.work)
2018-04-07 22:38:33 +00:00
# we need to create Edition(s) de novo
else:
# if there is a Work with doab_id already, attach any new Edition(s)
try:
2016-10-12 20:07:54 +00:00
work = models.Identifier.objects.get(type='doab', value=doab_id).work
except models.Identifier.DoesNotExist:
2016-10-28 00:05:43 +00:00
if language:
work = models.Work(language=language, title=title, age_level='18-')
else:
work = models.Work(language='xx', title=title, age_level='18-')
work.save()
2016-10-12 20:07:54 +00:00
doab_identifer = models.Identifier.get_or_add(type='doab', value=doab_id,
2018-04-07 22:38:33 +00:00
work=work)
# if work has any ebooks already, attach the ebook to the corresponding edition
# otherwise pick the first one
2018-04-07 22:38:33 +00:00
# pick the first edition as the one to tie ebook to
editions_with_ebooks = models.Edition.objects.filter(Q(work__id=work.id) & \
Q(ebooks__isnull=False)).distinct()
if editions_with_ebooks:
edition = editions_with_ebooks[0]
2016-10-28 00:05:43 +00:00
elif work.editions.all():
edition = work.editions.all()[0]
else:
edition = models.Edition(work=work, title=title)
edition.save()
2018-04-07 22:38:33 +00:00
# make the edition the selected_edition of the work
work.selected_edition = edition
work.save()
2018-04-07 22:38:33 +00:00
2018-04-18 21:53:21 +00:00
if format in ('pdf', 'epub', 'mobi', 'html', 'online') and rights:
2016-10-12 20:07:54 +00:00
ebook = models.Ebook()
ebook.format = format
ebook.provider = provider
2018-04-07 22:38:33 +00:00
ebook.url = url
2016-10-12 20:07:54 +00:00
ebook.rights = rights
# tie the edition to ebook
ebook.edition = edition
if format == "online":
ebook.active = False
2016-10-12 20:07:54 +00:00
ebook.save()
2018-04-07 22:38:33 +00:00
# update the cover id (could be done separately)
2018-04-18 15:29:57 +00:00
cover_url = update_cover_doab(doab_id, edition, redo=False)
2018-04-07 22:38:33 +00:00
# attach more metadata
attach_more_doab_metadata(
2018-04-07 22:38:33 +00:00
edition,
description=unlist(kwargs.get('description')),
subjects=kwargs.get('subject'),
publication_date=unlist(kwargs.get('date')),
publisher_name=unlist(kwargs.get('publisher')),
authors=kwargs.get('creator'),
2021-03-03 23:00:40 +00:00
editors=kwargs.get('editor'),
2020-07-22 23:10:05 +00:00
dois=dois,
2018-04-07 22:38:33 +00:00
)
2018-07-10 17:58:38 +00:00
if rights:
for ebook in edition.ebooks.all():
if not ebook.rights:
ebook.rights = rights
ebook.save()
return edition
2016-11-29 20:37:02 +00:00
2018-04-07 22:38:33 +00:00
#
2016-11-29 20:37:02 +00:00
#tools to parse the author lists in doab.csv
2018-04-07 22:38:33 +00:00
#
2016-11-29 20:37:02 +00:00
au = re.compile(r'\(Authors?\)', flags=re.U)
2021-03-04 01:27:25 +00:00
ed = re.compile(r'\([^\)]*(dir.|[Eeé]ds?.|org.|coord.|Editor|a cura di|archivist)[^\)]*\)',
flags=re.U)
2016-11-29 20:37:02 +00:00
tr = re.compile(r'\([^\)]*([Tt]rans.|tr.|translated by)[^\)]*\)', flags=re.U)
ai = re.compile(r'\([^\)]*(Introduction|Foreword)[^\)]*\)', flags=re.U)
ds = re.compile(r'\([^\)]*(designer)[^\)]*\)', flags=re.U)
cm = re.compile(r'\([^\)]*(comp.)[^\)]*\)', flags=re.U)
namelist = re.compile(r'([^,]+ [^, ]+)(, | and )([^,]+ [^, ]+)', flags=re.U)
namesep = re.compile(r', | and ', flags=re.U)
namesep2 = re.compile(r';|/| and ', flags=re.U)
isbnsep = re.compile(r'[ ,/;\t\.]+|Paper: *|Cloth: *|eISBN: *|Hardcover: *', flags=re.U)
2016-11-29 20:37:02 +00:00
edlist = re.compile(r'([eE]dited by| a cura di|editors)', flags=re.U)
def fnf(auth):
if len(auth) > 60:
return auth #probably corp name
parts = re.sub(r' +', u' ', auth).split(u',')
2016-11-29 20:37:02 +00:00
if len(parts) == 1:
return parts[0].strip()
2021-03-04 01:27:25 +00:00
if len(parts) == 2:
2018-04-07 22:38:33 +00:00
return u'{} {}'.format(parts[1].strip(), parts[0].strip())
2021-03-04 01:27:25 +00:00
if parts[1].strip() in ('der', 'van', 'von', 'de', 'ter'):
return u'{} {} {}'.format(parts[2].strip(), parts[1].strip(), parts[0].strip())
return u'{} {}, {}'.format(parts[2].strip(), parts[0].strip(), parts[1].strip())
2018-04-07 22:38:33 +00:00
2016-11-29 20:37:02 +00:00
def creator(auth, editor=False):
auth = auth.strip()
if auth in (u'', u'and'):
2016-11-29 20:37:02 +00:00
return None
if re.search(ed, auth) or editor:
return [u'edt', fnf(ed.sub(u'', auth))]
2016-11-29 20:37:02 +00:00
if re.search(tr, auth):
return [u'trl', fnf(tr.sub(u'', auth))]
2016-11-29 20:37:02 +00:00
if re.search(ai, auth):
return [u'aui', fnf(ai.sub(u'', auth))]
2016-11-29 20:37:02 +00:00
if re.search(ds, auth):
return [u'dsr', fnf(ds.sub(u'', auth))]
2016-11-29 20:37:02 +00:00
if re.search(cm, auth):
return [u'com', fnf(cm.sub(u'', auth))]
2018-04-07 22:38:33 +00:00
2016-11-29 20:37:02 +00:00
auth = au.sub('', auth)
return ['aut', fnf(auth)]
2021-03-03 23:00:40 +00:00
def creator_list(creators, editors):
2016-11-29 20:37:02 +00:00
auths = []
2021-03-03 23:00:40 +00:00
if creators:
for auth in creators:
auths.append(creator(auth))
if editors:
for auth in editors:
auths.append(creator(auth, editor=True))
2016-11-29 20:37:02 +00:00
return auths
2021-03-03 23:00:40 +00:00
DOAB_OAIURL = 'https://directory.doabooks.org/oai/request'
DOAB_PATT = re.compile(r'oai:directory\.doabooks\.org:(.*)')
mdregistry = MetadataRegistry()
2021-03-03 23:00:40 +00:00
mdregistry.registerReader('oai_dc', doab_reader)
doab_client = Client(DOAB_OAIURL, mdregistry)
2018-04-18 15:29:57 +00:00
isbn_cleaner = identifier_cleaner('isbn', quiet=True)
2020-07-22 23:10:05 +00:00
doi_cleaner = identifier_cleaner('doi', quiet=True)
2021-03-03 23:00:40 +00:00
ISBNSEP = re.compile(r'[/;]+')
def add_by_doab(doab_id, record=None):
try:
record = record if record else doab_client.getRecord(
metadataPrefix='oai_dc',
2021-03-03 23:00:40 +00:00
identifier='oai:directory.doabooks.org:{}'.format(doab_id)
)
2019-03-28 01:21:25 +00:00
if not record[1]:
2020-07-26 20:06:33 +00:00
logger.error('No content in record %s', record)
2019-03-28 01:21:25 +00:00
return None
metadata = record[1].getMap()
isbns = []
2020-07-22 23:10:05 +00:00
dois = []
2021-03-03 23:00:40 +00:00
urls = []
for ident in metadata.pop('isbn', []):
isbn_strings = ISBNSEP.split(ident[6:].strip())
for isbn_string in isbn_strings:
isbn = isbn_cleaner(isbn_string)
if isbn:
isbns.append(isbn)
for ident in metadata.pop('doi', []):
ident = doi_cleaner(ident)
if ident:
dois.append(ident)
for ident in metadata.pop('identifier', []):
2021-03-03 23:00:40 +00:00
if ident.find('doabooks.org') >= 0:
# should already know the doab_id
continue
2021-03-04 01:27:25 +00:00
if ident.startswith('http'):
2021-03-03 23:00:40 +00:00
urls.append(ident)
language = doab_lang_to_iso_639_1(unlist(metadata.pop('language', None)))
2021-03-03 23:00:40 +00:00
xurls = []
for url in urls:
xurls += online_to_download(url)
urls = xurls
edition = None
2018-04-18 21:53:21 +00:00
title = unlist(metadata.pop('title', None))
license = cc.license_from_cc_url(unlist(metadata.pop('rights', None)))
for dl_url in urls:
format = type_for_url(dl_url)
if 'format' in metadata:
del metadata['format']
2021-03-03 23:00:40 +00:00
added_edition = load_doab_edition(
2018-04-18 21:53:21 +00:00
title,
doab_id,
dl_url,
format,
2018-04-18 21:53:21 +00:00
license,
2018-04-12 19:08:29 +00:00
language,
isbns,
2020-09-07 14:14:51 +00:00
models.Ebook.infer_provider(dl_url) if dl_url else None,
2020-07-22 23:10:05 +00:00
dois=dois,
**metadata
)
2021-03-03 23:00:40 +00:00
edition = added_edition if added_edition else edition
return edition
2020-07-26 20:06:33 +00:00
except IdDoesNotExistError as e:
logger.error(e)
return None
2018-04-07 22:38:33 +00:00
def getdoab(url):
id_match = DOAB_PATT.search(url)
if id_match:
return id_match.group(1)
return False
2021-05-04 02:17:46 +00:00
def get_doab_record(doab_id):
record_id = 'oai:directory.doabooks.org:%s' % doab_id
try:
return doab_client.getRecord(metadataPrefix='oai_dc', identifier=record_id)
except IdDoesNotExistError:
return None
2021-03-03 23:00:40 +00:00
def load_doab_oai(from_date, until_date, limit=100):
'''
use oai feed to get oai updates
'''
start = datetime.datetime.now()
if from_date:
from_ = from_date
2019-03-28 01:46:25 +00:00
else:
2018-06-15 19:30:04 +00:00
# last 15 days
from_ = datetime.datetime.now() - datetime.timedelta(days=15)
num_doabs = 0
2021-03-04 01:27:25 +00:00
new_doabs = 0
lasttime = datetime.datetime(2000, 1, 1)
2021-03-03 23:00:40 +00:00
try:
2021-03-04 01:27:25 +00:00
for record in doab_client.listRecords(metadataPrefix='oai_dc', from_=from_,
until=until_date):
2021-03-03 23:00:40 +00:00
if not record[1]:
continue
item_type = unlist(record[1].getMap().get('type', None))
if item_type != 'book':
continue
ident = record[0].identifier()
datestamp = record[0].datestamp()
lasttime = datestamp if datestamp > lasttime else lasttime
doab = getdoab(ident)
if doab:
num_doabs += 1
e = add_by_doab(doab, record=record)
if not e:
logger.error('null edition for doab #%s', doab)
continue
2021-03-03 23:00:40 +00:00
if e.created > start:
new_doabs += 1
title = e.title if e else None
2021-03-04 01:27:25 +00:00
logger.info(u'updated:\t%s\t%s', doab, title)
2021-03-03 23:00:40 +00:00
if num_doabs >= limit:
break
except NoRecordsMatchError:
pass
2021-03-04 01:27:25 +00:00
return num_doabs, new_doabs, lasttime