Merge pull request #786 from Gluejar/handle-series

Handle sets
pull/91/head
eshellman 2018-06-19 10:55:47 -04:00 committed by GitHub
commit 80b46f36c7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 98 additions and 15 deletions

View File

@ -242,6 +242,17 @@ def update_edition(edition):
return edition return edition
def get_isbn_item(items, isbn):
# handle case where google sends back several items
for item in items:
volumeInfo = item.get('volumeInfo', {})
industryIdentifiers = volumeInfo.get('industryIdentifiers', [])
for ident in industryIdentifiers:
if ident['identifier'] == isbn:
return item
else:
return None # no items
return item
def add_by_isbn_from_google(isbn, work=None): def add_by_isbn_from_google(isbn, work=None):
"""add a book to the UnglueIt database from google based on ISBN. The work parameter """add a book to the UnglueIt database from google based on ISBN. The work parameter
@ -263,11 +274,12 @@ def add_by_isbn_from_google(isbn, work=None):
logger.info(u"adding new book by isbn %s", isbn) logger.info(u"adding new book by isbn %s", isbn)
results = get_google_isbn_results(isbn) results = get_google_isbn_results(isbn)
if results: if results:
item = get_isbn_item(results['items'], isbn)
try: try:
return add_by_googlebooks_id( return add_by_googlebooks_id(
results['items'][0]['id'], item['id'],
work=work, work=work,
results=results['items'][0], results=item,
isbn=isbn isbn=isbn
) )
except LookupFailure, e: except LookupFailure, e:
@ -521,6 +533,20 @@ def merge_works(w1, w2, user=None):
#(for example, when w2 has already been deleted) #(for example, when w2 has already been deleted)
if w1 is None or w2 is None or w1.id == w2.id or w1.id is None or w2.id is None: if w1 is None or w2 is None or w1.id == w2.id or w1.id is None or w2.id is None:
return w1 return w1
#don't merge if the works are related.
if w2 in w1.works_related_to.all() or w1 in w2.works_related_to.all():
return w1
# check if one of the works is a series with parts (that have their own isbn)
if w1.works_related_from.filter(relation='part'):
models.WorkRelation.objects.get_or_create(to_work=w2, from_work=w1, relation='part')
return w1
if w2.works_related_from.filter(relation='part'):
models.WorkRelation.objects.get_or_create(to_work=w1, from_work=w2, relation='part')
return w1
if w2.selected_edition is not None and w1.selected_edition is None: if w2.selected_edition is not None and w1.selected_edition is None:
#the merge should be reversed #the merge should be reversed
temp = w1 temp = w1
@ -583,7 +609,7 @@ def merge_works(w1, w2, user=None):
for work_relation in w2.works_related_from.all(): for work_relation in w2.works_related_from.all():
work_relation.from_work = w1 work_relation.from_work = w1
work_relation.save() work_relation.save()
w2.delete() w2.delete(cascade=False)
return w1 return w1
def detach_edition(e): def detach_edition(e):

View File

@ -142,6 +142,7 @@ def add_all_isbns(isbns, work, language=None, title=None):
if edition: if edition:
first_edition = first_edition if first_edition else edition first_edition = first_edition if first_edition else edition
if work and (edition.work_id != work.id): if work and (edition.work_id != work.id):
if work.doab and edition.work.doab and work.doab != edition.work.doab:
if work.created < edition.work.created: if work.created < edition.work.created:
work = merge_works(work, edition.work) work = merge_works(work, edition.work)
else: else:

View File

@ -19,10 +19,10 @@ class LoaderTests(TestCase):
dropbox_url = 'https://www.dropbox.com/s/h5jzpb4vknk8n7w/Jakobsson_The_Troll_Inside_You_EBook.pdf?dl=0' dropbox_url = 'https://www.dropbox.com/s/h5jzpb4vknk8n7w/Jakobsson_The_Troll_Inside_You_EBook.pdf?dl=0'
dropbox_ebook = Ebook.objects.create(format='online', url=dropbox_url, edition=edition) dropbox_ebook = Ebook.objects.create(format='online', url=dropbox_url, edition=edition)
dropbox_ebf = dl_online(dropbox_ebook) dropbox_ebf, new_ebf = dl_online(dropbox_ebook)
self.assertTrue(dropbox_ebf.ebook.filesize) self.assertTrue(dropbox_ebf.ebook.filesize)
jbe_url = 'http://www.jbe-platform.com/content/books/9789027295958' jbe_url = 'http://www.jbe-platform.com/content/books/9789027295958'
jbe_ebook = Ebook.objects.create(format='online', url=jbe_url, edition=edition) jbe_ebook = Ebook.objects.create(format='online', url=jbe_url, edition=edition)
jbe_ebf = dl_online(jbe_ebook) jbe_ebf, new_ebf = dl_online(jbe_ebook)
self.assertTrue(jbe_ebf.ebook.filesize) self.assertTrue(jbe_ebf.ebook.filesize)

View File

@ -4,7 +4,10 @@ from regluit.core.loaders import doab
class Command(BaseCommand): class Command(BaseCommand):
help = "load doab books by doab_id via oai" help = "load doab books by doab_id via oai"
args = "<doab_id>"
def handle(self, doab_id, **options): def add_arguments(self, parser):
parser.add_argument('doab_ids', nargs='+', type=int, default=1, help="doab ids to add")
def handle(self, doab_ids, **options):
for doab_id in doab_ids:
doab.add_by_doab(doab_id) doab.add_by_doab(doab_id)

View File

@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0013_ebookfile_mobied'),
]
operations = [
migrations.AlterField(
model_name='workrelation',
name='relation',
field=models.CharField(max_length=15, choices=[(b'translation', b'translation'), (b'revision', b'revision'), (b'sequel', b'sequel'), (b'part', b'part')]),
),
]

View File

@ -22,6 +22,8 @@ from django.db.models import F
from django.db.models.signals import post_save, pre_delete from django.db.models.signals import post_save, pre_delete
from django.utils.timezone import now from django.utils.timezone import now
from django_comments.models import Comment
import regluit import regluit
from regluit.marc.models import MARCRecord as NewMARC from regluit.marc.models import MARCRecord as NewMARC
from questionnaire.models import Landing from questionnaire.models import Landing
@ -131,6 +133,7 @@ class Work(models.Model):
class Meta: class Meta:
ordering = ['title'] ordering = ['title']
def __unicode__(self): def __unicode__(self):
return self.title return self.title
@ -138,6 +141,31 @@ class Work(models.Model):
self._last_campaign = None self._last_campaign = None
super(Work, self).__init__(*args, **kwargs) super(Work, self).__init__(*args, **kwargs)
def delete(self, cascade=True, *args, **kwargs):
if cascade:
if self.offers.all() or self.claim.all() or self.campaigns.all() or self.acqs.all() \
or self.holds.all() or self.landings.all():
return
for wishlist in self.wishlists.all():
wishlist.remove_work(self)
for userprofile in self.contributors.all():
userprofile.works.remove(self)
for identifier in self.identifiers.all():
identifier.delete()
for comment in Comment.objects.for_model(self):
comment.delete()
for edition in self.editions.all():
for ebook in edition.ebooks.all():
ebook.delete()
for ebookfile in edition.ebook_files.all():
ebookfile.delete()
edition.delete()
for work_relation in self.works_related_to.all():
work_relation.delete()
for work_relation in self.works_related_from.all():
work_relation.delete()
super(Work, self).delete(*args, **kwargs) # Call the "real" save() method.
def id_for(self, type): def id_for(self, type):
return id_for(self, type) return id_for(self, type)

View File

@ -20,7 +20,7 @@ TEXT_RELATION_CHOICES = (
('translation', 'translation'), ('translation', 'translation'),
('revision', 'revision'), ('revision', 'revision'),
('sequel', 'sequel'), ('sequel', 'sequel'),
('compilation', 'compilation') ('part', 'part')
) )
ID_CHOICES = ( ID_CHOICES = (

View File

@ -299,6 +299,12 @@ class BookLoaderTests(TestCase):
bookloader.merge_works(w1, w1) bookloader.merge_works(w1, w1)
self.assertEqual(models.Work.objects.count(), before + 2) self.assertEqual(models.Work.objects.count(), before + 2)
# first try to merge related works -- should not do anything
rel, created = models.WorkRelation.objects.get_or_create(to_work=w1, from_work=w2, relation='part')
bookloader.merge_works(w1, w2)
self.assertEqual(models.Work.objects.count(), before + 2)
rel.delete()
# merge the second work into the first # merge the second work into the first
bookloader.merge_works(e1.work, e2.work) bookloader.merge_works(e1.work, e2.work)
self.assertEqual(models.Work.objects.count(), before + 1) self.assertEqual(models.Work.objects.count(), before + 1)