factor add_by_webpage our of add_by_webpage
parent
8e394620d6
commit
395899e06c
|
@ -45,14 +45,10 @@ def scrape_sitemap(url, maxnum=None):
|
||||||
except SSLError as e:
|
except SSLError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
def add_by_webpage(url, work=None, user=None):
|
def add_by_metadata(metadata, url='', work=None, user=None):
|
||||||
if not url:
|
|
||||||
return None
|
|
||||||
edition = None
|
|
||||||
scraper = get_scraper(url)
|
|
||||||
loader = BasePandataLoader(url)
|
|
||||||
pandata = Pandata()
|
pandata = Pandata()
|
||||||
pandata.metadata = scraper.metadata
|
loader = BasePandataLoader(url)
|
||||||
|
pandata.metadata = metadata
|
||||||
for metadata in pandata.get_edition_list():
|
for metadata in pandata.get_edition_list():
|
||||||
edition = loader.load_from_pandata(metadata, work)
|
edition = loader.load_from_pandata(metadata, work)
|
||||||
if hasattr(edition, 'work'):
|
if hasattr(edition, 'work'):
|
||||||
|
@ -62,7 +58,14 @@ def add_by_webpage(url, work=None, user=None):
|
||||||
loader.load_ebooks(pandata, edition, user=user)
|
loader.load_ebooks(pandata, edition, user=user)
|
||||||
return edition if edition else None
|
return edition if edition else None
|
||||||
|
|
||||||
|
def add_by_webpage(url, work=None, user=None):
|
||||||
|
if not url:
|
||||||
|
return None
|
||||||
|
edition = None
|
||||||
|
scraper = get_scraper(url)
|
||||||
|
return add_by_metadata(metadata, url=url, work=None, user=None)
|
||||||
|
|
||||||
|
|
||||||
def add_by_sitemap(url, maxnum=None):
|
def add_by_sitemap(url, maxnum=None):
|
||||||
return add_from_bookdatas(scrape_sitemap(url, maxnum=maxnum))
|
return add_from_bookdatas(scrape_sitemap(url, maxnum=maxnum))
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue