Crawler aux mixin updated to catch the mysterious and anonymous timeout exception and re-raise it as a Timeout::Error
parent
be85cf54ab
commit
3d4d6e9860
|
@ -216,7 +216,13 @@ module Auxiliary::HttpCrawler
|
|||
# Bubble this up to the top-level handler
|
||||
raise $!
|
||||
rescue ::Exception => e
|
||||
# Ridiculous f'ing anonymous timeout exception which I've no idea
|
||||
# how it comes into existence.
|
||||
if e.to_s =~ /execution expired/
|
||||
raise ::Timeout::Error
|
||||
else
|
||||
print_error("Crawler Exception: #{url} #{e} #{e.backtrace}")
|
||||
end
|
||||
ensure
|
||||
@crawler.shutdown rescue nil
|
||||
@crawler = nil
|
||||
|
|
Loading…
Reference in New Issue