Crawler aux mixin updated to catch the mysterious and anonymous timeout exception and re-raise it as a Timeout::Error

unstable
Tasos Laskos 2013-01-04 21:32:18 +02:00
parent be85cf54ab
commit 3d4d6e9860
1 changed files with 7 additions and 1 deletions

View File

@ -216,7 +216,13 @@ module Auxiliary::HttpCrawler
# Bubble this up to the top-level handler
raise $!
rescue ::Exception => e
# Ridiculous f'ing anonymous timeout exception which I've no idea
# how it comes into existence.
if e.to_s =~ /execution expired/
raise ::Timeout::Error
else
print_error("Crawler Exception: #{url} #{e} #{e.backtrace}")
end
ensure
@crawler.shutdown rescue nil
@crawler = nil