Skip to content

Commit

Permalink
onUnhandledException - fix duplicate logging issue
Browse files Browse the repository at this point in the history
  • Loading branch information
EgbertW committed Jul 21, 2015
1 parent 28ca8eb commit f9c82fe
Showing 1 changed file with 3 additions and 5 deletions.
8 changes: 3 additions & 5 deletions src/main/java/edu/uci/ics/crawler4j/crawler/WebCrawler.java
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,8 @@ protected void onContentFetchError(WebURL webUrl) {
* @param webUrl URL where a unhandled exception occured
*/
protected void onUnhandledException(WebURL webUrl, Throwable e) {
logger.warn("Unhandled exception while fetching {}: {}", webUrl.getURL(), e.getMessage());
String urlStr = (webUrl == null ? "NULL" : webUrl.getURL());
logger.warn("Unhandled exception while fetching {}: {}", urlStr, e.getMessage());
logger.info("Stacktrace: ", e);
// Do nothing by default (except basic logging)
// Sub-classed can override this to add their custom functionality
Expand Down Expand Up @@ -430,9 +431,6 @@ private void processPage(WebURL curURL) {
} catch (NotAllowedContentException nace) {
logger.debug("Skipping: {} as it contains binary content which you configured not to crawl", curURL.getURL());
} catch (Exception e) {
String urlStr = (curURL == null ? "NULL" : curURL.getURL());
logger.error("{}, while processing: {}", e.getMessage(), urlStr);
logger.debug("Stacktrace", e);
onUnhandledException(curURL, e);
} finally {
if (fetchResult != null) {
Expand All @@ -452,4 +450,4 @@ public void setThread(Thread myThread) {
public boolean isNotWaitingForNewURLs() {
return !isWaitingForNewURLs;
}
}
}

0 comments on commit f9c82fe

Please sign in to comment.