Skip to content

Commit f9c82fe

Browse files
committed
onUnhandledException - fix duplicate logging issue
1 parent 28ca8eb commit f9c82fe

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

src/main/java/edu/uci/ics/crawler4j/crawler/WebCrawler.java

+3-5
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,8 @@ protected void onContentFetchError(WebURL webUrl) {
219219
* @param webUrl URL where a unhandled exception occured
220220
*/
221221
protected void onUnhandledException(WebURL webUrl, Throwable e) {
222-
logger.warn("Unhandled exception while fetching {}: {}", webUrl.getURL(), e.getMessage());
222+
String urlStr = (webUrl == null ? "NULL" : webUrl.getURL());
223+
logger.warn("Unhandled exception while fetching {}: {}", urlStr, e.getMessage());
223224
logger.info("Stacktrace: ", e);
224225
// Do nothing by default (except basic logging)
225226
// Sub-classed can override this to add their custom functionality
@@ -430,9 +431,6 @@ private void processPage(WebURL curURL) {
430431
} catch (NotAllowedContentException nace) {
431432
logger.debug("Skipping: {} as it contains binary content which you configured not to crawl", curURL.getURL());
432433
} catch (Exception e) {
433-
String urlStr = (curURL == null ? "NULL" : curURL.getURL());
434-
logger.error("{}, while processing: {}", e.getMessage(), urlStr);
435-
logger.debug("Stacktrace", e);
436434
onUnhandledException(curURL, e);
437435
} finally {
438436
if (fetchResult != null) {
@@ -452,4 +450,4 @@ public void setThread(Thread myThread) {
452450
public boolean isNotWaitingForNewURLs() {
453451
return !isWaitingForNewURLs;
454452
}
455-
}
453+
}

0 commit comments

Comments
 (0)