@@ -219,7 +219,8 @@ protected void onContentFetchError(WebURL webUrl) {
219
219
* @param webUrl URL where a unhandled exception occured
220
220
*/
221
221
protected void onUnhandledException (WebURL webUrl , Throwable e ) {
222
- logger .warn ("Unhandled exception while fetching {}: {}" , webUrl .getURL (), e .getMessage ());
222
+ String urlStr = (webUrl == null ? "NULL" : webUrl .getURL ());
223
+ logger .warn ("Unhandled exception while fetching {}: {}" , urlStr , e .getMessage ());
223
224
logger .info ("Stacktrace: " , e );
224
225
// Do nothing by default (except basic logging)
225
226
// Sub-classed can override this to add their custom functionality
@@ -430,9 +431,6 @@ private void processPage(WebURL curURL) {
430
431
} catch (NotAllowedContentException nace ) {
431
432
logger .debug ("Skipping: {} as it contains binary content which you configured not to crawl" , curURL .getURL ());
432
433
} catch (Exception e ) {
433
- String urlStr = (curURL == null ? "NULL" : curURL .getURL ());
434
- logger .error ("{}, while processing: {}" , e .getMessage (), urlStr );
435
- logger .debug ("Stacktrace" , e );
436
434
onUnhandledException (curURL , e );
437
435
} finally {
438
436
if (fetchResult != null ) {
@@ -452,4 +450,4 @@ public void setThread(Thread myThread) {
452
450
public boolean isNotWaitingForNewURLs () {
453
451
return !isWaitingForNewURLs ;
454
452
}
455
- }
453
+ }
0 commit comments