Created
January 3, 2013 19:24
-
-
Save dangra/4446243 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/scrapy/core/scraper.py b/scrapy/core/scraper.py | |
index 187c1e9..acbf6ea 100644 | |
--- a/scrapy/core/scraper.py | |
+++ b/scrapy/core/scraper.py | |
@@ -103,7 +103,8 @@ class Scraper(object): | |
self._scrape_next(spider, slot) | |
return _ | |
dfd.addBoth(finish_scraping) | |
- dfd.addErrback(log.err, 'Scraper bug processing %s' % request, \ | |
+ dfd.addErrback(log.err, 'Scraper bug processing %(request)s', | |
+ request=request, spider=spider) | |
spider=spider) | |
self._scrape_next(spider, slot) | |
return dfd | |
@@ -146,7 +147,8 @@ class Scraper(object): | |
if isinstance(exc, CloseSpider): | |
self.crawler.engine.close_spider(spider, exc.reason or 'cancelled') | |
return | |
- log.err(_failure, "Spider error processing %s" % request, spider=spider) | |
+ log.err(_failure, "Spider error processing %(request)s", | |
+ request=request, spider=spider) | |
self.signals.send_catch_log(signal=signals.spider_error, failure=_failure, response=response, \ | |
spider=spider) | |
self.crawler.stats.inc_value("spider_exceptions/%s" % _failure.value.__class__.__name__, \ | |
@@ -205,7 +207,7 @@ class Scraper(object): | |
return self.signals.send_catch_log_deferred(signal=signals.item_dropped, \ | |
item=item, spider=spider, exception=output.value) | |
else: | |
- log.err(output, 'Error processing %s' % item, spider=spider) | |
+ log.err(output, 'Error processing %(item)s', item=item, spider=spider) | |
else: | |
logkws = self.logformatter.scraped(output, response, spider) | |
log.msg(spider=spider, **logkws) | |
diff --git a/scrapy/log.py b/scrapy/log.py | |
index be2c1fd..bc05a50 100644 | |
--- a/scrapy/log.py | |
+++ b/scrapy/log.py | |
@@ -1,4 +1,4 @@ | |
-""" | |
+""" | |
Scrapy logging facility | |
See documentation in docs/topics/logging.rst | |
@@ -7,11 +7,11 @@ import sys | |
import logging | |
import warnings | |
-from twisted.python import log | |
+from twisted.python import log, _safeFormat | |
import scrapy | |
from scrapy.utils.python import unicode_to_str | |
- | |
+ | |
# Logging levels | |
DEBUG = logging.DEBUG | |
INFO = logging.INFO | |
@@ -131,6 +131,8 @@ def msg(message=None, _level=INFO, **kw): | |
def err(_stuff=None, _why=None, **kw): | |
kw['logLevel'] = kw.pop('level', ERROR) | |
kw.setdefault('system', 'scrapy') | |
+ if _why is not None: | |
+ _why = _safeFormat(_why, kw) | |
log.err(_stuff, _why, **kw) | |
def start_from_crawler(crawler): |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment