diff --git a/noisy.py b/noisy.py index c1830ee9..a2d3a62f 100644 --- a/noisy.py +++ b/noisy.py @@ -159,7 +159,7 @@ def _browse_from_links(self, depth=0): self._remove_and_blacklist(random_link) except (requests.exceptions.RequestException, UnicodeDecodeError): logging.debug( - "Exception on URL: %s, removing from list and trying again!" % random_link + "Exception on URL: %s, removing from list and trying again!".format(random_link) ) self._remove_and_blacklist(random_link) self._browse_from_links(depth + 1) @@ -225,11 +225,11 @@ def crawl(self): logging.debug("found {} links", len(self._links)) self._browse_from_links() except (requests.exceptions.RequestException, UnicodeDecodeError): - logging.warning("Error connecting to root url: {}", url) + logging.warning("Error connecting to root url: {}".format(url)) except MemoryError: - logging.warning("Error: content at url: {} is exhausting the memory", url) + logging.warning("Error: content at url: {} is exhausting the memory".format(url)) except LocationParseError: - logging.warning("Error encountered during parsing of: {}", url) + logging.warning("Error encountered during parsing of: {}".format(url)) except self.CrawlerTimedOut: logging.info("Timeout has exceeded, exiting") return