From aee36651a5a01bd1e19e64ce1eef2c3c7b3bacca Mon Sep 17 00:00:00 2001 From: Henrik Levkowetz Date: Wed, 16 Mar 2016 13:21:02 +0000 Subject: [PATCH] Tweaked the test-crawler to give the same log line format for exception failures as for regular log lines. - Legacy-Id: 10936 --- bin/test-crawl | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/bin/test-crawl b/bin/test-crawl index 2c4fce706..cc38bacec 100755 --- a/bin/test-crawl +++ b/bin/test-crawl @@ -291,15 +291,24 @@ if __name__ == "__main__": if skip_url(url): continue + timestamp = datetime.datetime.now() + acc_time = (timestamp - start_time).total_seconds() + acc_secs = (timestamp - start_time).total_seconds() + hrs = acc_secs // (60*60) + min = (acc_secs % (60*60)) // 60 + sec = acc_secs % 60 + try: - timestamp = datetime.datetime.now() + request_start = datetime.datetime.now() r = client.get(url, secure=True, follow=True) - elapsed = datetime.datetime.now() - timestamp + elapsed = datetime.datetime.now() - request_start except KeyboardInterrupt: log(" ... was fetching %s" % url) sys.exit(1) except: - log("500 %.3fs %s FAIL (from: [ %s ])" % ((datetime.datetime.now() - timestamp).total_seconds(), url, (",\n\t".join(get_referrers(url))))) + elapsed = datetime.datetime.now() - request_start + tags = [ u"FAIL (from [ %s ])" % (",\n\t".join(get_referrers(url))) ] + log("%2d:%02d:%02d %7d %6d %s %6.3fs %s %s" % (hrs,min,sec, len(visited), len(urls), 500, elapsed.total_seconds(), url, " ".join(tags))) log("=============") log(traceback.format_exc()) log("=============")