Skip to content

Commit

Permalink
Switch from root logger to separate loggers.
Browse files Browse the repository at this point in the history
Rather than using one logger per module, use three semantically-defined
loggers:  tornado.access, tornado.application, and tornado.general.
  • Loading branch information
bdarnell committed Sep 9, 2012
1 parent 1881958 commit 9b944aa
Show file tree
Hide file tree
Showing 19 changed files with 142 additions and 110 deletions.
26 changes: 13 additions & 13 deletions tornado/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ def _on_auth(self, user):
import binascii
import hashlib
import hmac
import logging
import time
import urllib
import urlparse
Expand All @@ -59,6 +58,7 @@ def _on_auth(self, user):
from tornado import httpclient
from tornado import escape
from tornado.httputil import url_concat
from tornado.log import gen_log
from tornado.util import bytes_type, b


Expand Down Expand Up @@ -150,7 +150,7 @@ def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):

def _on_authentication_verified(self, callback, response):
if response.error or b("is_valid:true") not in response.body:
logging.warning("Invalid OpenID response: %s", response.error or
gen_log.warning("Invalid OpenID response: %s", response.error or
response.body)
callback(None)
return
Expand Down Expand Up @@ -263,14 +263,14 @@ def get_authenticated_user(self, callback, http_client=None):
oauth_verifier = self.get_argument("oauth_verifier", None)
request_cookie = self.get_cookie("_oauth_request_token")
if not request_cookie:
logging.warning("Missing OAuth request token cookie")
gen_log.warning("Missing OAuth request token cookie")
callback(None)
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
if cookie_key != request_key:
logging.info((cookie_key, request_key, request_cookie))
logging.warning("Request token does not match cookie")
gen_log.info((cookie_key, request_key, request_cookie))
gen_log.warning("Request token does not match cookie")
callback(None)
return
token = dict(key=cookie_key, secret=cookie_secret)
Expand Down Expand Up @@ -348,7 +348,7 @@ def _oauth_access_token_url(self, request_token):

def _on_access_token(self, callback, response):
if response.error:
logging.warning("Could not fetch access token")
gen_log.warning("Could not fetch access token")
callback(None)
return

Expand Down Expand Up @@ -547,7 +547,7 @@ def _on_post(self, new_entry):

def _on_twitter_request(self, callback, response):
if response.error:
logging.warning("Error response %s fetching %s", response.error,
gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
Expand Down Expand Up @@ -669,7 +669,7 @@ def _on_post(self, new_entry):

def _on_friendfeed_request(self, callback, response):
if response.error:
logging.warning("Error response %s fetching %s", response.error,
gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
Expand Down Expand Up @@ -930,17 +930,17 @@ def _on_get_user_info(self, callback, session, users):

def _parse_response(self, callback, response):
if response.error:
logging.warning("HTTP error from Facebook: %s", response.error)
gen_log.warning("HTTP error from Facebook: %s", response.error)
callback(None)
return
try:
json = escape.json_decode(response.body)
except Exception:
logging.warning("Invalid JSON from Facebook: %r", response.body)
gen_log.warning("Invalid JSON from Facebook: %r", response.body)
callback(None)
return
if isinstance(json, dict) and json.get("error_code"):
logging.warning("Facebook error: %d: %r", json["error_code"],
gen_log.warning("Facebook error: %d: %r", json["error_code"],
json.get("error_msg"))
callback(None)
return
Expand Down Expand Up @@ -1007,7 +1007,7 @@ def _on_login(self, user):
def _on_access_token(self, redirect_uri, client_id, client_secret,
callback, fields, response):
if response.error:
logging.warning('Facebook auth error: %s' % str(response))
gen_log.warning('Facebook auth error: %s' % str(response))
callback(None)
return

Expand Down Expand Up @@ -1090,7 +1090,7 @@ def _on_post(self, new_entry):

def _on_facebook_request(self, callback, response):
if response.error:
logging.warning("Error response %s fetching %s", response.error,
gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
Expand Down
10 changes: 5 additions & 5 deletions tornado/autoreload.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,14 @@
del sys.path[0]

import functools
import logging
import os
import pkgutil
import sys
import types
import subprocess

from tornado import ioloop
from tornado.log import gen_log
from tornado import process

try:
Expand Down Expand Up @@ -177,7 +177,7 @@ def _check_file(modify_times, path):
modify_times[path] = modified
return
if modify_times[path] != modified:
logging.info("%s modified; restarting server", path)
gen_log.info("%s modified; restarting server", path)
_reload()


Expand Down Expand Up @@ -272,13 +272,13 @@ def main():
# module) will see the right things.
exec f.read() in globals(), globals()
except SystemExit, e:
logging.info("Script exited with status %s", e.code)
gen_log.info("Script exited with status %s", e.code)
except Exception, e:
logging.warning("Script exited with uncaught exception", exc_info=True)
gen_log.warning("Script exited with uncaught exception", exc_info=True)
if isinstance(e, SyntaxError):
watch(e.filename)
else:
logging.info("Script exited normally")
gen_log.info("Script exited normally")
# restore sys.argv so subsequent executions will include autoreload
sys.argv = original_argv

Expand Down
15 changes: 8 additions & 7 deletions tornado/curl_httpclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@

from tornado import httputil
from tornado import ioloop
from tornado.log import gen_log
from tornado import stack_context

from tornado.escape import utf8
Expand All @@ -51,7 +52,7 @@ def initialize(self, io_loop=None, max_clients=10):
# socket_action is found in pycurl since 7.18.2 (it's been
# in libcurl longer than that but wasn't accessible to
# python).
logging.warning("socket_action method missing from pycurl; "
gen_log.warning("socket_action method missing from pycurl; "
"falling back to socket_all. Upgrading "
"libcurl and pycurl will improve performance")
self._socket_action = \
Expand Down Expand Up @@ -263,7 +264,7 @@ def __init__(self, errno, message):

def _curl_create():
curl = pycurl.Curl()
if logging.getLogger().isEnabledFor(logging.DEBUG):
if gen_log.isEnabledFor(logging.DEBUG):
curl.setopt(pycurl.VERBOSE, 1)
curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
return curl
Expand Down Expand Up @@ -386,11 +387,11 @@ def ioctl(cmd):
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
curl.setopt(pycurl.USERPWD, utf8(userpwd))
logging.debug("%s %s (username: %r)", request.method, request.url,
gen_log.debug("%s %s (username: %r)", request.method, request.url,
request.auth_username)
else:
curl.unsetopt(pycurl.USERPWD)
logging.debug("%s %s", request.method, request.url)
gen_log.debug("%s %s", request.method, request.url)

if request.client_cert is not None:
curl.setopt(pycurl.SSLCERT, request.client_cert)
Expand Down Expand Up @@ -426,12 +427,12 @@ def _curl_header_callback(headers, header_line):
def _curl_debug(debug_type, debug_msg):
debug_types = ('I', '<', '>', '<', '>')
if debug_type == 0:
logging.debug('%s', debug_msg.strip())
gen_log.debug('%s', debug_msg.strip())
elif debug_type in (1, 2):
for line in debug_msg.splitlines():
logging.debug('%s %s', debug_types[debug_type], line)
gen_log.debug('%s %s', debug_types[debug_type], line)
elif debug_type == 4:
logging.debug('%s %r', debug_types[debug_type], debug_msg)
gen_log.debug('%s %r', debug_types[debug_type], debug_msg)

if __name__ == "__main__":
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
Expand Down
7 changes: 4 additions & 3 deletions tornado/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@

import copy
import itertools
import logging
import time

from tornado.log import gen_log

try:
import MySQLdb.constants
import MySQLdb.converters
Expand Down Expand Up @@ -83,7 +84,7 @@ def __init__(self, host, database, user=None, password=None,
try:
self.reconnect()
except Exception:
logging.error("Cannot connect to MySQL on %s", self.host,
gen_log.error("Cannot connect to MySQL on %s", self.host,
exc_info=True)

def __del__(self):
Expand Down Expand Up @@ -207,7 +208,7 @@ def _execute(self, cursor, query, parameters):
try:
return cursor.execute(query, parameters)
except OperationalError:
logging.error("Error connecting to MySQL on %s", self.host)
gen_log.error("Error connecting to MySQL on %s", self.host)
self.close()
raise

Expand Down
4 changes: 2 additions & 2 deletions tornado/httpserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@ class except to start a server at the beginning of the process
from __future__ import absolute_import, division, with_statement

import Cookie
import logging
import socket
import time

from tornado.escape import native_str, parse_qs_bytes
from tornado import httputil
from tornado import iostream
from tornado.log import gen_log
from tornado.netutil import TCPServer
from tornado import stack_context
from tornado.util import b, bytes_type
Expand Down Expand Up @@ -267,7 +267,7 @@ def _on_headers(self, data):

self.request_callback(self._request)
except _BadRequestException, e:
logging.info("Malformed HTTP request from %s: %s",
gen_log.info("Malformed HTTP request from %s: %s",
self.address[0], e)
self.close()
return
Expand Down
12 changes: 6 additions & 6 deletions tornado/httputil.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@

from __future__ import absolute_import, division, with_statement

import logging
import urllib
import re

from tornado.escape import native_str, parse_qs_bytes, utf8
from tornado.log import gen_log
from tornado.util import b, ObjectDict


Expand Down Expand Up @@ -228,7 +228,7 @@ def parse_body_arguments(content_type, body, arguments, files):
parse_multipart_form_data(utf8(v), body, arguments, files)
break
else:
logging.warning("Invalid multipart/form-data")
gen_log.warning("Invalid multipart/form-data")


def parse_multipart_form_data(boundary, data, arguments, files):
Expand All @@ -247,25 +247,25 @@ def parse_multipart_form_data(boundary, data, arguments, files):
boundary = boundary[1:-1]
final_boundary_index = data.rfind(b("--") + boundary + b("--"))
if final_boundary_index == -1:
logging.warning("Invalid multipart/form-data: no final boundary")
gen_log.warning("Invalid multipart/form-data: no final boundary")
return
parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
for part in parts:
if not part:
continue
eoh = part.find(b("\r\n\r\n"))
if eoh == -1:
logging.warning("multipart/form-data missing headers")
gen_log.warning("multipart/form-data missing headers")
continue
headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
disp_header = headers.get("Content-Disposition", "")
disposition, disp_params = _parse_header(disp_header)
if disposition != "form-data" or not part.endswith(b("\r\n")):
logging.warning("Invalid multipart/form-data")
gen_log.warning("Invalid multipart/form-data")
continue
value = part[eoh + 4:-2]
if not disp_params.get("name"):
logging.warning("multipart/form-data value missing name")
gen_log.warning("multipart/form-data value missing name")
continue
name = disp_params["name"]
if disp_params.get("filename"):
Expand Down
Loading

0 comments on commit 9b944aa

Please sign in to comment.