Skip to content

Commit

Permalink
make loggly logs less verbose
Browse files Browse the repository at this point in the history
  • Loading branch information
lyoshenka committed Mar 8, 2017
1 parent 531fa63 commit 68f5588
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 106 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ at anytime.
## [Unreleased]
### Added
* Add file filters: `claim_id`, `outpoint`, and `rowid`
*
* Make loggly logs less verbose
*

### Changed
Expand Down
2 changes: 0 additions & 2 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,6 @@ Returns:
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'search_timeout': float,
'download_timeout': int
'max_search_results': int,
Expand All @@ -458,7 +457,6 @@ Args:
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'download_timeout': int
Returns:
settings dict
Expand Down
1 change: 0 additions & 1 deletion lbrynet/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,6 @@ def _convert_value(value):
'search_timeout': (float, 5.0),
'startup_scripts': (list, []),
'ui_branch': (str, 'master'),
'upload_log': (bool, True),
'use_auth_http': (bool, False),
'use_upnp': (bool, True),
'wallet': (str, LBRYUM_WALLET),
Expand Down
73 changes: 17 additions & 56 deletions lbrynet/core/log_support.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,15 @@
import datetime
import inspect
import json
import logging
import logging.handlers
import os
import platform
import sys
import traceback

import requests
from requests_futures.sessions import FuturesSession
import twisted.python.log

import lbrynet
from lbrynet import analytics
from lbrynet import build_type
from lbrynet import conf
from lbrynet import __version__ as lbrynet_version, analytics, build_type, conf
from lbrynet.core import utils

####
Expand All @@ -25,7 +19,7 @@
# _srcfile is used when walking the stack to check when we've got the first
# caller stack frame.
#
if hasattr(sys, 'frozen'): #support for py2exe
if hasattr(sys, 'frozen'): # support for py2exe
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
elif __file__[-4:].lower() in ['.pyc', '.pyo']:
_srcfile = __file__[:-4] + '.py'
Expand Down Expand Up @@ -86,6 +80,7 @@ def _log_decorator(fn):
handler has its log-level set and is attached to the specified
logger or the root logger.
"""

def helper(*args, **kwargs):
log = kwargs.pop('log', logging.getLogger())
level = kwargs.pop('level', logging.INFO)
Expand All @@ -96,6 +91,7 @@ def helper(*args, **kwargs):
handler = fn(*args, **kwargs)
configure_handler(handler, log, level)
return handler

return helper


Expand Down Expand Up @@ -146,14 +142,15 @@ def configure_analytics_handler(analytics_manager):

def get_loggly_url(token=None, version=None):
token = token or utils.deobfuscate(conf.settings['LOGGLY_TOKEN'])
version = version or lbrynet.__version__
version = version or lbrynet_version
return LOGGLY_URL.format(token=token, tag='lbrynet-' + version)


def configure_loggly_handler(*args, **kwargs):
if build_type.BUILD == 'dev':
return
_configure_loggly_handler(*args, **kwargs)
level = kwargs.pop('level', logging.WARNING)
_configure_loggly_handler(*args, level=level, **kwargs)


@_log_decorator
Expand All @@ -168,6 +165,7 @@ def _configure_loggly_handler(url=None, **kwargs):

class JsonFormatter(logging.Formatter):
"""Format log records using json serialization"""

def __init__(self, **kwargs):
self.attributes = kwargs

Expand All @@ -187,15 +185,16 @@ def format(self, record):
data['exc_info'] = self.formatException(record.exc_info)
return json.dumps(data)


####
# This code is copied from logging/__init__.py in the python source code
####
def findCaller(srcfile=None):
"""Returns the filename, line number and function name of the caller"""
srcfile = srcfile or _srcfile
f = inspect.currentframe()
#On some versions of IronPython, currentframe() returns None if
#IronPython isn't run with -X:Frames.
# On some versions of IronPython, currentframe() returns None if
# IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)"
Expand All @@ -209,6 +208,8 @@ def findCaller(srcfile=None):
rv = (filename, f.f_lineno, co.co_name)
break
return rv


###


Expand Down Expand Up @@ -296,6 +297,7 @@ class LoggerNameFilter(object):
Debug records pass if the log record name (or a parent) match
the input list of logger names.
"""

def __init__(self, logger_names):
self.logger_names = logger_names

Expand All @@ -318,52 +320,9 @@ def get_parent(logger_name):
return '.'.join(names)


class LogUploader(object):
def __init__(self, log_name, log_file, log_size):
self.log_name = log_name
self.log_file = log_file
self.log_size = log_size

def upload(self, exclude_previous, id_hash, log_type):
if not os.path.isfile(self.log_file):
return
log_contents = self.log_contents(exclude_previous)
params = {
'date': datetime.datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
'hash': id_hash,
'sys': platform.system(),
'type': self.get_type(log_type),
'log': log_contents
}
requests.post(conf.settings['LOG_POST_URL'], params)

def log_contents(self, exclude_previous):
with open(self.log_file) as f:
if exclude_previous:
f.seek(self.log_size)
log_contents = f.read()
else:
log_contents = f.read()
return log_contents

def get_type(self, log_type):
if log_type:
return "%s-%s" % (self.log_name, log_type)
else:
return self.log_name

@classmethod
def load(cls, log_name, log_file):
if os.path.isfile(log_file):
with open(log_file, 'r') as f:
log_size = len(f.read())
else:
log_size = 0
return cls(log_name, log_file, log_size)


class Logger(logging.Logger):
"""A logger that has an extra `fail` method useful for handling twisted failures."""

def fail(self, callback=None, *args, **kwargs):
"""Returns a function to log a failure from an errback.
Expand Down Expand Up @@ -391,6 +350,7 @@ def fail(self, callback=None, *args, **kwargs):
keywoards are treated as normal log kwargs.
"""
fn, lno, func = findCaller()

def _fail(err, msg, *msg_args, **msg_kwargs):
level = msg_kwargs.pop('level', logging.ERROR)
msg += ": %s"
Expand All @@ -410,6 +370,7 @@ def _fail(err, msg, *msg_args, **msg_kwargs):
# never do and then we end up with an unhandled
# error that will get swallowed by twisted
self.exception('Failed to run callback')

return _fail

def trace(self, msg, *args, **kwargs):
Expand Down
50 changes: 5 additions & 45 deletions lbrynet/lbrynet_daemon/Daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,11 +220,10 @@ class Daemon(AuthJSONRPCServer):
LBRYnet daemon, a jsonrpc interface to lbry functions
"""

def __init__(self, root, analytics_manager, upload_logs_on_shutdown=True):
def __init__(self, root, analytics_manager):
AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http'])
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)

self.upload_logs_on_shutdown = upload_logs_on_shutdown
self.allowed_during_startup = [
'stop', 'status', 'version',
# delete these once they are fully removed:
Expand All @@ -250,7 +249,6 @@ def __init__(self, root, analytics_manager, upload_logs_on_shutdown=True):
self.max_key_fee = conf.settings['max_key_fee']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.upload_log = conf.settings['upload_log']
self.search_timeout = conf.settings['search_timeout']
self.download_timeout = conf.settings['download_timeout']
self.max_search_results = conf.settings['max_search_results']
Expand All @@ -277,7 +275,6 @@ def __init__(self, root, analytics_manager, upload_logs_on_shutdown=True):
# TODO: this should probably be passed into the daemon, or
# possibly have the entire log upload functionality taken out
# of the daemon, but I don't want to deal with that now
self.log_uploader = log_support.LogUploader.load('lbrynet', self.log_file)

self.analytics_manager = analytics_manager
self.lbryid = utils.generate_id()
Expand Down Expand Up @@ -306,29 +303,16 @@ def __init__(self, root, analytics_manager, upload_logs_on_shutdown=True):
def setup(self, launch_ui):
self._modify_loggly_formatter()

@defer.inlineCallbacks
def _announce_startup():
def _wait_for_credits():
if float(self.session.wallet.get_balance()) == 0.0:
self.startup_status = STARTUP_STAGES[6]
return reactor.callLater(1, _wait_for_credits)
else:
return _announce()

def _announce():
self.announced_startup = True
self.startup_status = STARTUP_STAGES[5]
log.info("Started lbrynet-daemon")
log.info("%i blobs in manager", len(self.session.blob_manager.blobs))

if self.first_run:
d = self._upload_log(log_type="first_run")
elif self.upload_log:
d = self._upload_log(exclude_previous=True, log_type="start")
else:
d = defer.succeed(None)
d.addCallback(lambda _: self.session.blob_manager.get_all_verified_blobs())
d.addCallback(lambda _: _announce())
return d
yield self.session.blob_manager.get_all_verified_blobs()
yield _announce()

log.info("Starting lbrynet-daemon")

Expand Down Expand Up @@ -528,16 +512,6 @@ def _add_query_handlers(self, query_handlers):
self.query_handlers[query_id] = handler
return defer.succeed(None)

def _upload_log(self, log_type=None, exclude_previous=False, force=False):
if self.upload_log or force:
try:
self.log_uploader.upload(exclude_previous,
conf.settings.installation_id[:SHORT_ID_LEN],
log_type)
except requests.RequestException:
log.warning('Failed to upload log file')
return defer.succeed(None)

def _clean_up_temp_files(self):
for path in self.uploaded_temp_files:
try:
Expand All @@ -555,17 +529,7 @@ def _shutdown(self):

self._clean_up_temp_files()

if self.upload_logs_on_shutdown:
try:
d = self._upload_log(
log_type="close", exclude_previous=False if self.first_run else True)
except Exception:
log.warn('Failed to upload log', exc_info=True)
d = defer.succeed(None)
else:
d = defer.succeed(None)

d.addCallback(lambda _: self._stop_server())
d = self._stop_server()
d.addErrback(log.fail(), 'Failure while shutting down')
d.addCallback(lambda _: self._stop_reflector())
d.addErrback(log.fail(), 'Failure while shutting down')
Expand All @@ -584,7 +548,6 @@ def _update_settings(self, settings):
'download_directory': str,
'max_upload': float,
'max_download': float,
'upload_log': bool,
'download_timeout': int,
'search_timeout': float,
'cache_time': int
Expand Down Expand Up @@ -620,7 +583,6 @@ def can_update_key(settings, key, setting_type):
self.download_directory = conf.settings['download_directory']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.upload_log = conf.settings['upload_log']
self.download_timeout = conf.settings['download_timeout']
self.search_timeout = conf.settings['search_timeout']
self.cache_time = conf.settings['cache_time']
Expand Down Expand Up @@ -1314,7 +1276,6 @@ def jsonrpc_settings_get(self):
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'search_timeout': float,
'download_timeout': int
'max_search_results': int,
Expand Down Expand Up @@ -1347,7 +1308,6 @@ def jsonrpc_settings_set(self, **kwargs):
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'download_timeout': int
Returns:
settings dict
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/lbrynet_daemon/test_Daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1},
'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2}
}
daemon = LBRYDaemon(None, None, upload_logs_on_shutdown=False)
daemon = LBRYDaemon(None, None)
daemon.session = mock.Mock(spec=Session.Session)
daemon.exchange_rate_manager = ExchangeRateManager.DummyExchangeRateManager(rates)
base_prm = PaymentRateManager.BasePaymentRateManager(rate=data_rate)
Expand Down

0 comments on commit 68f5588

Please sign in to comment.