def on_configure(self):
if hasattr(settings, 'RAVEN_CONFIG'):
from raven import Client
from raven.contrib.celery import register_signal
from raven.contrib.celery import register_logger_signal
client = Client(settings.RAVEN_CONFIG.get('dsn'))
register_logger_signal(client)
register_signal(client)
python类Client()的实例源码
def on_configure(self):
client = Client(settings.RAVEN_CONFIG["dsn"])
# register a custom filter to filter out duplicate logs
register_logger_signal(client)
# hook into the Celery error handler
register_signal(client)
def configure_logging(config, log_name='transcribersofreddit.log'):
logging.basicConfig(
level=logging.INFO,
format='%(levelname)s | %(funcName)s | %(message)s',
datefmt='%Y-%m-%dT%H:%M:%S',
)
# will intercept anything error level or above
if config.bugsnag_api_key:
bs_handler = BugsnagHandler()
bs_handler.setLevel(logging.ERROR)
logging.getLogger('').addHandler(bs_handler)
logging.info('Bugsnag enabled!')
else:
logging.info('Not running with Bugsnag!')
if config.sentry_api_url:
sentry_handler = SentryHandler(Client(config.sentry_api_url))
sentry_handler.setLevel(logging.ERROR)
# I don't know what this line does but it seems required by raven
setup_logging(sentry_handler)
logging.getLogger('').addHandler(sentry_handler)
logging.info('Sentry enabled!')
else:
logging.info('Not running with Sentry!')
log_header('Starting!')
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
if hasattr(settings, 'OPBEAT'):
from opbeat.contrib.django.models import client as opbeat_client
from opbeat.contrib.django.models import logger as opbeat_logger
from opbeat.contrib.django.models import register_handlers as opbeat_register_handlers
from opbeat.contrib.celery import register_signal as opbeat_register_signal
try:
opbeat_register_signal(opbeat_client)
except Exception as e:
opbeat_logger.exception('Failed installing celery hook: %s', e)
if 'opbeat.contrib.django' in settings.INSTALLED_APPS:
opbeat_register_handlers()
def _get_mock_raven_client(dsn, django_client):
response_list = []
raven_client = Client(
dsn=dsn,
transport=lambda: MockTransport(django_client=django_client, response_list=response_list),
)
raven_client.response_list = response_list
raven_client.raise_send_errors = True
return raven_client
def lambda_handler(event, context=None):
# Periodic
if event.get('detail-type') == 'Scheduled Event':
debug(event, context)
return app.on_timer(event)
# SNS / Dynamodb / Kinesis
elif event.get('Records'):
records = event['Records']
if records and records[0]['EventSource'] == 'aws:sns':
return app.on_config_message(records)
else:
return debug(event, context)
elif not event.get('path'):
return debug(event, context)
# API Gateway
if app.config.get('sentry-dsn'):
from raven import Client
from raven.contrib.bottle import Sentry
client = Client(app.config['sentry-dsn'])
app.app.catchall = False
wrapped_app = Sentry(app.app, client)
else:
wrapped_app = app.app
return wsgigw.invoke(wrapped_app, event)
def init_logging(gn_env: GNEnvironment) -> None:
if len(gn_env.config) == 0 or gn_env.config.get(ConfigKeys.TESTING, False):
# assume we're testing
return
logging_type = gn_env.config.get(ConfigKeys.TYPE, domain=ConfigKeys.LOGGING, default='logger')
if logging_type is None or len(logging_type.strip()) == 0 or logging_type in ['logger', 'default', 'mock']:
return
if logging_type != 'sentry':
raise RuntimeError('unknown logging type %s' % logging_type)
dsn = gn_env.config.get(ConfigKeys.DSN, domain=ConfigKeys.LOGGING, default='')
if dsn is None or len(dsn.strip()) == 0:
logger.warning('sentry logging selected but no DSN supplied, not configuring senty')
return
import raven
import socket
from git.cmd import Git
home_dir = os.environ.get('DINO_HOME', default=None)
if home_dir is None:
home_dir = '.'
tag_name = Git(home_dir).describe()
gn_env.sentry = raven.Client(
dsn=dsn,
environment=os.getenv(ENV_KEY_ENVIRONMENT),
name=socket.gethostname(),
release=tag_name
)
def capture_exception(e_info) -> None:
try:
gn_env.sentry.captureException(e_info)
except Exception as e2:
logger.exception(e_info)
logger.error('could not capture exception with sentry: %s' % str(e2))
gn_env.capture_exception = capture_exception
def init_app(self, app):
dsn = app.config.get('SENTRY_DSN')
if dsn:
client = raven.Client(dsn)
handler = SentryHandler(client)
handler.setLevel(logging.ERROR)
setup_logging(handler)
try:
from raven.contrib.celery import (
register_signal, register_logger_signal)
register_logger_signal(client)
register_signal(client)
except ImportError:
pass
def on_init(self, function):
self.client = Client(dsn=os.environ['SENTRY_IO'], transport=RequestsHTTPTransport, name=os.environ.get('AWS_LAMBDA_FUNCTION_NAME'))
self.handler = SentryHandler(client=self.client, level=logging.ERROR)
setup_logging(self.handler)
BaseLogger.on_init(self, function)
def main():
"""This is the main entry point. Here we launch the threads that stream
submissions and comments to the database
"""
logging.basicConfig(level=logging.INFO,
format='%(asctime)s @ %(threadName)s -- %(module)s.'
'%(funcName)s -- [%(levelname)s] %(message)s')
# parse arguments
parser = _get_parser()
args = vars(parser.parse_args())
# get configuration variables
config = _get_config(args['config'])
sentry = config.get('sentry', None)
mongo_uri = _get_env_or_config(config, 'mongo')
subs = config.get('subreddits')
credentials = config.get('credentials')
# initialize sentry client
if sentry is not None:
raven.Client(sentry)
logging.info('Sentry client initialized.')
# initialize database
client = pymongo.MongoClient(mongo_uri)
database = client.get_default_database()
hoottit.util.initialize_indexes(database)
# fire streaming processes
pool = multiprocessing.dummy.Pool(2)
pool.map(hoottit.util.execute, (
hoottit.util.pipe(
getattr(hoottit.producers, p)(subs, credentials.get(p)),
hoottit.consumers.mongo_upsert(database[p],
'reddit_id',
args['log_threshold'])
) for p in ['comments', 'submissions'] if args[p]))
def get_sentry():
try:
from raven import Client
try:
os.environ['SENTRY_DSN']
except KeyError:
raise ImportError
client = Client(release=__version__)
client.user_context({'username': getpass.getuser()})
client.tags_context({
'os_version': platform.platform(),
'nuke_version': nuke.NUKE_VERSION_STRING})
return client
except ImportError:
return None
def includeme(config):
environment = os.environ.get('ENV', 'dev')
config.registry["raven.client"] = raven.Client(environment=environment,
release=__version__)
config.add_request_method(
get_raven_client,
name="raven",
reify=True)