我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用logging.Logger()。
def callHandlers(self, record): # this is the same as Python 3.5's logging.Logger.callHandlers c = self found = 0 while c: for hdlr in c.handlers: found = found + 1 if record.levelno >= hdlr.level: hdlr.handle(record) if not c.propagate: c = None # break out else: c = c.parent if (found == 0): if logging.lastResort: if record.levelno >= logging.lastResort.level: logging.lastResort.handle(record) elif logging.raiseExceptions and not self.manager.emittedNoHandlerWarning: sys.stderr.write("No handlers could be found for logger" " \"%s\"\n" % self.name) self.manager.emittedNoHandlerWarning = True
def __init__(self, debug=False, logfile=None): logging.Logger.__init__(self, 'VirtualBMC') try: if logfile is not None: self.handler = logging.FileHandler(logfile) else: self.handler = logging.StreamHandler() formatter = logging.Formatter(DEFAULT_LOG_FORMAT) self.handler.setFormatter(formatter) self.addHandler(self.handler) if debug: self.setLevel(logging.DEBUG) else: self.setLevel(logging.INFO) except IOError, e: if e.errno == errno.EACCES: pass
def setUp(self): self.formatter = LogFormatter(color=False) # Fake color support. We can't guarantee anything about the $TERM # variable when the tests are run, so just patch in some values # for testing. (testing with color off fails to expose some potential # encoding issues from the control characters) self.formatter._colors = { logging.ERROR: u("\u0001"), } self.formatter._normal = u("\u0002") # construct a Logger directly to bypass getLogger's caching self.logger = logging.Logger('LogFormatterTest') self.logger.propagate = False self.tempdir = tempfile.mkdtemp() self.filename = os.path.join(self.tempdir, 'log.out') self.handler = self.make_handler(self.filename) self.handler.setFormatter(self.formatter) self.logger.addHandler(self.handler)
def __call__(self, spec=None, spec_loader=None, plugins=None, parser=None, serializer=None, logger=None, **kwargs): if spec_loader: spec = load_spec_by_spec_loader(spec_loader, self.loader) try: plugins = self.iter_loaded_item_list(plugins, BasePlugin) except TypeError: # pragma: no cover pass if isinstance(parser, str): parser = self.loader.load_class(parser) if isinstance(serializer, str): serializer = self.loader.load_class(serializer) try: logger = self.load_item(logger, Logger) except TypeError: # pragma: no cover pass return super(ServiceClientFactory, self).__call__(spec=spec, plugins=plugins, parser=parser, serializer=serializer, logger=logger, **kwargs)
def setUp(self): class MockResponse: def __init__(self, status_code, data): self.status_code = status_code self.text = json.dumps(data) self.mock_response = MockResponse self.orig_directory_init = OktaDirectoryConnector.__init__ OktaDirectoryConnector.__init__ = mock.Mock(return_value=None) directory = OktaDirectoryConnector({}) directory.options = {'all_users_filter': None, 'group_filter_format': '{group}'} directory.logger = mock.create_autospec(logging.Logger) directory.groups_client = okta.UserGroupsClient('example.com', 'xyz') self.directory = directory
def setUp(self): class MockResponse: def __init__(self, status_code, data): self.status_code = status_code self.text = json.dumps(data) self.mock_response = MockResponse self.orig_directory_init = OktaDirectoryConnector.__init__ OktaDirectoryConnector.__init__ = mock.Mock(return_value=None) directory = OktaDirectoryConnector({}) directory.logger = mock.create_autospec(logging.Logger) directory.groups_client = okta.UserGroupsClient('example.com', 'xyz') self.directory = directory
def init_logger(self, logger): if not logger: logger = logging.Logger("sketal", level=logging.DEBUG if self.settings.DEBUG else logging.INFO) formatter = logging.Formatter(fmt=u'%(filename)-10s [%(asctime)s] %(levelname)-8s: %(message)s', datefmt='%y.%m.%d %H:%M:%S') file_handler = logging.FileHandler('logs.txt') file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(formatter) self.logger_file = file_handler stream_handler = logging.StreamHandler() stream_handler.setLevel(level=logging.DEBUG if self.settings.DEBUG else logging.INFO) stream_handler.setFormatter(formatter) logger.addHandler(file_handler) logger.addHandler(stream_handler) self.logger = logger
def __init__(self, vk_client, logger=None): if logger: self.logger = logger else: self.logger = logging.Logger("vk_reqque") self.vk_client = vk_client self.hold = False self.release = False self.processing = False self._requests_done = 0 self.requests_done_clear_time = 0 self.queue = asyncio.Queue()
def get_logger(name, filename, level=logging.DEBUG, fmt=None): logger = logging.Logger(name) fmt = fmt or '%(asctime)s-%(name)s-%(levelname)-10s%(message)s' formatter = logging.Formatter(fmt=fmt, datefmt='%Y-%m-%d %H:%M:%S') stream_handler = logging.StreamHandler() stream_handler.setFormatter(formatter) file_handler = logging.FileHandler(filename) file_handler.setFormatter(formatter) logger.addHandler(stream_handler) logger.addHandler(file_handler) logger.setLevel(level) return logger
def ensure_file(name, url=None, force=False, logger=logging.getLogger(), postprocess=None): """ Ensures that the file requested exists in the cache, downloading it if it does not exist. Args: name (str): name of the file. url (str): url to download the file from, if it doesn't exist. force (bool): whether to force the download, regardless of the existence of the file. logger (logging.Logger): logger to log results. postprocess (function): a function that, if given, will be applied after the file is downloaded. The function has the signature ``f(fname)`` Returns: str: file name of the downloaded file. """ fname = Embedding.path(name) if not path.isfile(fname) or force: if url: logger.critical('Downloading from {} to {}'.format(url, fname)) Embedding.download_file(url, fname) if postprocess: postprocess(fname) else: raise Exception('{} does not exist!'.format(fname)) return fname
def __init__(self, connection: Connection, consumer: BrightsideConsumerConfiguration, consumer_factory: Callable[[Connection, BrightsideConsumerConfiguration, logging.Logger], BrightsideConsumer], command_processor_factory: Callable[[str], CommandProcessor], mapper_func: Callable[[BrightsideMessage], Request]) -> None: """ The configuration parameters for one consumer - can create one or more performers from this, each of which is a message pump reading froma queue :param connection: The connection to the broker :param consumer: The consumer we want to create (routing key, queue etc) :param consumer_factory: A factory to create a consumer to read from a broker, a given implementation i.e. arame the command processor factory creates a command procesoor configured for a pipeline :param mapper_func: Maps between messages on the queue and requests (commnands/events) """ self._connection = connection self._consumer = consumer self._consumer_factory = consumer_factory self._command_processor_factory = command_processor_factory self._mapper_func = mapper_func
def get_fallback_logger(stream=None): global _fallback_logger if _fallback_logger: return _fallback_logger log_format = '%(asctime)s:%(levelname)s:%(message)s' formatter = logging.Formatter(log_format) level = logging.WARNING handler = logging.StreamHandler(stream) handler.setLevel(level) handler.setFormatter(formatter) logger = logging.Logger('powerline') logger.setLevel(level) logger.addHandler(handler) _fallback_logger = PowerlineLogger(None, logger, '_fallback_') return _fallback_logger
def create_logger(self): '''Create logger This function is used to create logger unless it was already specified at initialization. :return: Three objects: #. :py:class:`logging.Logger` instance. #. :py:class:`PowerlineLogger` instance. #. Function, output of :py:func:`gen_module_attr_getter`. ''' return create_logger( common_config=self.common_config, use_daemon_threads=self.use_daemon_threads, ext=self.ext, imported_modules=self.imported_modules, stream=self.default_log_stream, )
def __enter__(self): if isinstance(self.logger_name, logging.Logger): logger = self.logger = self.logger_name else: logger = self.logger = logging.getLogger(self.logger_name) formatter = logging.Formatter(self.LOGGING_FORMAT) handler = _CapturingHandler() handler.setFormatter(formatter) self.watcher = handler.watcher self.old_handlers = logger.handlers[:] self.old_level = logger.level self.old_propagate = logger.propagate logger.handlers = [handler] logger.setLevel(self.level) logger.propagate = False return handler.watcher
def run(self, argv = None, data = None, logger = None): """ Runs the function """ if not logger is None: assert isinstance(logger, logging.Logger), "logger is not a valid logging.Logger" self.logger = logger if not data is None: assert isinstance(data, Configuration), "data is not a valid QXSConsolas.Configuration.Configuration" self.data = data self.options, self.arguments = self._argparser.parseArguments(argv) if self._argparser.loglevel == 1: self._configureConsoleLoggers(logging.NOTSET, True) elif self._argparser.loglevel == -1: self._configureConsoleLoggers(logging.CRITICAL, False) try: self._argparser.validateRequiredArguments() return self._app(ApplicationData(self)) except Exception as e: logger.exception(e) return 1
def setRoleInfo(self, logger, envname, envconfig, rolename, roleconfig): """ sets the role info during deployment and backup/restores logger logging.Logger a logger envname string name of the environment, where the app should be deployed envconfig QXSConsolas.Configuration.Configuration configuration of the environment, where the app should be deployed rolename string name of the environment, where the app should be deployed roleconfig QXSConsolas.Configuration.Configuration configuration of the role, where the app shoukd be deplyed """ if isinstance(logger, logging.Logger): self._logger = logger else: self._logger = logging.getLogger() self._envname = envname self._envconfig = envconfig self._rolename = rolename self._roleconfig = roleconfig
def __init__(self, debug=False, logfile=None): logging.Logger.__init__(self, 'VirtualBMC') try: if logfile is not None: self.handler = logging.FileHandler(logfile) else: self.handler = logging.StreamHandler() formatter = logging.Formatter(DEFAULT_LOG_FORMAT) self.handler.setFormatter(formatter) self.addHandler(self.handler) if debug: self.setLevel(logging.DEBUG) else: self.setLevel(logging.INFO) except IOError as e: if e.errno == errno.EACCES: pass
def test_logPassthrough(self): """If MDKHandler is used, logging via stdlib is passed to MDK.""" logger = logging.Logger("mylog") logger.setLevel(logging.DEBUG) mdk, tracer = create_mdk_with_faketracer() session = mdk.session() session.trace("DEBUG") logger.addHandler(MDKHandler(mdk, lambda: session)) logger.debug("debugz") logger.info("infoz") logger.warning("warnz") logger.error("errorz") logger.critical("criticalz") self.assertEqual( tracer.messages, [{"level": level.upper(), "category": "mylog", "text": level + "z", "context": session._context.traceId} for level in ["debug", "info", "warn", "error", "critical"]])
def test_sessions(self): """ The given session's context is used; if no session is available a default session is used. """ mdk, tracer = create_mdk_with_faketracer() session1, session3 = mdk.session(), mdk.session() def get_session(results=[session1, None, session3]): return results.pop(0) logger = logging.Logger("mylog") handler = MDKHandler(mdk, get_session) logger.addHandler(handler) for i in range(3): logger.info("hello") self.assertEqual([d["context"] for d in tracer.messages], [s._context.traceId for s in [session1, handler._default_session, session3]])
def tes_withinARequest(self): """ When logging inside a Flask route, the MDK Session for the request is used if MDKLoggingHandler was set up. """ logger = logging.Logger("logz") mdk, tracer = create_mdk_with_faketracer() app = make_flask_app(logger) mdk_setup(app, mdk=mdk) handler = MDKLoggingHandler(mdk) logger.addHandler(handler) client = app.test_client() client.get("/") message = tracer.messages[-1] self.assertEqual("hello: " + message["context"], message["text"])
def __init__(self, config, frame_shape, log=Logger(__name__)): """Creates and sets up SPEAD streams. The configuration of streams is passed in via the ``config`` arguent. The dimensions of the visibility data must be specified in order to initialise the payload. This is a tuple of dimensions defined in the ICD as: Args: config (dict): Dictionary of settings (see above). frame_shape (tuple): Dimensions of the payload visibility data. log (logging.Logger): Python logging object. """ self._config = config self._frame_shape = frame_shape self._log = log self._heap_descriptor = self._init_heap_descriptor() self._streams = list() self._heap_counter = 0 self._send_timer = 0 self._heap_size = self._get_heap_size() self._create_streams() self._payload = self._init_payload()
def get_logger(self) -> logging.Logger: formatter = logging.Formatter(fmt='%(levelname)s (%(threadName)-10s) :%(name)s: %(message)s ' '(%(asctime)s; %(filename)s:%(lineno)d)', datefmt="%Y-%m-%d %H:%M:%S") handlers = [ logging.handlers.RotatingFileHandler(self.__filename, encoding='utf8', maxBytes=self.__max_bytes, backupCount=3), logging.StreamHandler() ] self.__root_logger = logging.getLogger() if (self.__debug): level = logging.DEBUG else: level = logging.WARNING self.__root_logger.setLevel(level) for h in handlers: h.setFormatter(formatter) h.setLevel(level) self.__root_logger.addHandler(h) return self.__root_logger
def enable_pretty_logging(logger='calmjs', level=logging.DEBUG, stream=None): """ Shorthand to enable pretty logging """ def cleanup(): logger.removeHandler(handler) logger.level = old_level if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) old_level = logger.level handler = logging.StreamHandler(stream) handler.setFormatter(logging.Formatter( u'%(asctime)s %(levelname)s %(name)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) return cleanup
def serve(services: List[Service], credentials: BrokerCredentials, logger: logging.Logger = logging.root, port=5000, debug=False): """ Starts flask with the given broker :param services: Services that this broker provides :param credentials: Username and password that will be required to communicate with service broker :param logger: Used for api logs. This will not influence Flasks logging behavior :param port: Port :param debug: Enables debugging in flask app """ from flask import Flask app = Flask(__name__) blueprint = get_blueprint(services, credentials, logger) logger.debug("Register openbrokerapi blueprint") app.register_blueprint(blueprint) logger.info("Start Flask on 0.0.0.0:%s" % port) app.run('0.0.0.0', port, debug)
def __init__(self, log=None): """A base class for config loaders. log : instance of :class:`logging.Logger` to use. By default loger of :meth:`traitlets.config.application.Application.instance()` will be used Examples -------- >>> cl = ConfigLoader() >>> config = cl.load_config() >>> config {} """ self.clear() if log is None: self.log = self._log_default() self.log.debug('Using default logger') else: self.log = log
def test_persistent_loggers(self): # Logger objects are persistent and retain their configuration, even # if visible references are destroyed. self.root_logger.setLevel(logging.INFO) foo = logging.getLogger("foo") self._watch_for_survival(foo) foo.setLevel(logging.DEBUG) self.root_logger.debug(self.next_message()) foo.debug(self.next_message()) self.assert_log_lines([ ('foo', 'DEBUG', '2'), ]) del foo # foo has survived. self._assertTruesurvival() # foo has retained its settings. bar = logging.getLogger("foo") bar.debug(self.next_message()) self.assert_log_lines([ ('foo', 'DEBUG', '2'), ('foo', 'DEBUG', '3'), ])
def set_direct_console_logger(cls, loglevel=logging.INFO): """ Configure and add the handler for the direct console logger. Parameters: loglevel (int): numeric value of the logging level (e.g. DEBUG == 10) Returns: logger (Logger): the root logger's child named 'console' """ logger = cls.get_root_logger().getChild("console") logger.setLevel(logging.DEBUG) consolehandler = logging.StreamHandler() consolehandler.setLevel(loglevel) logger.addHandler(consolehandler) logger.propagate = True return logger
def get_module_logger(cls, mod, logdir): """ Returns a logging.Logger specific to the given module. If the logger has not yet been configured, it will be created with default options by LogUtil.create_module_logger() Parameters: mod (Module): module to return a logger for logdir (str): the log directory path Returns: (logger): logging.Logger specific to the given ec2rlcore.module """ if "{}:{}".format(mod.placement, mod.name) not in cls._module_loggers: cls.create_module_logger(mod, logdir) return logging.getLogger("ec2rl").getChild("module").getChild(mod.placement).getChild(mod.name)
def log(self, lvl, msg, *args, **kwargs): try: extra = kwargs.get("extra", None) if extra is not None: metric = extra.get(METRIC_VAR, None) value = extra.get(VALUE_VAR, None) typ = extra.get(MTYPE_VAR, None) if metric and value and typ: if typ == GAUGE_TYPE: self.gauge(metric, value) elif typ == COUNTER_TYPE: self.increment(metric, value) elif typ == HISTOGRAM_TYPE: self.histogram(metric, value) else: pass except Exception: logging.Logger.warning(self, "Failed to log to statsd", exc_info=True) if msg: logging.Logger.log(self, lvl, msg, *args, **kwargs)
def __init__(self, logger=None, mlogger=None): """ @param logger: holds logger for where to log info/warnings/errors If None, a default logger will be created. @type logger: L{logging.Logger} @param mlogger: holds mlogger for where to log info/warnings/errors If None, a default mlogger will be created. @type mlogger: L{utils.MLogger} """ super(Settings, self).__init__(logger, mlogger) self.TArg = False self.EArg = False self.OArg = False self.DArg = False self.MArg = False self.NArg = False self.XArg = False # -2, --twice self.PArg = None self.RArg = None # -r read PIN self.AArg = None # -R read passphrase self.SArg = False # Safety check self.WArg = False # Wipe plaintxt after encryption self.QArg = False # noconfirm self.inputFiles = [] # list of input filenames
def __init__(self, terminalMode=None, logger=None, qtextbrowser=None): """ Get as many necessary parameters upfront as possible, so the user does not have to provide them later on each call. @param terminalMode: log only to terminal? @type terminalMode: C{bool} @param logger: holds logger for where to log info/warnings/errors @type logger: L{logging.Logger} @param qtextbrowser: holds GUI widget for where to log info/warnings/errors @type qtextbrowser: L{PyQt5.QtWidgets.QTextBrowser} """ self.terminalMode = terminalMode self.logger = logger self.qtextbrowser = qtextbrowser # qtextbrowser text will be created by assembling: # qtextheader + qtextContent + qtextTrailer self.qtextheader = u'' self.qtextcontent = u'' self.qtexttrailer = u''
def logger(self, logger): """Set a custom logger. :param logger: The logger to use :type logger: `~logging.Logger` instance """ self._logger = logger
def getLogger(): # ??log????????,????????????????? # ??a?????log???b????a???????????? # ????????????? if Logger.logger is not None: return Logger.logger Logger.logger = logging.Logger(Logger.log_name) if Logger.log_print == "True": print_handler = logging.StreamHandler() print_fmt = logging.Formatter( Logger.log_formatter, datefmt=Logger.log_formatter_datefmt ) print_handler.setFormatter(print_fmt) Logger.logger.addHandler(print_handler) file_handler = logging.handlers.RotatingFileHandler( filename = Logger.log_file, maxBytes = Logger.log_max_byte, backupCount = Logger.log_backup_count ) file_fmt = logging.Formatter( Logger.log_formatter, datefmt=Logger.log_formatter_datefmt ) file_handler.setFormatter(file_fmt) Logger.logger.addHandler(file_handler) Logger.logger.setLevel(Logger.levels.get(Logger.log_level)) return Logger.logger
def __init__(self, embeddings, nbow, vocabulary_min=50, vocabulary_max=500, vocabulary_optimizer=TailVocabularyOptimizer(), verbosity=logging.INFO, main_loop_log_interval=60): """ Initializes a new instance of WMD class. :param embeddings: The embeddings model, see WMD.embeddings. :param nbow: The nBOW model, see WMD.nbow. :param vocabulary_min: The minimum bag size, see \ :py:attr:`~wmd.WMD.vocabulary_min`. :param vocabulary_max: The maximum bag size, see \ :py:attr:`~wmd.WMD.vocabulary_max`. :param vocabulary_optimizer: The bag size reducer, see \ :py:attr:`~wmd.WMD.vocabulary_optimizer`. :param verbosity: The log verbosity level. :param main_loop_log_interval: Time frequency of logging updates, see \ :py:attr:`~wmd.WMD.main_loop_log_interval`. :type embeddings: object with :meth:`~object.__getitem__` :type nbow: object with :meth:`~object.__iter__` and \ :meth:`~object.__getitem__` :type vocabulary_min: int :type vocabulary_max: int :type vocabulary_optimizer: callable :type verbosity: int :type main_loop_log_interval: int :raises TypeError: if some of the arguments are invalid. :raises ValueError: if some of the arguments are invalid. """ self._relax_cache = None self._exact_cache = None self._centroid_cache = None self.embeddings = embeddings self.nbow = nbow self.vocabulary_min = vocabulary_min self.vocabulary_max = vocabulary_max self.vocabulary_optimizer = vocabulary_optimizer self._log = logging.getLogger("WMD") self._log.level = logging.Logger("", verbosity).level self.main_loop_log_interval = main_loop_log_interval
def logger(self) -> logging.Logger: """ Gets the logger belonging to this plugin :return: The logger """ if self._logger is None: self._logger = logging.getLogger(self.manifest.get("name")) return self._logger
def logger(self): """Create and return a logger that logs to both console and a log file. Use :meth:`open_log` to open the log file in Console. :returns: an initialised :class:`~logging.Logger` """ if self._logger: return self._logger # Initialise new logger and optionally handlers logger = logging.getLogger('workflow') if not len(logger.handlers): # Only add one set of handlers fmt = logging.Formatter( '%(asctime)s %(filename)s:%(lineno)s' ' %(levelname)-8s %(message)s', datefmt='%H:%M:%S') logfile = logging.handlers.RotatingFileHandler( self.logfile, maxBytes=1024*1024, backupCount=1) logfile.setFormatter(fmt) logger.addHandler(logfile) # console = logging.StreamHandler() # console.setFormatter(fmt) # logger.addHandler(console) logger.setLevel(logging.DEBUG) self._logger = logger return self._logger
def __init__(self, name = ''): conf = com_config.Config() self.config = conf.getconfig() self.logger = logging.Logger(name, logging.DEBUG) self.logger.name = name # Formatter formatterfile = logging.Formatter('%(asctime)s %(levelname)s : %(name)s - %(message)s', datefmt='%d/%m/%Y %H:%M:%S') formatterconsole = colorlog.ColoredFormatter('%(asctime)s %(log_color)s%(levelname)s : %(name)s - %(message)s', datefmt='%d/%m/%Y %H:%M:%S', log_colors={'DEBUG': 'white', 'INFO': 'green', 'WARNING': 'bold_yellow', 'ERROR': 'bold_red', 'CRITICAL': 'bold_red'}) # First logger (file) self.logger.setLevel(logging.DEBUG) file_handler = RotatingFileHandler(self.config['LOGGER']['logfile'], 'a', int(self.config['LOGGER']['logfilesize']), 1) file_handler.setLevel(int(self.config['LOGGER']['levelfile'])) file_handler.setFormatter(formatterfile) self.logger.addHandler(file_handler) # second logger (console) steam_handler = logging.StreamHandler() steam_handler.setLevel(int(self.config['LOGGER']['levelconsole'])) steam_handler.setFormatter(formatterconsole) self.logger.addHandler(steam_handler)
def setUp(self): super(EnablePrettyLoggingTest, self).setUp() self.options = OptionParser() define_logging_options(self.options) self.logger = logging.Logger('tornado.test.log_test.EnablePrettyLoggingTest') self.logger.propagate = False
def __call__(self, logger, **kwargs): try: logger = self.load_item(logger, Logger) except TypeError: # pragma: no cover pass return super(LoggerPluginFactory, self).__call__(logger=logger, **kwargs)
def get_logger(self): """ Override this method to designate the logger for the application :return: a :py:class:`logging.Logger` instance """ enable_pretty_logging() return logging.getLogger("tornado.application") # endregion # region Can be called by user
def __init__(self, verbosity=5): self.udp_ts = UDPTransceiver() self.iax_proto = IAXProto() self.log = Logger(verbosity).log self.liveUsers = []
def __init__(self, verbosity=5): self.udp_ts = UDPTransceiver() self.iax_proto = IAXProto() self.log = Logger(verbosity).log self.liveHosts = []