我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用logging.html()。
def setup_logging(path): """Initialize logging to screen and path.""" # See https://docs.python.org/2/library/logging.html#logrecord-attributes # [IWEF]mmdd HH:MM:SS.mmm] msg fmt = '%(levelname).1s%(asctime)s.%(msecs)03d] %(message)s' # pylint: disable=line-too-long datefmt = '%m%d %H:%M:%S' logging.basicConfig( level=logging.INFO, format=fmt, datefmt=datefmt, ) build_log = logging.FileHandler(filename=path, mode='w') build_log.setLevel(logging.INFO) formatter = logging.Formatter(fmt, datefmt=datefmt) build_log.setFormatter(formatter) logging.getLogger('').addHandler(build_log) return build_log
def setup_default_logger(logfile=None, level=logging.DEBUG, formatter=None, maxBytes=0, backupCount=0): """ Deprecated. Use `logzero.loglevel(..)`, `logzero.logfile(..)`, etc. Globally reconfigures the default `logzero.logger` instance. Usage: .. code-block:: python from logzero import logger, setup_default_logger setup_default_logger(level=logging.WARN) logger.info("hello") # this will not be displayed anymore because minimum loglevel was set to WARN :arg string logfile: If set, also write logs to the specified filename. :arg int level: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ to display (default: `logging.DEBUG`). :arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter). :arg int maxBytes: Size of the logfile when rollover should occur. Defaults to 0, rollover never occurs. :arg int backupCount: Number of backups to keep. Defaults to 0, rollover never occurs. """ global logger logger = setup_logger(name=LOGZERO_DEFAULT_LOGGER, logfile=logfile, level=level, formatter=formatter) return logger
def loglevel(level=logging.DEBUG, update_custom_handlers=False): """ Set the minimum loglevel for the default logger (`logzero.logger`). This reconfigures only the internal handlers of the default logger (eg. stream and logfile). You can also update the loglevel for custom handlers by using `update_custom_handlers=True`. :arg int level: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ to display (default: `logging.DEBUG`). :arg bool update_custom_handlers: If you added custom handlers to this logger and want this to update them too, you need to set `update_custom_handlers` to `True` """ logger.setLevel(level) # Reconfigure existing internal handlers for handler in list(logger.handlers): if hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR) or update_custom_handlers: # Don't update the loglevel if this handler uses a custom one if hasattr(handler, LOGZERO_INTERNAL_HANDLER_IS_CUSTOM_LOGLEVEL): continue # Update the loglevel for all default handlers handler.setLevel(level) global _loglevel _loglevel = level
def formatter(formatter, update_custom_handlers=False): """ Set the formatter for all handlers of the default logger (``logzero.logger``). This reconfigures only the logzero internal handlers by default, but you can also reconfigure custom handlers by using ``update_custom_handlers=True``. Beware that setting a formatter which uses colors also may write the color codes to logfiles. :arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter). :arg bool update_custom_handlers: If you added custom handlers to this logger and want this to update them too, you need to set ``update_custom_handlers`` to `True` """ for handler in list(logger.handlers): if hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR) or update_custom_handlers: handler.setFormatter(formatter) global _formatter _formatter = formatter
def html(self): a = ['<table>'] for r in self: a.append('<tr>') a.extend('<td>%s</td>' % v for v in r) a.append('</tr>') a.append('</table>') return u'\n'.join(a) # t = table() # t.append([1, 2, 3]) # t.append([4, 5, 6]) # t.append([7, 8, 9]) # # print(t) # [[1, 2, 3], [4, 5, 6], [7, 8, 9]] # print(t[0]) # [1, 2, 3] # print(t[0,0]) # 1 # print(t[:,0]) # [1, 4, 7] # print(t[:2,:2]) # [[1, 2], [4, 5]]
def sniff(url, *args, **kwargs): """ Returns the media type for the given URL. """ return request(url, *args, **kwargs).headers.get('Content-Type', '').split(";")[0] # print(sniff('http://www.textgain.com')) # 'text/html' # Clear cache from 7+ days ago: # t = time.time() - 7 * 24 * 60 * 60 # for f in glob.glob(cd(CACHE, '*')): # if os.stat(f).st_ctime < t: # os.remove(f) #---- SEARCH -------------------------------------------------------------------------------------- # The Bing Search API grants 5,000 free requests per month. # The Google Search API grants 100 free requests per day.
def get_safe_value(self, key): """ Get the meta value or an empty string. http://python3-exiv2.readthedocs.io/en/latest/api.html http://python3-exiv2.readthedocs.io/en/latest/tutorial.html """ try: val = self.meta[key].value if self.meta[key].repeatable: return val return val[0] except KeyError: return ''
def handledbexc(cols_to_print_on_err, update=False): """Returns a **function** to be passed to pdsql functions when inserting/ updating the db. Basically, it prints to log""" if not cols_to_print_on_err: return None def hde(dataframe, exception): if not empty(dataframe): try: # if sql-alchemy exception, try to guess the orig atrribute which represents # the wrapped exception # http://docs.sqlalchemy.org/en/latest/core/exceptions.html errmsg = str(exception.orig) except AttributeError: # just use the string representation of exception errmsg = str(exception) len_df = len(dataframe) msg = MSG("%d database rows not %s" % (len_df, "updated" if update else "inserted"), errmsg) logwarn_dataframe(dataframe, msg, cols_to_print_on_err) return hde
def configlog4download(logger, db_session, download_id, isterminal): """configs for download and returns the handler used to store the log to the db and to a tmp file. The file is accessible via logger..baseFilename """ # https://docs.python.org/2/howto/logging.html#optimization: logging._srcfile = None logging.logThreads = 0 logging.logProcesses = 0 # FIXME above: move elsewhere (maybe restoring defaults?) logger.setLevel(logging.INFO) # necessary to forward to handlers # custom StreamHandler: count errors and warnings: dbstream_handler = DbStreamHandler(db_session, download_id) logger.addHandler(dbstream_handler) if isterminal: # configure print to stdout (by default only info and critical messages) logger.addHandler(SysOutStreamHandler(sys.stdout)) return dbstream_handler # def configlog4stdout(logger): # logger.setLevel(logging.INFO) # necessary to forward to handlers # # configure print to stdout (by default only info and critical messages): # logger.addHandler(SysOutStreamHandler(sys.stdout))
def print_attributes(obj): for attr in obj.__dict__: print attr, getattr(obj, attr) ## pdb: # https://docs.python.org/2/library/pdb.html # http://web.archive.org/web/20120819135307/http://aymanh.com/python-debugging-techniques ## commands: # help # p pp # print, pretty print # list args # continue step next # run restart # where down up # print stack trace and move frame # quit # ;; (separator) # [!]statement # Commands that the debugger doesn’t recognize are assumed to be Python statements # and are executed in the context of the program being debugged. # Python statements can also be prefixed with an exclamation point (!). ## 1- in IPython, use %debug or %pdb ## 2- at the console prompt:
def init_logger(args): log = logging.getLogger('__name__') handler = None if (args.log_file_path is not None): handler = logging.FileHandler( args.log_file_path, 'w', encoding=None, delay='true') else: handler = logging.StreamHandler() # ref: https://docs.python.org/2/library/logging.html#logrecord-attributes log_format = ' \033[1;37m>>\033[0m \033[93m[%(funcName)s][%(levelname)s] \033[0;37m::\033[0m %(message)s' # colored output handler.setFormatter(logging.Formatter(log_format)) log.addHandler(handler) log.setLevel(getattr(logging, args.logging_level)) return log
def round2(number, ndigits=None): """ Implementation of Python 2 built-in round() function. Rounds a number to a given precision in decimal digits (default 0 digits). The result is a floating point number. Values are rounded to the closest multiple of 10 to the power minus ndigits; if two multiples are equally close, rounding is done away from 0. ndigits may be negative. See Python 2 documentation: https://docs.python.org/2/library/functions.html?highlight=round#round """ if ndigits is None: ndigits = 0 if ndigits < 0: exponent = 10 ** (-ndigits) quotient, remainder = divmod(number, exponent) if remainder >= exponent//2 and number >= 0: quotient += 1 return float(quotient * exponent) else: exponent = _decimal.Decimal('10') ** (-ndigits) d = _decimal.Decimal.from_float(number).quantize( exponent, rounding=_decimal.ROUND_HALF_UP) return float(d)
def set_log_level(level, name="cleverhans"): """ Sets the threshold for the cleverhans logger to level :param level: the logger threshold. You can find values here: https://docs.python.org/2/library/logging.html#levels :param name: the name used for the cleverhans logger """ logging.getLogger(name).setLevel(level)
def _setup_logging(): """Setup logging to log to nowhere by default. For details, see: http://docs.python.org/3/howto/logging.html#library-config Internal function. """ import logging logger = logging.getLogger('spotify-connect') handler = logging.NullHandler() logger.addHandler(handler)
def resource(*args, **kwargs): """ Create a resource service client by name using the default session. See :py:meth:`boto3.session.Session.resource`. """ return _get_default_session().resource(*args, **kwargs) # Set up logging to ``/dev/null`` like a library is supposed to. # http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
def __init__(self, name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None): """Constructs a SIP logging record. A more detailed description of arguments can be found here: https://docs.python.org/3.5/library/logging.html#logrecord-objects Args: name: The name of the logger. level: Numeric level of the logging event. pathname: The full pathname of the source file where the logging call was made. lineno: Line number of the logging call. msg: The event description message, possibly a format string. args: Variable data to merge into the msg arguement to obtain the event description. exc_info: An exception tupple with the current exception info or None func: The name of the function from which the logging call was invoked. sinfo: A text string representing the stack info in the current thread up to the logging call. kwargs: """ logging.LogRecord.__init__(self, name, level, pathname, lineno, msg, args, exc_info, func, sinfo) # Note: Can also access the following variables via the formatter as: # %(hostname)s # %(username)s # %(origin)s # %(time)s self.hostname = socket.gethostname() self.username = getpass.getuser() self.origin = '{}.{}:{}'.format(self.module, self.funcName, self.lineno) self.time = datetime.datetime.utcnow().isoformat() self._raw = self.__dict__.copy()
def format(self, record): """ Map from Python LogRecord attributes to JSON log format fields * from - https://docs.python.org/3/library/logging.html#logrecord-attributes * to - https://mana.mozilla.org/wiki/pages/viewpage.action?pageId=42895640 """ out = dict( Timestamp=int(record.created * 1e9), Type=record.name, Logger=self.logger_name, Hostname=self.hostname, EnvVersion=self.LOGGING_FORMAT_VERSION, Severity=self.SYSLOG_LEVEL_MAP.get(record.levelno, self.DEFAULT_SYSLOG_LEVEL), Pid=record.process, ) # Include any custom attributes set on the record. # These would usually be collected metrics data. fields = dict() for key, value in record.__dict__.items(): if key not in self.EXCLUDED_LOGRECORD_ATTRS: fields[key] = value # Only include the 'msg' key if it has useful content # and is not already a JSON blob. message = record.getMessage() if message and not message.startswith('{') and not message.endswith('}'): fields['msg'] = message # If there is an error, format it for nice output. if record.exc_info is not None: fields['error'] = repr(record.exc_info[1]) fields['traceback'] = safer_format_traceback(*record.exc_info) out['Fields'] = fields return json.dumps(out, cls=SafeJSONEncoder)
def set_loglevel(self, level): """ Set the minimum loglevel for the default logger Args: level (int): eg. logging.DEBUG or logging.ERROR. See also https://docs.python.org/2/library/logging.html#logging-levels """ logzero.loglevel(level) # Settings instance used by external modules
def add_log(self, level, msg, device_name=None, process=None): """ Add a log to the datastore. :param level: Per spec https://docs.python.org/2/library/logging.html#logging-levels with the additional log level of JOURNAL :param process: :param msg: :param device_name: As explained in DataStore.list_devices() :return: None :raise DataStore Expection if the level is not a valid log level as sepecified in Datastore.get_log_levels() """ if level not in self.get_log_levels(): raise DataStoreException("Invalid log level specified. Please use the appropriate level as determined" "in Datastore.get_log_levels()")
def expand_device_list(self, device_list): """ Expand strings like "device[1-3]" into lists like ["device1", "device2", device3"]. Also handles groups like "@compute_nodes". See the range of inputs at: http://clustershell.readthedocs.io/en/latest/tools/nodeset.html :param device_list: A list of devices. :raise DevicelListParseError: When the expression is not parsable. :return: """ return list(NodeSet(device_list, resolver=self.datastore_group_resolver))
def configure(self, level=None, file_name=None, file_mode='a', time_format=None, print_level=False, stream=None, reset_handlers=False): """ Args: level: None to retain the original level of the logger file_name: None to print to console only file_mode: 'w' to override a file or 'a' to append time_format: - `dhms`: %m/%d %H:%M:%S - `dhm`: %m/%d %H:%M - `hms`: %H:%M:%S - `hm`: %H:%M - if contains '%', will be interpreted as a format string https://docs.python.org/3/library/logging.html#logrecord-attributes - None print_level: if True, display `INFO> ` before the message stream: - stream object: defaults to sys.stderr - str: "out", "stdout", "err", or "stderr" - None: do not print to any stream reset_handlers: True to remove all old handlers Warning: always removes all previous handlers """ if reset_handlers: self.remove_all_handlers() if level: self.logger.setLevel(level) self.add_stream_handler(stream, time_format, print_level) self.add_file_handler(file_name, file_mode, time_format, print_level) return self
def set_formatter(self, formatter): """ Sets a custom formatter for *all* handlers. https://docs.python.org/3/library/logging.html#formatter-objects Args: formatter: can be either of the following: - instance of logging.Formatter - tuple of fmt strings (fmt, datefmt), note that the style is `{}` References: - for fmt string: https://docs.python.org/3/library/logging.html#logrecord-attributes - for datefmt string: https://docs.python.org/3/library/time.html#time.strftime """ if isinstance(formatter, (list, tuple)): assert len(formatter) == 2, 'formatter=(fmt, datefmt) strings' fmt, datefmt = formatter datefmt = self.get_datefmt(datefmt) formatter = logging.Formatter(fmt, datefmt, style='{') elif not isinstance(formatter, logging.Formatter): raise TypeError('formatter must be either an instance of logging.Formatter' ' or a tuple of (fmt, datefmt) strings') for handler in self.logger.handlers: handler.setFormatter(formatter)
def run(self): """ Running logging thread ????????????? .. note:: ??????`threading.Thread <http://docs.python.jp/\ 3/library/threading.html#thread-objects>`_ ????????? ??????????????start() ??????????????? ???????????????self.stop.set()??????? ???????????????????????????????????????? ????????????????????????????????????????? """ # queue check assert self.queue is not None, \ "Log Queue is None, use Logger.setQueue(queue) before calling me." self.stop = threading.Event() while not self.stop.is_set(): res = self.queue.get() if getattr(res, '__hash__', False) and res in self.mode: log_func = self.mode[res] if res == 'END': self.stop.set() continue self.__call__(log_func(res)) self.post_log()
def createLock(self): self.lock = None # Configure a NullHandler for library logging. # See https://docs.python.org/2/howto/logging.html#library-config.
def __repr__(self): return repr(dict(self)) # models = LazyDict() # models['en'] = lambda: Perceptron('huge.json') ##### ETC ######################################################################################### #---- LOG ----------------------------------------------------------------------------------------- # Functions that access the internet must report the visited URL using the standard logging module. # See also: https://docs.python.org/2/library/logging.html#logging.Formatter
def __repr__(self): return 'Synset(%s)' % tuple.__repr__(self) # wn = Wordnet(path='WordNet-3.0') # for s in wn.synsets('grasp', pos='n'): # print(s) # print(s.gloss) # print(s.hyponyms) # print() ##### WWW ######################################################################################### #---- OAUTH --------------------------------------------------------------------------------------- # The Open standard for Authorization (OAuth) is used to encrypt requests, for example by Twitter. # The protocol is documented on https://tools.ietf.org/html/rfc5849. Do not change the code below.
def __str__(self): a = ' '.join('%s=%s' % (k, quote(v)) for k, v in self.attributes.items() if v is not None) a = ' ' + a if a else '' if self.tag in SELF_CLOSING: return u'<%s%s />' % ( self.tag, a) else: return u'<%s%s>%s</%s>' % ( self.tag, a, self.html, self.tag)
def html(self): return ''.join(u(n) for n in self)
def __init__(self, html): """ Document Object Model, a tree of Element and Text nodes from the given HTML string. """ HTMLParser.__init__(self) Element.__init__(self, tag=None) self.head = None self.body = None self.type = None self._stack = [self] self.feed(u(html))
def emit(self, record): """Log an error to the datastore, if applicable. Args: The logging.LogRecord object. See http://docs.python.org/library/logging.html#logging.LogRecord """ try: if not record.exc_info: return signature = self.__GetSignature(record.exc_info) old_namespace = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') if not memcache.add(signature, None, self.log_interval): return db.run_in_transaction_custom_retries(1, self.__EmitTx, signature, record.exc_info) finally: namespace_manager.set_namespace(old_namespace) except Exception: self.handleError(record)
def create_timed_rotating_log(path): logFormatter = NsaFormatter() logger.setLevel(logging.INFO) # See https://docs.python.org/2/library/logging.html#levels handler = TimedRotatingFileHandler(LOG_FILE, # https://docs.python.org/2/library/logging.handlers.html#timedrotatingfilehandler when="midnight", interval=1, backupCount=30) handler.setFormatter(logFormatter) handler.suffix = "%Y%m%d" logger.addHandler(handler)
def get_logger(name): logger = logging.getLogger(name) handler = logging.StreamHandler() # https://docs.python.org/2/library/logging.html#logrecord-attributes formatter = logging.Formatter("[%(asctime)s - %(filename)s:%(lineno)s - %(levelname)-8s ] %(funcName)10s(): %(message)s" ) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.CRITICAL) #logger.setLevel(logging.INFO) logger.setLevel(logging.DEBUG) return logger
def src(self): """Get the html 'src' attributes.""" return { 'thumb': quote('/thumbs/' + encode(CIPHER_KEY, 'v1:300x300:' + self.path)), 'original': quote('/images' + self.path), }
def __init__(self, prev, next, msg): self.prev = prev self.next = next self.msg = msg ## Magic Methods -------------------------------------------------------------- # http://www.rafekettler.com/magicmethods.html # # Custom collections -------------------------------------------------------- # # https://docs.python.org/2/reference/datamodel.html?highlight=__setitem__#emulating-container-types # # Basic skeleton for custom dictionary:
def getUserInput(): parser = ap.ArgumentParser( description='Build cost spreadsheet for a KiCAD project.') # See https://docs.python.org/3/library/argparse.html#name-or-flags why -i or --input # # The full path and name to the bom2csv file must be given. # parser.add_argument('-xml', '--bom2csv', # See https://docs.python.org/3/library/argparse.html#nargs nargs='?', type=str, default=None, # See https://docs.python.org/3/library/argparse.html#metavar metavar='file.xml', help='BOM XML file created from csv2bom Kicad plug-in.') parser.add_argument('-j','--jellybean', nargs='?', type=str, default=None, metavar='file.csv', help='csv file containing the jellybean parts.') parser.add_argument('-d','--outdir', nargs='?', type=str, default=None, metavar='/<dir path>/...', help='Directory path where the MadeDigikeyBOM.csv will be written.') parser.add_argument('-np', '--num_processes', nargs='?', type=int, default=NUM_PROCESSES, const=NUM_PROCESSES, metavar='NUM_PROCESSES', help='Set the number of parallel processes used for web scraping part data.') return parser.parse_args() ############################################################################### # Main entrypoint. ###############################################################################
def log_debug(self, msg): log_debug = logging.getLogger('logger_LogDebug') #https://docs.python.org/2/howto/logging.html log_debug.debug(msg)
def round3(number, ndigits=None): """ Implementation of Python 3 built-in round() function. Rounds a number to a given precision in decimal digits (default 0 digits). This returns an int when ndigits is omitted or is None, otherwise the same type as the number. Values are rounded to the closest multiple of 10 to the power minus ndigits; if two multiples are equally close, rounding is done toward the even choice (aka "Banker's Rounding"). For example, both round(0.5) and round(-0.5) are 0, and round(1.5) is 2. ndigits may be negative. See Python 3 documentation: https://docs.python.org/3/library/functions.html?highlight=round#round Derived from python-future: https://github.com/PythonCharmers/python-future/blob/master/src/future/builtins/newround.py """ if ndigits is None: ndigits = 0 # return an int when called with one argument totype = int # shortcut if already an integer, or a float with no decimal digits inumber = totype(number) if inumber == number: return inumber else: # return the same type as the number, when called with two arguments totype = type(number) m = number * (10 ** ndigits) # if number is half-way between two multiples, and the mutliple that is # closer to zero is even, we use the (slow) pure-Python implementation if isclose(m % 1, .5) and int(m) % 2 == 0: if ndigits < 0: exponent = 10 ** (-ndigits) quotient, remainder = divmod(number, exponent) half = exponent//2 if remainder > half or (remainder == half and quotient % 2 != 0): quotient += 1 d = quotient * exponent else: exponent = _decimal.Decimal('10') ** (-ndigits) if ndigits != 0 else 1 d = _decimal.Decimal.from_float(number).quantize( exponent, rounding=_decimal.ROUND_HALF_EVEN) else: # else we use the built-in round() as it produces the same results d = round2(number, ndigits) return totype(d)
def setup_logger(name=None, logfile=None, level=logging.DEBUG, formatter=None, maxBytes=0, backupCount=0, fileLoglevel=None): """ Configures and returns a fully configured logger instance, no hassles. If a logger with the specified name already exists, it returns the existing instance, else creates a new one. If you set the ``logfile`` parameter with a filename, the logger will save the messages to the logfile, but does not rotate by default. If you want to enable log rotation, set both ``maxBytes`` and ``backupCount``. Usage: .. code-block:: python from logzero import setup_logger logger = setup_logger() logger.info("hello") :arg string name: Name of the `Logger object <https://docs.python.org/2/library/logging.html#logger-objects>`_. Multiple calls to ``setup_logger()`` with the same name will always return a reference to the same Logger object. (default: ``__name__``) :arg string logfile: If set, also write logs to the specified filename. :arg int level: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ to display (default: ``logging.DEBUG``). :arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter). :arg int maxBytes: Size of the logfile when rollover should occur. Defaults to 0, rollover never occurs. :arg int backupCount: Number of backups to keep. Defaults to 0, rollover never occurs. :arg int fileLoglevel: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ for the file logger (is not set, it will use the loglevel from the ``level`` argument) :return: A fully configured Python logging `Logger object <https://docs.python.org/2/library/logging.html#logger-objects>`_ you can use with ``.debug("msg")``, etc. """ _logger = logging.getLogger(name or __name__) _logger.propagate = False _logger.setLevel(level) # Reconfigure existing handlers has_stream_handler = False for handler in list(_logger.handlers): if isinstance(handler, logging.StreamHandler): has_stream_handler = True if isinstance(handler, logging.FileHandler) and hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR): # Internal FileHandler needs to be removed and re-setup to be able # to set a new logfile. _logger.removeHandler(handler) continue # reconfigure handler handler.setLevel(level) handler.setFormatter(formatter or LogFormatter()) if not has_stream_handler: stream_handler = logging.StreamHandler() setattr(stream_handler, LOGZERO_INTERNAL_LOGGER_ATTR, True) stream_handler.setLevel(level) stream_handler.setFormatter(formatter or LogFormatter()) _logger.addHandler(stream_handler) if logfile: rotating_filehandler = RotatingFileHandler(filename=logfile, maxBytes=maxBytes, backupCount=backupCount) setattr(rotating_filehandler, LOGZERO_INTERNAL_LOGGER_ATTR, True) rotating_filehandler.setLevel(fileLoglevel or level) rotating_filehandler.setFormatter(formatter or LogFormatter(color=False)) _logger.addHandler(rotating_filehandler) return _logger
def logfile(filename, formatter=None, mode='a', maxBytes=0, backupCount=0, encoding=None, loglevel=None): """ Setup logging to file (using a `RotatingFileHandler <https://docs.python.org/2/library/logging.handlers.html#rotatingfilehandler>`_ internally). By default, the file grows indefinitely (no rotation). You can use the ``maxBytes`` and ``backupCount`` values to allow the file to rollover at a predetermined size. When the size is about to be exceeded, the file is closed and a new file is silently opened for output. Rollover occurs whenever the current log file is nearly ``maxBytes`` in length; if either of ``maxBytes`` or ``backupCount`` is zero, rollover never occurs. If ``backupCount`` is non-zero, the system will save old log files by appending the extensions ‘.1’, ‘.2’ etc., to the filename. For example, with a ``backupCount`` of 5 and a base file name of app.log, you would get app.log, app.log.1, app.log.2, up to app.log.5. The file being written to is always app.log. When this file is filled, it is closed and renamed to app.log.1, and if files app.log.1, app.log.2, etc. exist, then they are renamed to app.log.2, app.log.3 etc. respectively. :arg string filename: Filename of the logfile. Set to `None` to disable logging to the logfile. :arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter). :arg string mode: mode to open the file with. Defaults to ``a`` :arg int maxBytes: Size of the logfile when rollover should occur. Defaults to 0, rollover never occurs. :arg int backupCount: Number of backups to keep. Defaults to 0, rollover never occurs. :arg string encoding: Used to open the file with that encoding. :arg int loglevel: Set a custom loglevel for the file logger, else uses the current global loglevel. """ # Step 1: If an internal RotatingFileHandler already exists, remove it for handler in list(logger.handlers): if isinstance(handler, RotatingFileHandler) and hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR): logger.removeHandler(handler) # Step 2: If wanted, add the RotatingFileHandler now if filename: rotating_filehandler = RotatingFileHandler(filename, mode=mode, maxBytes=maxBytes, backupCount=backupCount, encoding=encoding) # Set internal attributes on this handler setattr(rotating_filehandler, LOGZERO_INTERNAL_LOGGER_ATTR, True) if loglevel: setattr(rotating_filehandler, LOGZERO_INTERNAL_HANDLER_IS_CUSTOM_LOGLEVEL, True) # Configure the handler and add it to the logger rotating_filehandler.setLevel(loglevel or _loglevel) rotating_filehandler.setFormatter(formatter or _formatter or LogFormatter(color=False)) logger.addHandler(rotating_filehandler)
def __init__(self, name, description, commandline_args=[]): """Command line arguments can be a list of shortcuts from `predefined_args`, or a list of dictionaries. Arguments can also be put in a file named SCRIPTNAME_args.py, e.g. `harvest_args.py`. """ self.parser = argparse.ArgumentParser(description) # Add one ubiqitous command line arguments commandline_args += ["loglevel"] # Check for FILENAME_args.py file import __main__ import os try: filename = os.path.basename(__main__.__file__) filename = os.path.splitext(filename)[0] args_from_file = __import__(filename + "_args") commandline_args += args_from_file.args except ImportError: pass # Add all the command line arguments for c in commandline_args: # cCheck for shortcuts used if isinstance(c, str): c = self.predefined_args[c] self.parser.add_argument( c.pop("short", None), c.pop("long", None), **c) argcomplete.autocomplete(self.parser) self.args = self.parser.parse_args() self.logger = logging.getLogger(name) # https://docs.python.org/2/library/logging.html#levels self.logger.setLevel(self.args.loglevel * 10) self.executionMode = self.NORMAL_MODE # Convenience shortcuts to logger methods
def oauth(url, data={}, method='GET', key='', token='', secret=('','')): """ Returns (url, data), where data is updated with OAuth 1.0 authorization. """ def nonce(): return hashlib.md5(b('%s%s' % (time.time(), random.random()))).hexdigest() def timestamp(): return int(time.time()) def encode(s): return urlquote(b(s), safe='~') def hash(s, key): return hmac.new(b(s), b(key), hashlib.sha1).digest() def base(url, data={}, method='GET'): # https://tools.ietf.org/html/rfc5849#section-3.4.1 s = encode(method.upper()) + '&' s += encode(url.rstrip('?')) + '&' s += encode('&'.join('%s=%s' % ( encode(k), encode(v)) for k, v in sorted(data.items()))) return s def sign(url, data={}, method='GET', secret=('','')): # https://tools.ietf.org/html/rfc5849#section-3.4.2 s = encode(secret[0]) + '&' s += encode(secret[1]) s = hash(s, base(url, data, method)) s = base64.b64encode(s) return s data = dict(data, **{ 'oauth_nonce' : nonce(), 'oauth_timestamp' : timestamp(), 'oauth_consumer_key' : key, 'oauth_token' : token, 'oauth_signature_method' : 'HMAC-SHA1', 'oauth_version' : '1.0', }) data['oauth_signature'] = sign(url.split('?')[0], data, method.upper(), secret) return url, data
def visualize(g, **kwargs): """ Returns a string with a HTML5 <canvas> element, that renders the given graph using a force-directed layout. """ a = {} for e in g.edges: a.setdefault(e.node1, {})[e.node2] = e.weight f = lambda k, v: json.dumps(kwargs.get(k, v)) s = '\n'.join(( '<canvas id=%(id)s width=%(width)s height=%(height)s></canvas>', '<script src=%(src)s></script>', '<script>', '\tvar adjacency = %s;' % json.dumps(a), '', '\tvar canvas;', '\tcanvas = document.getElementById(%(id)s);', '\tcanvas.graph = new Graph(adjacency);', '\tcanvas.graph.animate(canvas, %(n)s, {', '\t\tdirected : %s,' % f('directed', False), '\t\tfont : %s,' % f('font', '10px sans-serif'), '\t\tfill : %s,' % f('fill', '#fff'), '\t\tstroke : %s,' % f('stroke', '#000'), '\t\tstrokewidth : %s,' % f('strokewidth', 0.5), '\t\tradius : %s,' % f('radius', 4.0), '\t\tf1 : %s,' % f('f1', 10.0), '\t\tf2 : %s,' % f('f2', 0.5), '\t\tm : %s' % f('m', 0.25), '\t});', '</script>' )) k = {} k.update({'src': 'graph.js', 'id': 'g', 'width': 640, 'height': 480, 'n': 1000}) k.update(kwargs) k = {k: json.dumps(v) for k, v in k.items()} return s % k # g = Graph() # n = range(200) # for i in range(200): # g.add( # n1=random.choice(n), # n2=random.choice(n)) # # f = open('test.html', 'w') # f.write(visualize(g, n=1000, directed=True)) # f.close()
def setup_logger(name=None, logfile=None, level=logging.DEBUG, formatter=None, maxBytes=0, backupCount=0, fileLoglevel=None): """ Configures and returns a fully configured logger instance, no hassles. If a logger with the specified name already exists, it returns the existing instance, else creates a new one. If you set the ``logfile`` parameter with a filename, the logger will save the messages to the logfile, but does not rotate by default. If you want to enable log rotation, set both ``maxBytes`` and ``backupCount``. Usage: .. code-block:: python from logzero import setup_logger logger = setup_logger() logger.info("hello") :arg string name: Name of the `Logger object <https://docs.python.org/2/library/logging.html#logger-objects>`_. Multiple calls to ``setup_logger()`` with the same name will always return a reference to the same Logger object. (defaut: ``__name__``) :arg string logfile: If set, also write logs to the specified filename. :arg int level: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ to display (default: ``logging.DEBUG``). :arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter). :arg int maxBytes: Size of the logfile when rollover should occur. Defaults to 0, rollover never occurs. :arg int backupCount: Number of backups to keep. Defaults to 0, rollover never occurs. :arg int fileLoglevel: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ for the file logger (is not set, it will use the loglevel from the ``level`` argument) :return: A fully configured Python logging `Logger object <https://docs.python.org/2/library/logging.html#logger-objects>`_ you can use with ``.debug("msg")``, etc. """ _logger = logging.getLogger(name or __name__) _logger.propagate = False _logger.setLevel(level) # Reconfigure existing handlers has_stream_handler = False for handler in list(_logger.handlers): if isinstance(handler, logging.StreamHandler): has_stream_handler = True if isinstance(handler, logging.FileHandler) and hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR): # Internal FileHandler needs to be removed and re-setup to be able # to set a new logfile. _logger.removeHandler(handler) continue # reconfigure handler handler.setLevel(level) handler.setFormatter(formatter or LogFormatter()) if not has_stream_handler: stream_handler = logging.StreamHandler() setattr(stream_handler, LOGZERO_INTERNAL_LOGGER_ATTR, True) stream_handler.setLevel(level) stream_handler.setFormatter(formatter or LogFormatter()) _logger.addHandler(stream_handler) if logfile: rotating_filehandler = RotatingFileHandler(filename=logfile, maxBytes=maxBytes, backupCount=backupCount) setattr(rotating_filehandler, LOGZERO_INTERNAL_LOGGER_ATTR, True) rotating_filehandler.setLevel(fileLoglevel or level) rotating_filehandler.setFormatter(formatter or LogFormatter(color=False)) _logger.addHandler(rotating_filehandler) return _logger