我们从Python开源项目中,提取了以下33个代码示例,用于说明如何使用logger.get()。
def __init__(self, filename=None, default_verbose=0): """ Constructs a logger with optional log file output. Args: filename: optional log file output. If None, nothing will be written to file """ now = datetime.datetime.now() self.verbose_thresh = int(os.environ.get('VERBOSE', 0)) self.default_verbose = default_verbose if filename is not None: self.filename = filename dirname = os.path.dirname(self.filename) if not os.path.exists(dirname): os.makedirs(dirname) open(self.filename, 'w').close() self.info('Log written to {}'.format( os.path.abspath(self.filename))) else: self.filename = None
def __init__(self, filename=None, default_verbose=0): """ Constructs a logger with optional log file output. Args: filename: optional log file output. If None, nothing will be written to file """ now = datetime.datetime.now() self.verbose_thresh = int(os.environ.get("VERBOSE", 0)) self.default_verbose = default_verbose if filename is not None: self.filename = filename dirname = os.path.dirname(self.filename) if not os.path.exists(dirname): os.makedirs(dirname) open(self.filename, "w").close() self.info("Log written to {}".format(os.path.abspath(self.filename))) else: self.filename = None pass
def __init__(self, filename=None, default_verbose=0): """ Constructs a logger with optional log file output. Args: filename: optional log file output. If None, nothing will be written to file """ now = datetime.datetime.now() self.verbose_thresh = int(os.environ.get('VERBOSE', 0)) self.default_verbose = default_verbose if filename is not None: self.filename = filename dirname = os.path.dirname(self.filename) if not os.path.exists(dirname): os.makedirs(dirname) open(self.filename, 'w').close() self.info('Log written to {}'.format(os.path.abspath(self.filename))) else: self.filename = None pass
def __init__(self, batch_iter, max_queue_size=10, num_threads=5, log_queue=20, name=None): """ Data provider wrapper that supports concurrent data fetching. """ super(ConcurrentBatchIterator, self).__init__() self.max_queue_size = max_queue_size self.num_threads = num_threads self.q = queue.Queue(maxsize=max_queue_size) self.log = logger.get() self.batch_iter = batch_iter self.fetchers = [] self.init_fetchers() self.counter = 0 self.relaunch = True self._stopped = False self.log_queue = log_queue self.name = name pass
def __init__(self, filename=None, default_verbose=0): """ Constructs a logger with optional log file output. Args: filename: optional log file output. If None, nothing will be written to file """ now = datetime.datetime.now() self.verbose_thresh = int(os.environ.get("VERBOSE", 0)) self.default_verbose = default_verbose if filename is not None: self.filename = filename dirname = os.path.dirname(self.filename) if not os.path.exists(dirname): os.makedirs(dirname) open(self.filename, "w").close() self.info("Log written to {}".format( os.path.abspath(self.filename))) else: self.filename = None pass
def __init__(self, height, width, output_fname, semantic_only=True): self.height = height self.width = width self.semantic_only = semantic_only self.log = logger.get() self.output_fname = output_fname self.log.info("Output h5 dataset: {}".format(self.output_fname)) self.log.info("Reading image IDs") self.img_ids = self.read_ids() # Shuffle sequence. random = np.random.RandomState(2) shuffle = np.arange(len(self.img_ids)) random.shuffle(shuffle) self.img_ids = [ self.img_ids[shuffle[idx]] for idx in range(len(self.img_ids)) ] pass
def __init__(self, folder, fname='model', var_dict=None): if not os.path.exists(folder): os.makedirs(folder) self.folder = folder self.log = logger.get() self.fname = fname self.tf_saver = None if var_dict is None: self.var_dict = tf.all_variables() else: self.var_dict = var_dict pass
def __init__(self, num, batch_size=1, progress_bar=False, log_epoch=10, get_fn=None, cycle=False, shuffle=True, stagnant=False): """Construct a batch iterator. Args: data: numpy.ndarray, (N, D), N is the number of examples, D is the feature dimension. labels: numpy.ndarray, (N), N is the number of examples. batch_size: int, batch size. """ self._num = num self._batch_size = batch_size self._step = 0 self._num_steps = int(np.ceil(self._num / float(batch_size))) self._pb = None self._variables = None self._get_fn = get_fn self.get_fn = get_fn self._cycle = cycle self._shuffle_idx = np.arange(self._num) self._shuffle = shuffle self._random = np.random.RandomState(2) self._shuffle_flag = shuffle self._stagnant = stagnant self._log_epoch = log_epoch self._log = logger.get() self._epoch = 0 if progress_bar: self._pb = pb.get(self._num_steps) pass self._mutex = threading.Lock() pass
def get(fname=None): """ Returns a logger instance, with optional log file output. """ global log if log is not None and fname is None: return log # fname = os.environ.get('LOGTO', None) # if fname is None: # fname = default_fname else: log = Logger(fname) return log
def __init__(self): self._reg = {} self.log = logger.get() pass
def __init__(self, folder, name): if not os.path.exists(folder): os.makedirs(folder) self.folder = folder self.log = logger.get() self.fname = os.path.join(folder, name + '.yaml') pass
def __init__(self, num_cls=10, filename=None, name=None, cmap='gray'): super(ConfusionMatrixPlotter, self).__init__( filename=filename, name=name) self.num_cls = num_cls self.cmap = cmap self.log = logger.get() pass
def get(name): return registry[name]
def __init__(self, filename, labels, name, buffer_size=1, restore_step=0): """ Args: label: list of string name: string buffer_size: int """ self.filename = filename self.folder = os.path.dirname(filename) self.written_catalog = False self.log = logger.get() if type(labels) != list: labels = [labels] if name is None: self.name = labels[0] else: self.name = name self.labels = labels self.buffer_size = buffer_size self.buffer = [] self.label_table = {} for ll, label in enumerate(labels): self.label_table[label] = ll self.log.info( 'Time series data "{}" log to "{}"'.format(labels, filename)) self._has_init = False pass
def get(fname=None): """ Returns a logger instance, with optional log file output. """ global log if log is not None and fname is None: return log # fname = os.environ.get("LOGTO", None) # if fname is None: # fname = default_fname else: log = Logger(fname) return log
def __init__(self, folder, model_opt=None, data_opt=None): if not os.path.exists(folder): os.makedirs(folder) self.folder = folder self.log = logger.get() self.tf_saver = None if model_opt is not None: self.save_opt(os.path.join(folder, kModelOptFilename), model_opt) if data_opt is not None: self.save_opt(os.path.join(folder, kDatasetOptFilename), data_opt)
def __init__(self, q, batch_iter): super(BatchProducer, self).__init__() threading.Thread.__init__(self) self.q = q self.batch_iter = batch_iter self.log = logger.get() self._stop = threading.Event() self.daemon = True
def next(self): if self._stopped: raise StopIteration self.scan(do_print=(self.counter % self.log_queue == 0)) if self.counter % self.log_queue == 0: self.counter = 0 batch = self.q.get() self.q.task_done() self.counter += 1 while batch is None: self.info("Got an empty batch. Ending iteration.") self.relaunch = False try: batch = self.q.get(False) self.q.task_done() qempty = False except queue.Empty: qempty = True pass if qempty: self.info("Queue empty. Scanning for alive thread.") # Scan for alive thread. found_alive = False for ff in self.fetchers: if ff.is_alive(): found_alive = True break self.info("No alive thread found. Joining.") # If no alive thread, join all. if not found_alive: for ff in self.fetchers: ff.join() self._stopped = True raise StopIteration else: self.info("Got another batch from the queue.") return batch
def __init__(self, folder, name): if not os.path.exists(folder): os.makedirs(folder) self.folder = folder self.log = logger.get() self.fname = os.path.join(folder, name + '.yaml')
def __init__(self, num, batch_size=1, log_epoch=10, get_fn=None, cycle=False, shuffle=True, stagnant=False, seed=2): """Construct a batch iterator. Args: data: numpy.ndarray, (N, D), N is the number of examples, D is the feature dimension. labels: numpy.ndarray, (N), N is the number of examples. batch_size: int, batch size. """ self._num = num self._batch_size = batch_size self._step = 0 self._num_steps = int(np.ceil(self._num / float(batch_size))) self._variables = None self._get_fn = get_fn self.get_fn = get_fn self._cycle = cycle self._shuffle_idx = np.arange(self._num) self._shuffle = shuffle self._random = np.random.RandomState(seed) if shuffle: self._random.shuffle(self._shuffle_idx) self._shuffle_flag = False self._stagnant = stagnant self._log_epoch = log_epoch self._log = logger.get() self._epoch = 0 self._mutex = threading.Lock() pass
def run(self): while not self.stopped(): try: self.q.get(False) self.q.task_done() except Queue.Empty: pass pass
def next(self): self.scan(do_print=(self.counter % self.log_queue == 0)) if self.counter % self.log_queue == 0: self.counter = 0 batch = self.q.get() self.q.task_done() self.counter += 1 while batch is None: self.info("Got an empty batch. Ending iteration.") self.relaunch = False try: batch = self.q.get(False) self.q.task_done() qempty = False except Queue.Empty: qempty = True pass if qempty: self.info("Queue empty. Scanning for alive thread.") # Scan for alive thread. found_alive = False for ff in self.fetchers: if ff.is_alive(): found_alive = True break self.info("No alive thread found. Joining.") # If no alive thread, join all. if not found_alive: for ff in self.fetchers: ff.join() raise StopIteration else: self.info("Got another batch from the queue.") return batch
def __init__(self, h5_fname): self.log = logger.get() self.h5_fname = h5_fname self.log.info("Reading image IDs") self.img_ids = self._read_ids() pass
def __init__(self, num, batch_size=1, progress_bar=False, log_epoch=10, get_fn=None, cycle=False, shuffle=True, stagnant=False, seed=2, num_batches=-1): """Construct a batch iterator. Args: data: numpy.ndarray, (N, D), N is the number of examples, D is the feature dimension. labels: numpy.ndarray, (N), N is the number of examples. batch_size: int, batch size. """ self._num = num self._batch_size = batch_size self._step = 0 self._num_steps = int(np.ceil(self._num / float(batch_size))) if num_batches > 0: self._num_steps = min(self._num_steps, num_batches) self._pb = None self._variables = None self._get_fn = get_fn self.get_fn = get_fn self._cycle = cycle self._shuffle_idx = np.arange(self._num) self._shuffle = shuffle self._random = np.random.RandomState(seed) if shuffle: self._random.shuffle(self._shuffle_idx) self._shuffle_flag = False self._stagnant = stagnant self._log_epoch = log_epoch self._log = logger.get() self._epoch = 0 if progress_bar: self._pb = pb.get(self._num_steps) pass self._mutex = threading.Lock() pass