我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用progressbar.Bar()。
def render(self, length=None, progress=False): """ Render this signal into an numpy array of floats. Return the array. :param length: The length to render, in seconds. Optional. :param progress: Whether to show a progress bar for rendering """ if progress and not progressbar: print('Install the progressbar module to see a progress bar for rendering') progress = False duration = self.duration if length is None else length * SAMPLE_RATE if duration == float('inf'): duration = 3*SAMPLE_RATE else: duration = int(duration) out = numpy.empty((duration, 1)) pbar = progressbar.ProgressBar(widgets=['Rendering: ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=duration-1).start() if progress else None for i in range(duration): out[i] = self.amplitude(i) if pbar: pbar.update(i) if pbar: pbar.finish() return out
def main(): uri, outfile, dataset = get_arguments() fd = tempfile.NamedTemporaryFile() progress = ProgressBar(widgets=[Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()]) def update(count, blockSize, totalSize): if progress.maxval is None: progress.maxval = totalSize progress.start() progress.update(min(count * blockSize, totalSize)) urllib.urlretrieve(uri, fd.name, reporthook = update) if dataset == 'zinc12': df = pandas.read_csv(fd.name, delimiter = '\t') df = df.rename(columns={'SMILES':'structure'}) df.to_hdf(outfile, 'table', format = 'table', data_columns = True) elif dataset == 'chembl22': df = pandas.read_table(fd.name,compression='gzip') df = df.rename(columns={'canonical_smiles':'structure'}) df.to_hdf(outfile, 'table', format = 'table', data_columns = True) pass else: df = pandas.read_csv(fd.name, delimiter = '\t') df.to_hdf(outfile, 'table', format = 'table', data_columns = True)
def knn_masked_data(trX,trY,missing_data_dir, input_shape, k): raw_im_data = np.loadtxt(join(script_dir,missing_data_dir,'index.txt'),delimiter=' ',dtype=str) raw_mask_data = np.loadtxt(join(script_dir,missing_data_dir,'index_mask.txt'),delimiter=' ',dtype=str) # Using 'brute' method since we only want to do one query per classifier # so this will be quicker as it avoids overhead of creating a search tree knn_m = KNeighborsClassifier(algorithm='brute',n_neighbors=k) prob_Y_hat = np.zeros((raw_im_data.shape[0],int(np.max(trY)+1))) total_images = raw_im_data.shape[0] pbar = progressbar.ProgressBar(widgets=[progressbar.FormatLabel('\rProcessed %(value)d of %(max)d Images '), progressbar.Bar()], maxval=total_images, term_width=50).start() for i in range(total_images): mask_im=load_image(join(script_dir,missing_data_dir,raw_mask_data[i][0]), input_shape,1).reshape(np.prod(input_shape)) mask = np.logical_not(mask_im > eps) # since mask is 1 at missing locations v_im=load_image(join(script_dir,missing_data_dir,raw_im_data[i][0]), input_shape, 255).reshape(np.prod(input_shape)) rep_mask = np.tile(mask,(trX.shape[0],1)) # Corrupt whole training set according to the current mask corr_trX = np.multiply(trX, rep_mask) knn_m.fit(corr_trX, trY) prob_Y_hat[i,:] = knn_m.predict_proba(v_im.reshape(1,-1)) pbar.update(i) pbar.finish() return prob_Y_hat
def preprocess(self, questions: List[QASetting], answers: Optional[List[List[Answer]]] = None, is_eval: bool = False) -> List[XQAAnnotation]: if answers is None: answers = [None] * len(questions) preprocessed = [] if len(questions) > 1000: bar = progressbar.ProgressBar( max_value=len(questions), widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ']) for q, a in bar(zip(questions, answers)): preprocessed.append(self.preprocess_instance(q, a)) else: for q, a in zip(questions, answers): preprocessed.append(self.preprocess_instance(q, a)) return preprocessed
def preprocess(self, questions: List[QASetting], answers: Optional[List[List[Answer]]] = None, is_eval: bool = False) -> List[MCAnnotation]: if answers is None: answers = [None] * len(questions) preprocessed = [] if len(questions) > 1000: bar = progressbar.ProgressBar( max_value=len(questions), widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ']) for i, (q, a) in bar(enumerate(zip(questions, answers))): preprocessed.append(self.preprocess_instance(i, q, a)) else: for i, (q, a) in enumerate(zip(questions, answers)): preprocessed.append(self.preprocess_instance(i, q, a)) return preprocessed
def collect_mailids(server): folders = server.list_folders() #construct progressbar progressbar_widgets = [ '[Searching for mails on server] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' '] progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(folders)).start() #collect all mailids for all folders folder_contents = {} folder_progress = 0 for flags, delimiter, folder in folders: #read all mailids for the folder server.select_folder(folder, readonly=True) folder_contents[folder] = server.search() #update progrssbar folder_progress += 1 progressbar_instance.update(folder_progress) progressbar_instance.finish() return folder_contents
def download(download_list, total_download_size): progressbar_widgets = [ '[Downloading mails ] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA(), ' ', bitmath.integrations.BitmathFileTransferSpeed()] progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=int(total_download_size)).start() downloaded_size = bitmath.Byte(0) for folder, mails in download_list.items(): server.select_folder(folder, readonly=True) for mailid, mailfilename, mailsize in mails: #make parent directory if not os.path.isdir(os.path.dirname(mailfilename)): os.makedirs(os.path.dirname(mailfilename)) #download mail with open(mailfilename, 'wb') as mailfile: mailfile.write(server.fetch([mailid], ['RFC822'])[mailid][b'RFC822']) #update progressbar downloaded_size += mailsize progressbar_instance.update(int(downloaded_size)) progressbar_instance.finish()
def bruteforce(): import progressbar from time import sleep bar = progressbar.ProgressBar(maxval=60, \ widgets=[progressbar.Bar('==', '[', ']'), ' ', progressbar.Percentage()]) bar.start() for i in xrange(10): bar.update(i+1) sleep(0.05) wordlist = "/root/2fassassin/crack/wordlist/2fa-wordlist.txt" target = "/root/2fassassin/loot/*.pfx" sign = "" sign += "crackpkcs12 -v -b" sign += " " sign += target sign += "| tee crack.log" os.system(sign) bar.finish() sys.exit()
def bruteforce(): import progressbar from time import sleep bar = progressbar.ProgressBar(maxval=60, \ widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]) bar.start() for i in xrange(10): bar.update(i+1) sleep(0.05) wordlist = "/root/2fassassin/crack/wordlist/2fa-wordlist.txt" target = "/root/2fassassin/loot/*.pfx" sign = "" sign += "crackpkcs12 -v -b" sign += " " sign += target sign += "| tee crack.log" os.system(sign) bar.finish() sys.exit()
def Steg_brute(ifile, dicc): i = 0 ofile = ifile.split('.')[0] + "_flag.txt" nlines = len(open(dicc).readlines()) with open(dicc, 'r') as passFile: pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=nlines).start() for line in passFile.readlines(): password = line.strip('\n') r = commands.getoutput("steghide extract -sf %s -p '%s' -xf %s" % (ifile, password, ofile)) if not "no pude extraer" in r and not "could not extract" in r: print(color.GREEN + "\n\n " + r + color.ENDC) print("\n\n [+] " + color.INFO + "Information obtained with password:" + color.GREEN + " %s\n" % password + color.ENDC) if check_file(ofile): with open(ofile, 'r') as outfile: for line in outfile.readlines(): print(line) break pbar.update(i + 1) i += 1
def progress_bar(n): import progressbar return progressbar.ProgressBar( max_value=n, widgets=[ progressxbar.Percentage(), ' ', '(', progressbar.SimpleProgress(), ')', ' ', progressbar.Bar(), ' ', progressbar.AdaptiveETA(), ]) # http://code.activestate.com/recipes/577058/
def __call__(self, progress, data, width): if progress.end_time: return self.finish_msg if progress.max_value is progressbar.UnknownLength: bar = progressbar.BouncingBar else: bar = progressbar.Bar line = bar.__call__(self, progress, data, width) if data["percentage"] is None: msg = self.msg else: msg = "%s (%d%%)" % (self.msg, data["percentage"]) offset = width // 2 - len(msg) // 2 return line[:offset] + msg + line[offset + len(msg):]
def __init__(self, *args, **kwargs): self.dld = FileDownloader() self.dld.stage(self.cmd_name) load_continents() load_oceans() load_currencies() load_languages() self.widgets = [ MemoryUsage(), progressbar.ETA(), ' |Processed: ', progressbar.Counter(), ' |Done: ', progressbar.Percentage(), progressbar.Bar(), ] return super().__init__(*args, **kwargs)
def __init__(self, name, max_value=100, history_len=5, display=True, display_data={'train':['loss', 'accuracy'], 'test':['loss', 'accuracy']}, level=logging.INFO, train_log_mode='TRAIN_PROGRESS', test_log_mode='TEST_PROGRESS'): super(ProgressbarLogger, self).__init__( name, level=level, display=display, logfile=None, train_log_mode=train_log_mode, test_log_mode=test_log_mode) self.train_log_data = {} self.test_log_data = {} self.max_value = max_value self.history_len = history_len self.display_data = display_data self.mode['TRAIN_PROGRESS'] = self.log_train_progress self.mode['TEST_PROGRESS'] = self.log_test_progress # create logging format self.widgets = [progressbar.FormatLabel('(%(value)d of %(max)s)'), ' ', progressbar.Percentage(), ' ', progressbar.Bar()] self.dynamic_data = {k+'_'+kk: 0.0 for k in display_data.keys() for kk in display_data[k]} diff_data = {'diff_'+k+'_'+kk: 0.0 for k in display_data.keys() for kk in display_data[k]} self.dynamic_data.update(diff_data) for t in display_data.keys(): ddstr = ' [' + t + ']' for s in display_data[t]: value_name = t + '_' + s ddstr = ddstr + ' ' + s + ':' + '%(' + value_name + ').3f (%(diff_' + value_name + ').3f)' self.widgets.append(progressbar.FormatLabel(ddstr)) self.widgets.extend(['|', progressbar.FormatLabel('Time: %(elapsed)s'), '|', progressbar.AdaptiveETA()])
def train(self, epochs, batch_size, learning_rate, save_to=None): self.train_step = pt.apply_optimizer(tf.train.AdamOptimizer(learning_rate, epsilon=1), losses = [self.error_function]) init = tf.initialize_all_variables() self.sess.run(init) pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=epochs).start() while self.get_epoch() < epochs: input_data = self.hdf5reader.next() _, loss_value = self.sess.run( [self.train_step, self.error_function], { self.encoder.input_data: input_data } ) pbar.update(self.get_epoch()) pbar.finish()
def images_to_hdf5(dir_path, output_hdf5, size = (112,112), channels = 3, resize_to = None): files = sorted(os.listdir(dir_path)) nr_of_images = len(files) if resize_to: size = resize_to i = 0 pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=nr_of_images).start() data = np.empty(shape=(nr_of_images, size[0], size[1], channels), dtype=np.uint8) for f in files: datum = imread(dir_path + '/' + f) if resize_to: datum = np.asarray(Image.fromarray((datum), 'RGB').resize((size[0],size[1]), PIL.Image.ANTIALIAS)) data[i,:,:,:] = datum i = i + 1 pbar.update(i) pbar.finish() with h5py.File(output_hdf5, 'w') as hf: hf.create_dataset('data', data=data)
def load_corpus(self, corenlpserver, process=True): """ Use the PubMed web services to retrieve the title and abstract of each PMID :param corenlpserver: :param process: :return: """ time_per_abs = [] widgets = [pb.Percentage(), ' ', pb.Bar(), ' ', pb.AdaptiveETA(), ' ', pb.Timer()] pbar = pb.ProgressBar(widgets=widgets, maxval=len(self.pmids), redirect_stdout=True).start() for i, pmid in enumerate(self.pmids): t = time.time() newdoc = PubmedDocument(pmid) if newdoc.abstract == "": logging.info("ignored {} due to the fact that no abstract was found".format(pmid)) continue newdoc.process_document(corenlpserver, "biomedical") self.documents["PMID" + pmid] = newdoc abs_time = time.time() - t time_per_abs.append(abs_time) pbar.update(i+1) pbar.finish() abs_avg = sum(time_per_abs)*1.0/len(time_per_abs) logging.info("average time per abstract: %ss" % abs_avg)
def getProgress(self, url, fileSize): status = json.loads(urllib.urlopen(url).read()) if len(status["data"]) ==0 : logger.info(url + " upload done ") return True widgets = ['Progress: ', Percentage(), ' ', Bar( marker=RotatingMarker('>-=')), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=fileSize).start() upload_size = 0 while upload_size < fileSize: _response = self.doGet(url) _data = json.loads(_response) upload_size = long(_data["data"]["upload_size"]) total_size = long(_data["data"]["total_size"]) if upload_size == 0 and total_size == 0: break pbar.update(upload_size) time.sleep(1) pbar.finish() logger.info(url + " upload done") return True """ ?????? """
def scrape_mlb_odds_range(min_date=None, max_date=None): min_date = min_date or datetime.datetime.today() - datetime.timedelta(days=1) max_date = max_date or datetime.datetime.today() if isinstance(min_date, basestring): min_date = parser.parse(min_date) if isinstance(max_date, basestring): max_date = parser.parse(max_date) date = min_date pbar = progressbar.ProgressBar(widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=int((max_date-min_date).total_seconds() / (60*60*24)) + 1) pbar.start() saved = 0 hit = 0 while date <= max_date: day_odds = load_odds_for_day(date) if day_odds is not None and len(day_odds) > 0: save_sbr_odds_info('mlb', date, day_odds) saved += 1 hit += 1 date += datetime.timedelta(days=1) pbar.update(value=hit) pbar.finish() return saved
def scrape_nba_odds_range(min_date=None, max_date=None): min_date = min_date or datetime.datetime.today() - datetime.timedelta(days=1) max_date = max_date or datetime.datetime.today() if isinstance(min_date, basestring): min_date = parser.parse(min_date) if isinstance(max_date, basestring): max_date = parser.parse(max_date) date = min_date pbar = progressbar.ProgressBar(widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=int((max_date-min_date).total_seconds() / (60*60*24)) + 1) pbar.start() saved = 0 hit = 0 while date <= max_date: day_odds = load_odds_for_day(date) if day_odds is not None and len(day_odds) > 0: save_sbr_odds_info('nba', date, day_odds) saved += 1 hit += 1 date += datetime.timedelta(days=1) pbar.update(value=hit) pbar.finish() return saved
def __iter__(self): if self.count != 0: widgets = [ '%s: ' % (self.caption,), progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ] pbar = progressbar.ProgressBar(widgets=widgets, maxval=self.count) pbar.start() for idx, item in enumerate(self.iterator): yield item pbar.update(idx) pbar.finish()
def _setup_progress(self, options): if options.progress: if self.beanstalk: # With Beanstalk C&C we don't know how many... self.progress = progressbar.ProgressBar( redirect_stdout=True, redirect_stderr=True, widgets=[ 'Total: ', progressbar.Counter(), ', ', progressbar.Timer() ]) else: self.progress = progressbar.ProgressBar( redirect_stdout=True, redirect_stderr=True, widgets=[ progressbar.Percentage(), progressbar.Bar(), ' (', progressbar.ETA(), ') ', ]) else: self.progress = None
def __init__(self, options): self.wildcards = [] self.options = options self.domains = [] if options.domains: self.domains += filter(None, options.domains.read().split("\n")) self.domains += options.domain self.domains = list(set(self.domains)) random.shuffle(self.domains) self.resolvers = map(str.strip, filter(None, options.resolvers.read().split("\n"))) random.shuffle(self.resolvers) self.names = [X for X in self._load_names(options.names)] if options.progress: self.progress = progressbar.ProgressBar( redirect_stdout=True, redirect_stderr=True, widgets=[ progressbar.Percentage(), progressbar.Bar(), ' (', progressbar.ETA(), ') ', ]) else: self.progress = None self.finished = 0 LOG.info("%d names, %d resolvers, %d domains", len(self.names), len(self.resolvers), len(self.domains))
def compute_embeddings(images): """Runs inference on an image. Args: image: Image file names. Returns: Dict mapping image file name to embedding. """ # Creates graph from saved GraphDef. create_graph() filename_to_emb = {} config = tf.ConfigProto(device_count = {'GPU': 0}) bar = progressbar.ProgressBar(widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]) with tf.Session(config=config) as sess: i = 0 for image in bar(images): if not tf.gfile.Exists(image): tf.logging.fatal('File does not exist %s', image) image_data = tf.gfile.FastGFile(image, 'rb').read() # Some useful tensors: # 'softmax:0': A tensor containing the normalized prediction across # 1000 labels. # 'pool_3:0': A tensor containing the next-to-last layer containing 2048 # float description of the image. # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG # encoding of the image. # Runs the softmax tensor by feeding the image_data as input to the graph. softmax_tensor = sess.graph.get_tensor_by_name('softmax:0') embedding_tensor = sess.graph.get_tensor_by_name('pool_3:0') embedding = sess.run(embedding_tensor, {'DecodeJpeg/contents:0': image_data}) filename_to_emb[image] = embedding.reshape(2048) i += 1 # print(image, i, len(images)) return filename_to_emb # temp_dir is a subdir of temp
def main(project_id, video_basename, sampling_rate=3): # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' # or any {'0', '1', '2'} video_name = video_basename[:video_basename.index('.')] # extract video frames extracted_frame_dir = os.path.join('temp', project_id, video_name, 'frames') mkdir_p(extracted_frame_dir) if not os.path.isdir(extracted_frame_dir): os.mkdir(extracted_frame_dir) video_path = os.path.join('videos', project_id, video_basename) vidcap = cv2.VideoCapture(video_path) print('Extracting video frames...') bar = progressbar.ProgressBar(maxval=101, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]) bar.start() fps = vidcap.get(CV_CAP_PROP_FPS)# TODO fps = fps if fps != float('nan') else 25 print 'actual fps', fps, 'sampling rate', sampling_rate success, image = vidcap.read() frames_to_extract = range(0, int(vidcap.get(CV_CAP_PROP_FRAME_COUNT)), int(round(fps / sampling_rate))) frame_count = len(frames_to_extract) for frame_pos in bar(frames_to_extract): vidcap.set(CV_CAP_PROP_POS_FRAMES, frame_pos) success, image = vidcap.read() # print('Read a new frame: %f ms'% vidcap.get(CV_CAP_PROP_POS_MSEC), success) cv2.imwrite(os.path.join(extracted_frame_dir, "%09d.jpg" % vidcap.get(CV_CAP_PROP_POS_MSEC)), image) # TODO (might still work) bar.finish()
def download(number, save_dir='./'): """Download pre-trained word vector :param number: integer, default ``None`` :param save_dir: str, default './' :return: file path for downloaded file """ df = load_datasets() row = df.iloc[[number]] url = ''.join(row.URL) if not url: print('The word vector you specified was not found. Please specify correct name.') widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets) def dlProgress(count, blockSize, totalSize): if pbar.max_value is None: pbar.max_value = totalSize pbar.start() pbar.update(min(count * blockSize, totalSize)) file_name = url.split('/')[-1] if not os.path.exists(save_dir): os.makedirs(save_dir) save_path = os.path.join(save_dir, file_name) path, _ = urlretrieve(url, save_path, reporthook=dlProgress) pbar.finish() return path
def __enter__(self): self.bar = progressbar.ProgressBar( widgets=[ progressbar.Percentage(), ' ', progressbar.Bar(), progressbar.FileTransferSpeed(), ' ', progressbar.ETA(), ], max_value=self.max_value, ) self.fd = open(self.output_path, 'wb') return self
def train(self): data = Data(self.train_dat, self.train_lab) batch_num = self.length/self.batch_size if self.length%self.batch_size == 0 else self.length/self.batch_size + 1 model = self.add_model() with self.sess as sess: tf.initialize_all_variables().run() for ite in range(self.iterations): print "Iteration {}".format(ite) cost = 0. pbar = pb.ProgressBar(widgets=[pb.Percentage(), pb.Bar(), pb.ETA()], maxval=batch_num).start() for i in range(batch_num): batch_x, batch_y = data.next_batch(self.batch_size) c, _ = self.sess.run([model['loss'], model['optimizer']], feed_dict={model['train_x']:batch_x, model['train_y']:batch_y, model['p_keep_dens']:0.75}) cost += c / batch_num pbar.update(i+1) pbar.finish() print ">>cost: {}".format(cost) t_acc, d_acc = self.eval(model, 3000) # early stop if t_acc >= 0.995 and d_acc >= 0.995: break self.predict(model)
def bar_update(self, epoch, logs): ologs = {} for k in self.custom_log_functions: ologs[k] = self.custom_log_functions[k]() for k in logs: if len(k) > 5: ologs[k[-5:]] = logs[k] else: ologs[k] = logs[k] if not hasattr(self,'bar'): import progressbar widgets = [ progressbar.Timer(format='%(elapsed)s'), ' ', progressbar.Counter(), progressbar.Bar(), progressbar.AbsoluteETA(format='%(eta)s'), ' ', ] keys = [] for k in ologs: keys.append(k) keys.sort() for k in keys: widgets.append(progressbar.DynamicMessage(k)) widgets.append(' ') self.bar = progressbar.ProgressBar(max_value=self.max_epoch, widgets=widgets) self.bar.update(epoch+1, **ologs)
def __call__(self, epoch): if self._batches is None: logger.info("Preparing evaluation data...") self._batches = self.reader.input_module.batch_generator(self._dataset, self._batch_size, is_eval=True) logger.info("Started evaluation %s" % self._info) metrics = defaultdict(lambda: list()) bar = progressbar.ProgressBar( max_value=len(self._dataset) // self._batch_size + 1, widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ']) for i, batch in bar(enumerate(self._batches)): inputs = self._dataset[i * self._batch_size:(i + 1) * self._batch_size] predictions = self.reader.model_module(batch, self._ports) m = self.apply_metrics(inputs, predictions) for k in self._metrics: metrics[k].append(m[k]) metrics = self.combine_metrics(metrics) super().add_to_history(metrics, self._iter, epoch) printmetrics = sorted(metrics.keys()) res = "Epoch %d\tIter %d\ttotal %d" % (epoch, self._iter, self._total) for m in printmetrics: res += '\t%s: %.3f' % (m, metrics[m]) self.update_summary(self._iter, self._info + '_' + m, metrics[m]) if self._write_metrics_to is not None: with open(self._write_metrics_to, 'a') as f: f.write("{0} {1} {2:.5}\n".format(datetime.now(), self._info + '_' + m, np.round(metrics[m], 5))) res += '\t' + self._info logger.info(res) if self._side_effect is not None: self._side_effect_state = self._side_effect(metrics, self._side_effect_state)
def get_pbar(num, prefix=""): assert isinstance(prefix, str) pbar = pb.ProgressBar(widgets=[prefix, pb.Percentage(), pb.Bar(), pb.ETA()], maxval=num) return pbar
def __init__(self, nbytes, nfiles): self._total_bytes = nbytes self._pending_files = nfiles self._transferring_files = 0 self._complete_files = 0 self._lock = Lock() self._data = {} widgets = ['Progress: ', Percentage(), ' ', Bar(left='[',right=']'), ' ', Timer(format='Time: %s'), ' ', FileTransferSpeed()] if self._total_bytes > 0: self.pbar = ProgressBar(widgets=widgets, maxval=self._total_bytes).start() else: self.pbar = ProgressBar(widgets=widgets, maxval=nfiles).start()
def collect_mailinfos(server, folder_contents, outpath_format): #construct progressbar progressbar_widgets = [ '[Choosing mails for download ] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA()] total_count = 0 for folder, mailids in folder_contents.items(): total_count += len(mailids) progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=total_count).start() #collect all mailinfos mailinfos = {} mailinfo_count = 0 for folder, mailids in folder_contents.items(): mailinfos[folder] = [] #get mailinfo bit by bit server.select_folder(folder, readonly=True) for mailid in mailids: #fetch mail information mailinfo = server.fetch([mailid], ['ENVELOPE', 'INTERNALDATE', 'RFC822.SIZE'])[mailid] mailsize = bitmath.Byte(mailinfo[b'RFC822.SIZE']) mailfilename = construct_mailfilename(outpath_format, mailinfo, args.outdir, folder, mailid) #only add if mailfilename can be constructed if mailfilename: mailinfos[folder].append((mailid, mailfilename, mailsize)) mailinfo_count += 1 progressbar_instance.update(mailinfo_count) progressbar_instance.finish() return mailinfos
def cleanup(stored_files, stored_dirs, download_list, outdir): #create list of files to keep keep_list = [] for folder, mails in download_list.items(): for mailid, mailfilename, mailsize in mails: keep_list.append(mailfilename) progressbar_widgets = [ '[Cleaning up outdir ] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' '] progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(stored_files)).start() file_count = 0 #delete all files we don't need to keep for file in stored_files: #delete if not on server if not file in keep_list: os.remove(file) #progressbar file_count += 1 progressbar_instance.update(file_count) progressbar_instance.finish() #remove empty folders possible_empty_folders = True while possible_empty_folders: #find all subfolders stored_dirs = [] for root, dirs, files in os.walk(outdir): for name in dirs: stored_dirs.append(os.path.join(root, name)) #delete empty folders indicate next run if one folder was deleted possible_empty_folders = False for folder in stored_dirs: if not os.listdir(folder): shutil.rmtree(folder) possible_empty_folders = True
def create_app(load_db=True, populate_qr_cache=True, progressbar=False): # Set up logging log_level = os.environ.get('AF_LOGGING_LEVEL', None) if log_level is not None: log_levels = ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL') if log_level.upper() in log_levels: log_level = getattr(log, log_level) log.basicConfig(level=log_level) else: log.warning('Invalid log level: {}'.format(log_level.upper())) else: log.warning('No log level set, using default level.') log.info('Creating Flask application') app = Flask(__name__) app.register_blueprint(root) # Now load the database if requested if load_db: from . import database_handler as dh log.info('Loading database.') dh.get_database() # This loads the database into memory. log.info('Database loaded.') if populate_qr_cache: if progressbar: from progressbar import ProgressBar, Bar, Timer, ETA pbar = ProgressBar(widgets=['Populating QR cache: ', Bar(), ' ', Timer(), ' ', ETA()]) kwargs = {'pbar': pbar} else: log.info('Populating QR cache.') kwargs = {} from .cache_utils import populate_qr_cache populate_qr_cache(**kwargs) return app
def __init__(self, msg, maxval, widgets=None, extrapos=-1): self.msg = msg self.extrapos = extrapos if not widgets: widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] self.extrapos = 4 try: self._resize_default = signal.getsignal(signal.SIGWINCH) except: self._resize_default = None progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
def getCurrentStatus(self, filename_detections): pbar = pb.ProgressBar(maxval=len(self._seq.data), widgets=['Loading last status', pb.Percentage(), pb.Bar()]) pbar.start() cache_str = '' with open(filename_detections, "r") as inputfile: cache_str = inputfile.readlines() for i in xrange(len(self._seq.data)): pbar.update(i) if len(self.subset_idxs) > 0: if i not in self.subset_idxs: break hd = HandDetector(numpy.zeros((1, 1)), 0., 0.) # dummy object com = numpy.asarray(hd.detectFromCache(filename_detections, self._seq.data[i].fileName, cache_str)) if numpy.allclose(com[2], 0.): self.curFrame = i break else: self._seq.data[i] = self._seq.data[i]._replace(com=self.importer.jointImgTo3D(com.reshape((3,)))) # redo last pose, it might be set to default and saved if self.curFrame > 0: if len(self.subset_idxs) > 0: if self.subset_idxs.index(self.curFrame) - 1 >= 0: self.curFrame = self.subset_idxs[self.subset_idxs.index(self.curFrame) - 1] else: self.curFrame -= 1
def saveVideo3D(self, filename, sequence, showPC=True, showGT=False, niceColors=True, plotFrameNumbers=False, height=400, width=400): """ Create a video with 3D annotations :param filename: name of file to save :param sequence: sequence data :return: None """ txt = 'Saving {}'.format(filename) pbar = pb.ProgressBar(maxval=self.joints.shape[0], widgets=[txt, pb.Percentage(), pb.Bar()]) pbar.start() # Define the codec and create VideoWriter object fourcc = cv2.cv.CV_FOURCC(*'DIVX') video = cv2.VideoWriter('{}/depth_{}.avi'.format(self.subfolder, filename), fourcc, self.fps, (height, width)) if not video: raise EnvironmentError("Error in creating video writer") for i in range(self.joints.shape[0]): jt = self.joints[i] img = self.plotResult3D_OS(sequence.data[i].dpt, sequence.data[i].T, sequence.data[i].gt3Dorig, jt, showPC=showPC, showGT=showGT, niceColors=niceColors, width=width, height=height) img = numpy.flipud(img) img = img[:, :, [2, 1, 0]] # change color channels for OpenCV img = cv2.resize(img, (height, width)) if plotFrameNumbers: if sequence.data[i].subSeqName == 'ref': cv2.putText(img, "Reference Frame {}".format(i), (20, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255)) # plot frame number cv2.putText(img, "{}".format(i), (height-50, width-10), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0, 0, 255)) # write frame video.write(img) pbar.update(i) video.release() del video cv2.destroyAllWindows() pbar.finish()
def saveVideoFrames(self, filename, images): """ Create a video with synthesized images :param filename: name of file to save :param images: video data :return: None """ txt = 'Saving {}'.format(filename) pbar = pb.ProgressBar(maxval=images.shape[0], widgets=[txt, pb.Percentage(), pb.Bar()]) pbar.start() height = width = 128 # Define the codec and create VideoWriter object fourcc = cv2.cv.CV_FOURCC(*'DIVX') video = cv2.VideoWriter('{}/synth_{}.avi'.format(self.subfolder, filename), fourcc, self.fps, (height, width)) if not video: raise EnvironmentError("Error in creating video writer") for i in range(images.shape[0]): img = images[i] img = cv2.normalize(img, alpha=0, beta=255, norm_type=cv2.cv.CV_MINMAX, dtype=cv2.cv.CV_8UC1) img = cv2.cvtColor(img, cv2.cv.CV_GRAY2BGR) img = cv2.resize(img, (height, width)) # write frame video.write(img) pbar.update(i) video.release() del video cv2.destroyAllWindows() pbar.finish()
def print_status_stream(title, stream): widgets = [title, FormatLabel(''), ' ', Percentage(), ' ', Bar(), ' ', RotatingMarker()] bar = None if sys.stderr.isatty(): bar = progressbar.ProgressBar(widgets=widgets, max_value=255) def print_error(status): print(status['error']) def print_status(status): progress = status.get('progressDetail') if progress: widgets[1] = FormatLabel("%12s" % (status['status'])) prog = int(round(255 * ((progress['current'] / progress['total'])))) if bar is not None: bar.update(prog) def print_unknown(status): print(status) for line in stream: status = json.loads(line.decode('utf8')) if 'error' in status: print_error(status) elif 'status' in status: print_status(status) else: print_unknown(status)
def prepare_h5py(train_image, train_label, test_image, test_label, data_dir, shape=None): image = np.concatenate((train_image, test_image), axis=0).astype(np.uint8) label = np.concatenate((train_label, test_label), axis=0).astype(np.uint8) print('Preprocessing data...') import progressbar bar = progressbar.ProgressBar( maxval=100, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()] ) bar.start() f = h5py.File(os.path.join(data_dir, 'data.hy'), 'w') with open(os.path.join(data_dir, 'id.txt'), 'w') as data_id: for i in range(image.shape[0]): if i % (image.shape[0] / 100) == 0: bar.update(i / (image.shape[0] / 100)) grp = f.create_group(str(i)) data_id.write('{}\n'.format(i)) if shape: grp['image'] = np.reshape(image[i], shape, order='F') else: grp['image'] = image[i] label_vec = np.zeros(10) label_vec[label[i] % 10] = 1 grp['label'] = label_vec.astype(np.bool) bar.finish() f.close() return
def __init__(self): super().__init__() self.label = Label() # Got messes with sys.stdout and sys.stderr in ways that confuse progressbar and cause it to output on the wrong one # This can be worked around by passing in a new stream, but that stream can't be the same instance as sys.stdout or sys.stderr, so I make a new one here that forwards everything class StreamWrapper: def __getattr__(self, k): return getattr(sys.stdout, k) self.bar = progressbar.ProgressBar(fd = StreamWrapper(), widgets = [self.label, ' ', progressbar.Bar(), ' ', progressbar.Percentage(), ' '])
def _create_pbar(self, max_iter): """ Creates a progress bar. """ self.grad_iter = 0 self.pbar = pb.ProgressBar() self.pbar.widgets = ["Optimizing: ", pb.Percentage(), " ", pb.Bar(marker=pb.AnimatedMarker()), " ", pb.ETA()] self.pbar.maxval = max_iter
def convert_dataset(indices, name): # Open a TFRRecordWriter filename = os.path.join(FLAGS.out, name + '.tfrecords') writeOpts = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB) writer = tf.python_io.TFRecordWriter(filename, options=writeOpts) # Load each data sample (image_a, image_b, flow) and write it to the TFRecord count = 0 pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(indices)).start() for i in indices: image_a_path = os.path.join(FLAGS.data_dir, '%05d_img1.ppm' % (i + 1)) image_b_path = os.path.join(FLAGS.data_dir, '%05d_img2.ppm' % (i + 1)) flow_path = os.path.join(FLAGS.data_dir, '%05d_flow.flo' % (i + 1)) image_a = imread(image_a_path) image_b = imread(image_b_path) # Convert from RGB -> BGR image_a = image_a[..., [2, 1, 0]] image_b = image_b[..., [2, 1, 0]] # Scale from [0, 255] -> [0.0, 1.0] image_a = image_a / 255.0 image_b = image_b / 255.0 image_a_raw = image_a.tostring() image_b_raw = image_b.tostring() flow_raw = open_flo_file(flow_path).tostring() example = tf.train.Example(features=tf.train.Features(feature={ 'image_a': _bytes_feature(image_a_raw), 'image_b': _bytes_feature(image_b_raw), 'flow': _bytes_feature(flow_raw)})) writer.write(example.SerializeToString()) pbar.update(count + 1) count += 1 writer.close()
def setup_progressbar(self): from progressbar import ProgressBar, Bar, Percentage return ProgressBar(widgets=[Bar(), ' ', Percentage()])
def setup_progressbar(self): from progressbar import ProgressBar, FileTransferSpeed, Bar, Percentage, ETA return ProgressBar(widgets=[FileTransferSpeed(), ' <<<', Bar(), '>>> ', Percentage(), ' ', ETA()])
def __init__(self, widgets=None, **kwargs): import progressbar as pb logging.Handler.__init__(self) if widgets is None: class CommaProgress(pb.widgets.WidgetBase): def __call__(self, progress, data): return '{value:,} of {max_value:,}'.format(**data) widgets = [' ', CommaProgress(), ' (', pb.Percentage(), ') ', pb.Bar(), ' ', pb.ETA()] self.pbar_args = {'widgets': widgets} self.pbar_args.update(kwargs)
def create_progress_bar(message): widgets = [ message, progressbar.Counter(), ' ', progressbar.Percentage(), ' ', progressbar.Bar(), progressbar.AdaptiveETA() ] pbar = progressbar.ProgressBar(widgets=widgets) return pbar
def _get_progress_widgets(self): """ Returns the progress widgets for a file download. """ format_custom_text = progressbar.FormatCustomText( 'Fetching [ %(file)s ] :: ', dict(file=self.remote_file_name), ) widgets = [ format_custom_text, progressbar.ETA(), progressbar.Percentage(), progressbar.Bar(), ] return widgets
def transfer(self): image_reshape = np.ndarray(shape=(self.pre_images.shape[0], self.output_rows, self.output_cols, 3), dtype=np.float16) widgets = ['Transfer: ', pbar.Percentage(), ' ', pbar.Bar('>'), ' ', pbar.ETA()] image_bar = pbar.ProgressBar(widgets=widgets, maxval=self.pre_images.shape[0]).start() for i in range(0, self.pre_images.shape[0]): image = self.pre_images[i].reshape(self.pre_img_rows, self.pre_img_cols) image = image.astype('uint8') im = Image.fromarray(image) # monochromatic image imrgb = im.convert('RGB') imrgb = imrgb.resize((self.output_rows, self.output_cols), Image.ANTIALIAS) im = np.array(imrgb, dtype=np.float16) im[:, :, 0] -= imagenet_mean['R'] im[:, :, 1] -= imagenet_mean['G'] im[:, :, 2] -= imagenet_mean['B'] # 'RGB'->'BGR', historical reasons in OpenCV im = im[:, :, ::-1] image_reshape[i] = im # test for correct convert! # if i < 3: # img = Image.fromarray(np.uint8(im)) # img.save(str(i) + '.jpeg', 'jpeg') image_bar.update(i + 1) image_bar.finish() print('image_reshape:', image_reshape.shape) return image_reshape