我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用progressbar.Percentage()。
def render(self, length=None, progress=False): """ Render this signal into an numpy array of floats. Return the array. :param length: The length to render, in seconds. Optional. :param progress: Whether to show a progress bar for rendering """ if progress and not progressbar: print('Install the progressbar module to see a progress bar for rendering') progress = False duration = self.duration if length is None else length * SAMPLE_RATE if duration == float('inf'): duration = 3*SAMPLE_RATE else: duration = int(duration) out = numpy.empty((duration, 1)) pbar = progressbar.ProgressBar(widgets=['Rendering: ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=duration-1).start() if progress else None for i in range(duration): out[i] = self.amplitude(i) if pbar: pbar.update(i) if pbar: pbar.finish() return out
def main(): uri, outfile, dataset = get_arguments() fd = tempfile.NamedTemporaryFile() progress = ProgressBar(widgets=[Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()]) def update(count, blockSize, totalSize): if progress.maxval is None: progress.maxval = totalSize progress.start() progress.update(min(count * blockSize, totalSize)) urllib.urlretrieve(uri, fd.name, reporthook = update) if dataset == 'zinc12': df = pandas.read_csv(fd.name, delimiter = '\t') df = df.rename(columns={'SMILES':'structure'}) df.to_hdf(outfile, 'table', format = 'table', data_columns = True) elif dataset == 'chembl22': df = pandas.read_table(fd.name,compression='gzip') df = df.rename(columns={'canonical_smiles':'structure'}) df.to_hdf(outfile, 'table', format = 'table', data_columns = True) pass else: df = pandas.read_csv(fd.name, delimiter = '\t') df.to_hdf(outfile, 'table', format = 'table', data_columns = True)
def collect_mailids(server): folders = server.list_folders() #construct progressbar progressbar_widgets = [ '[Searching for mails on server] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' '] progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(folders)).start() #collect all mailids for all folders folder_contents = {} folder_progress = 0 for flags, delimiter, folder in folders: #read all mailids for the folder server.select_folder(folder, readonly=True) folder_contents[folder] = server.search() #update progrssbar folder_progress += 1 progressbar_instance.update(folder_progress) progressbar_instance.finish() return folder_contents
def download(download_list, total_download_size): progressbar_widgets = [ '[Downloading mails ] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA(), ' ', bitmath.integrations.BitmathFileTransferSpeed()] progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=int(total_download_size)).start() downloaded_size = bitmath.Byte(0) for folder, mails in download_list.items(): server.select_folder(folder, readonly=True) for mailid, mailfilename, mailsize in mails: #make parent directory if not os.path.isdir(os.path.dirname(mailfilename)): os.makedirs(os.path.dirname(mailfilename)) #download mail with open(mailfilename, 'wb') as mailfile: mailfile.write(server.fetch([mailid], ['RFC822'])[mailid][b'RFC822']) #update progressbar downloaded_size += mailsize progressbar_instance.update(int(downloaded_size)) progressbar_instance.finish()
def progressbarize(iterable, progress=False): """Construct progressbar for loops if progressbar requested, otherwise return directly iterable. :param iterable: iterable to use :param progress: True if print progressbar """ if progress: # The casting to list is due to possibly yielded value that prevents # ProgressBar to compute overall ETA return progressbar.ProgressBar(widgets=[ progressbar.Timer(), ', ', progressbar.Percentage(), ', ', progressbar.SimpleProgress(), ', ', progressbar.ETA() ])(list(iterable)) return iterable
def bruteforce(): import progressbar from time import sleep bar = progressbar.ProgressBar(maxval=60, \ widgets=[progressbar.Bar('==', '[', ']'), ' ', progressbar.Percentage()]) bar.start() for i in xrange(10): bar.update(i+1) sleep(0.05) wordlist = "/root/2fassassin/crack/wordlist/2fa-wordlist.txt" target = "/root/2fassassin/loot/*.pfx" sign = "" sign += "crackpkcs12 -v -b" sign += " " sign += target sign += "| tee crack.log" os.system(sign) bar.finish() sys.exit()
def bruteforce(): import progressbar from time import sleep bar = progressbar.ProgressBar(maxval=60, \ widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]) bar.start() for i in xrange(10): bar.update(i+1) sleep(0.05) wordlist = "/root/2fassassin/crack/wordlist/2fa-wordlist.txt" target = "/root/2fassassin/loot/*.pfx" sign = "" sign += "crackpkcs12 -v -b" sign += " " sign += target sign += "| tee crack.log" os.system(sign) bar.finish() sys.exit()
def Steg_brute(ifile, dicc): i = 0 ofile = ifile.split('.')[0] + "_flag.txt" nlines = len(open(dicc).readlines()) with open(dicc, 'r') as passFile: pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=nlines).start() for line in passFile.readlines(): password = line.strip('\n') r = commands.getoutput("steghide extract -sf %s -p '%s' -xf %s" % (ifile, password, ofile)) if not "no pude extraer" in r and not "could not extract" in r: print(color.GREEN + "\n\n " + r + color.ENDC) print("\n\n [+] " + color.INFO + "Information obtained with password:" + color.GREEN + " %s\n" % password + color.ENDC) if check_file(ofile): with open(ofile, 'r') as outfile: for line in outfile.readlines(): print(line) break pbar.update(i + 1) i += 1
def __init__(self, *args, **kwargs): self.dld = FileDownloader() self.dld.stage(self.cmd_name) load_continents() load_oceans() load_currencies() load_languages() self.widgets = [ MemoryUsage(), progressbar.ETA(), ' |Processed: ', progressbar.Counter(), ' |Done: ', progressbar.Percentage(), progressbar.Bar(), ] return super().__init__(*args, **kwargs)
def __init__(self, name, max_value=100, history_len=5, display=True, display_data={'train':['loss', 'accuracy'], 'test':['loss', 'accuracy']}, level=logging.INFO, train_log_mode='TRAIN_PROGRESS', test_log_mode='TEST_PROGRESS'): super(ProgressbarLogger, self).__init__( name, level=level, display=display, logfile=None, train_log_mode=train_log_mode, test_log_mode=test_log_mode) self.train_log_data = {} self.test_log_data = {} self.max_value = max_value self.history_len = history_len self.display_data = display_data self.mode['TRAIN_PROGRESS'] = self.log_train_progress self.mode['TEST_PROGRESS'] = self.log_test_progress # create logging format self.widgets = [progressbar.FormatLabel('(%(value)d of %(max)s)'), ' ', progressbar.Percentage(), ' ', progressbar.Bar()] self.dynamic_data = {k+'_'+kk: 0.0 for k in display_data.keys() for kk in display_data[k]} diff_data = {'diff_'+k+'_'+kk: 0.0 for k in display_data.keys() for kk in display_data[k]} self.dynamic_data.update(diff_data) for t in display_data.keys(): ddstr = ' [' + t + ']' for s in display_data[t]: value_name = t + '_' + s ddstr = ddstr + ' ' + s + ':' + '%(' + value_name + ').3f (%(diff_' + value_name + ').3f)' self.widgets.append(progressbar.FormatLabel(ddstr)) self.widgets.extend(['|', progressbar.FormatLabel('Time: %(elapsed)s'), '|', progressbar.AdaptiveETA()])
def deleteHostsByHostgroup(groupname): hostgroup = zapi.hostgroup.get(output=['groupid'],filter={'name': groupname}) if hostgroup.__len__() != 1: logger.error('Hostgroup not found: %s\n\tFound this: %s' % (groupname,hostgroup)) groupid = int(hostgroup[0]['groupid']) hosts = zapi.host.get(output=['name','hostid'],groupids=groupid) total = len(hosts) logger.info('Hosts found: %d' % (total)) if ( args.run ): x = 0 bar = ProgressBar(maxval=total,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() logger.echo = False for host in hosts: x = x + 1 bar.update(x) logger.debug('(%d/%d) >> Removing >> %s' % (x, total, host)) out = zapi.globo.deleteMonitors(host['name']) bar.finish() logger.echo = True else: logger.info('No host removed due to --no-run arg. Full list of hosts:') for host in hosts: logger.info('%s' % host['name']) return
def hosts_disable_all(): """ status de host 0 = enabled status de host 1 = disabled """ logger.info('Disabling all hosts, in blocks of 1000') hosts = zapi.host.get(output=[ 'hostid' ], search={ 'status': 0 }) maxval = int(ceil(hosts.__len__())/1000+1) bar = ProgressBar(maxval=maxval,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for i in xrange(maxval): block = hosts[:1000] del hosts[:1000] result = zapi.host.massupdate(hosts=[ x for x in block ], status=1) i += 1 bar.update(i) bar.finish() logger.info('Done') return
def proxy_passive_to_active(): """ status de prxy 5 = active status de prxy 6 = passive """ logger.info('Change all proxys to active') proxys = zapi.proxy.get(output=[ 'shorten', 'host' ], filter={ 'status': 6 }) if ( proxys.__len__() == 0 ): logger.info('Done') return bar = ProgressBar(maxval=proxys.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in proxys: i += 1 proxyid = x['proxyid'] result = zapi.proxy.update(proxyid=proxyid, status=5) logger.echo = False logger.debug('Changed from passive to active proxy: %s' % (x['host'])) bar.update(i) bar.finish() logger.echo = True logger.info('Done') return
def log_stats(self, generation, population, selected, n_mutated, scores): """ Stats to be logged: Generation : Represant the iteration you're on mutated : Represent the mutated population mutation_percent : Percentage of mutation fitness_ind : Fitness of all individue group_fitness : Fitness of all groups population : Population at this time. score : One score for the Puzzle record: fitness_ind and fitness_group compiled by stats. (min max avg) :param generation: The current iteration you're on :param population: The current population you're using :param n_mutated: The number of mutated element. :return: """ record = self.stats.compile(population) self.populations.append(population) self.logbook.record(generations=generation, mutated=n_mutated, mutation_percent=config.mutate_inpd, nb_full_connected=len([x for x in population if x.fitness_ind.values[0] == 4]), connections_completions=scores[0], score=scores[1], selected=selected, **record) # I case we need to keep famous big scores. # self.famous.update(self.pop)
def train(self, epochs, batch_size, learning_rate, save_to=None): self.train_step = pt.apply_optimizer(tf.train.AdamOptimizer(learning_rate, epsilon=1), losses = [self.error_function]) init = tf.initialize_all_variables() self.sess.run(init) pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=epochs).start() while self.get_epoch() < epochs: input_data = self.hdf5reader.next() _, loss_value = self.sess.run( [self.train_step, self.error_function], { self.encoder.input_data: input_data } ) pbar.update(self.get_epoch()) pbar.finish()
def images_to_hdf5(dir_path, output_hdf5, size = (112,112), channels = 3, resize_to = None): files = sorted(os.listdir(dir_path)) nr_of_images = len(files) if resize_to: size = resize_to i = 0 pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=nr_of_images).start() data = np.empty(shape=(nr_of_images, size[0], size[1], channels), dtype=np.uint8) for f in files: datum = imread(dir_path + '/' + f) if resize_to: datum = np.asarray(Image.fromarray((datum), 'RGB').resize((size[0],size[1]), PIL.Image.ANTIALIAS)) data[i,:,:,:] = datum i = i + 1 pbar.update(i) pbar.finish() with h5py.File(output_hdf5, 'w') as hf: hf.create_dataset('data', data=data)
def load_corpus(self, corenlpserver, process=True): """ Use the PubMed web services to retrieve the title and abstract of each PMID :param corenlpserver: :param process: :return: """ time_per_abs = [] widgets = [pb.Percentage(), ' ', pb.Bar(), ' ', pb.AdaptiveETA(), ' ', pb.Timer()] pbar = pb.ProgressBar(widgets=widgets, maxval=len(self.pmids), redirect_stdout=True).start() for i, pmid in enumerate(self.pmids): t = time.time() newdoc = PubmedDocument(pmid) if newdoc.abstract == "": logging.info("ignored {} due to the fact that no abstract was found".format(pmid)) continue newdoc.process_document(corenlpserver, "biomedical") self.documents["PMID" + pmid] = newdoc abs_time = time.time() - t time_per_abs.append(abs_time) pbar.update(i+1) pbar.finish() abs_avg = sum(time_per_abs)*1.0/len(time_per_abs) logging.info("average time per abstract: %ss" % abs_avg)
def getProgress(self, url, fileSize): status = json.loads(urllib.urlopen(url).read()) if len(status["data"]) ==0 : logger.info(url + " upload done ") return True widgets = ['Progress: ', Percentage(), ' ', Bar( marker=RotatingMarker('>-=')), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=fileSize).start() upload_size = 0 while upload_size < fileSize: _response = self.doGet(url) _data = json.loads(_response) upload_size = long(_data["data"]["upload_size"]) total_size = long(_data["data"]["total_size"]) if upload_size == 0 and total_size == 0: break pbar.update(upload_size) time.sleep(1) pbar.finish() logger.info(url + " upload done") return True """ ?????? """
def scrape_mlb_odds_range(min_date=None, max_date=None): min_date = min_date or datetime.datetime.today() - datetime.timedelta(days=1) max_date = max_date or datetime.datetime.today() if isinstance(min_date, basestring): min_date = parser.parse(min_date) if isinstance(max_date, basestring): max_date = parser.parse(max_date) date = min_date pbar = progressbar.ProgressBar(widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=int((max_date-min_date).total_seconds() / (60*60*24)) + 1) pbar.start() saved = 0 hit = 0 while date <= max_date: day_odds = load_odds_for_day(date) if day_odds is not None and len(day_odds) > 0: save_sbr_odds_info('mlb', date, day_odds) saved += 1 hit += 1 date += datetime.timedelta(days=1) pbar.update(value=hit) pbar.finish() return saved
def scrape_nba_odds_range(min_date=None, max_date=None): min_date = min_date or datetime.datetime.today() - datetime.timedelta(days=1) max_date = max_date or datetime.datetime.today() if isinstance(min_date, basestring): min_date = parser.parse(min_date) if isinstance(max_date, basestring): max_date = parser.parse(max_date) date = min_date pbar = progressbar.ProgressBar(widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=int((max_date-min_date).total_seconds() / (60*60*24)) + 1) pbar.start() saved = 0 hit = 0 while date <= max_date: day_odds = load_odds_for_day(date) if day_odds is not None and len(day_odds) > 0: save_sbr_odds_info('nba', date, day_odds) saved += 1 hit += 1 date += datetime.timedelta(days=1) pbar.update(value=hit) pbar.finish() return saved
def __iter__(self): if self.count != 0: widgets = [ '%s: ' % (self.caption,), progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ] pbar = progressbar.ProgressBar(widgets=widgets, maxval=self.count) pbar.start() for idx, item in enumerate(self.iterator): yield item pbar.update(idx) pbar.finish()
def _setup_progress(self, options): if options.progress: if self.beanstalk: # With Beanstalk C&C we don't know how many... self.progress = progressbar.ProgressBar( redirect_stdout=True, redirect_stderr=True, widgets=[ 'Total: ', progressbar.Counter(), ', ', progressbar.Timer() ]) else: self.progress = progressbar.ProgressBar( redirect_stdout=True, redirect_stderr=True, widgets=[ progressbar.Percentage(), progressbar.Bar(), ' (', progressbar.ETA(), ') ', ]) else: self.progress = None
def __init__(self, options): self.wildcards = [] self.options = options self.domains = [] if options.domains: self.domains += filter(None, options.domains.read().split("\n")) self.domains += options.domain self.domains = list(set(self.domains)) random.shuffle(self.domains) self.resolvers = map(str.strip, filter(None, options.resolvers.read().split("\n"))) random.shuffle(self.resolvers) self.names = [X for X in self._load_names(options.names)] if options.progress: self.progress = progressbar.ProgressBar( redirect_stdout=True, redirect_stderr=True, widgets=[ progressbar.Percentage(), progressbar.Bar(), ' (', progressbar.ETA(), ') ', ]) else: self.progress = None self.finished = 0 LOG.info("%d names, %d resolvers, %d domains", len(self.names), len(self.resolvers), len(self.domains))
def compute_embeddings(images): """Runs inference on an image. Args: image: Image file names. Returns: Dict mapping image file name to embedding. """ # Creates graph from saved GraphDef. create_graph() filename_to_emb = {} config = tf.ConfigProto(device_count = {'GPU': 0}) bar = progressbar.ProgressBar(widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]) with tf.Session(config=config) as sess: i = 0 for image in bar(images): if not tf.gfile.Exists(image): tf.logging.fatal('File does not exist %s', image) image_data = tf.gfile.FastGFile(image, 'rb').read() # Some useful tensors: # 'softmax:0': A tensor containing the normalized prediction across # 1000 labels. # 'pool_3:0': A tensor containing the next-to-last layer containing 2048 # float description of the image. # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG # encoding of the image. # Runs the softmax tensor by feeding the image_data as input to the graph. softmax_tensor = sess.graph.get_tensor_by_name('softmax:0') embedding_tensor = sess.graph.get_tensor_by_name('pool_3:0') embedding = sess.run(embedding_tensor, {'DecodeJpeg/contents:0': image_data}) filename_to_emb[image] = embedding.reshape(2048) i += 1 # print(image, i, len(images)) return filename_to_emb # temp_dir is a subdir of temp
def main(project_id, video_basename, sampling_rate=3): # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' # or any {'0', '1', '2'} video_name = video_basename[:video_basename.index('.')] # extract video frames extracted_frame_dir = os.path.join('temp', project_id, video_name, 'frames') mkdir_p(extracted_frame_dir) if not os.path.isdir(extracted_frame_dir): os.mkdir(extracted_frame_dir) video_path = os.path.join('videos', project_id, video_basename) vidcap = cv2.VideoCapture(video_path) print('Extracting video frames...') bar = progressbar.ProgressBar(maxval=101, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]) bar.start() fps = vidcap.get(CV_CAP_PROP_FPS)# TODO fps = fps if fps != float('nan') else 25 print 'actual fps', fps, 'sampling rate', sampling_rate success, image = vidcap.read() frames_to_extract = range(0, int(vidcap.get(CV_CAP_PROP_FRAME_COUNT)), int(round(fps / sampling_rate))) frame_count = len(frames_to_extract) for frame_pos in bar(frames_to_extract): vidcap.set(CV_CAP_PROP_POS_FRAMES, frame_pos) success, image = vidcap.read() # print('Read a new frame: %f ms'% vidcap.get(CV_CAP_PROP_POS_MSEC), success) cv2.imwrite(os.path.join(extracted_frame_dir, "%09d.jpg" % vidcap.get(CV_CAP_PROP_POS_MSEC)), image) # TODO (might still work) bar.finish()
def download(number, save_dir='./'): """Download pre-trained word vector :param number: integer, default ``None`` :param save_dir: str, default './' :return: file path for downloaded file """ df = load_datasets() row = df.iloc[[number]] url = ''.join(row.URL) if not url: print('The word vector you specified was not found. Please specify correct name.') widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets) def dlProgress(count, blockSize, totalSize): if pbar.max_value is None: pbar.max_value = totalSize pbar.start() pbar.update(min(count * blockSize, totalSize)) file_name = url.split('/')[-1] if not os.path.exists(save_dir): os.makedirs(save_dir) save_path = os.path.join(save_dir, file_name) path, _ = urlretrieve(url, save_path, reporthook=dlProgress) pbar.finish() return path
def __enter__(self): self.bar = progressbar.ProgressBar( widgets=[ progressbar.Percentage(), ' ', progressbar.Bar(), progressbar.FileTransferSpeed(), ' ', progressbar.ETA(), ], max_value=self.max_value, ) self.fd = open(self.output_path, 'wb') return self
def train(self): data = Data(self.train_dat, self.train_lab) batch_num = self.length/self.batch_size if self.length%self.batch_size == 0 else self.length/self.batch_size + 1 model = self.add_model() with self.sess as sess: tf.initialize_all_variables().run() for ite in range(self.iterations): print "Iteration {}".format(ite) cost = 0. pbar = pb.ProgressBar(widgets=[pb.Percentage(), pb.Bar(), pb.ETA()], maxval=batch_num).start() for i in range(batch_num): batch_x, batch_y = data.next_batch(self.batch_size) c, _ = self.sess.run([model['loss'], model['optimizer']], feed_dict={model['train_x']:batch_x, model['train_y']:batch_y, model['p_keep_dens']:0.75}) cost += c / batch_num pbar.update(i+1) pbar.finish() print ">>cost: {}".format(cost) t_acc, d_acc = self.eval(model, 3000) # early stop if t_acc >= 0.995 and d_acc >= 0.995: break self.predict(model)
def get_pbar(num, prefix=""): assert isinstance(prefix, str) pbar = pb.ProgressBar(widgets=[prefix, pb.Percentage(), pb.Bar(), pb.ETA()], maxval=num) return pbar
def __init__(self, nbytes, nfiles): self._total_bytes = nbytes self._pending_files = nfiles self._transferring_files = 0 self._complete_files = 0 self._lock = Lock() self._data = {} widgets = ['Progress: ', Percentage(), ' ', Bar(left='[',right=']'), ' ', Timer(format='Time: %s'), ' ', FileTransferSpeed()] if self._total_bytes > 0: self.pbar = ProgressBar(widgets=widgets, maxval=self._total_bytes).start() else: self.pbar = ProgressBar(widgets=widgets, maxval=nfiles).start()
def collect_mailinfos(server, folder_contents, outpath_format): #construct progressbar progressbar_widgets = [ '[Choosing mails for download ] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA()] total_count = 0 for folder, mailids in folder_contents.items(): total_count += len(mailids) progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=total_count).start() #collect all mailinfos mailinfos = {} mailinfo_count = 0 for folder, mailids in folder_contents.items(): mailinfos[folder] = [] #get mailinfo bit by bit server.select_folder(folder, readonly=True) for mailid in mailids: #fetch mail information mailinfo = server.fetch([mailid], ['ENVELOPE', 'INTERNALDATE', 'RFC822.SIZE'])[mailid] mailsize = bitmath.Byte(mailinfo[b'RFC822.SIZE']) mailfilename = construct_mailfilename(outpath_format, mailinfo, args.outdir, folder, mailid) #only add if mailfilename can be constructed if mailfilename: mailinfos[folder].append((mailid, mailfilename, mailsize)) mailinfo_count += 1 progressbar_instance.update(mailinfo_count) progressbar_instance.finish() return mailinfos
def cleanup(stored_files, stored_dirs, download_list, outdir): #create list of files to keep keep_list = [] for folder, mails in download_list.items(): for mailid, mailfilename, mailsize in mails: keep_list.append(mailfilename) progressbar_widgets = [ '[Cleaning up outdir ] ', progressbar.Percentage(), progressbar.Bar(marker=progressbar.RotatingMarker()), ' '] progressbar_instance = progressbar.ProgressBar(widgets=progressbar_widgets, maxval=len(stored_files)).start() file_count = 0 #delete all files we don't need to keep for file in stored_files: #delete if not on server if not file in keep_list: os.remove(file) #progressbar file_count += 1 progressbar_instance.update(file_count) progressbar_instance.finish() #remove empty folders possible_empty_folders = True while possible_empty_folders: #find all subfolders stored_dirs = [] for root, dirs, files in os.walk(outdir): for name in dirs: stored_dirs.append(os.path.join(root, name)) #delete empty folders indicate next run if one folder was deleted possible_empty_folders = False for folder in stored_dirs: if not os.listdir(folder): shutil.rmtree(folder) possible_empty_folders = True
def __init__(self, msg, maxval, widgets=None, extrapos=-1): self.msg = msg self.extrapos = extrapos if not widgets: widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] self.extrapos = 4 try: self._resize_default = signal.getsignal(signal.SIGWINCH) except: self._resize_default = None progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
def getCurrentStatus(self, filename_detections): pbar = pb.ProgressBar(maxval=len(self._seq.data), widgets=['Loading last status', pb.Percentage(), pb.Bar()]) pbar.start() cache_str = '' with open(filename_detections, "r") as inputfile: cache_str = inputfile.readlines() for i in xrange(len(self._seq.data)): pbar.update(i) if len(self.subset_idxs) > 0: if i not in self.subset_idxs: break hd = HandDetector(numpy.zeros((1, 1)), 0., 0.) # dummy object com = numpy.asarray(hd.detectFromCache(filename_detections, self._seq.data[i].fileName, cache_str)) if numpy.allclose(com[2], 0.): self.curFrame = i break else: self._seq.data[i] = self._seq.data[i]._replace(com=self.importer.jointImgTo3D(com.reshape((3,)))) # redo last pose, it might be set to default and saved if self.curFrame > 0: if len(self.subset_idxs) > 0: if self.subset_idxs.index(self.curFrame) - 1 >= 0: self.curFrame = self.subset_idxs[self.subset_idxs.index(self.curFrame) - 1] else: self.curFrame -= 1
def saveVideo3D(self, filename, sequence, showPC=True, showGT=False, niceColors=True, plotFrameNumbers=False, height=400, width=400): """ Create a video with 3D annotations :param filename: name of file to save :param sequence: sequence data :return: None """ txt = 'Saving {}'.format(filename) pbar = pb.ProgressBar(maxval=self.joints.shape[0], widgets=[txt, pb.Percentage(), pb.Bar()]) pbar.start() # Define the codec and create VideoWriter object fourcc = cv2.cv.CV_FOURCC(*'DIVX') video = cv2.VideoWriter('{}/depth_{}.avi'.format(self.subfolder, filename), fourcc, self.fps, (height, width)) if not video: raise EnvironmentError("Error in creating video writer") for i in range(self.joints.shape[0]): jt = self.joints[i] img = self.plotResult3D_OS(sequence.data[i].dpt, sequence.data[i].T, sequence.data[i].gt3Dorig, jt, showPC=showPC, showGT=showGT, niceColors=niceColors, width=width, height=height) img = numpy.flipud(img) img = img[:, :, [2, 1, 0]] # change color channels for OpenCV img = cv2.resize(img, (height, width)) if plotFrameNumbers: if sequence.data[i].subSeqName == 'ref': cv2.putText(img, "Reference Frame {}".format(i), (20, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255)) # plot frame number cv2.putText(img, "{}".format(i), (height-50, width-10), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0, 0, 255)) # write frame video.write(img) pbar.update(i) video.release() del video cv2.destroyAllWindows() pbar.finish()
def saveVideoFrames(self, filename, images): """ Create a video with synthesized images :param filename: name of file to save :param images: video data :return: None """ txt = 'Saving {}'.format(filename) pbar = pb.ProgressBar(maxval=images.shape[0], widgets=[txt, pb.Percentage(), pb.Bar()]) pbar.start() height = width = 128 # Define the codec and create VideoWriter object fourcc = cv2.cv.CV_FOURCC(*'DIVX') video = cv2.VideoWriter('{}/synth_{}.avi'.format(self.subfolder, filename), fourcc, self.fps, (height, width)) if not video: raise EnvironmentError("Error in creating video writer") for i in range(images.shape[0]): img = images[i] img = cv2.normalize(img, alpha=0, beta=255, norm_type=cv2.cv.CV_MINMAX, dtype=cv2.cv.CV_8UC1) img = cv2.cvtColor(img, cv2.cv.CV_GRAY2BGR) img = cv2.resize(img, (height, width)) # write frame video.write(img) pbar.update(i) video.release() del video cv2.destroyAllWindows() pbar.finish()
def print_status_stream(title, stream): widgets = [title, FormatLabel(''), ' ', Percentage(), ' ', Bar(), ' ', RotatingMarker()] bar = None if sys.stderr.isatty(): bar = progressbar.ProgressBar(widgets=widgets, max_value=255) def print_error(status): print(status['error']) def print_status(status): progress = status.get('progressDetail') if progress: widgets[1] = FormatLabel("%12s" % (status['status'])) prog = int(round(255 * ((progress['current'] / progress['total'])))) if bar is not None: bar.update(prog) def print_unknown(status): print(status) for line in stream: status = json.loads(line.decode('utf8')) if 'error' in status: print_error(status) elif 'status' in status: print_status(status) else: print_unknown(status)
def prepare_h5py(train_image, train_label, test_image, test_label, data_dir, shape=None): image = np.concatenate((train_image, test_image), axis=0).astype(np.uint8) label = np.concatenate((train_label, test_label), axis=0).astype(np.uint8) print('Preprocessing data...') import progressbar bar = progressbar.ProgressBar( maxval=100, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()] ) bar.start() f = h5py.File(os.path.join(data_dir, 'data.hy'), 'w') with open(os.path.join(data_dir, 'id.txt'), 'w') as data_id: for i in range(image.shape[0]): if i % (image.shape[0] / 100) == 0: bar.update(i / (image.shape[0] / 100)) grp = f.create_group(str(i)) data_id.write('{}\n'.format(i)) if shape: grp['image'] = np.reshape(image[i], shape, order='F') else: grp['image'] = image[i] label_vec = np.zeros(10) label_vec[label[i] % 10] = 1 grp['label'] = label_vec.astype(np.bool) bar.finish() f.close() return
def __init__(self): super().__init__() self.label = Label() # Got messes with sys.stdout and sys.stderr in ways that confuse progressbar and cause it to output on the wrong one # This can be worked around by passing in a new stream, but that stream can't be the same instance as sys.stdout or sys.stderr, so I make a new one here that forwards everything class StreamWrapper: def __getattr__(self, k): return getattr(sys.stdout, k) self.bar = progressbar.ProgressBar(fd = StreamWrapper(), widgets = [self.label, ' ', progressbar.Bar(), ' ', progressbar.Percentage(), ' '])
def _create_pbar(self, max_iter): """ Creates a progress bar. """ self.grad_iter = 0 self.pbar = pb.ProgressBar() self.pbar.widgets = ["Optimizing: ", pb.Percentage(), " ", pb.Bar(marker=pb.AnimatedMarker()), " ", pb.ETA()] self.pbar.maxval = max_iter
def convert_dataset(indices, name): # Open a TFRRecordWriter filename = os.path.join(FLAGS.out, name + '.tfrecords') writeOpts = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB) writer = tf.python_io.TFRecordWriter(filename, options=writeOpts) # Load each data sample (image_a, image_b, flow) and write it to the TFRecord count = 0 pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(indices)).start() for i in indices: image_a_path = os.path.join(FLAGS.data_dir, '%05d_img1.ppm' % (i + 1)) image_b_path = os.path.join(FLAGS.data_dir, '%05d_img2.ppm' % (i + 1)) flow_path = os.path.join(FLAGS.data_dir, '%05d_flow.flo' % (i + 1)) image_a = imread(image_a_path) image_b = imread(image_b_path) # Convert from RGB -> BGR image_a = image_a[..., [2, 1, 0]] image_b = image_b[..., [2, 1, 0]] # Scale from [0, 255] -> [0.0, 1.0] image_a = image_a / 255.0 image_b = image_b / 255.0 image_a_raw = image_a.tostring() image_b_raw = image_b.tostring() flow_raw = open_flo_file(flow_path).tostring() example = tf.train.Example(features=tf.train.Features(feature={ 'image_a': _bytes_feature(image_a_raw), 'image_b': _bytes_feature(image_b_raw), 'flow': _bytes_feature(flow_raw)})) writer.write(example.SerializeToString()) pbar.update(count + 1) count += 1 writer.close()
def setup_progressbar(self): from progressbar import ProgressBar, Bar, Percentage return ProgressBar(widgets=[Bar(), ' ', Percentage()])
def setup_progressbar(self): from progressbar import ProgressBar, FileTransferSpeed, Bar, Percentage, ETA return ProgressBar(widgets=[FileTransferSpeed(), ' <<<', Bar(), '>>> ', Percentage(), ' ', ETA()])
def __init__(self, widgets=None, **kwargs): import progressbar as pb logging.Handler.__init__(self) if widgets is None: class CommaProgress(pb.widgets.WidgetBase): def __call__(self, progress, data): return '{value:,} of {max_value:,}'.format(**data) widgets = [' ', CommaProgress(), ' (', pb.Percentage(), ') ', pb.Bar(), ' ', pb.ETA()] self.pbar_args = {'widgets': widgets} self.pbar_args.update(kwargs)
def create_progress_bar(message): widgets = [ message, progressbar.Counter(), ' ', progressbar.Percentage(), ' ', progressbar.Bar(), progressbar.AdaptiveETA() ] pbar = progressbar.ProgressBar(widgets=widgets) return pbar
def _get_progress_widgets(self): """ Returns the progress widgets for a file download. """ format_custom_text = progressbar.FormatCustomText( 'Fetching [ %(file)s ] :: ', dict(file=self.remote_file_name), ) widgets = [ format_custom_text, progressbar.ETA(), progressbar.Percentage(), progressbar.Bar(), ] return widgets
def transfer(self): image_reshape = np.ndarray(shape=(self.pre_images.shape[0], self.output_rows, self.output_cols, 3), dtype=np.float16) widgets = ['Transfer: ', pbar.Percentage(), ' ', pbar.Bar('>'), ' ', pbar.ETA()] image_bar = pbar.ProgressBar(widgets=widgets, maxval=self.pre_images.shape[0]).start() for i in range(0, self.pre_images.shape[0]): image = self.pre_images[i].reshape(self.pre_img_rows, self.pre_img_cols) image = image.astype('uint8') im = Image.fromarray(image) # monochromatic image imrgb = im.convert('RGB') imrgb = imrgb.resize((self.output_rows, self.output_cols), Image.ANTIALIAS) im = np.array(imrgb, dtype=np.float16) im[:, :, 0] -= imagenet_mean['R'] im[:, :, 1] -= imagenet_mean['G'] im[:, :, 2] -= imagenet_mean['B'] # 'RGB'->'BGR', historical reasons in OpenCV im = im[:, :, ::-1] image_reshape[i] = im # test for correct convert! # if i < 3: # img = Image.fromarray(np.uint8(im)) # img.save(str(i) + '.jpeg', 'jpeg') image_bar.update(i + 1) image_bar.finish() print('image_reshape:', image_reshape.shape) return image_reshape
def _generate_negative_patches(self, negative_image_files, window_size, step, pyramid_scale, threshold_prob): widgets = ["Generating negative samples which represent high probability: ", progressbar.Percentage(), " ", progressbar.Bar(), " ", progressbar.ETA()] pbar = progressbar.ProgressBar(maxval=len(negative_image_files), widgets=widgets).start() for i, image_file in enumerate(negative_image_files): image = cv2.imread(image_file) image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # detect objects in the image (boxes, probs) = self.run(image, window_size, step, pyramid_scale, threshold_prob, do_nms=False, show_result=False, show_operation=False) pbar.update(i) for (y1, y2, x1, x2), prob in zip(boxes, probs): negative_patch = cv2.resize(image[y1:y2, x1:x2], (window_size[1], window_size[0]), interpolation=cv2.INTER_AREA) yield negative_patch, prob pbar.finish() # todo: code review
def createSQL(table,values,name='insert'): ''' Generate the SQL insert line, breaking each insert to up to ~1k values and up to ~1k insert's (~1M values total for each SQL file) ''' logger.info('Generating SQL file') queryInsert='INSERT INTO %s (itemid,clock,num,value_min,value_avg,value_max) VALUES' % table i=0 # Controls the progress bar x=0 # Controls number of inserts in one line y=0 # Controls number of lines in one file z=0 # Controls number of file name valuesLen=values.__len__() sqlFile='%s.sql.%d' % (name,z) logger.debug('Total itens for %s: %d' % (name,valuesLen)) if valuesLen > 0: bar=ProgressBar(maxval=valuesLen,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() for value in values: i+=1 x+=1 if x != 1: # First line only sqlInsert='%s,%s' % (sqlInsert,value) else: sqlInsert=value if y >= 1000: # If there is more than 1k lines, write to new file z+=1 y=0 if x >= 1000 or i == valuesLen: # If there is more than 1k values or we finished our list, write to file sqlFile='%s.sql.%d' % (name,z) fileAppend(f=sqlFile,content='%s %s;\n' % (queryInsert,sqlInsert)) x=0 y+=1 sqlInsert='' if args.loglevel.upper() != 'DEBUG': # Dont print progressbar if in debug mode bar.update(i) bar.finish() else: logger.warning('No values received')
def discovery_disable_all(status=0): """ Alterar status de todos os discoveries *auto* Status 0 = enable Status 1 = disable """ logger.info('Disabling all network discoveries') druleids = zapi.drule.get(output=[ 'druleid', 'iprange', 'name', 'proxy_hostid', 'status' ], selectDChecks='extend', filter={ 'status': 0 }) if ( druleids.__len__() == 0 ): logger.info('Done') return bar = ProgressBar(maxval=druleids.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in druleids: params_disable = { 'druleid': x['druleid'], 'iprange': x['iprange'], 'name': x['name'], 'dchecks': x['dchecks'], 'status': 1 } out = zapi.drule.update(**params_disable) logger.echo = False if out: logger.debug('\tNew status: %s (%s) --> %d' % (x['name'],out['druleids'],status)) else: logger.warning('\tFAILED to change status: %s (%s) --> %d' % (x['name'],out['druleids'],status)) i += 1 bar.update(i) logger.echo = True bar.finish() logger.info('Done') return