我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sys.stdout.write()。
def loadfromfile(filename, charset=None): """Loads data set from filename.""" try: f = open(filename, 'rb') except OSError as e: stderr.write("Couldn't open data set file, error: {0}\n".format(e)) return None else: try: dataset = pickle.load(f) except Exception as e: stderr.write("Couldn't load data set, error: {0}\n".format(e)) return None else: stderr.write("Loaded data set from {0}\n".format(filename)) return dataset finally: f.close()
def ask_username(config): stdout.write("Username: ".encode('utf-8')) raw_username = raw_input() try: get_user(config.temboard['users'], raw_username) except HTTPError: pass except ConfigurationError: pass else: stdout.write("User already exists.\n") return ask_username(config) try: username = raw_username validate_parameters({'username': username}, [('username', T_USERNAME, False)]) except HTTPError: stdout.write("Invalid username.\n") return ask_username(config) return username
def print_status(progress, file_size, start): """ This function - when passed as `on_progress` to `Video.download` - prints out the current download progress. :params progress: The lenght of the currently downloaded bytes. :params file_size: The total size of the video. :params start: The time when started """ percent_done = int(progress) * 100. / file_size done = int(50 * progress / int(file_size)) dt = (clock() - start) if dt > 0: stdout.write("\r [%s%s][%3.2f%%] %s at %s/s " % ('=' * done, ' ' * (50 - done), percent_done, sizeof(file_size), sizeof(progress // dt))) stdout.flush()
def extract_all_features(save_dir, data_dir=DATA_DIR, extension=".cell"): from naive_bayes import extract_nb_features from random_forest import extract_rf_features from svc1 import extract_svc1_features from svc2 import extract_svc2_features import subprocess create_dir_if_not_exists(save_dir + '/knn_cells/') subprocess.run([ 'go', 'run', dirname + '/kNN.go', '-folder', data_dir + '/', '-new_path', save_dir + '/knn_cells/', '-extension', extension] ) # extract_features(extract_nb_features, save_dir + '/nb_cells', data_dir=data_dir, extension=extension, model_name="naive bayes") extract_features(extract_rf_features, save_dir + '/rf_cells', data_dir=data_dir, extension=extension, model_name="random forest") extract_features(extract_svc1_features, save_dir + '/svc1_cells', data_dir=data_dir, extension=extension, model_name="svc1") extract_features(extract_svc2_features, save_dir + '/svc2_cells', data_dir=data_dir, extension=extension, model_name="svc2") stdout.write("Finished extracting features\n")
def import_data(data_dir=DATA_DIR, in_memory=True, extension=".cell"): """ Reads all of the files in the `data_dir` and returns all of the contents in a variable. @param data_dir is a string with the name of the data directory @param in_memory is a boolean value. If true, it pulls all the data into memory @return if in_memory == True: is a tuple with the following format: ([[size, incoming]], [webpage_label]) where outgoing is 1 is incoming and -1 else: a tuple with the following format: ([paths], [webpage_label]) """ stdout.write("Starting data import\n") if in_memory: return pull_data_in_memory(data_dir, extension) else: return get_files(data_dir, extension)
def parallel_cone(pipe,cells,time,cone_input,cone_layer,Vis_dark,Vis_resting_potential): # Initialize array of cone_response copying cone_input cone_response = cone_input for cell in cells: if multiprocessing.current_process().name=="root": progress = 100*(cell-cells[0])/len(cells) stdout.write("\r progress: %d %%"% progress) stdout.flush() # Time-driven simulation for t in np.arange(0,time): # Update dynamics of the model cone_layer[cell].feedInput(cone_input[cell,t]) cone_layer[cell].update() # Record response cone_response[cell,t] = (cone_layer[cell].LF_taum.last_values[0] -\ cone_layer[cell].LF_tauh.last_values[0] - Vis_dark - Vis_resting_potential) pipe.send(cone_response[cells,:]) pipe.close() #! ================ #! Class runNetwork #! ================
def update_web_event(self): web_event = os.path.join(_base_dir, "web", "events-%s.json" % self._username) if not os.path.exists(web_event): self.init_event_outfile() json_events = self.jsonify_events() #self.bot.logger.info('####### Writing %s' % json_events) try: with open(web_event, "w") as outfile: json.dump(json_events, outfile) except (IOError, ValueError) as e: self.bot.logger.info('[x] Error while opening events file for write: %s' % e, 'red') except: raise FileIOException("Unexpected error writing to {}".web_event)
def main(argv, environ): parser = ArgumentParser( prog='temboard-agent-adduser', description="Add a new temboard-agent user.", argument_default=UNDEFINED_ARGUMENT, ) args = parser.parse_args(argv) config = load_configuration( specs=list_options_specs(), args=args, environ=environ, ) # Load configuration from the configuration file. username = ask_username(config) password = ask_password() hash_ = hash_password(username, password).decode('utf-8') try: with open(config.temboard['users'], 'a') as fd: fd.write("%s:%s\n" % (username, hash_)) except IOError as e: raise UserError(str(e)) else: stdout.write("Done.\n")
def test(self, dataset, subset='test', name='Test'): global g_args train_writer = tf.summary.FileWriter( os.path.join(hparams.SUMMARY_DIR, str(datetime.datetime.now().strftime("%m%d_%H%M%S")) + ' ' + hparams.SUMMARY_TITLE), g_sess.graph) cli_report = {} for data_pt in dataset.epoch( subset, hparams.BATCH_SIZE * hparams.MAX_N_SIGNAL): # note: this disables dropout during test to_feed = dict( zip(self.train_feed_keys, ( np.reshape(data_pt[0], [hparams.BATCH_SIZE, hparams.MAX_N_SIGNAL, -1, hparams.FEATURE_SIZE]), 1.))) step_summary, step_fetch = g_sess.run( self.valid_fetches, to_feed)[:2] train_writer.add_summary(step_summary) stdout.write('.') stdout.flush() _dict_add(cli_report, step_fetch) stdout.write(name + ': %s\n' % ( _dict_format(cli_report)))
def before_exit(): lines_of_code = process_history() if not PySession.save or len(lines_of_code) == 0: stdout.write(DO_NOTHING) return filename = expanduser(os.getenv('PYSESSION_FILENAME', 'session.py')) if PySession.save_locally: stdout.write(SAVING_FILE.format(filename=filename)) PySession.save_to_file('\n'.join(lines_of_code), filename) stdout.write(SUCCESS) return try: stdout.write(SAVING_GIST.format(filename=filename)) gist_response = PySession.save_to_gist('\n'.join(lines_of_code), filename) gist_url = gist_response['html_url'] PySession.save_gist_url(gist_url) webbrowser.open_new_tab(gist_url) stdout.write(SUCCESS) except: stdout.write(FAILED) PySession.save_to_file('\n'.join(lines_of_code), filename)
def notify(self, msg): if isinstance(msg, basestring): print msg elif msg[0] == "PROGRESSBAR": n = len(self.progressbars) name, action = msg[1:] if action == "start": stdout.write(term.CLRSCR()) self.progressbars[name] = term.ProgressBar() elif action == "end": stdout.write(term.CLRSCR()) del self.progressbars[name] else: self.progressbars[name].frac = action stdout.write(term.CUP(0, 0)) for name, progressbar in self.progressbars.iteritems(): stdout.write("%s : %s\n"%(progressbar, name))
def loadfromfile(cls, infile): """Load model parameters from file and rebuild model.""" with np.load(infile) as f: # Extract hyperparams and position p = f['p'] hparams = pickle.loads(p.tobytes()) hyper, epoch, pos = hparams['hyper'], hparams['epoch'], hparams['pos'] # Load matrices pvalues = { n:f[n] for n in cls.pnames } # Create instance if isinstance(infile, str): stdout.write("Loaded model parameters from {0}\n".format(infile)) stdout.write("Rebuilding model...\n") model = cls(hyper, epoch, pos, pvalues) return model
def savetofile(self, outfile): """Save model parameters to file.""" # Pickle non-matrix params into bytestring, then convert to numpy byte array pklbytes = pickle.dumps({'hyper': self.hyper, 'epoch': self.epoch, 'pos': self.pos}, protocol=pickle.HIGHEST_PROTOCOL) p = np.fromstring(pklbytes, dtype=np.uint8) # Gather parameter matrices and names pvalues = { n:m.get_value() for n, m in self.params.items() } # Now save params and matrices to file try: np.savez_compressed(outfile, p=p, **pvalues) except OSError as e: raise e else: if isinstance(outfile, str): stdout.write("Saved model parameters to {0}\n".format(outfile))
def build_onehots(self, vocab_size=None): """Build one-hot encodings of each sequence.""" # If we're passed a charset size, great - if not, fall back to inferring vocab size if vocab_size: self.charsize = vocab_size vocab = vocab_size else: vocab = self.charsize stderr.write("Constructing one-hot vector data...") stderr.flush() time1 = time.time() # These can be large, so we don't necessarily want them on the GPU # Thus they're not Theano shared vars # Also, numpy fancy indexing is fun! self.x_onehots = np.eye(vocab, dtype=th.config.floatX)[self.x_array] self.y_onehots = np.eye(vocab, dtype=th.config.floatX)[self.y_array] time2 = time.time() stderr.write("done!\nTook {0:.4f} ms.\n".format((time2 - time1) * 1000.0))
def buildmodelparams(self, hyper, checkpointdir=None): """Builds model parameters from given hyperparameters and charset size. Optionally saves checkpoint immediately after building if path specified. """ useclass = self.modeltypes[self.modeltype] self.model = useclass(hyper) if checkpointdir: # Compile training functions self.model._build_t() # Get initial loss estimate stderr.write("Calculating initial loss estimate...\n") # We don't need anything fancy or long, just a rough baseline data_len = self.valid.batchepoch(16) loss_len = 20 if data_len >= 20 else data_len loss = self.model.calc_loss(self.valid, 0, batchsize=8, num_examples=loss_len) stderr.write("Initial loss: {0:.3f}\n".format(loss)) stderr.write("Initial log loss: {0:.3f}\n".format(log(loss))) # Take checkpoint self.newcheckpoint(loss, savedir=checkpointdir)
def _vis_graph(graph, points, worker, status): total_points = len(points) visible_edges = [] if status: t0 = default_timer() points_done = 0 for p1 in points: for p2 in visible_vertices(p1, graph, scan='half'): visible_edges.append(Edge(p1, p2)) if status: points_done += 1 avg_time = round((default_timer() - t0) / points_done, 3) time_stat = (points_done, total_points-points_done, avg_time) status = '\r\033[' + str(21*worker) + 'C[{:4}][{:4}][{:5.3f}] \r' stdout.write(status.format(*time_stat)) stdout.flush() return visible_edges
def download_file(self, url: str, filepath: str, fname="", progf=False): """Download a file from `url` to `filepath/name`""" r = self.session.get(url, stream=True) dlen = r.headers.get("content-length") step = (100 / int(dlen)) prog = 0 if not fname: fname = unquote(Path(r.url).name) with open(filepath+"/"+fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: prog += len(chunk) if progf: progf(int(step * prog)) f.write(chunk) if progf: progf(0) return filepath+"/"+fname
def bruteforce(host, port, uname, wordlist): try: lista = open(wordlist, "r") except IOError: stdout.write(colored(" [x] Error opening word list\n", "red", attrs=['bold'])) exit() url = "http://"+host+":"+port+"/" init = time() for l in lista: pwd = l.strip() try: r=get(url, auth=(uname, pwd), timeout=3) except: stdout.write(colored("\n [-] There was an error connecting to the router %s\n"%(host), "red", attrs=['bold'])) exit() if r.status_code == 200: stdout.write(colored("\n\n [+] Cracked => %s:%s\n [+] Duration => %s seconds\n\n"%(uname, pwd, time() - init), "green", attrs=['bold'])) lista.close() exit() else: stdout.write(colored("\r [-] Current login %s:%s"%(uname, pwd), "yellow", attrs=['bold'])) stdout.flush() print "" lista.close()
def file_stats(max_lenght): Fl = 7 ** max_lenght Fb = (max_lenght + 2) * Fl if Fb >= 1000 and Fb < 1000000: Fbfinal = Fb / 1000 stdout.write("[+] Lines : %s\n[+] File size : %i Kb\n\n"%(Fl, Fbfinal)) elif Fb >= 1000000 and Fb < 1000000000: Fbfinal = Fb / 1000000 stdout.write("[+] Lines : %s\n[+] File size : %i Mb\n\n"%(Fl, Fbfinal)) elif Fb >= 1000000000: setlocale(LC_NUMERIC, '') Fbfinal = Fb / 1000000000 formato = format("%.*f", (0, Fbfinal), True) stdout.write("[+] Lines : %s\n[+] File size : "+formato+" Gb\n\n"%(Fl)) else: stdout.write("[+] Lines : %s\n[+] File size : %i bytes\n\n"%(Fl)) cmd = raw_input("[?] Do you want continue [Y/N] : ") if cmd == "N" or cmd == "n": exit()
def generator(min_lenght, max_lenght, chars, name): lines = 0 try: file=open(name, "w") except IOError: print "\n[x] Error : %s este caminho nao existe\n"%(name) exit() file_stats(max_lenght) print "" for n in range(min_lenght, max_lenght + 1): for xs in product(chars, repeat=n): lines = lines + 1 string=''.join(xs) file.write(string + "\n") stdout.write('\r[+] Saving character `%s`' % string) stdout.flush() print "\a" file.close()
def check_deploy_result(operation, console, appname, auth_header): i = 0 while True: s = (i % 3 + 1) * '.' if len(s) < 3: s = s + (3 - len(s)) * ' ' i += 1 stdout.write("\r%s... %s " % (operation, s)) stdout.flush() sleep(0.5) result = app_status(console, appname, auth_header) if result: stdout.write("\r%s... %s. " % (operation, result)) stdout.flush() stdout.write("\n") return result
def download_and_uncompress_tarball(base_url, filename, data_dir): def _progress(count, block_size, total_size): stdout.write('\r>> Downloading %s %.1f%%' % ( filename, float(count * block_size) / float(total_size) * 100.0)) stdout.flush() tarball_url = base_url + filename filepath = osp.join(data_dir, filename) if not tf.gfile.Exists( osp.join(download_dir, model_dl) ): filepath, _ = urllib.request.urlretrieve(tarball_url, filepath, _progress) print() statinfo = stat(filepath) print('Successfully downloaded', filename, statinfo.st_size, 'bytes.') else: print('{} tarball already exists -- not downloading'.format(filename)) tarfile.open(filepath, 'r:*').extractall(data_dir)
def print_rule(self): """Print out rules """ stdout.write('======================================================\n') stdout.write('Rules:\n') for rule in self.rule: head = rule[0][0] tail = rule[0][1] confidence = rule[1] stdout.write('(') stdout.write(', '.join(head)) stdout.write(')') stdout.write(' ==> ') stdout.write('(') stdout.write(', '.join(tail)) stdout.write(')') stdout.write(' confidence = {0}\n'.format(round(confidence, 3))) stdout.write('======================================================\n')
def optimize(self, X, lmbd, Z=None, max_iter=1, tol=1e-5): if Z is None: batch_size = X.shape[0] K = self.D.shape[0] z_curr = np.zeros((batch_size, K)) else: z_curr = np.copy(Z) self.train_cost, self.train_z = [], [] feed = {self.X: X, self.Z: z_curr, self.lmbd: lmbd} for k in range(max_iter): z_curr[:], dz, cost = self.session.run( [self.step_optim, self.dz, self._cost], feed_dict=feed) self.train_cost += [cost] self.train_z += [np.copy(z_curr)] if dz < tol: print("\r{} reached optimal solution in {}-iteration" .format(self.name, k)) break out.write("\rIterative optimization ({}): {:7.1%} - {:.4e}" "".format(self.name, k/max_iter, dz)) out.flush() self.train_cost += [self.session.run(self._cost, feed_dict=feed)] print("\rIterative optimization ({}): {:7}".format(self.name, "done")) return z_curr
def print_progress(self, threshold=0, decimals=1, barLength=100): """Print a terminal progress bar.""" # Based on @Greenstick's reply (https://stackoverflow.com/a/34325723) iteration = self.stream.tell() if iteration > self.file_size: return total = self.file_size if total == 0: return progress = 100.0 * iteration / total if self.progress and progress - self.progress < threshold: return self.progress = progress percents = ("%03." + str(decimals) + "f") % progress filledLength = int(round(barLength * iteration / float(total))) barText = '*' * filledLength + '-' * (barLength - filledLength) stdout.write('%s| %s%% Completed\r' % (barText, percents)) stdout.flush()
def __init__(self, parallel): self.continuation_prompt = self.prompt self.parallel = parallel width, height = get_terminal_size() or MIN_TERM_SIZE if any(map((lambda s: s[0] < s[1]), zip((height, width), MIN_TERM_SIZE))): stdout.write("\x1b[8;{rows};{cols}t".format(rows=max(MIN_TERM_SIZE[0], height), cols=max(MIN_TERM_SIZE[1], width))) if self.parallel: processes = cpu_count() self.__last_tasklist = None self.tasklist = {} self.pool = Pool(processes, lambda: signal(SIGINT, SIG_IGN)) atexit.register(self.graceful_exit) self.reexec = ['status'] self.__bind_commands() super(FrameworkConsole, self).__init__() self.do_loglevel('info') self.__start_docserver() self.do_clear('')
def wait_time(self, data={'waitSeconds': None}): def complete(i, wait): return ((100 * (float(i) / float(wait))) * 50) / 100 if data['waitSeconds'] is not None: wait = data['waitSeconds'] + (random.randint(2, 4) / 3.33) print(I18n.get('Waiting %s seconds') % str(wait)) c = i = 0 while c < 50: c = complete(i, wait) time.sleep(wait - i if i == int(wait) else 1) out.write("[{}]\0\r".format('+' * int(c) + '-' * (50 - int(c)))) out.flush() i += 1 out.write("\n") out.flush() return data['waitSeconds'] return 99999999
def __iter__(self): try: with open(self.__filename, 'rb') as file_in: while True: data = file_in.read(self.__chunksize) if not data: if self.__progressbar: stdout.write("\n") break self.__readsofar += len(data) if self.__progressbar: percent = self.__readsofar * 100 / self.__totalsize stdout.write("\r{percent:3.0f}%".format(percent=percent)) yield data except OSError as error: raise exception.SourceNotFound(error)
def create_admin(username): from eventit.eventit import db from eventit.models import User, Role import getpass from sys import stdout password = None password2 = None while not password or password != password2: password = getpass.getpass() password2 = getpass.getpass('Please, repeat your password: ') if not password or password != password2: stdout.write('Passwords do not match') user = User(username=username, email='', is_active=True, password=password) role_admin = Role.get_role_obj('admin') user.role = role_admin db.session.add(user) db.session.commit()
def LOG(message=None,type=None): if VERBOSITY<=0: return elif VERBOSITY==1: #minimal verbosity ... dot style output if type in MSGSCHEME_MIN: message = MSGSCHEME_MIN[type] if DO_COLOR and type in COLORSCHEME: message = COLORSCHEME[type]%message stdout.write("%s"%message) stdout.flush() else: if type in MSGSCHEME: message = MSGSCHEME[type]%message if DO_COLOR and type in COLORSCHEME: message = COLORSCHEME[type]%message if MODE_FUZZ: stdout.write("[FUZZ] %s\n"% (message)) else: stdout.write("%s\n"% (message)) stdout.flush()
def try_it(qu): stdout.write("\r{} ---> ".format(qu)) stdout.flush() passed = 0 req = None try: req = qu.run(c) if basic_test(req): passed = 1 stdout.write("PASS\n") else: fails.append(err_format(q, req)) stdout.write("FAIL\n") print err_format(q, req) exit() except (preqlerrors.TopologyError, preqlerrors.ValueTypeError, preqlerrors.NonexistenceError) as e: errors.append(err_format(q, str(e.msg))) stdout.write("ERROR\n") stdout.flush() return passed, 1, req
def fetch(): a = IPSet([]) for blocklist in blocklists: r = requests.get(blocklist) for line in r.iter_lines(): if linefilter(line): a.add(makeprefix(linefilter(line))) for prefix in b: if b.len() > 0 and b.__contains__(prefix) and not a.__contains__(prefix): a.discard(prefix) stdout.write('withdraw route ' + str(prefix) + nexthop) stdout.flush() for prefix in a: if a.__contains__(prefix) and not b.__contains__(prefix): stdout.write('announce route ' + str(prefix) + nexthop) stdout.flush() b.add(a)
def process_photos(photos): if 'error' in photos: print "Error = ", error raise Exception("Error in Response") no_of_photos = 0 if 'data' not in photos: return while len(photos['data']) > 0: for photo in photos['data']: if 'tags' in photo: process_photo_tags(photo['tags']) if 'comments' in photo: process_photo_comments(photo['comments']) no_of_photos += 1 stdout.write("\rNumber of Photos Processed = %d" % no_of_photos) stdout.flush() if 'paging' in photos and 'next' in photos['paging']: request_str = photos['paging']['next'].replace('https://graph.facebook.com/', '') request_str = request_str.replace('limit=25', 'limit=200') photos = graph.get(request_str) else: photos['data'] = []
def mangleException(self, pkt, reason=''): self.notifyBad('\nFENRIR PANIC : Process failed during MANGLING', 1, 1) if reason != '': self.notifyBad('Reason : ' + reason, 1) self.notify('Packet was logged to errorLogFile : FENRIR.err', 1) logfd = open('FENRIR.err', 'a') logfd.write( '---DUMP BEGINS--------------------------------------------------------------------------------------\n') logfd.write( '[*] Packet header SRC : ' + pkt[IP].src + ' (' + pkt[Ether].src + ') DST : ' + pkt[IP].dst + ' (' + pkt[ Ether].dst + ')\n') logfd.write('Packet dump :\n') logfd.write(str(ls(pkt)) + '\n') logfd.write( '---DUMP ENDS----------------------------------------------------------------------------------------\n') logfd.close() ## fenrirPanic : unrecoverable exception handling ##
def init(): height_term, width_term = get_terminal_size() height_min = COL_HEIGHT * HEIGHT + 2 + 9 width_min = COL_WIDTH * WIDTH + 2 + 5 if height_term < height_min or width_term < width_min: # resize the terminal to fit the minimum size to display the connect4 before exit stdout.write("\x1b[8;{h};{w}t".format(h=max(height_min, height_term), w=max(width_min, width_term))) exit('\033[91m' + 'The terminal was too small, you can now restart ' + '\033[1m' + 'Connect4' + '\033[0m') stdscr = curses.initscr() height,width = stdscr.getmaxyx() if height < height_min or width < width_min: # abort the program if the terminal can't be resized curses.endwin() exit('Please resize your terminal [%d%s%d] (minimum required %d%s%d)' %(width, 'x', height, width_min, 'x', height_min)) curses.noecho() curses.cbreak() curses.curs_set(0) stdscr.keypad(1) #define the different colors if curses.can_change_color(): defineColors() #return stdscr, width stdscr.clear() stdscr.border(0) return stdscr, width, height
def _usage(error_message=None): if error_message: stderr.write('ERROR: ' + error_message + linesep) stdout.write( linesep.join([ 'Usage:', ' list_versions.py [OPTION]... [DEPENDENCY]', 'Examples:', ' list_versions.py go', ' list_versions.py -r docker', ' list_versions.py --rc docker', ' list_versions.py -l kubernetes', ' list_versions.py --latest kubernetes', 'Options:', '-l/--latest Include only the latest version of each major and' ' minor versions sub-tree.', '-r/--rc Include release candidate versions.', '-h/--help Prints this!', '' ]))
def __init__(self, file=None, stringio=False, encoding=None): if file is None: if stringio: self.stringio = file = py.io.TextIO() else: from sys import stdout as file elif py.builtin.callable(file) and not ( hasattr(file, "write") and hasattr(file, "flush")): file = WriteFile(file, encoding=encoding) if hasattr(file, "isatty") and file.isatty() and colorama: file = colorama.AnsiToWin32(file).stream self.encoding = encoding or getattr(file, 'encoding', "utf-8") self._file = file self.hasmarkup = should_do_markup(file) self._lastlen = 0 self._chars_on_current_line = 0
def write_out(fil, msg): # XXX sometimes "msg" is of type bytes, sometimes text which # complicates the situation. Should we try to enforce unicode? try: # on py27 and above writing out to sys.stdout with an encoding # should usually work for unicode messages (if the encoding is # capable of it) fil.write(msg) except UnicodeEncodeError: # on py26 it might not work because stdout expects bytes if fil.encoding: try: fil.write(msg.encode(fil.encoding)) except UnicodeEncodeError: # it might still fail if the encoding is not capable pass else: fil.flush() return # fallback: escape all unicode characters msg = msg.encode("unicode-escape").decode("ascii") fil.write(msg) fil.flush()
def _get(self, url, query=None, timeout=30): payload=self._generatePayload(query) a = 0 while 1: try: a = self._session.get(url, headers=self._header, params=payload, timeout=timeout) except : #print(exc_info()) a = a+1 if self.listening: stdout.write("_get "+url+" failed, retrying..."+str(a)+"\r") stdout.flush() continue break stdout.write(" \r") stdout.flush() return a
def main(): timings = False start = time.time() initialize() if timings: print('initialize {} s'.format(time.time() - start), file=stderr) start = time.time() command_table = load_command_table() if timings: print('load_command_table {} s'.format(time.time() - start), file=stderr) start = time.time() group_index = get_group_index(command_table) if timings: print('get_group_index {} s'.format(time.time() - start), file=stderr) start = time.time() snippets = get_snippets(command_table) if AUTOMATIC_SNIPPETS_ENABLED else [] if timings: print('get_snippets {} s'.format(time.time() - start), file=stderr) while True: line = stdin.readline() start = time.time() request = json.loads(line) response_data = None if request['data'].get('request') == 'status': response_data = get_status() if timings: print('get_status {} s'.format(time.time() - start), file=stderr) elif request['data'].get('request') == 'hover': response_data = get_hover_text(group_index, command_table, request['data']['command']) if timings: print('get_hover_text {} s'.format(time.time() - start), file=stderr) else: response_data = get_completions(group_index, command_table, snippets, request['data'], True) if timings: print('get_completions {} s'.format(time.time() - start), file=stderr) response = { 'sequence': request['sequence'], 'data': response_data } output = json.dumps(response) stdout.write(output + '\n') stdout.flush() stderr.flush()
def __next__(self): """ next overload. If display is true the latest stetistics are displayed :return: The next number in iterator """ if self.display: self.__restart_line() stdout.write(str(self)) stdout.flush() if self.current >= self.end: raise StopIteration self.current += self.step return self.current - self.step
def __restart_line(): """ Writes return carriage to stdout and flushes. This allows writing to the same line. :return: None """ stdout.write('\r') stdout.flush()
def update( self, n ): mem = "" if self.show_mem: m = mem_usage() if m>0: mem = " [ %0.1fMb used ]"%m try: stdout.write( (self.m+mem+" \r")%(100*(n-self.start+1e-50)/(self.range+1e-50)) ) except: stdout.write( ("%s%0.1f%% "+mem+"\r")%(self.m,100*(n-self.start+1e-50)/(self.range+1e-50)) )
def __del__( self ): stdout.write('\n')
def monitor(s): r = s.get(url=progress_url) try: progress_data = json.loads(r.text) except ValueError: print """No JSON object could be decoded. Get progress failed to return expected data. Return code: %s """ % (r.status_code) result = ['No JSON object could be decoded\ - get progress failed to return expected data\ Return code: %s """ % (r.status_code)', False] # Timeout waiting for remote backup to complete # (since it sometimes fails) in 5s multiples global timeout timeout_count = timeout*12 # timeout x 12 = number of iterations of 5s time_left = timeout while 'fileName' not in progress_data or timeout_count > 0: # Clears the line before re-writing to avoid artifacts stdout.write("\r\x1b[2k") stdout.write("\r\x1b[2K%s. Timeout remaining: %sm" % (progress_data['alternativePercentage'], str(time_left))) stdout.flush() r = s.get(url=progress_url) progress_data = json.loads(r.text) time.sleep(5) timeout_count = timeout_count - 5 if timeout_count % 12 == 0: time_left = time_left - 1 if 'fileName' in progress_data: result = [progress_data['fileName'], True] return result
def download(s, l): filename = get_filename(s) if not filename: return False print "Filename found: %s" % filename print "Checking if url is valid" r = s.get(url=download_url + filename, stream=True) print "Status code: %s" % str(r.status_code) if int(r.status_code) == 200: print "Url returned '200', downloading file" if not create_backup_location(l): result = ['Failed to create backup location', False] return result date_time = datetime.datetime.now().strftime("%Y%m%d") with open(l + '/' + application + '-' + date_time + '.zip', 'wb') as f: file_total = 0 for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) file_total = file_total + 1024 file_total_m = float(file_total)/1048576 # Clears the line before re-writing to avoid artifacts stdout.write("\r\x1b[2k") stdout.write("\r\x1b[2K%.2fMB downloaded" % file_total_m) stdout.flush() stdout.write("\n") result = ['Backup downloaded successfully', True] return result else: print "Download file not found on remote server - response code %s" % \ str(r.status_code) print "Download url: %s" % download_url + filename result = ['Download file not found on remote server', False] return result
def LoadingCallBack(j,k): stdout.write("\r [+] Files: [{}] (strings: [{}])".format(j,k)) stdout.flush()
def create_character(n, c, ifont): size = ifont.getsize(c) image = Image.new('RGBA', size) draw = ImageDraw.Draw(image) draw.text((0, 0), c, font=ifont) data = list(image.getdata()) print 'const PROGMEM unsigned char font%d_%02x[] = {' % (n, ord(c)) for i in range(len(data)): stdout.write('0x%02x, ' % data[i][0]) print '};'; return size