我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sys.stderr.write()。
def zipdir(archivename, basedir): '''Zip directory, from J.F. Sebastian http://stackoverflow.com/''' assert os.path.isdir(basedir) with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z: for root, dirs, files in os.walk(basedir): #NOTE: ignore empty directories for fn in files: if fn[-4:]!='.zip': absfn = os.path.join(root, fn) zfn = absfn[len(basedir)+len(os.sep):] #XXX: relative path z.write(absfn, zfn) # ================ Inventory input data and create data structure =================
def loadfromfile(filename, charset=None): """Loads data set from filename.""" try: f = open(filename, 'rb') except OSError as e: stderr.write("Couldn't open data set file, error: {0}\n".format(e)) return None else: try: dataset = pickle.load(f) except Exception as e: stderr.write("Couldn't load data set, error: {0}\n".format(e)) return None else: stderr.write("Loaded data set from {0}\n".format(filename)) return dataset finally: f.close()
def printexpr(expr_string): """ printexpr(expr) - print the value of the expression, along with linenumber and filename. """ stack = extract_stack ( )[-2:][0] actualCall = stack[3] left = string.find ( actualCall, '(' ) right = string.rfind ( actualCall, ')' ) caller_globals,caller_locals = _caller_symbols() expr = eval(expr_string,caller_globals,caller_locals) varType = type( expr ) stderr.write("%s:%d> %s == %s (%s)\n" % ( stack[0], stack[1], string.strip( actualCall[left+1:right] )[1:-1], repr(expr), str(varType)[7:-2]))
def buildmodelparams(self, hyper, checkpointdir=None): """Builds model parameters from given hyperparameters and charset size. Optionally saves checkpoint immediately after building if path specified. """ useclass = self.modeltypes[self.modeltype] self.model = useclass(hyper) if checkpointdir: # Compile training functions self.model._build_t() # Get initial loss estimate stderr.write("Calculating initial loss estimate...\n") # We don't need anything fancy or long, just a rough baseline data_len = self.valid.batchepoch(16) loss_len = 20 if data_len >= 20 else data_len loss = self.model.calc_loss(self.valid, 0, batchsize=8, num_examples=loss_len) stderr.write("Initial loss: {0:.3f}\n".format(loss)) stderr.write("Initial log loss: {0:.3f}\n".format(log(loss))) # Take checkpoint self.newcheckpoint(loss, savedir=checkpointdir)
def integer_list_file(cls, filename, values, bits = None): """ Write a list of integers to a file. If a file of the same name exists, it's contents are replaced. See L{HexInput.integer_list_file} for a description of the file format. @type filename: str @param filename: Name of the file to write. @type values: list( int ) @param values: List of integers to write to the file. @type bits: int @param bits: (Optional) Number of bits of the target architecture. The default is platform dependent. See: L{HexOutput.integer_size} """ fd = open(filename, 'w') for integer in values: print >> fd, cls.integer(integer, bits) fd.close()
def string_list_file(cls, filename, values): """ Write a list of strings to a file. If a file of the same name exists, it's contents are replaced. See L{HexInput.string_list_file} for a description of the file format. @type filename: str @param filename: Name of the file to write. @type values: list( int ) @param values: List of strings to write to the file. """ fd = open(filename, 'w') for string in values: print >> fd, string fd.close()
def __logfile_error(self, e): """ Shows an error message to standard error if the log file can't be written to. Used internally. @type e: Exception @param e: Exception raised when trying to write to the log file. """ from sys import stderr msg = "Warning, error writing log file %s: %s\n" msg = msg % (self.logfile, str(e)) stderr.write(DebugLog.log_text(msg)) self.logfile = None self.fd = None
def SaveToFile(self, filename, canvas): if hasattr(getattr(filename, "write",None),'__call__'): myfile = 0 f = filename filename = makeFileName(getattr(filename,'name','')) else : myfile = 1 filename = makeFileName(filename) f = open(filename, "wb") data = self.GetPDFData(canvas) if isUnicode(data): data = data.encode('latin1') f.write(data) if myfile: f.close() import os if os.name=='mac': from reportlab.lib.utils import markfilename markfilename(filename) # do platform specific file junk if getattr(canvas,'_verbosity',None): print('saved %s' % (filename,))
def _usage(error_message=None): if error_message: stderr.write('ERROR: ' + error_message + linesep) stdout.write( linesep.join([ 'Usage:', ' list_versions.py [OPTION]... [DEPENDENCY]', 'Examples:', ' list_versions.py go', ' list_versions.py -r docker', ' list_versions.py --rc docker', ' list_versions.py -l kubernetes', ' list_versions.py --latest kubernetes', 'Options:', '-l/--latest Include only the latest version of each major and' ' minor versions sub-tree.', '-r/--rc Include release candidate versions.', '-h/--help Prints this!', '' ]))
def __init__(self, idx, field, filename): """ idx: index of probing point. type: tuple-3 field: field to probe. type: numpy.array filename: recording file name. type: str """ self.idx = tuple(idx) self.field = field f_name = str(filename) if exists(f_name): stderr.write('Warning: ' + f_name + ' already exists.\n') try: self.f = open(f_name, 'w') except IOError: self.f = None print('Warning: Can\'t open file ' + f_name + '.\n')
def maximize_likelihood(self, data, responsibilities, weights, cmask=None): if not (cmask is None or cmask.shape == () or np.all(cmask)): # cluster reduction responsibilities = responsibilities[:, cmask] self.names = list(compress(self.names, cmask)) # TODO: make self.names a numpy array? weights_combined = responsibilities * weights self.variables = np.dot(weights_combined.T, data.frequencies) with np.errstate(invalid='ignore'): # if no training data is available for any class np.divide(self.variables, weights_combined.sum(axis=0, keepdims=True, dtype=types.large_float_type).T, out=self.variables) # normalize before update, self.variables is types.prob_type dimchange = self.update() # create cache for likelihood calculations # TODO: refactor this block ll = self.log_likelihood(data) std_per_class = common.weighted_std(ll, weights_combined) weight_per_class = weights_combined.sum(axis=0, dtype=types.large_float_type) weight_per_class /= weight_per_class.sum() std_per_class_mask = np.isnan(std_per_class) skipped_classes = std_per_class_mask.sum() self.stdev = np.ma.dot(np.ma.MaskedArray(std_per_class, mask=std_per_class_mask), weight_per_class) stderr.write("LOG %s: mean class likelihood standard deviation is %.2f (omitted %i/%i classes due to invalid or unsufficient data)\n" % (self._short_name, self.stdev, skipped_classes, self.num_components - skipped_classes)) return dimchange, ll
def load_model_tuples(inseq, **kwargs): # TODO: make generic cols = [] names = [] try: for rec in inseq: names.append(rec[0]) for i, data in enumerate(rec[1:]): vec = np.asarray(data, dtype=np.float64) # allow for large numbers try: cols[i].append(vec) except IndexError: cols.append([vec]) except TypeError: stderr.write("Could not parse model definition line\n") exit(1) return [Model(np.vstack(v), names, **kwargs) for v in cols] # TODO: add load_data from generic with data-specific parse_line function
def maximize_likelihood(self, data, responsibilities, weights, cmask=None): if not (cmask is None or cmask.shape == () or np.all(cmask)): # cluster reduction responsibilities = responsibilities[:, cmask] weights_combined = responsibilities * weights weighted_meancoverage_samples = np.dot(data.covmeans.T, weights_combined) # TODO: use np.average? weights_normalization = weights_combined.sum(axis=0, keepdims=True) self.params = weighted_meancoverage_samples / weights_normalization dimchange = self.update() # create cache for likelihood calculations # TODO: refactor this block ll = self.log_likelihood(data) std_per_class = common.weighted_std(ll, weights_combined) weight_per_class = weights_combined.sum(axis=0, dtype=types.large_float_type) weight_per_class /= weight_per_class.sum() std_per_class_mask = np.isnan(std_per_class) skipped_classes = std_per_class_mask.sum() self.stdev = np.ma.dot(np.ma.MaskedArray(std_per_class, mask=std_per_class_mask), weight_per_class) stderr.write("LOG %s: mean class likelihood standard deviation is %.2f (omitted %i/%i classes due to invalid or unsufficient data)\n" % (self._short_name, self.stdev, skipped_classes, self.num_components - skipped_classes)) return dimchange, ll
def configure(args): try: kubernetes.config.kube_config.load_kube_config( client_configuration=config, context=args.context) except: stderr.write("warning: could not load kubeconfig\n") args.server = 'http://localhost:8080' if args.server: config.host = args.server if args.token: config.api_key['authorization'] = "bearer " + args.token if args.ca_certificate: config.ssl_ca_cert = args.ca_certificate # get_client: return a Kubernetes API client.
def rc (self, val = None): """ Get/Set the return code @params: `val`: The return code to be set. If it is None, return the return code. Default: `None` If val == -1000: the return code will be negative of current one. 0 will be '-0' @returns: The return code if `val` is `None` If rcfile does not exist or is empty, return 9999, otherwise return -rc A negative rc (including -0) means output files not generated """ if val is None: if not path.exists (self.rcfile): return -1 with open (self.rcfile) as f: return int(f.read().strip()) else: with open (self.rcfile, 'w') as f: f.write (str(val))
def _prepScript (self): """ Build the script, interpret the placeholders """ script = self.proc.script.render(self.data) write = True if path.exists (self.script): f = open (self.script) prevscript = f.read() f.close() # no change to happen? script change will cause a different uid for a proc if prevscript == script: write = False self.proc.log ("Script file exists: %s" % self.script, 'debug', 'SCRIPT_EXISTS') if write: with open (self.script, 'w') as f: f.write (script)
def show_io(input_dir, output_dir): ''' show directory structure and inputs and autputs to scoring program''' swrite('\n=== DIRECTORIES ===\n\n') # Show this directory swrite("-- Current directory " + pwd() + ":\n") write_list(ls('.')) write_list(ls('./*')) write_list(ls('./*/*')) swrite("\n") # List input and output directories swrite("-- Input directory " + input_dir + ":\n") write_list(ls(input_dir)) write_list(ls(input_dir + '/*')) write_list(ls(input_dir + '/*/*')) write_list(ls(input_dir + '/*/*/*')) swrite("\n") swrite("-- Output directory " + output_dir + ":\n") write_list(ls(output_dir)) write_list(ls(output_dir + '/*')) swrite("\n") # write meta data to sdterr swrite('\n=== METADATA ===\n\n') swrite("-- Current directory " + pwd() + ":\n") try: metadata = yaml.load(open('metadata', 'r')) for key,value in metadata.items(): swrite(key + ': ') swrite(str(value) + '\n') except: swrite("none\n"); swrite("-- Input directory " + input_dir + ":\n") try: metadata = yaml.load(open(os.path.join(input_dir, 'metadata'), 'r')) for key,value in metadata.items(): swrite(key + ': ') swrite(str(value) + '\n') swrite("\n") except: swrite("none\n");
def write_scores(fp, scores): ''' Write scores to file opened under file pointer fp''' for key in scores.keys(): fp.write("%s --> %s\n" % (key, scores[key])) print(key + " --> " + str(scores[key]))
def write(filename, predictions): ''' Write prediction scores in prescribed format''' with open(filename, "w") as output_file: for row in predictions: if type(row) is not np.ndarray and type(row) is not list: row = [row] for val in row: output_file.write('{:g} '.format(float(val))) output_file.write('\n')
def show_io(input_dir, output_dir): swrite('\n=== DIRECTORIES ===\n\n') # Show this directory swrite("-- Current directory " + pwd() + ":\n") write_list(ls('.')) write_list(ls('./*')) write_list(ls('./*/*')) swrite("\n") # List input and output directories swrite("-- Input directory " + input_dir + ":\n") write_list(ls(input_dir)) write_list(ls(input_dir + '/*')) write_list(ls(input_dir + '/*/*')) write_list(ls(input_dir + '/*/*/*')) swrite("\n") swrite("-- Output directory " + output_dir + ":\n") write_list(ls(output_dir)) write_list(ls(output_dir + '/*')) swrite("\n") # write meta data to sdterr swrite('\n=== METADATA ===\n\n') swrite("-- Current directory " + pwd() + ":\n") try: metadata = yaml.load(open('metadata', 'r')) for key,value in metadata.items(): swrite(key + ': ') swrite(str(value) + '\n') except: swrite("none\n"); swrite("-- Input directory " + input_dir + ":\n") try: metadata = yaml.load(open(os.path.join(input_dir, 'metadata'), 'r')) for key,value in metadata.items(): swrite(key + ': ') swrite(str(value) + '\n') swrite("\n") except: swrite("none\n");
def test(): "Run a simple demo that shows evaluator's capability." from sys import exc_info, stderr from traceback import format_exception_only local = {} while True: try: evaluate(input('>>> '), local) except EOFError: break except: stderr.write(format_exception_only(*exc_info()[:2])[-1])
def warning(message): _flush_stdout() stderr.write("%s %s\n" % (_warning_str, message))
def error(message): _flush_stdout() stderr.write("%s %s\n" % (_error_str, message))
def set_size(height, width): stdout.write("\x1b[8;{rows};{cols}t".format(rows=height, cols=width)) return
def main(): """Act like letsencrypt --version insofar as printing the version number to stderr.""" if '--version' in argv: stderr.write('letsencrypt 99.9.9\n')
def run(song, num, bearer=None, folder=None): if song is None: song = input( "Enter the artist and song (e.g. The Beatles Let It Be): ") youtubeLinks = [] if num == 1: youtubeLinks.append(getYoutubeURLFromSearch(song)) elif bearer is None: youtubeLinks = useLastFM(song, num) else: print("Using spotify") youtubeLinks = useSpotify(song, num, bearer) if len(youtubeLinks) == 0: print("Could not find song recommendations for '%s'" % song) return # Start downloading and print out progress if folder is not None: chdir(folder) else: folder = abspath('.') p = multiprocessing.Pool(multiprocessing.cpu_count()) print("\nStarting download...") for i, _ in enumerate(p.imap_unordered(downloadURL, youtubeLinks), 1): stderr.write( '\r...{0:2.1%} complete'.format(i / len(youtubeLinks))) print("\n\n%d tracks saved to %s\n" % (len(youtubeLinks), folder)) return
def latex2png(snippet, outfile): """Compiles a LaTeX snippet to png""" pngimage = os.path.join(IMAGEDIR, outfile + '.png') environment = os.environ environment['openout_any'] = 'a' environment['shell_escape_commands'] = \ "bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf,gregorio" proc = Popen( ["lualatex", '-output-directory=' + IMAGEDIR], stdin=PIPE, stdout=DEVNULL, env=environment ) proc.stdin.write( ( LATEX_DOC % (snippet) ).encode("utf-8") ) proc.communicate() proc.stdin.close() call(["pdfcrop", os.path.join(IMAGEDIR, "texput.pdf")], stdout=DEVNULL) call( [ "gs", "-sDEVICE=pngalpha", "-r144", "-sOutputFile=" + pngimage, os.path.join(IMAGEDIR, "texput-crop.pdf"), ], stdout=DEVNULL, )
def png(contents, latex_command): """Creates a png if needed.""" outfile = sha(contents + latex_command) src = os.path.join(IMAGEDIR, outfile + '.png') if not os.path.isfile(src): try: os.mkdir(IMAGEDIR) stderr.write('Created directory ' + IMAGEDIR + '\n') except OSError: pass latex2png(latex_command + "{" + contents + "}", outfile) stderr.write('Created image ' + src + '\n') return src
def ly2png(lily, outfile, staffsize): p = Popen([ "lilypond", "-dno-point-and-click", "-dbackend=eps", "-djob-count=2", "-ddelete-intermediate-files", "-o", outfile, "-" ], stdin=PIPE, stdout=-3) p.stdin.write(("\\paper{\n" "indent=0\\mm\n" "oddFooterMarkup=##f\n" "oddHeaderMarkup=##f\n" "bookTitleMarkup = ##f\n" "scoreTitleMarkup = ##f\n" "}\n" "#(set-global-staff-size %s)\n" % staffsize + lily).encode("utf-8")) p.communicate() p.stdin.close() call([ "gs", "-sDEVICE=pngalpha", "-r144", "-sOutputFile=" + outfile + '.png', outfile + '.pdf', ], stdout=-3)
def png(contents, staffsize): """Creates a png if needed.""" outfile = os.path.join(IMAGEDIR, sha(contents + str(staffsize))) src = outfile + '.png' if not os.path.isfile(src): try: os.mkdir(IMAGEDIR) stderr.write('Created directory ' + IMAGEDIR + '\n') except OSError: pass ly2png(contents, outfile, staffsize) stderr.write('Created image ' + src + '\n') return src
def ask_password(): try: raw_pass = os.environ['TEMBOARD_UI_PASSWORD'] except KeyError: raw_pass = getpass(" Password: ") try: password = raw_pass validate_parameters({'password': password}, [('password', T_PASSWORD, False)]) except HTTPError: stdout.write("Invalid password.\n") return ask_password() return password
def ask_username(): try: raw_username = os.environ['TEMBOARD_UI_USER'] except KeyError: raw_username = raw_input(" Username: ") try: username = raw_username validate_parameters({'username': username}, [('username', T_USERNAME, False)]) except HTTPError: stdout.write("Invalid username.\n") return ask_username() return username
def main(): job_id = 0 f_log = open('./dummy_cuckoo.log', 'a') while True: timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3] msg = '%s [lib.cuckoo.core.scheduler] INFO: Task #%d: reports generation completed ...\n' % (timestamp, job_id) f_log.write(msg) stderr.write(msg) sleep(1) # gcd(p, q) == 1 # m = (p * q) - 1 # n = (n + 5) % m job_id = (job_id + 5) % 14 f_log.flush() f_log.close()