我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用yaml.Loader()。
def load_config(): obj = { 'cache_dir': '%s/.cache/gpx' % os.environ['HOME'], 'enable_upload': True, 'overpass_server': 'http://overpass-api.de/api/interpreter', } config_file = '%s/.gpx_upload.yaml' % os.environ['HOME'] try: with open(config_file, 'r') as f: loaded = yaml.load(f, Loader=yaml.Loader) for key in loaded.keys(): obj[key] = loaded[key] except IOError: try: with open(config_file, 'w') as f: f.write(yaml.dump(obj, Dumper=yaml.Dumper)) except IOError: pass return obj
def __init__(self, stream): """Initialise Loader.""" try: self._root = os.path.split(stream.name)[0] except AttributeError: self._root = os.path.curdir super().__init__(stream) self.add_implicit_resolver( u'tag:yaml.org,2002:float', re.compile(u'''^(?: [-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)? |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+) |\\.[0-9_]+(?:[eE][-+][0-9]+)? |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]* |[-+]?\\.(?:inf|Inf|INF) |\\.(?:nan|NaN|NAN))$''', re.X), list(u'-+0123456789.')) self.filenames = [os.path.abspath(stream.name)]
def load_app_from_config(path): """ Generate app directly from config file, bypassing command line settings (useful for testing in ipython) """ Setting.generate_missing_shorthands() defaults = Setting.generate_defaults_dict() if osp.isfile(path): file_stream = open(path, "r", encoding="utf-8") config_defaults = load(file_stream, Loader=Loader) file_stream.close() for key, value in config_defaults.items(): defaults[key] = value else: raise ValueError("Settings file not found at: {0:s}".format(path)) args = ap.Namespace() for key, value in defaults.items(): args.__dict__[key] = value if args.unsynced: app = ApplicationUnsynced(args) else: app = ApplicationSynced(args) return app
def load_app_from_config(path): """ Generate app directly from config file, bypassing command line settings (useful for testing in ipython) """ Setting.generate_missing_shorthands() defaults = Setting.generate_defaults_dict() if osp.isfile(path): file_stream = open(path, "r", encoding="utf-8") config_defaults = load(file_stream, Loader=Loader) file_stream.close() for key, value in config_defaults.items(): defaults[key] = value else: raise ValueError("Settings file not found at: {0:s}".format(path)) args = ap.Namespace() for key, value in defaults.items(): args.__dict__[key] = value app = StereoMatcherApp(args) return app
def get_module(filename_with_path): """ Given a filename with an absolute path, load the contents, instantiate a Module, and set the Module's path attribute. Parameters: filename_with_path (str): the YAML filename with an absolute path """ try: with open(filename_with_path) as config_file: Module.temp_path = filename_with_path this_module = yaml.load(config_file, Loader=Loader) Module.temp_path = "" return this_module except IOError: raise ModulePathError(filename_with_path) except yaml.scanner.ScannerError: raise ModuleConstraintParseError("Parsing of module {} failed. This is likely caused by a typo in the file." "".format(filename_with_path)) # Add the YAML Module constructor so that YAML knows to use it in situations where the tag matches.
def read_configuration(cligraph, custom_suffix=''): """Read configuration dict for the given tool """ cfg = {} layers = collections.OrderedDict() layers['auto'] = [automatic_configuration, None] layers['shared'] = [os.path.join(cligraph.tool_path, 'conf/%s.yaml' % cligraph.tool_shortname), None] layers['custom'] = [os.path.join(os.path.abspath(os.path.expanduser('~/.' + cligraph.tool_shortname)), '%s.yaml%s' % (cligraph.tool_shortname, custom_suffix)), None] for layer_name, layer_data in layers.items(): if callable(layer_data[0]): layer = layer_data[0](cligraph, layer_name) else: if not os.path.exists(layer_data[0]): continue with open(layer_data[0], 'r') as filep: layer = yaml.load(filep, Loader=Loader) layers[layer_name][1] = layer if layer: update_recursive(cfg, layer) resolve_config(cfg) return AttrDict(**cfg), layers
def get_dataset(filename): ''' Iterator for dataset's items :param filename: Path to dataset's file :type filename: str :return: Dataset's items :raises OSError: if has problem with file :raises yaml.YAMLError: if has problem with format :raises ValueError: if has problem with content ''' with open(filename, 'rt', encoding='utf-8') as input: package = load(input, Loader=Loader) dataset = package.get('dataset') if not isinstance(dataset, list): raise ValueError('wrong format') yield from dataset
def _parse(self, text): ''' Parse text with frontmatter, return metadata and content. If frontmatter is not found, returns an empty metadata dictionary and original text content. ''' # ensure unicode first text = str(text).strip() if not text.startswith(DELIMITER): return {}, text try: _, fm, content = BOUNDARY.split(text, 2) except ValueError: # if we can't split, bail return {}, text # loader_class = self.loader_factory(self) metadata = yaml.load(fm, Loader=self.loader_class) metadata = metadata if (isinstance(metadata, dict)) else {} return metadata, content
def loader_class(self): class FrontmarkLoader(yaml.Loader): ''' Custom YAML Loader for frontmark - Mapping order is respected (wiht OrderedDict) ''' def construct_mapping(self, node, deep=False): '''User OrderedDict as default for mappings''' return collections.OrderedDict(self.construct_pairs(node)) FrontmarkLoader.add_constructor('!md', self.yaml_markdown_constructor) if self.settings.get('FRONTMARK_PARSE_LITERAL', True): FrontmarkLoader.add_constructor(STR_TAG, self.yaml_multiline_as_markdown_constructor) for _, pair in frontmark_yaml_register.send(self): if not len(pair) == 2: log.warning('Ignoring YAML type (%s), expected a (tag, handler) tuple', pair) continue tag, constructor = pair FrontmarkLoader.add_constructor(tag, constructor) return FrontmarkLoader
def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict): """ Wrapper function to assert ordered loading of yaml file """ class OrderedLoader(Loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return object_pairs_hook(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) return yaml.load(stream, OrderedLoader) # Represent OrderedDict as normal yaml when dumping to file or rosbag
def from_yaml(stream, cls=None, loader_cls=yaml.Loader, object_pairs_hook=OrderedDict, **extras): """ Convert a YAML stream into a class via the OrderedLoader class. """ class OrderedLoader(loader_cls): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return object_pairs_hook(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) yaml_dict = yaml.load(stream, OrderedLoader) or {} yaml_dict.update(extras) return cls(**yaml_dict) if cls else yaml_dict
def __init__(self, *args, **kwds): yaml.Loader.__init__(self, *args, **kwds) self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map) self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map)
def __ordered_load(self, stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict): """Load an ordered dictionary from a yaml file. Note ---- Borrowed from John Schulman. http://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml-mappings-as-ordereddicts/21048064#21048064" """ class OrderedLoader(Loader): pass OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, lambda loader, node: object_pairs_hook(loader.construct_pairs(node))) return yaml.load(stream, OrderedLoader)
def __parse_yaml(cls, yaml_string, variables): """ :type yaml_string: str :type variables: dict :rtype dict """ substituted_yaml_string = cls.__substitute_yaml(yaml_string, variables) parsed_yaml = yaml.load(substituted_yaml_string, Loader=YamlLoader) or {} if not isinstance(parsed_yaml, dict): raise ValueError('bad formatted YAML; have to be dict on top level') return parsed_yaml
def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict): class OrderedLoader(Loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return object_pairs_hook(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) return yaml.load(stream, OrderedLoader)
def process_measurements(self): """Process measurements""" loader = Loader(self.measurements_stream) setattr(loader, 'collector', self.collector) setattr(loader, 'system', self.system) setattr(loader, 'config', self.config) measurements = loader.get_data() for measurement_name in measurements: logging.debug('Process "{}" measurements: {}'.format( measurement_name, measurements[measurement_name])) for measurement in measurements[measurement_name]: self.send_data(measurement)
def process_notify(self, notification): """Process events""" loader = Loader(self.events_stream) setattr(loader, 'notification', notification) setattr(loader, 'system', self.system) notifications = loader.get_data() for notify_name in notifications: logging.debug('Process "{}" notification'.format(notify_name)) if notifications[notify_name] is not None: self.send_data(notifications[notify_name])
def __init__(self, stream): self._root = os.path.split(stream.name)[0] super(Loader, self).__init__(stream)
def include(self, node): filename = os.path.join(self._root, self.construct_scalar(node)) with open(filename, 'r') as f: return yaml.load(f, Loader)
def hunt_repeated_yaml_keys(data): """Parses yaml and returns a list of repeated variables and the line on which they occur """ loader = yaml.Loader(data) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = dict() errors = dict() for key_node, value_node in node.value: key = key_node.value if key in mapping: if key in errors: errors[key].append(key_node.__line__) else: errors[key] = [mapping[key], key_node.__line__] mapping[key] = key_node.__line__ return errors loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() return data
def __init__(self, *args, **kwargs): yaml.Loader.__init__(self, *args, **kwargs) self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map) self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map)
def construct_include(self, node): """Include file referenced at node.""" filename = os.path.abspath(os.path.join( self._root, self.construct_scalar(node) )) extension = os.path.splitext(filename)[1].lstrip('.') self.filenames.append(filename) with open(filename, 'r') as f: if extension in ('yaml', 'yml'): return yaml.load(f, Loader) else: return ''.join(f.readlines())
def load_config(filename=None): global meas_file, AWGDir, plotBackground, gridColor, pulse_primitives_lib, cnot_implementation if filename: meas_file = filename else: meas_file = find_meas_file() with open(meas_file, 'r') as FID: # cfg = yaml.load(f) loader = Loader(FID) try: cfg = loader.get_single_data() finally: loader.dispose() # pull out the variables # abspath allows the use of relative file names in the config file if 'AWGDir' in cfg['config'].keys(): AWGDir = os.path.abspath(cfg['config']['AWGDir']) else: raise KeyError("Could not find AWGDir in the YAML config section") plotBackground = cfg['config'].get('PlotBackground', '#EAEAF2') gridColor = cfg['config'].get('GridColor', None) pulse_primitives_lib = cfg['config'].get('PulsePrimitivesLibrary', 'standard') cnot_implementation = cfg['config'].get('cnot_implementation', 'CNOT_simple') return meas_file
def parse_file(cls, path): try: if hasattr(path, 'read') and callable(path.read): # File-like obj conf = yaml.load(path.read(), Loader=_Loader) else: with io.open(path) as f: conf = yaml.load(f.read(), Loader=_Loader) except yaml.error.MarkedYAMLError as e: raise InvalidSpecification(str(e)) return cls.parse(conf)
def __init__ (self, stream, net): yaml.Loader.__init__ (self, stream) self.net = net
def load_yaml(document: Path, required=False): document = Path(document) if not document.exists(): if required: click.secho('yaml document does not exists: {0}'.format(document), fg='red') return {} try: return yaml.load(document.text(), Loader=Loader) except yaml.YAMLError as exc: click.secho(str(exc), fg='red') return {}
def parse_annotations(self, ctx, symbol): assert ctx and symbol if ctx.comment: comment = ctx.comment.text symbol.comment = comment if ctx.tagSymbol(): lines = [tag.line.text[1:] for tag in ctx.tagSymbol()] try: data = yaml.load('\n'.join(lines), Loader=Loader) symbol._tags = data except yaml.YAMLError as exc: click.secho(exc, fg='red')
def read(self, stream): """ Read the given stream and returns it as a dict. :param stream: The stream to read the configuration from. :return IgnoreCaseDict: The configuration read from the stream. """ if stream is None: raise ValueError('stream cannot be None') return yaml.load(stream, Loader=self._get_loader())
def loads(content): return yaml.load(content.decode('utf-8'), Loader=Loader)
def read_yaml(file: str): with open(file, 'rU', encoding="utf-8") as stream: return yaml.load(stream, Loader=Loader)
def load_config(path): # TODO: support old-style setting names? i.e. pass them through ARGS_TO_SETTINGS ? data = {} yaml_err = "" if useYAML: try: # this assumes only one document with open(path, encoding='utf8') as infp: data = yaml_load(infp, Loader=YAMLLoader) except FileNotFoundError: return {} except YAMLError as e: yaml_err = 'YAML parsing error in file {}'.format(path) if hasattr(e, 'problem_mark'): mark = e.problem_mark yaml_err + '\nError on Line:{} Column:{}'.format(mark.line + 1, mark.column + 1) else: return _convert_old_config(data) try: with open(path, encoding='utf8') as infp: data = json.load(infp) except FileNotFoundError: return {} except json.JSONDecodeError as e: if useYAML and yaml_err: print(yaml_err) else: print('JSON parsing error in file {}'.format(path), 'Error on Line: {} Column: {}'.format(e.lineno, e.colno), sep='\n') data = _convert_old_config(data) # if 'directory' in data: # for k, v in data['directory'].items(): # data['directory'][k] = os.path.expanduser(v) return data
def load(f): return yaml.load(f, Loader=FancyLoader)
def yaml_load_ordered(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict): class OrderedLoader(Loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return object_pairs_hook(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) return yaml.load(stream, OrderedLoader)
def __init__(self, stream): yaml.Loader.__init__(self, stream)
def get_single_data(self): data = yaml.Loader.get_single_data(self) return self.recursive_extend(data)
def include(self, node): # include another yaml file by `!include` directive filename = os.path.join(self._root, self.construct_scalar(node)) with open(filename, 'r') as f: return yaml.load(f, Loader)
def __init__(self, rule_name, metadata): """Constructor of TreeLoader. Args: rule_name: rule file name in ./rules, excluding extension .yml metadata: data of any other information needed evaluating tree """ base_dir = os.path.dirname(os.path.abspath(__file__)) # Open yaml rule file try: f = open(os.path.join(base_dir, 'rules', rule_name + '.yml'), 'r', encoding='utf-8') self.tree = yaml.load(f, Loader) f.close() except IOError: logger.error('Error opening file: ' + rule_name) raise TreeLoaderException() # Metadata self.metadata = metadata self.default_properties = { # if this value is True, falsy node will be hidden 'hide_false': False, # [currently acquired credit, required credit for True, sum credit of falsy node or not] 'credit_info': [0, 0, False], 'main_node': False, } # base node is GRADUATE self.base_node = TreeNode(None, self.default_properties, self.metadata, and_func(), '!GRADUATE') # initiate recursive tree loading procedure self.load_tree(self.tree, self.base_node)
def __init__(self, *args, **kwargs): yaml.Loader.__init__(self, *args, **kwargs) toplevel = True m = u'tag:yaml.org,2002:map' self.prev_constructor = self.yaml_constructors[m] self.add_constructor(m, type(self).construct_yaml_map)
def __init__(self, stream): """Initialise Loader.""" try: self._root = os.path.split(stream.name)[0] except AttributeError: self._root = os.path.curdir super().__init__(stream)
def construct_include(self, node): """Include file referenced at node.""" filename = os.path.abspath(os.path.join( self._root, self.construct_scalar(node) )) filename = os.path.join(self._root, self.construct_scalar(node)) extension = os.path.splitext(filename)[1].lstrip('.') with open(filename, 'r') as f: if extension in ('yaml', 'yml'): return yaml.load(f, Loader) else: return ''.join(f.readlines())
def load(filename): with open(filename, 'r') as f: data = yaml.load(f, Loader) # pp = pprint.PrettyPrinter(indent=4) # pp.pprint(data) return data
def load(*args, **kwargs): """Delegate to yaml load. """ if kwargs is None: kwargs = {} kwargs['Loader'] = Loader return yaml.load(*args, **kwargs)
def load_all(*args, **kwargs): """Delegate to yaml loadall. """ if kwargs is None: kwargs = {} kwargs['Loader'] = Loader return yaml.load_all(*args, **kwargs)
def load(stream, safe=False, many=False): Loader = SafeCustomLoader if safe else CustomLoader if many: return tuple(yaml.load_all(stream, Loader)) else: return yaml.load(stream, Loader)