我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用ujson.load()。
def read_json_file(filename, path): """Accepts a file name and loads it as a json object. Args: filename (str): Filename to be loaded. path (str): Directory path to use. Returns: obj: json object """ result = [] try: with open(path + filename + ".json", "r") as entry: result = ujson.load(entry) except IOError as ex: print "I/O error({0}): {1}".format(ex.errno, ex.strerror) else: entry.close() return result
def test_should_fail_missing_timestamp_in_body(self, bulk_processor): events_resource = _init_resource(self) events_resource._processor = bulk_processor unit_test_patch = os.path.dirname(__file__) json_file_path = 'event_template_json/req_simple_event.json' patch_to_req_simple_event_file = os.path.join(unit_test_patch, json_file_path) with open(patch_to_req_simple_event_file, 'r') as fi: events = json.load(fi)['events'] body = {'events': [events]} self.simulate_request( path=ENDPOINT, method='POST', headers={ 'Content-Type': 'application/json', 'X_ROLES': 'monasca' }, body=json.dumps(body) ) self.assertEqual(falcon.HTTP_422, self.srmock.status)
def parse_postman_file(path: str, environment_vars: List[Dict]) -> APITest: """ This function parse a Postman file and return an APITest object instance :param path: path to postman file :type path: str :return: APITest instance :rtype: APITest """ assert isinstance(path, str) with open(path, "r") as f: json_info = load(f) return postman_parser(json_info, environment_vars)
def postman_parser_form_file(file_path: str, environment_vars: dict = None): """ Get a parser collection, in JSON input format, and parser it :param file_path: file path :type file_path: str :return: a Postman object :rtype: APITest :raise ApitestInvalidFormatError: When the Postman JSON file has wrong format """ assert file_path is not None with open(file_path, "r") as f: try: loaded_data = load(f) except (ValueError, TypeError) as e: raise ApitestInvalidFormatError from e return postman_parser(loaded_data, environment_vars)
def get_database_connection(): global logger s3 = boto3.resource('s3') metasrcs = ujson.load( s3.Object('net-mozaws-prod-us-west-2-pipeline-metadata', 'sources.json').get()['Body']) creds = ujson.load( s3.Object( 'net-mozaws-prod-us-west-2-pipeline-metadata', '%s/write/credentials.json' % metasrcs[DB]['metadata_prefix'] ).get()['Body']) conn = psycopg2.connect(connection_factory=LoggingConnection, host=creds['host'], port=creds.get('port', 5432), user=creds['username'], password=creds['password'], dbname=creds['db_name']) conn.initialize(logger) return conn
def setUp(self): self.test_subject = IIIFValidator() self.test_subject.logger.setLevel("CRITICAL") self.test_subject.fail_fast = False self.test_subject.collect_errors = True self.test_subject.collect_warnings = True self.test_subject.debug = True self.base_dir = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(self.base_dir, 'fixtures/valid_manifest')) as f: self.valid_manifest = json.load(f) with open(os.path.join(self.base_dir, 'fixtures/error_collection.json')) as f: self.error_collection = json.load(f) self.man_with_warnings_and_errors = self.error_collection['manifests'][-1]['manifest']
def read_vqa(vqa_dir, section): ans = 'mscoco_%s2014_annotations.json' % section with (vqa_dir / ans).open() as file_: ans_data = json.load(file_) answers_by_id = {} for answer in ans_data['annotations']: mca = answer['multiple_choice_answer'] answers_by_id[answer['question_id']] = mca filename = ('MultipleChoice_mscoco_' '%s2014_questions.json' % section) with (vqa_dir / filename).open() as file_: ques_data = json.load(file_) for question in ques_data['questions']: text = question['question'] ques_id = question['question_id'] options = question['multiple_choices'] yield text, options, answers_by_id[ques_id]
def convert_image(options): print 'loading cache' options['cache'] = json.load(open('color_cache.json')) print 'beginning conversion' im = Image.open( options['filename'] ) o = convert_frame(im, options) o += ANSI_RESET + '\n\n' # Save to ANSI file and add SAUCE record if options['output_file'] is not sys.stdout: save_frame(o,options) add_sauce(o,options) # Output to console (unicode) else: print_frame(o,options) # json.dump(options['cache'], open('color_cache.json','w')) # MAIN FUNCTION
def _set_asset_paths(self, app): """ Read in the manifest json file which acts as a manifest for assets. This allows us to get the asset path as well as hashed names. :param app: Sanic application :return: None """ webpack_stats = app.config['WEBPACK_MANIFEST_PATH'] try: with open(webpack_stats) as stats_json: stats = json.load(stats_json) if app.config['WEBPACK_ASSETS_URL']: self.assets_url = app.config['WEBPACK_ASSETS_URL'] else: self.assets_url = stats['publicPath'] self.assets = stats['assets'] except IOError: raise RuntimeError( "Sanic-Webpack requires 'WEBPACK_MANIFEST_PATH' to be set and " "it must point to a valid json file.")
def query(self, *args, onlyCached=False, **kwargs): queryString, hashString = self._queryString(*args, **kwargs) filename = self.__cacheDir + '/' + self._prefix + '-' + self.__hash(hashString) if not os.path.exists(self.__cacheDir): os.makedirs(self.__cacheDir) if os.path.exists(filename): with open(filename, 'r') as file: data = ujson.load(file) elif onlyCached: print('[' + self._prefix + '] data not cached: ' + queryString) return None else: print('[' + self._prefix + '] downloading data: ' + queryString) if self._waitForReady() == None: if self.__lastQuery and self.__waitBetweenQueries and time.time() - self.__lastQuery < self.__waitBetweenQueries: time.sleep(self.__waitBetweenQueries - time.time() + self.__lastQuery) self.__lastQuery = time.time() data = self.__query(queryString) with open(filename, 'w') as file: ujson.dump(data, file) result = self._rawToResult(data, queryString) if not self._isValid(result): raise(Exception('[' + self._prefix + '] error in result (' + filename + '): ' + queryString)) return result
def _generate_fsm(language): cond_path = os.path.join(CONFIG_PATH, language) cond_path = os.path.join(cond_path, 'cond_config.json') with open(cond_path) as cond_file: cond_config = ujson.load(cond_file) cls_name = language + '_FSM' fsm_cls = generate_fsm_cls(cls_name, cond_config) fsm_config_path = os.path.join(CONFIG_PATH, 'FSM.json') with open(fsm_config_path, 'r') as fsm_config_file: data = ujson.loads(jsmin(fsm_config_file.read())) states = data['states'] transitions = data['transitions'] return fsm_cls( states=states, transitions=transitions, bot_client=line_bot_api, template_path=os.path.join(BOT_TEMPLATE_PATH, language), )
def testCase000(self): """Load persistent data from files into into memory. """ global jval global datafile # data datafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('testdata.json') if not os.path.isfile(datafile): raise BaseException("Missing JSON data:file="+str(datafile)) # load data with open(datafile) as data_file: jval = myjson.load(data_file) if jval == None: raise BaseException("Failed to load data:"+str(data_file)) jval = jval assert jval pass
def testCase001(self): """Load persistent schema from files into into memory. """ global sval global datafile global schemafile # schema schemafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('schema.jsd') if not os.path.isfile(schemafile): raise BaseException("Missing JSONschema:file="+str(schemafile)) with open(schemafile) as schema_file: sval = myjson.load(schema_file) if sval == None: raise BaseException("Failed to load schema:"+str(schema_file)) # # Create by object #
def testCase000(self): """Load persistent data from files into into memory. """ global jval global datafile # data datafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('testdata.json') if not os.path.isfile(datafile): raise BaseException("Missing JSON data:file="+str(datafile)) # load data with open(datafile) as data_file: jval = myjson.load(data_file) if jval == None: raise BaseException("Failed to load data:"+str(data_file)) jval = jval assert jval pass # # Create by object #
def testCase000(self): """Load a data file. """ global jval global datafile # data datafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('testdata.json') if not os.path.isfile(datafile): raise BaseException("Missing JSON data:file="+str(datafile)) # load data with open(datafile) as data_file: jval = myjson.load(data_file) if jval == None: raise BaseException("Failed to load data:"+str(data_file)) jval = jval assert jval pass
def testCase000(self): """Load a data file. """ global jval global datafile # data datafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('datafile.json') if not os.path.isfile(datafile): raise BaseException("Missing JSON data:file="+str(datafile)) # load data with open(datafile) as data_file: jval = myjson.load(data_file) if jval == None: raise BaseException("Failed to load data:"+str(data_file)) jval = jval assert jval pass
def testCase000(self): """Load a data file by 'json' module. """ global jval global datafile # data datafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('testdata.json') if not os.path.isfile(datafile): raise BaseException("Missing JSON data:file="+str(datafile)) # load data with open(datafile) as data_file: jval = myjson.load(data_file) if jval == None: raise BaseException("Failed to load data:"+str(data_file)) cdata = {u'phoneNumber': [{u'type': u'home', u'number': u'212 555-1234'}], u'address': {u'city': u'New York', u'streetAddress': u'21 2nd Street', u'houseNumber': 12}} assert cdata == jval pass
def testCase000(self): """Load a data file. """ global jval global datafile # data datafile = os.path.abspath(os.path.dirname(__file__))+os.sep+str('testdata.json') if not os.path.isfile(datafile): raise BaseException("Missing JSON data:file="+str(datafile)) # load data with open(datafile) as data_file: jval = myjson.load(data_file) if jval == None: raise BaseException("Failed to load data:"+str(data_file)) cdata = {u'phoneNumber': [{u'type': u'home', u'number': u'212 555-1234'}], u'address': {u'city': u'New York', u'streetAddress': u'21 2nd Street', u'houseNumber': 12}} assert cdata == jval pass