我们从Python开源项目中,提取了以下12个代码示例,用于说明如何使用tensorflow.load_op_library()。
def decode_jpeg(self, image_buffer, scope=None): if FLAGS.fast_jpeg_decode == "pyfunc": print("using ctypes jpeg decode...") lib_jpeg = ctypes.cdll.LoadLibrary('./data_providers/decode_jpeg_memory/decode_memory.so') global ctypes_jpeg ctypes_jpeg = lib_jpeg.decode_jpeg_memory_turbo return self.decode_jpeg_python(image_buffer, scope) elif FLAGS.fast_jpeg_decode=="tf": print("using tensorflow binary libjpeg turbo") decode_jpeg_batch = tf.load_op_library( './data_providers/decode_jpeg_memory/decode_jpeg_batch.so').decode_jpeg_batch assert( FLAGS.decode_downsample_factor == 1 ) ans = decode_jpeg_batch(image_buffer, FLAGS.IM_HEIGHT, FLAGS.IM_WIDTH) ans.set_shape([FLAGS.FRAMES_IN_SEG // FLAGS.temporal_downsample_factor, FLAGS.IM_HEIGHT, FLAGS.IM_WIDTH, 3]) return ans else: return self.decode_jpeg_original(image_buffer, scope)
def testShuffle(self): shuffle_module = tf.load_op_library('shuffle_op.so') shuffle = shuffle_module.shuffle input_tensor = np.arange(12).reshape((3, 4)) desired_shape = np.array([6, -1]) output_tensor = input_tensor.reshape((6, 2)) with self.test_session(): result = shuffle(input_tensor, desired_shape) self.assertAllEqual(result.eval(), output_tensor) input_tensor = np.arange(12).reshape((3, 4)) desired_shape = np.array([5, -1]) output_tensor = input_tensor.reshape((6, 2))[:-1] with self.test_session(): result = shuffle(input_tensor, desired_shape) self.assertAllEqual(result.eval(), output_tensor)
def load_oplib(lib_name): """ Load TensorFlow operator library. """ # use absolute path so that ops.py can be called from other directory lib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib{0}.so'.format(lib_name)) # duplicate library with a random new name so that # a running program will not be interrupted when the original library is updated lib_copy_path = '/tmp/lib{0}_{1}.so'.format(str(uuid.uuid4())[:8], LIB_NAME) shutil.copyfile(lib_path, lib_copy_path) oplib = tf.load_op_library(lib_copy_path) return oplib
def f_segm_match(iou, s_gt): """Matching between segmentation output and groundtruth. Args: y_out: [B, T, H, W], output segmentations y_gt: [B, T, H, W], groundtruth segmentations s_gt: [B, T], groudtruth score sequence """ global hungarian_module if hungarian_module is None: mod_name = './hungarian.so' hungarian_module = tf.load_op_library(mod_name) log.info('Loaded library "{}"'.format(mod_name)) # Mask X, [B, M] => [B, 1, M] mask_x = tf.expand_dims(s_gt, dim=1) # Mask Y, [B, M] => [B, N, 1] mask_y = tf.expand_dims(s_gt, dim=2) iou_mask = iou * mask_x * mask_y # Keep certain precision so that we can get optimal matching within # reasonable time. eps = 1e-5 precision = 1e6 iou_mask = tf.round(iou_mask * precision) / precision match_eps = hungarian_module.hungarian(iou_mask + eps)[0] # [1, N, 1, 1] s_gt_shape = tf.shape(s_gt) num_segm_out = s_gt_shape[1] num_segm_out_mul = tf.pack([1, num_segm_out, 1]) # Mask the graph algorithm output. match = match_eps * mask_x * mask_y return match
def Load(): """Load the TopN ops library and return the loaded module.""" with _ops_lock: global _topn_ops if not _topn_ops: ops_path = tf.resource_loader.get_path_to_datafile(TOPN_OPS_FILE) tf.logging.info('data path: %s', ops_path) _topn_ops = tf.load_op_library(ops_path) assert _topn_ops, 'Could not load topn_ops.so' return _topn_ops
def Load(library_base_dir=''): """Load the quantized ops library and return the loaded module.""" with _kernels_lock: global _quantized_kernels if not _quantized_kernels: data_files_path = os.path.join(library_base_dir, tf.resource_loader.get_data_files_path()) tf.logging.info('data path: %s', data_files_path) _quantized_kernels = tf.load_op_library(os.path.join( data_files_path, QUANTIZED_KERNELS_FILE)) assert _quantized_kernels, 'Could not load _quantized_kernels.so' return _quantized_kernels
def Load(library_base_dir=''): """Load the quantized ops library and return the loaded module.""" with _ops_lock: global _quantized_ops if not _quantized_ops: data_files_path = os.path.join(library_base_dir, tf.resource_loader.get_data_files_path()) tf.logging.info('q:data path: %s', data_files_path) _quantized_ops = tf.load_op_library(os.path.join( data_files_path, QUANTIZED_OPS_FILE)) assert _quantized_ops, 'Could not load quantized_ops.so' return _quantized_ops
def testLoadTwice(self): zero_out_loaded_again = tf.load_op_library(os.path.join( tf.resource_loader.get_data_files_path(), 'zero_out_op_kernel_1.so')) self.assertEqual(zero_out_loaded_again, zero_out_op_1._zero_out_module)
def is_word(word): for char in word: if char.isalpha() or char.isdigit(): return True return False # def word2id(word): # word = 'b\'' + word + '\'' # with open("data/vocab.txt") as f: # for i, line in enumerate(f): # if line.split()[0] == word: # return i # return -1 # def get_word_vector(): # tf.load_op_library(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'word2vec_ops.so')) # metafile = str(tf.train.get_checkpoint_state("data").model_checkpoint_path) + ".meta" # sess = tf.Session() # new_saver = tf.train.import_meta_graph(metafile) # new_saver.restore(sess, tf.train.latest_checkpoint("data")) # all_vars = tf.trainable_variables() # init_op = tf.global_variables_initializer() # sess.run(init_op) # yield sess.run(all_vars[3])
def __init__(self, run_dir): self.name = 'linemove_2D' game_params = { 'L': 2, 'dt': 0.15, 'v_0': 0., 'v_max': 0.5, } self._connect(game_params) self._train_params() self.run_dir = run_dir if self.collect_data: self.record_expert() sys.exit(0) self._init_display() # subprocess.Popen(self.run_dir + "./simulator") # self.pipe_module = tf.load_op_library(self.run_dir + 'pipe.so') plt.ion() plt.show()
def __init__(self, run_dir): r = 10. game_params = { 'r': r, 'dt': 1./9, 'host_speed': 10/3.6, 'target_speed': 5., 'num_of_targets': 5, } self._connect(game_params) self._train_params() self.fig = plt.figure() self.ax = plt.subplot2grid((2, 2), (0, 0), colspan=2, rowspan=2) self.run_dir = run_dir subprocess.Popen(self.run_dir + "./simulator") self.pipe_module = tf.load_op_library(self.run_dir + 'pipe.so') plt.ion() plt.show()