我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用random.seed()。
def test_uint_multi_port(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) do_ports = random.sample( [d for d in x_series_device.do_ports if d.do_port_width <= 16], 2) total_port_width = sum([d.do_port_width for d in do_ports]) with nidaqmx.Task() as task: task.do_channels.add_do_chan( flatten_channel_string([d.name for d in do_ports]), line_grouping=LineGrouping.CHAN_FOR_ALL_LINES) # Generate random values to test. values_to_test = [int(random.getrandbits(total_port_width)) for _ in range(10)] values_read = [] for value_to_test in values_to_test: task.write(value_to_test) time.sleep(0.001) values_read.append(task.read()) assert values_read == values_to_test
def test_one_sample_one_line(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) do_line = random.choice(x_series_device.do_lines).name with nidaqmx.Task() as task: task.do_channels.add_do_chan( do_line, line_grouping=LineGrouping.CHAN_PER_LINE) writer = DigitalSingleChannelWriter(task.out_stream) reader = DigitalSingleChannelReader(task.in_stream) # Generate random values to test. values_to_test = [bool(random.getrandbits(1)) for _ in range(10)] values_read = [] for value_to_test in values_to_test: writer.write_one_sample_one_line(value_to_test) time.sleep(0.001) values_read.append(reader.read_one_sample_one_line()) numpy.testing.assert_array_equal(values_read, values_to_test)
def test_one_sample_port_byte(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) do_port = random.choice( [d for d in x_series_device.do_ports if d.do_port_width <= 8]) with nidaqmx.Task() as task: task.do_channels.add_do_chan( do_port.name, line_grouping=LineGrouping.CHAN_FOR_ALL_LINES) # Generate random values to test. values_to_test = [int(random.getrandbits(do_port.do_port_width)) for _ in range(10)] writer = DigitalSingleChannelWriter(task.out_stream) reader = DigitalSingleChannelReader(task.in_stream) values_read = [] for value_to_test in values_to_test: writer.write_one_sample_port_byte(value_to_test) time.sleep(0.001) values_read.append(reader.read_one_sample_port_byte()) numpy.testing.assert_array_equal(values_read, values_to_test)
def test_one_sample_port_uint32(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) do_port = random.choice( [do for do in x_series_device.do_ports if do.do_port_width <= 32]) with nidaqmx.Task() as task: task.do_channels.add_do_chan( do_port.name, line_grouping=LineGrouping.CHAN_FOR_ALL_LINES) # Generate random values to test. values_to_test = [int(random.getrandbits(do_port.do_port_width)) for _ in range(10)] writer = DigitalSingleChannelWriter(task.out_stream) reader = DigitalSingleChannelReader(task.in_stream) values_read = [] for value_to_test in values_to_test: writer.write_one_sample_port_uint32(value_to_test) time.sleep(0.001) values_read.append(reader.read_one_sample_port_uint32()) numpy.testing.assert_array_equal(values_read, values_to_test)
def test_insufficient_numpy_write_data(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) # Randomly select physical channels to test. number_of_channels = random.randint( 2, len(x_series_device.ao_physical_chans)) channels_to_test = random.sample( x_series_device.ao_physical_chans, number_of_channels) with nidaqmx.Task() as task: task.ao_channels.add_ao_voltage_chan( flatten_channel_string([c.name for c in channels_to_test]), max_val=10, min_val=-10) number_of_samples = random.randint(1, number_of_channels - 1) values_to_test = numpy.float64([ random.uniform(-10, 10) for _ in range(number_of_samples)]) with pytest.raises(DaqError) as e: task.write(values_to_test, auto_start=True) assert e.value.error_code == -200524
def test_create_ai_voltage_chan(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) ai_phys_chan = random.choice(x_series_device.ai_physical_chans).name with nidaqmx.Task() as task: ai_channel = task.ai_channels.add_ai_voltage_chan( ai_phys_chan, name_to_assign_to_channel="VoltageChannel", terminal_config=TerminalConfiguration.NRSE, min_val=-20.0, max_val=20.0, units=VoltageUnits.FROM_CUSTOM_SCALE, custom_scale_name="double_gain_scale") assert ai_channel.physical_channel.name == ai_phys_chan assert ai_channel.name == "VoltageChannel" assert ai_channel.ai_term_cfg == TerminalConfiguration.NRSE assert ai_channel.ai_min == -20.0 assert ai_channel.ai_max == 20.0 assert (ai_channel.ai_voltage_units == VoltageUnits.FROM_CUSTOM_SCALE) assert (ai_channel.ai_custom_scale.name == "double_gain_scale")
def test_create_ai_resistance_chan(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) ai_phys_chan = random.choice(x_series_device.ai_physical_chans).name with nidaqmx.Task() as task: ai_channel = task.ai_channels.add_ai_resistance_chan( ai_phys_chan, name_to_assign_to_channel="ResistanceChannel", min_val=-1000.0, max_val=1000.0, units=ResistanceUnits.OHMS, resistance_config=ResistanceConfiguration.TWO_WIRE, current_excit_source=ExcitationSource.EXTERNAL, current_excit_val=0.002, custom_scale_name="") assert ai_channel.physical_channel.name == ai_phys_chan assert ai_channel.name == "ResistanceChannel" assert numpy.isclose(ai_channel.ai_min, -1000.0, atol=1) assert numpy.isclose(ai_channel.ai_max, 1000.0, atol=1) assert ai_channel.ai_resistance_units == ResistanceUnits.OHMS assert (ai_channel.ai_resistance_cfg == ResistanceConfiguration.TWO_WIRE) assert ai_channel.ai_excit_src == ExcitationSource.EXTERNAL assert ai_channel.ai_excit_val == 0.002
def test_watchdog_expir_state(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) do_line = random.choice(x_series_device.do_lines) with nidaqmx.system.WatchdogTask( x_series_device.name, timeout=0.1) as task: expir_states = [DOExpirationState( physical_channel=do_line.name, expiration_state=Level.TRISTATE)] task.cfg_watchdog_do_expir_states(expir_states) expir_state_obj = task.expiration_states[do_line.name] assert expir_state_obj.expir_states_do_state == Level.TRISTATE expir_state_obj.expir_states_do_state = Level.LOW assert expir_state_obj.expir_states_do_state == Level.LOW
def test_arm_start_trigger(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) counter = random.choice(self._get_device_counters(x_series_device)) with nidaqmx.Task() as task: task.co_channels.add_co_pulse_chan_freq(counter) task.triggers.arm_start_trigger.trig_type = ( TriggerType.DIGITAL_EDGE) assert (task.triggers.arm_start_trigger.trig_type == TriggerType.DIGITAL_EDGE) task.triggers.arm_start_trigger.trig_type = ( TriggerType.NONE) assert (task.triggers.arm_start_trigger.trig_type == TriggerType.NONE)
def test_pause_trigger(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) counter = random.choice(self._get_device_counters(x_series_device)) with nidaqmx.Task() as task: task.co_channels.add_co_pulse_chan_freq(counter) task.timing.cfg_implicit_timing( sample_mode=AcquisitionType.CONTINUOUS) task.triggers.pause_trigger.trig_type = ( TriggerType.DIGITAL_LEVEL) assert (task.triggers.pause_trigger.trig_type == TriggerType.DIGITAL_LEVEL) task.triggers.pause_trigger.trig_type = ( TriggerType.NONE) assert (task.triggers.pause_trigger.trig_type == TriggerType.NONE)
def test_int_property(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) with nidaqmx.Task() as task: task.ci_channels.add_ci_count_edges_chan( x_series_device.ci_physical_chans[0].name) # Test property default value. assert task.in_stream.offset == 0 # Test property setter and getter. value_to_test = random.randint(0, 100) task.in_stream.offset = value_to_test assert task.in_stream.offset == value_to_test value_to_test = random.randint(-100, 0) task.in_stream.offset = value_to_test assert task.in_stream.offset == value_to_test # Test property deleter. del task.in_stream.offset assert task.in_stream.offset == 0
def test_uint_property(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) with nidaqmx.Task() as task: task.ai_channels.add_ai_voltage_chan( x_series_device.ai_physical_chans[0].name) task.timing.cfg_samp_clk_timing(1000) # Test property initial value. assert task.timing.samp_clk_timebase_div == 100000 # Test property setter and getter. value_to_test = random.randint(500, 10000) task.timing.samp_clk_timebase_div = value_to_test assert task.timing.samp_clk_timebase_div == value_to_test # Test property deleter. del task.timing.samp_clk_timebase_div assert task.timing.samp_clk_timebase_div == 100000
def test_list_of_floats_property(self, bridge_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) with nidaqmx.Task() as task: ai_channel = task.ai_channels.add_ai_bridge_chan( bridge_device.ai_physical_chans[0].name) # Test default property value. assert isinstance(ai_channel.ai_bridge_poly_forward_coeff, list) assert len(ai_channel.ai_bridge_poly_forward_coeff) == 0 # Test property setter and getter. value_to_test = [random.randint(-10, 10) for _ in range(random.randint(2, 5))] ai_channel.ai_bridge_poly_forward_coeff = value_to_test assert ai_channel.ai_bridge_poly_forward_coeff == value_to_test # Test property deleter. del ai_channel.ai_bridge_poly_forward_coeff assert isinstance(ai_channel.ai_bridge_poly_forward_coeff, list) assert len(ai_channel.ai_bridge_poly_forward_coeff) == 0
def __init__(self, database, windows, left, right, cluster, table_output, consensus_threshold, v_error_rate, downsample, cluster_subsample_size, approx_columns, max_n_bases, exact_copies, d_coverage, d_evalue, seed): self.database = database self.windows = windows self.left = left self.right = right self.cluster = cluster self.table_output = table_output self.consensus_threshold = consensus_threshold self.v_error_rate = v_error_rate self.downsample = downsample self.cluster_subsample_size = cluster_subsample_size self.approx_columns = approx_columns self.max_n_bases = max_n_bases self.exact_copies = exact_copies self.d_coverage = d_coverage self.d_evalue = d_evalue self.seed = seed
def minutes_for_days(): """ 500 randomly selected days. This is used to make sure our test coverage is unbaised towards any rules. We use a random sample because testing on all the trading days took around 180 seconds on my laptop, which is far too much for normal unit testing. We manually set the seed so that this will be deterministic. Results of multiple runs were compared to make sure that this is actually true. This returns a generator of tuples each wrapping a single generator. Iterating over this yeilds a single day, iterating over the day yields the minutes for that day. """ env = TradingEnvironment() random.seed('deterministic') return ((env.market_minutes_for_day(random.choice(env.trading_days)),) for _ in range(500))
def loadLogoSet(path, rows,cols,test_data_rate=0.15): random.seed(612) _, imgID = readItems('data.txt') y, _ = modelDict(path) nPics = len(y) faceassset = np.zeros((nPics,rows,cols), dtype = np.uint8) ### gray images noImg = [] for i in range(nPics): temp = cv2.imread(path +'logo/'+imgID[i]+'.jpg', 0) if temp == None: noImg.append(i) elif temp.size < 1000: noImg.append(i) else: temp = cv2.resize(temp,(cols, rows), interpolation = cv2.INTER_CUBIC) faceassset[i,:,:] = temp y = np.delete(y, noImg,0); faceassset = np.delete(faceassset, noImg, 0) nPics = len(y) index = random.sample(np.arange(nPics), int(nPics*test_data_rate)) x_test = faceassset[index,:,:]; x_train = np.delete(faceassset, index, 0) y_test = y[index]; y_train = np.delete(y, index, 0) return (x_train, y_train), (x_test, y_test)
def plot_labeled_images_random(image_list, label_list, categories, n, title_str, ypixels, xpixels, seed, filename): random.seed(seed) index_sample = random.sample(range(len(image_list)), n) plt.figure(figsize=(2*n, 2)) #plt.suptitle(title_str) for i, ind in enumerate(index_sample): ax = plt.subplot(1, n, i + 1) plt.imshow(image_list[ind].reshape(ypixels, xpixels)) plt.gray() ax.set_title(categories[label_list[ind]], fontsize=20) ax.get_xaxis().set_visible(False); ax.get_yaxis().set_visible(False) if 1: pylab.savefig(filename, bbox_inches='tight') else: plt.show() # plot_unlabeled_images_random: plots unlabeled images at random
def plot_unlabeled_images_random(image_list, n, title_str, ypixels, xpixels, seed, filename): random.seed(seed) index_sample = random.sample(range(len(image_list)), n) plt.figure(figsize=(2*n, 2)) plt.suptitle(title_str) for i, ind in enumerate(index_sample): ax = plt.subplot(1, n, i + 1) plt.imshow(image_list[ind].reshape(ypixels, xpixels)) plt.gray() ax.get_xaxis().set_visible(False); ax.get_yaxis().set_visible(False) if 1: pylab.savefig(filename, bbox_inches='tight') else: plt.show() # plot_compare: given test images and their reconstruction, we plot them for visual comparison
def generate_graphs(self, n_edges_list, use_seed=True): """For each number of edges (n_edges) in n_edges_list create an Erdos Renyi Precision Graph that allows us to sample from later. Parameters ---------- n_edges : list[int] or int list of number of edges for each graph or scalar if only one graph is wanted use_seed : bool indicates if seed shall be reset """ if use_seed and self.seed is not None: random.seed(self.seed) n_edges = n_edges_list if type(n_edges_list) is list \ else [n_edges_list] self.graphs = [ErdosRenyiPrecisionGraph(self.n_vertices, n_es) for n_es in n_edges]
def make_list(args): image_list = list_image(args.root, args.recursive, args.exts) image_list = list(image_list) if args.shuffle is True: random.seed(100) random.shuffle(image_list) N = len(image_list) chunk_size = (N + args.chunks - 1) / args.chunks for i in xrange(args.chunks): chunk = image_list[i * chunk_size:(i + 1) * chunk_size] if args.chunks > 1: str_chunk = '_%d' % i else: str_chunk = '' sep = int(chunk_size * args.train_ratio) sep_test = int(chunk_size * args.test_ratio) if args.train_ratio == 1.0: write_list(args.prefix + str_chunk + '.lst', chunk) else: if args.test_ratio: write_list(args.prefix + str_chunk + '_test.lst', chunk[:sep_test]) if args.train_ratio + args.test_ratio < 1.0: write_list(args.prefix + str_chunk + '_val.lst', chunk[sep_test + sep:]) write_list(args.prefix + str_chunk + '_train.lst', chunk[sep_test:sep_test + sep])
def get_random_string(length=12, allowed_chars='abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'): """ Returns a securely generated random string. The default length of 12 with the a-z, A-Z, 0-9 character set returns a 71-bit value. log_2((26+26+10)^12) =~ 71 bits """ if not using_sysrandom: # This is ugly, and a hack, but it makes things better than # the alternative of predictability. This re-seeds the PRNG # using a value that is hard for an attacker to predict, every # time a random string is required. This may change the # properties of the chosen random sequence slightly, but this # is better than absolute predictability. random.seed( hashlib.sha256( ("%s%s" % (random.getstate(), time.time())).encode('utf-8') ).digest()) return ''.join(random.choice(allowed_chars) for i in range(length))
def write_parameter_log(options, output_dir): """ Write paramter values to a log file, named by current time. """ merge_method_dict={1:'narrowPeak', 2:'broadPeak'} correction_method_dict={1:'Bonferroni', 2:'BH_FDR'} with open(output_dir+'/CLAM_Peaker.Parameters.'+ strftime("%Y%m%d_%H%M") + '.txt', 'w') as log: log.write('CLAM Peaker ' + __version__ + '\n') log.write('resume: ' + str(options.resume) + '\n') log.write('verbose: ' + str(options.verbose) + '\n') log.write('output_dir:' + str(options.output_dir) + '\n') log.write('tmp_dir: ' + str(options.tmp_dir) + '\n') log.write('peak_file: ' + str(options.peak_file) + '\n') log.write('is_stranded: ' + str(options.is_stranded) + '\n') log.write('extend: ' + str(options.extend) + '\n') log.write('pval_cutoff: ' + str(options.pval_cutoff) + '\n') log.write('merge_size: ' + str(options.merge_size) + '\n') log.write('max_iter: ' + str(options.max_iter) + '\n') log.write('gtf: ' + str(options.gtf) + '\n') log.write('seed: ' + str(options.seed) + '\n') log.write('merge_method: ' + merge_method_dict[options.merge_method] + '\n') log.write('correction_method: ' + correction_method_dict[options.correction_method] + '\n') log.write('thread: ' + str(options.nb_proc) + '\n')
def split_keys(profiles, bin_sites, random_state=1234): """Balanced split over binding/non-binding sequences.""" random.seed(random_state) pos_keys = bin_sites.keys() neg_keys = list(set(profiles.keys()) - set(pos_keys)) random.shuffle(pos_keys) random.shuffle(neg_keys) len_pos = len(pos_keys) pos_keys1 = pos_keys[:len_pos / 2] pos_keys2 = pos_keys[len_pos / 2:] len_neg = len(neg_keys) neg_keys1 = neg_keys[:len_neg / 2] neg_keys2 = neg_keys[len_neg / 2:] return [pos_keys1, pos_keys2, neg_keys1, neg_keys2]
def get_batches(data, batch_size, vocabulary, pos_vocabulary): ''' Get batches without any restrictions on number of antecedents and negative candidates. ''' random.seed(24) random.shuffle(data) data_size = len(data) if data_size % float(batch_size) == 0: num_batches = int(data_size / float(batch_size)) else: num_batches = int(data_size / float(batch_size)) + 1 batches = [] for batch_num in range(num_batches): start_index = batch_num * batch_size end_index = min((batch_num + 1) * batch_size, data_size) batch = pad_batch(data[start_index:end_index], vocabulary, pos_vocabulary) batches.append(batch) logging.info('Data size: %s' % len(data)) logging.info('Number of batches: %s' % len(batches)) return batches
def split(flags): if os.path.exists(flags.split_path): return np.load(flags.split_path).item() folds = flags.folds path = flags.input_path random.seed(6) img_list = ["%s/%s"%(path,img) for img in os.listdir(path)] random.shuffle(img_list) dic = {} n = len(img_list) num = (n+folds-1)//folds for i in range(folds): s,e = i*num,min(i*num+num,n) dic[i] = img_list[s:e] np.save(flags.split_path,dic) return dic
def randomly_grouped_by(key_from_example: Callable[[LabeledExample], Any], training_share: float = .9) -> Callable[ [List[LabeledExample]], Tuple[List[LabeledExample], List[LabeledExample]]]: def split(examples: List[LabeledExample]) -> Tuple[List[LabeledExample], List[LabeledExample]]: examples_by_directory = group(examples, key=key_from_example) directories = examples_by_directory.keys() # split must be the same every time: random.seed(42) keys = set(random.sample(directories, int(training_share * len(directories)))) training_examples = [example for example in examples if key_from_example(example) in keys] test_examples = [example for example in examples if key_from_example(example) not in keys] return training_examples, test_examples return split
def gen_training_data( num_features, num_training_samples, num_outputs, noise_scale=0.1, ): np.random.seed(0) random.seed(1) input_distribution = stats.norm() training_inputs = input_distribution.rvs( size=(num_training_samples, num_features) ).astype(np.float32) weights = np.random.normal(size=(num_outputs, num_features) ).astype(np.float32).transpose() noise = np.multiply( np.random.normal(size=(num_training_samples, num_outputs)), noise_scale ) training_outputs = (np.dot(training_inputs, weights) + noise).astype(np.float32) return training_inputs, training_outputs, weights, input_distribution
def test(args=None, BSTtype=BST): import random, sys random.seed(19920206) if not args: args = sys.argv[1:] if not args: print 'usage: %s <number-of-random-items | item item item ...>' % \ sys.argv[0] sys.exit() elif len(args) == 1: items = (random.randrange(100) for i in xrange(int(args[0]))) else: items = [int(i) for i in args] tree = BSTtype() source = [] for item in items: tree.insert(item) source += [str(item)] print ' '.join(source) print tree
def generate(): import random, sys random.seed(19920206) Lmin = 2 ** 2 - 1 Lmax = 2 ** 4 - 1 Xnum = 1000000 voc = 26 wfile = open('/home/thoma/Work/Dial-DRL/dataset/BST_1M.txt', 'w') for id in xrange(Xnum): tree = BST() items = (random.randrange(voc) for i in xrange(random.randint(Lmin, Lmax))) source = [] for item in items: item = chr(item + 65) tree.insert(item) source += [str(item)] source = ' '.join(source) target = str(tree) line = '{0} -> {1}'.format(source, target) wfile.write(line + '\n') if id % 10000 == 0: print id
def add_cmdline_args(argparser): """Parameters of agent and default values""" group = argparser.add_argument_group('Coreference Teacher') group.add_argument('--language', type=str, default='ru') group.add_argument('--predictions_folder', type=str, default='predicts', help='folder where to dump conll predictions, scorer will use this folder') group.add_argument('--scorer_path', type=str, default='scorer/reference-coreference-scorers/v8.01/scorer.pl', help='path to CoNLL scorer perl script') group.add_argument('--valid_ratio', type=float, default=0.2, help='valid_set ratio') group.add_argument('--test_ratio', type=float, default=0.2, help='test_set ratio') group.add_argument('--teacher_seed', type=int, default=42, help='seed') group.add_argument('--raw-dataset-path', type=str, default=None, help='Path to folder with two subfolders: dataset and scorer. ' 'These two folders are extracted rucoref_29.10.2015.zip and ' 'reference-coreference-scorers.v8.01.tar.gz')
def __init__(self, opt, shared=None): """Initialize the parameters of the DefaultTeacher""" assert opt['train_part'] + opt['test_part'] + opt['valid_part'] == 1 self.parts = [opt['train_part'], opt['valid_part'], opt['test_part']] # store datatype self.dt = opt['datatype'].split(':')[0] self.opt = opt opt['datafile'] = _path(opt) # store identifier for the teacher in the dialog self.id = 'ner_teacher' random_state = random.getstate() random.seed(opt.get('teacher_seed')) self.random_state = random.getstate() random.setstate(random_state) if shared and shared.get('metrics'): self.metrics = shared['metrics'] else: self.metrics = CoNLLClassificationMetrics(opt['model_file']) # define standard question, since it doesn't change for this task super().__init__(opt, shared)
def _child_process(self, service): self._child_process_handle_signal() # Reopen the eventlet hub to make sure we don't share an epoll # fd with parent and/or siblings, which would be bad eventlet.hubs.use_hub() # Close write to ensure only parent has it open os.close(self.writepipe) # Create greenthread to watch for parent to close pipe eventlet.spawn_n(self._pipe_watcher) # Reseed random number generator random.seed() launcher = Launcher() launcher.launch_service(service) return launcher
def get_random_string(length=12, allowed_chars='abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'): """ Returns a securely generated random string. The default length of 12 with the a-z, A-Z, 0-9 character set returns a 71-bit value. log_2((26+26+10)^12) =~ 71 bits """ if not using_sysrandom: # This is ugly, and a hack, but it makes things better than # the alternative of predictability. This re-seeds the PRNG # using a value that is hard for an attacker to predict, every # time a random string is required. This may change the # properties of the chosen random sequence slightly, but this # is better than absolute predictability. random.seed( hashlib.sha256( ("%s%s%s" % ( random.getstate(), time.time(), settings.SECRET_KEY)).encode('utf-8') ).digest()) return ''.join(random.choice(allowed_chars) for i in range(length))
def run(self, vacuum_class, max_steps=10000): scores = [] for seed in seeds: logging.debug('Running seed {}...'.format(seed)) random.seed(seed) dve = RandomDirtyVacuumEnvironment() dve.add_to_random_empty_square(vacuum_class()) step = 0 steps_to_clean = 0 while step < max_steps: dve.step() step += 1 if not dve.is_clean(): steps_to_clean += 1 scores.append(Result(seed, dve.agents[0].performance, steps_to_clean)) return scores
def test_sample_two_dimensions_one_cuboid_property(self): random.seed(42) doms = {0:[0], 1:[1]} dom = {0:[0]} cs.init(2, doms) s = Core([Cuboid([0.5, float("-inf")],[0.7, float("inf")],dom)], dom) w = Weights({0:1}, {0:{0:1}}) f = Concept(s, 1.0, 10.0, w) expected_samples = [[0.671077246097072, -1.1182375118372132], [0.7223363669989505, 0.8182873448596939], [0.8341255198319808, 0.43652402266795276], [0.4349365229310276, 1.658190358962174], [0.6150663198218392, -1.6363623513048244], [0.47689201330881126, -1.7458891753921715], [0.5268116788866108, 1.8152637100843205], [0.8197557203077108, 0.43588084575268926], [0.6480058823075816, -1.997712415488226], [0.5778432024671717, -1.7231499261264656], [0.6787669258743846, -0.9397734842397636], [0.47843872817402455, -1.1528071782316718], [0.6277970899463485, -1.5159832165269371], [0.7123582792556478, -0.10931589475282344], [0.4909539247388911, -0.3056855079203169], [0.5187297023218571, -0.31247344066238325], [0.5772907067965353, -1.1450108032032733], [0.6882004507621521, 0.873633101185304], [0.6667338652830263, 0.9919022415162564], [0.4722500795674033, 0.3346891571648989]] samples = f.sample(20) self.assertEqual(samples, expected_samples)
def generate_maze(bm, maze_params): """ generate the maze on the bm bmesh """ bm.verts.ensure_lookup_table() bm.edges.ensure_lookup_table() sel_geom, inner_edges = get_inner_edges(bm, maze_params['boundary_type']) if maze_params['maze_update']: all_edges = sorted(bm.edges, key=lambda edge: edge.index) full_mesh = inner_edges == all_edges random.seed(maze_params['rseed']) maze_path, maze_verts = recursive_back_tracker_maze(inner_edges, full_mesh) if maze_params['braid'] > 0.0: maze_path = do_braid(maze_path, maze_verts, maze_params['braid']) link_centers, vert_centers = get_maze_centers(maze_path, maze_verts) else: link_centers = maze_params['link_centers'] vert_centers = maze_params['vert_centers'] bevel_extrude(bm, sel_geom, maze_params, link_centers, vert_centers) return bm, link_centers, vert_centers
def get_random_string(length=12, allowed_chars='abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'): """ Return a securely generated random string. The default length of 12 with the a-z, A-Z, 0-9 character set returns a 71-bit value. log_2((26+26+10)^12) =~ 71 bits """ if not using_sysrandom: # This is ugly, and a hack, but it makes things better than # the alternative of predictability. This re-seeds the PRNG # using a value that is hard for an attacker to predict, every # time a random string is required. This may change the # properties of the chosen random sequence slightly, but this # is better than absolute predictability. random.seed( hashlib.sha256( ('%s%s%s' % (random.getstate(), time.time(), settings.SECRET_KEY)).encode() ).digest() ) return ''.join(random.choice(allowed_chars) for i in range(length))
def __init__(self): Module.__init__(self) self.add_method('+', self.addition) self.add_method('-', self.subtraction) self.add_method('*', self.multiplication) self.add_method('/', self.division) self.add_method('%', self.module) self.add_method('**', self.power) self.add_method('abs', self.abs) self.add_method('min', self.minimum) self.add_method('max', self.maximum) self.add_method('strcat', self.strcat) self.add_method('substr', self.substr) self.add_method('strlen', self.strlen) self.add_method('strindex', self.strindex) self.add_method('symcat', self.symcat) self.add_method('randint', self.randint) random.seed(time.time())
def retrieve_lsb(data, magic): ''' A method that retrieves the least significant bits of the picture Args: data (list): The list representation of the image magic (str) : The password Returns: The list representation of the image with retrieved text from random lsb's ''' print '[*] Retrieving message from image' retrieve_range = range(data.size) if not magic is None: random.seed(generate_seed(magic)) retrieve_range = random_ints(data.size) return retrieve(data, retrieve_range)
def __init__(self,location = None): random.seed() self.name = generate_name() self.location = location self.cities = []
def __init__(self,seed): random.seed(seed) self.ISLAND_FACTOR = 1.13 # 1.0 means no small islands; 2.0 leads to a lot self.bumps = random.randrange(1,7) self.startAngle = random.uniform(0,2*math.pi) self.dipAngle = random.uniform(0,2*math.pi) self.dipWidth = random.uniform(0.2,0.7)
def HOCcat(data_, mvmodel, seed): response = data_.ix[:, 10:25] preditors = [] preditors.append(data_.ix[:, 10:15]) preditors.append(data_.ix[:, 15:20]) preditors.append(data_.ix[:, 20:25]) plsr_ = None for i in range(3): res_ = plsr2(preditors[i], response, seed=seed)[0] plsr_ = res_ if plsr_ is None else np.hstack((plsr_, res_)) plsr_ = pd.DataFrame(plsr_) plsr_.index = range(len(plsr_)) cols = list(plsr_.columns) for s in range(len(cols)): cols[cols.index(s)] = 'T' + str(s) plsr_.columns = cols data_ = pd.concat([data_, plsr_], axis=1) Variables = pd.read_csv(mvmodel) Variables = Variables[ Variables.latent.str.contains("Humanização") == False] for i in range(len(cols)): df_ = pd.DataFrame([['Humanização', cols[i], 'A']], columns=Variables.columns) Variables = Variables.append(df_) Variables.index = range(len(Variables)) mvmodel = Variables return[data_, mvmodel]
def construct(params, seed=0, render=False, out_path=None): """Construct the tree""" if seed == 0: seed = int(random.random() * 9999999) # print('Seed: ', seed) random.seed(seed) Tree(TreeParam(params)).make() if render: bpy.data.scenes['Scene'].render.filepath = out_path bpy.ops.render.render(write_still=True) #mod = __import__('ch_trees.parametric.tree_params.quaking_aspen', fromlist=['']) #reload(mod) #construct(mod.params)
def test_one_sample_uint32(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) number_of_pulses = random.randint(2, 50) frequency = random.uniform(1000, 10000) # Select random counters from the device. counters = random.sample(self._get_device_counters(x_series_device), 2) with nidaqmx.Task() as write_task, nidaqmx.Task() as read_task: write_task.co_channels.add_co_pulse_chan_freq( counters[0], freq=frequency) write_task.timing.cfg_implicit_timing( samps_per_chan=number_of_pulses) read_task.ci_channels.add_ci_count_edges_chan(counters[1]) read_task.ci_channels.all.ci_count_edges_term = ( '/{0}InternalOutput'.format(counters[0])) reader = CounterReader(read_task.in_stream) read_task.start() write_task.start() write_task.wait_until_done(timeout=2) value_read = reader.read_one_sample_uint32() assert value_read == number_of_pulses
def test_one_sample_double(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) frequency = random.uniform(1000, 10000) # Select random counters from the device. counters = random.sample( self._get_device_counters(x_series_device), 2) with nidaqmx.Task() as write_task, nidaqmx.Task() as read_task: write_task.co_channels.add_co_pulse_chan_freq( counters[0], freq=frequency) write_task.timing.cfg_implicit_timing( sample_mode=AcquisitionType.CONTINUOUS) actual_frequency = write_task.co_channels.all.co_pulse_freq read_task.ci_channels.add_ci_freq_chan( counters[1], min_val=1000, max_val=10000) read_task.ci_channels.all.ci_freq_term = ( '/{0}InternalOutput'.format(counters[0])) reader = CounterReader(read_task.in_stream) read_task.start() write_task.start() value_read = reader.read_one_sample_double() numpy.testing.assert_allclose( [value_read], [actual_frequency], rtol=0.05)
def test_multi_sample_double(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) number_of_samples = random.randint(2, 50) frequency = random.uniform(1000, 10000) # Select random counters from the device. counters = random.sample( self._get_device_counters(x_series_device), 3) with nidaqmx.Task() as write_task, nidaqmx.Task() as read_task: write_task.co_channels.add_co_pulse_chan_freq( counters[1], freq=frequency) write_task.timing.cfg_implicit_timing( samps_per_chan=number_of_samples + 1) read_task.ci_channels.add_ci_freq_chan( counters[2], min_val=1000, max_val=10000, edge=Edge.RISING) read_task.ci_channels.all.ci_freq_term = ( '/{0}InternalOutput'.format(counters[1])) read_task.timing.cfg_implicit_timing( samps_per_chan=number_of_samples) read_task.start() write_task.start() write_task.wait_until_done(timeout=2) reader = CounterReader(read_task.in_stream) values_read = numpy.zeros(number_of_samples, dtype=numpy.float64) reader.read_many_sample_double( values_read, number_of_samples_per_channel=number_of_samples, timeout=2) expected_values = [frequency for _ in range(number_of_samples)] numpy.testing.assert_allclose( values_read, expected_values, rtol=0.05)
def test_one_sample_pulse_freq(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) frequency = random.uniform(1000, 10000) duty_cycle = random.uniform(0.2, 0.8) # Select random counters from the device. counters = random.sample(self._get_device_counters(x_series_device), 2) with nidaqmx.Task() as write_task, nidaqmx.Task() as read_task: write_task.co_channels.add_co_pulse_chan_freq( counters[0], freq=frequency, duty_cycle=duty_cycle) write_task.timing.cfg_implicit_timing( sample_mode=AcquisitionType.CONTINUOUS) read_task.ci_channels.add_ci_pulse_chan_freq( counters[1], min_val=1000, max_val=10000) read_task.ci_channels.all.ci_pulse_freq_term = ( '/{0}InternalOutput'.format(counters[0])) read_task.start() write_task.start() reader = CounterReader(read_task.in_stream) value_read = reader.read_one_sample_pulse_frequency() write_task.stop() assert numpy.isclose(value_read.freq, frequency, rtol=0.05) assert numpy.isclose(value_read.duty_cycle, duty_cycle, rtol=0.05)
def test_one_sample_pulse_time(self, x_series_device, seed): # Reset the pseudorandom number generator with seed. random.seed(seed) high_time = random.uniform(0.0001, 0.001) low_time = random.uniform(0.0001, 0.001) # Select random counters from the device. counters = random.sample(self._get_device_counters(x_series_device), 2) with nidaqmx.Task() as write_task, nidaqmx.Task() as read_task: write_task.co_channels.add_co_pulse_chan_time( counters[0], high_time=high_time, low_time=low_time) write_task.timing.cfg_implicit_timing( sample_mode=AcquisitionType.CONTINUOUS) read_task.ci_channels.add_ci_pulse_chan_time( counters[1], min_val=0.0001, max_val=0.001) read_task.ci_channels.all.ci_pulse_time_term = ( '/{0}InternalOutput'.format(counters[0])) read_task.start() write_task.start() reader = CounterReader(read_task.in_stream) value_read = reader.read_one_sample_pulse_time() write_task.stop() assert numpy.isclose(value_read.high_time, high_time, rtol=0.05) assert numpy.isclose(value_read.low_time, low_time, rtol=0.05)