我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用django.conf.settings.BASE_DIR。
def handle(self, *args, **options): """""" ovpn = Ovpn.objects.filter(activated=True) if ovpn.exists(): self._kill_old_process() ovpn = ovpn[0] print >> sys.stdout, "Config: {0.path}".format(ovpn.file) auth_filepath = os.path.join(settings.BASE_DIR, "vpn{0.vpn.pk}.auth.txt".format(ovpn)) with open(auth_filepath, "w") as auth: auth.write(ovpn.vpn.username + '\n') auth.write(ovpn.vpn.password + '\n') # get file content with open(ovpn.file.path, "r") as vpn: vpn_file_content = vpn.readlines() # change file for index, line in enumerate(vpn_file_content): if re.match(self.vpn_param + '.*', line): vpn_file_content[index] = "{0.vpn_param} {1:s}\n".format(self, auth_filepath) break # write new data with open(ovpn.file.path, "w") as vpn: vpn.write(''.join(vpn_file_content)) # vpn activate sh.openvpn(ovpn.file.path, _out=sys.stdout)
def archive_replicas(self): filename = safe_join(settings.BASE_DIR, "replica.sqlite3") has_file = os.path.exists(filename) if not has_file: return dirpath = safe_join(settings.BASE_DIR, "replicas") replicas = os.path.exists(dirpath) if not replicas is True: try: print("Creating replicas archive directory ...") os.makedirs(safe_join(settings.BASE_DIR, "replicas")) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise dst = safe_join(settings.BASE_DIR, "replicas") ts = str(int(time.time())) newname = "replica." + ts + ".sqlite3" os.rename("replica.sqlite3", newname) src = safe_join(settings.BASE_DIR, newname) print("Archiving current replica ...") shutil.move(src, dst)
def test_templates_render_successfully(): template_list = [] template_dirs = [ os.path.join(settings.BASE_DIR, 'enrolment/templates'), os.path.join(settings.BASE_DIR, 'supplier/templates'), ] for template_dir in template_dirs: for dir, dirnames, filenames in os.walk(template_dir): for filename in filenames: path = os.path.join(dir, filename).replace(template_dir, '') template_list.append(path.lstrip('/')) default_context = { 'supplier': None, 'form': Form(), } assert template_list for template in template_list: render_to_string(template, default_context)
def create_folders(): """ Creates required directories. """ folders = ["data", "data/incoming", "data/garch", "data/incoming_pickled", "data/incoming_pickled/csv", "data/indicators", "data/indicators/csv", "data/monte_carlo", "data/monte_carlo/indicators", "data/monte_carlo/systems", "data/monte_carlo/performance", "data/monte_carlo/avg", "data/performance", "data/performance/csv", "data/portfolios", "data/portfolios/csv", "data/quandl", "data/portfolios/csv", "data/quandl/csv", "data/systems", "data/systems/csv", "data/systems/json"] for folder in folders: try: Popen("mkdir {0}/{1}".format(settings.BASE_DIR, folder)) except Exception as err: print(colored.red("create_folders {}".format(err)))
def create_fixture(self): """Create the fixture using the dumpdata command""" excluded = [ 'admin', 'auth.Permission', 'contenttypes', 'sessions', 'wagtailcore.grouppagepermission', 'wagtailcore.groupcollectionpermission', ] path = os.path.join(settings.BASE_DIR, 'tests/fixtures/basic_site.json') call_command( 'dumpdata', exclude=excluded, natural_foreign=True, indent=2, output=path )
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['ImageData']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'ImageData') # Test 2 net['l0']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'ImageData') # Test 3 net['l0']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'ImageData')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Data']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'Data') # Test 2 net['l0']['info']['phase'] = 0 net['l0']['params']['mean_value'] = '' net['l0']['params']['mean_file'] = '/path/to/mean/file' net['l0']['params']['backend'] = "LEVELDB" prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'Data') # Test 3 net['l0']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'Data')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['HDF5Data']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'HDF5Data') # Test 2 net['l0']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'HDF5Data') # Test 3 net['l0']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'HDF5Data')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['HDF5Output']} net['l0']['connection']['output'].append('l1') # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'HDF5Output') # Test 2 net['l1']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'HDF5Output') # Test 3 net['l1']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'HDF5Output')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['WindowData']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'WindowData') # Test 2 net['l0']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'WindowData') # Test 3 net['l0']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'WindowData')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['MemoryData']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'MemoryData') # Test 2 net['l0']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'MemoryData') # Test 3 net['l0']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'MemoryData')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['DummyData']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'DummyData') # Test 2 net['l0']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'DummyData') # Test 3 net['l0']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'DummyData') # ********** Vision Layers Test **********
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['Pooling']} net['l0']['connection']['output'].append('l1') # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'Pooling') # Test 2 net['l1']['params']['pool'] = 'AVE' prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'Pooling') # Test 3 net['l1']['params']['pool'] = 'STOCHASTIC' prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'Pooling')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['LRN']} net['l0']['connection']['output'].append('l1') # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'LRN') # Test 2 net['l1']['params']['norm_region'] = 'ACROSS_CHANNELS' prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'LRN')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['Accuracy']} net['l0']['connection']['output'].append('l1') # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'Accuracy') # Test 2 net['l1']['info']['phase'] = 0 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'Accuracy') # Test 3 net['l1']['info']['phase'] = 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l1']['info']['type'], 'Accuracy')
def test_json_to_prototxt(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide', 'caffe_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['PythonData']} # Test 1 prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'Python') # Test 2 net['l0']['params']['endPoint'] = "1, 0" prototxt, input_dim = json_to_prototxt(net, response['net_name']) self.assertGreater(len(prototxt), 9) self.assertEqual(net['l0']['info']['type'], 'Python')
def test_caffe_export(self): data = L.Input(shape={'dim': [10, 3, 16, 224, 224]}) top = L.Convolution(data, kernel_size=3, pad=1, stride=1, num_output=128, dilation=1, weight_filler={'type': 'xavier'}, bias_filler={'type': 'constant'}) with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f: f.write(str(to_proto(top))) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r') response = self.client.post(reverse('caffe-import'), {'file': sample_file}) response = json.loads(response.content) response['net']['l0']['params']['caffe'] = True response['net']['l1']['params']['layer_type'] = '3D' response['net']['l1']['params']['caffe'] = False response = self.client.post(reverse('caffe-export'), {'net': json.dumps(response['net']), 'net_name': ''}) response = json.loads(response.content) self.assertEqual(response['result'], 'error') # ********** Data Layers Test **********
def test_caffe_import(self): data, label = L.WindowData(source='/dummy/source/', batch_size=32, ntop=2, fg_threshold=0.5, bg_threshold=0.5, fg_fraction=0.25, context_pad=0, crop_mode='warp', cache_images=False, root_folder='/dummy/folder/', transform_param=dict(crop_size=227, mean_value=[104, 117, 123], mirror=True, force_color=False, force_gray=False)) with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f: f.write(str(to_proto(data, label))) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r') response = self.client.post(reverse('caffe-import'), {'file': sample_file}) response = json.loads(response.content) os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt')) self.assertGreaterEqual(len(response['net']['l0']['params']), 14) self.assertEqual(response['result'], 'success')
def test_caffe_import(self): # Test 1 top = L.Pooling(kernel_size=2, pad=0, stride=2, pool=1) with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f: f.write(str(to_proto(top))) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r') response = self.client.post(reverse('caffe-import'), {'file': sample_file}) response = json.loads(response.content) os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt')) self.assertGreaterEqual(len(response['net']['l0']['params']), 4) self.assertEqual(response['result'], 'success') # Test 2 top = L.Pooling(kernel_size=2, pad=0, stride=2, pool=2) with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f: f.write(str(to_proto(top))) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r') response = self.client.post(reverse('caffe-import'), {'file': sample_file}) response = json.loads(response.content) os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt')) self.assertGreaterEqual(len(response['net']['l0']['params']), 4) self.assertEqual(response['result'], 'success')
def test_caffe_import(self): # Test 1 data = L.Input(shape={'dim': [10, 3, 224, 224]}) top = L.Python(data, module='pyloss', layer='EuclideanLossLayer', loss_weight=1, name='eucLoss') with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f: f.write(str(to_proto(top))) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r') response = self.client.post(reverse('caffe-import'), {'file': sample_file}) response = json.loads(response.content) os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt')) self.assertGreaterEqual(len(response['net']['l1']['params']), 4) self.assertEqual(response['result'], 'success') # Test 2 top = L.Python(module='pascal_multilabel_datalayers', layer='PascalMultilabelDataLayerSync', param_str="{\'pascal_root\': \'../data/pascal/VOC2007\', \'im_shape\': [227, 227], \ \'split\': \'train\', \'batch_size\': 128}") with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f: f.write(str(to_proto(top))) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r') response = self.client.post(reverse('caffe-import'), {'file': sample_file}) response = json.loads(response.content) os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt')) self.assertGreaterEqual(len(response['net']['l0']['params']), 6) self.assertEqual(response['result'], 'success')
def test_keras_import(self): model = Sequential() model.add(BatchNormalization(center=True, scale=True, beta_regularizer=regularizers.l2(0.01), gamma_regularizer=regularizers.l2(0.01), beta_constraint='max_norm', gamma_constraint='max_norm', input_shape=(10, 16))) model.build() json_string = Model.to_json(model) with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out: json.dump(json.loads(json_string), out, indent=4) sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r') response = self.client.post(reverse('keras-import'), {'file': sample_file}) response = json.loads(response.content) layerId = sorted(response['net'].keys()) self.assertEqual(response['result'], 'success') self.assertEqual(response['net'][layerId[0]]['info']['type'], 'Scale') self.assertEqual(response['net'][layerId[1]]['info']['type'], 'BatchNorm') # ********** Noise Layers **********
def test_keras_export(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app', 'keras_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input2'], 'l1': net['InnerProduct']} net['l0']['connection']['output'].append('l1') # Test 1 inp = data(net['l0'], '', 'l0')['l0'] temp = dense(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[2].__class__.__name__, 'Dense') # Test 2 net['l1']['params']['weight_filler'] = 'glorot_normal' net['l1']['params']['bias_filler'] = 'glorot_normal' inp = data(net['l0'], '', 'l0')['l0'] temp = dense(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[2].__class__.__name__, 'Dense')
def test_keras_export(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app', 'keras_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['ReLU']} # Test 1 net['l0']['connection']['output'].append('l1') inp = data(net['l0'], '', 'l0')['l0'] temp = activation(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[1].__class__.__name__, 'Activation') # Test 2 net['l1']['params']['negative_slope'] = 1 net['l0']['connection']['output'].append('l1') inp = data(net['l0'], '', 'l0')['l0'] temp = activation(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[1].__class__.__name__, 'LeakyReLU')
def test_keras_export(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app', 'keras_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['Deconvolution']} net['l0']['connection']['output'].append('l1') # Test 1 inp = data(net['l0'], '', 'l0')['l0'] temp = deconvolution(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[2].__class__.__name__, 'Conv2DTranspose') # Test 2 net['l1']['params']['weight_filler'] = 'xavier' net['l1']['params']['bias_filler'] = 'xavier' inp = data(net['l0'], '', 'l0')['l0'] temp = deconvolution(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[2].__class__.__name__, 'Conv2DTranspose')
def test_keras_export(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app', 'keras_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input3'], 'l1': net['Embed']} net['l0']['connection']['output'].append('l1') # Test 1 inp = data(net['l0'], '', 'l0')['l0'] temp = embed(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[1].__class__.__name__, 'Embedding') # Test 2 net['l1']['params']['input_length'] = None net['l1']['params']['weight_filler'] = 'VarianceScaling' inp = data(net['l0'], '', 'l0')['l0'] temp = embed(net['l1'], [inp], 'l1') model = Model(inp, temp['l1']) self.assertEqual(model.layers[1].__class__.__name__, 'Embedding') # ********** Merge Layers Test **********
def test_keras_export(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app', 'keras_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['GaussianNoise']} net['l0']['connection']['output'].append('l1') inp = data(net['l0'], '', 'l0')['l0'] net = gaussian_noise(net['l1'], [inp], 'l1') model = Model(inp, net['l1']) self.assertEqual(model.layers[1].__class__.__name__, 'GaussianNoise')
def test_keras_export(self): tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app', 'keras_export_test.json'), 'r') response = json.load(tests) tests.close() net = yaml.safe_load(json.dumps(response['net'])) net = {'l0': net['Input'], 'l1': net['BatchNorm'], 'l2': net['Scale']} net['l0']['connection']['output'].append('l1') # Test 1 inp = data(net['l0'], '', 'l0')['l0'] temp = batch_norm(net['l1'], [inp], 'l1', 'l2', net['l2']) model = Model(inp, temp['l2']) self.assertEqual(model.layers[1].__class__.__name__, 'BatchNormalization') # Test 2 net['l2']['params']['filler'] = 'VarianceScaling' net['l2']['params']['bias_filler'] = 'VarianceScaling' inp = data(net['l0'], '', 'l0')['l0'] temp = batch_norm(net['l1'], [inp], 'l1', 'l2', net['l2']) model = Model(inp, temp['l2']) self.assertEqual(model.layers[1].__class__.__name__, 'BatchNormalization') # Test 3 inp = data(net['l0'], '', 'l0')['l0'] temp = batch_norm(net['l1'], [inp], 'l1', 'l0', net['l0']) model = Model(inp, temp['l1']) self.assertEqual(model.layers[1].__class__.__name__, 'BatchNormalization')
def find_files(self, root): a4js_paths = super().find_files(path.join( settings.BASE_DIR, 'node_modules', 'adhocracy4', 'adhocracy4' )) a4_paths = super().find_files(get_module_dir('adhocracy4')) mbjs_paths = super().find_files(path.join( settings.BASE_DIR, 'node_modules', 'a4-meinberlin', 'meinberlin' )) mb_paths = super().find_files(get_module_dir('meinberlin')) liqd_product_paths = super().find_files( path.relpath(get_module_dir('liqd_product')) ) return a4js_paths + a4_paths + \ mbjs_paths + mb_paths + \ liqd_product_paths
def get_resetable_apps(app_labels=()): """ ?????? ??????????, ??? ???????? ????? ???????? """ local_apps = {} for app in apps.get_apps(): app_path = apps._get_app_path(app) if app_path.startswith(settings.BASE_DIR): app_name = app.__name__.rsplit('.', 1)[0] local_apps[app_name] = app_path if app_labels: result_apps = {} for app_label in app_labels: if app_label in local_apps: result_apps[app_label] = local_apps[app_label] else: raise CommandError('application %s not found' % app_label) else: return result_apps else: return local_apps
def verification_token(request, file): """ Handles the request for SSL token. """ with open(settings.BASE_DIR + '/Plamber/{}'.format(file), 'r') as data: return HttpResponse(data.read(), content_type='text/plain')
def setUpClass(cls): super(TestMJMLTCPServer, cls).setUpClass() root_dir = os.path.dirname(settings.BASE_DIR) tcpserver_path = os.path.join(root_dir, 'mjml', 'node', 'tcpserver.js') env = os.environ.copy() env['NODE_PATH'] = root_dir for host, port in mjml_settings.MJML_TCPSERVERS: p = subprocess.Popen(['node', tcpserver_path, str(port), host], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) cls.processes.append(p) time.sleep(5)
def delete(self, request, *args, **kwargs): """ This does not actually delete the file, only the database record. But that is easy to implement. """ self.object = self.get_object() individual_id = self.object.id if self.object.user: username = self.object.user.username else: username = 'public' #delete files if self.object.vcf_file: self.object.vcf_file.delete() # if self.object.strs_file: # self.object.strs_file.delete() # if self.object.cnvs_file: # self.object.cnvs_file.delete() os.system('rm -rf %s/genomes/%s/%s' % (settings.BASE_DIR, username, individual_id)) self.object.delete() # response = JSONResponse(True, {}, response_mimetype(self.request)) # response['Content-Disposition'] = 'inline; filename=files.json' # return response messages.add_message(request, messages.INFO, "Individual deleted with success!") #return redirect('individuals_list') return redirect('individuals_list')
def clean_individuals(): print("Running periodic task!") individuals = Individual.objects.filter(user=None) for individual in individuals: time_difference = datetime.datetime.now()-individual.creation_date if time_difference.days > 0: #delete individuals os.system('rm -rf %s/genomes/public/%s' % (settings.BASE_DIR, individual_id)) individual.delete()
def get_upload_path(self, filename): if self.user != None: string = "%s/genomes/%s/%s/%s" % (settings.BASE_DIR, slugify(self.user.username), self.id, filename)#.replace(' ', '_') else: string = "%s/genomes/public/%s/%s" % (settings.BASE_DIR, self.id, filename)#.replace(' ', '_') print('string',string) return string
def download_files(self): print('Download Files') file_list = open('%s/data/files/all_files.txt' % (settings.BASE_DIR), 'w') s3credentials = S3Credential.objects.all() for s3credential in s3credentials: print(s3credential.name) for bucket_name in s3credential.buckets.splitlines(): session = boto3.Session( aws_access_key_id=s3credential.access_key, aws_secret_access_key=s3credential.secret_key ) s3 = session.resource('s3') bucket = s3.Bucket(bucket_name) print(bucket) for key in bucket.objects.all(): if key.size != 0: file = [str(key.last_modified), str(key.size), bucket.name, key.key] file_list.writelines('%s\n' % ('\t'.join(file))) self.stdout.write(self.style.SUCCESS('Successfully downloaded files!'))
def django_db_setup(): settings.DATABASES['default']['name'] = os.path.join( settings.BASE_DIR, 'db.sqlite3')
def handle(self, *args, **options): call_subprocess('./node_modules/.bin/webpack --config webpack.config.js') for each in settings.WEB_PACK_FILES: directory = settings.BASE_DIR + '/static/webpack_bundles/' css_file = max([os.path.join(directory, d) for d in os.listdir(directory) if d.startswith(each['webpack_js']) and d.endswith('css')], key=os.path.getmtime) js_file = max([os.path.join(directory, d) for d in os.listdir(directory) if d.startswith(each['webpack_js']) and d.endswith('js')], key=os.path.getmtime) if settings.ENABLE_DJANGO_WEBPACK_S3_STORAGES: upload_to_s3(css_file) upload_to_s3(js_file) import re regex = r'(.*?<link rel="stylesheet" type="text/css" href=")(.*?)(" id="packer_css"/>.*?<script id="packer_js" src=")(.*?)(" type="text/javascript"></script>.*)' with open(each['html_file_name'], 'r+') as f: content = f.read() m = re.match(regex, content, re.DOTALL) href = settings.STATIC_URL + css_file.split('/static/')[-1] src = settings.STATIC_URL + js_file.split('/static/')[-1] content = m.group(1) + href + m.group(3) + src + m.group(5) with open(each['html_file_name'], 'w') as f: f.write(content) result = {'message': "Successfully Created Compressed CSS, JS Files"} return json.dumps(result)
def get_next_migration_filename(app_name, connection=None, migration_type='data'): ''' Return name (including the absolute path) of the next migration to insert for this app ''' latest_migration_name = get_latest_migration(app_name) next_migration_name = '{0:04d}_i18n_{1}_migration.py'.format( int(latest_migration_name[0:4]) + 1, migration_type ) app_path = os.path.join(*apps.get_app_config(app_name).name.split('.')) return os.path.join(settings.BASE_DIR, app_path, 'migrations', next_migration_name)
def webpack_dev_server(config_path=None): config_path = config_path or 'webpack.config.js' with open(config_path, 'r') as f: config = f.read() munged = get_munged_config(config) handle, name = tempfile.mkstemp(prefix='webpack-config') with open(name, 'w') as f: f.write(munged) result = subprocess.run(['npm', 'bin'], stdout=subprocess.PIPE) bin_path = result.stdout.decode().rstrip() dev_server_path = os.path.join(bin_path, 'webpack-dev-server') args = [dev_server_path, '--config', name, '--hot'] return subprocess.Popen( args, cwd=settings.BASE_DIR, stdout=subprocess.PIPE, env={ 'NODE_PATH': os.path.join(settings.BASE_DIR, 'node_modules') } )
def contribute_json(request): """Advantages of having our own custom view over using django.view.static.serve is that we get the right content-type and as a view we write a unit test that checks that the JSON is valid and can be deserialized.""" with open(os.path.join(settings.BASE_DIR, 'contribute.json')) as f: contribute_json_dict = json.load(f) return http.JsonResponse( contribute_json_dict, json_dumps_params={'indent': 3} )
def current_versions(request): """return a JSON dict of a selection of keys and their versions """ context = { 'versions': [] } with connection.cursor() as cursor: cursor.execute('select version()') row = cursor.fetchone() value, = row context['versions'].append({ 'key': 'PostgreSQL', 'value': value.split(' on ')[0].replace('PostgreSQL', '').strip() }) context['versions'].append({ 'key': 'Tecken', 'value': dockerflow_get_version(settings.BASE_DIR) }) context['versions'].append({ 'key': 'Django', 'value': get_version(), }) redis_store_info = get_redis_connection('store').info() context['versions'].append({ 'key': 'Redis Store', 'value': redis_store_info['redis_version'] }) try: redis_cache_info = get_redis_connection('default').info() except NotImplementedError: redis_cache_info = {'redis_version': 'fakeredis'} context['versions'].append({ 'key': 'Redis Cache', 'value': redis_cache_info['redis_version'] }) context['versions'].sort(key=lambda x: x['key']) return http.JsonResponse(context)
def get_python_rpc_source_path(): return os.path.join(settings.BASE_DIR, "tasks", "rpc.py")
def get_markdown_directory(self): return os.path.join( settings.BASE_DIR, 'apps', 'api', 'documentation', )
def get_markdown_directory(self): return os.path.join( settings.BASE_DIR, 'apps', 'staticpages', 'pages', )
def get_default_text(file_name): TEMPALTE_DIR = os.path.join(settings.BASE_DIR, 'templates') with open(os.path.join(TEMPALTE_DIR, file_name), 'r') as template: output = join_as_compacted_paragraphs(template.readlines()) return output # Model Fields
def create_states(apps, schema_editor): State = apps.get_model('core', 'State') fixture_file = ('InternetSemLimites', 'core', 'fixtures', 'states.csv') fixture_path = path.join(settings.BASE_DIR, *fixture_file) with open(fixture_path, encoding='utf-8') as fh: for line in reader(fh): State.objects.create(name=line[1], abbr=line[0])
def remove_media_archive(self): filename = safe_join(settings.BASE_DIR, "media.zip") has_file = os.path.exists(filename) if has_file is True: print("Removing old media archive ...") os.remove(filename)
def zipdir(self): shutil.make_archive("media", 'zip', safe_join( settings.BASE_DIR, "media"))
def get_country_income_thresholds_data(): """ Returns a list of dictionaries of data imported from financialaid/fixtures/country_income_threshold_data.json. """ fixture_path = os.path.join(settings.BASE_DIR, "financialaid/fixtures/country_income_threshold_data.json") with open(fixture_path, "r") as f: country_data = json.loads(f.read()) return [ { "country_code": country["fields"]["country_code"], "income_threshold": DEFAULT_INCOME_THRESHOLD } for country in country_data ]
def test_exam_read_no_shows(self): """Test that a typical no-show result from Perason does not result in any errors""" test_file_path = '{}/exams/pearson/test_resources/noshow.dat'.format(settings.BASE_DIR) reader = EXAMReader() with open(test_file_path, 'r') as test_file: results = reader.read(test_file) # Assert that there are no error messages in the results tuple assert len(results[1]) == 0