def handle(self, *args, **options):
""""""
ovpn = Ovpn.objects.filter(activated=True)
if ovpn.exists():
self._kill_old_process()
ovpn = ovpn[0]
print >> sys.stdout, "Config: {0.path}".format(ovpn.file)
auth_filepath = os.path.join(settings.BASE_DIR, "vpn{0.vpn.pk}.auth.txt".format(ovpn))
with open(auth_filepath, "w") as auth:
auth.write(ovpn.vpn.username + '\n')
auth.write(ovpn.vpn.password + '\n')
# get file content
with open(ovpn.file.path, "r") as vpn:
vpn_file_content = vpn.readlines()
# change file
for index, line in enumerate(vpn_file_content):
if re.match(self.vpn_param + '.*', line):
vpn_file_content[index] = "{0.vpn_param} {1:s}\n".format(self, auth_filepath)
break
# write new data
with open(ovpn.file.path, "w") as vpn:
vpn.write(''.join(vpn_file_content))
# vpn activate
sh.openvpn(ovpn.file.path, _out=sys.stdout)
python类BASE_DIR的实例源码
def archive_replicas(self):
filename = safe_join(settings.BASE_DIR, "replica.sqlite3")
has_file = os.path.exists(filename)
if not has_file:
return
dirpath = safe_join(settings.BASE_DIR, "replicas")
replicas = os.path.exists(dirpath)
if not replicas is True:
try:
print("Creating replicas archive directory ...")
os.makedirs(safe_join(settings.BASE_DIR, "replicas"))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
dst = safe_join(settings.BASE_DIR, "replicas")
ts = str(int(time.time()))
newname = "replica." + ts + ".sqlite3"
os.rename("replica.sqlite3", newname)
src = safe_join(settings.BASE_DIR, newname)
print("Archiving current replica ...")
shutil.move(src, dst)
def test_templates_render_successfully():
template_list = []
template_dirs = [
os.path.join(settings.BASE_DIR, 'enrolment/templates'),
os.path.join(settings.BASE_DIR, 'supplier/templates'),
]
for template_dir in template_dirs:
for dir, dirnames, filenames in os.walk(template_dir):
for filename in filenames:
path = os.path.join(dir, filename).replace(template_dir, '')
template_list.append(path.lstrip('/'))
default_context = {
'supplier': None,
'form': Form(),
}
assert template_list
for template in template_list:
render_to_string(template, default_context)
def create_folders():
"""
Creates required directories.
"""
folders = ["data", "data/incoming", "data/garch", "data/incoming_pickled",
"data/incoming_pickled/csv", "data/indicators", "data/indicators/csv",
"data/monte_carlo", "data/monte_carlo/indicators", "data/monte_carlo/systems",
"data/monte_carlo/performance", "data/monte_carlo/avg", "data/performance",
"data/performance/csv", "data/portfolios", "data/portfolios/csv", "data/quandl",
"data/portfolios/csv", "data/quandl/csv", "data/systems", "data/systems/csv",
"data/systems/json"]
for folder in folders:
try:
Popen("mkdir {0}/{1}".format(settings.BASE_DIR, folder))
except Exception as err:
print(colored.red("create_folders {}".format(err)))
def create_fixture(self):
"""Create the fixture using the dumpdata command"""
excluded = [
'admin',
'auth.Permission',
'contenttypes',
'sessions',
'wagtailcore.grouppagepermission',
'wagtailcore.groupcollectionpermission',
]
path = os.path.join(settings.BASE_DIR, 'tests/fixtures/basic_site.json')
call_command(
'dumpdata',
exclude=excluded,
natural_foreign=True,
indent=2,
output=path
)
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['ImageData']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'ImageData')
# Test 2
net['l0']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'ImageData')
# Test 3
net['l0']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'ImageData')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Data']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'Data')
# Test 2
net['l0']['info']['phase'] = 0
net['l0']['params']['mean_value'] = ''
net['l0']['params']['mean_file'] = '/path/to/mean/file'
net['l0']['params']['backend'] = "LEVELDB"
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'Data')
# Test 3
net['l0']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'Data')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['HDF5Data']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'HDF5Data')
# Test 2
net['l0']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'HDF5Data')
# Test 3
net['l0']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'HDF5Data')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['HDF5Output']}
net['l0']['connection']['output'].append('l1')
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'HDF5Output')
# Test 2
net['l1']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'HDF5Output')
# Test 3
net['l1']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'HDF5Output')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['WindowData']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'WindowData')
# Test 2
net['l0']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'WindowData')
# Test 3
net['l0']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'WindowData')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['MemoryData']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'MemoryData')
# Test 2
net['l0']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'MemoryData')
# Test 3
net['l0']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'MemoryData')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['DummyData']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'DummyData')
# Test 2
net['l0']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'DummyData')
# Test 3
net['l0']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'DummyData')
# ********** Vision Layers Test **********
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['Pooling']}
net['l0']['connection']['output'].append('l1')
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'Pooling')
# Test 2
net['l1']['params']['pool'] = 'AVE'
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'Pooling')
# Test 3
net['l1']['params']['pool'] = 'STOCHASTIC'
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'Pooling')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['LRN']}
net['l0']['connection']['output'].append('l1')
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'LRN')
# Test 2
net['l1']['params']['norm_region'] = 'ACROSS_CHANNELS'
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'LRN')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['Accuracy']}
net['l0']['connection']['output'].append('l1')
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'Accuracy')
# Test 2
net['l1']['info']['phase'] = 0
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'Accuracy')
# Test 3
net['l1']['info']['phase'] = 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l1']['info']['type'], 'Accuracy')
def test_json_to_prototxt(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
'caffe_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['PythonData']}
# Test 1
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'Python')
# Test 2
net['l0']['params']['endPoint'] = "1, 0"
prototxt, input_dim = json_to_prototxt(net, response['net_name'])
self.assertGreater(len(prototxt), 9)
self.assertEqual(net['l0']['info']['type'], 'Python')
def test_caffe_export(self):
data = L.Input(shape={'dim': [10, 3, 16, 224, 224]})
top = L.Convolution(data, kernel_size=3, pad=1, stride=1, num_output=128, dilation=1,
weight_filler={'type': 'xavier'}, bias_filler={'type': 'constant'})
with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f:
f.write(str(to_proto(top)))
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r')
response = self.client.post(reverse('caffe-import'), {'file': sample_file})
response = json.loads(response.content)
response['net']['l0']['params']['caffe'] = True
response['net']['l1']['params']['layer_type'] = '3D'
response['net']['l1']['params']['caffe'] = False
response = self.client.post(reverse('caffe-export'), {'net': json.dumps(response['net']),
'net_name': ''})
response = json.loads(response.content)
self.assertEqual(response['result'], 'error')
# ********** Data Layers Test **********
def test_caffe_import(self):
data, label = L.WindowData(source='/dummy/source/', batch_size=32, ntop=2,
fg_threshold=0.5, bg_threshold=0.5, fg_fraction=0.25,
context_pad=0, crop_mode='warp', cache_images=False,
root_folder='/dummy/folder/',
transform_param=dict(crop_size=227, mean_value=[104, 117, 123],
mirror=True, force_color=False,
force_gray=False))
with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f:
f.write(str(to_proto(data, label)))
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r')
response = self.client.post(reverse('caffe-import'), {'file': sample_file})
response = json.loads(response.content)
os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'))
self.assertGreaterEqual(len(response['net']['l0']['params']), 14)
self.assertEqual(response['result'], 'success')
def test_caffe_import(self):
# Test 1
top = L.Pooling(kernel_size=2, pad=0, stride=2, pool=1)
with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f:
f.write(str(to_proto(top)))
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r')
response = self.client.post(reverse('caffe-import'), {'file': sample_file})
response = json.loads(response.content)
os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'))
self.assertGreaterEqual(len(response['net']['l0']['params']), 4)
self.assertEqual(response['result'], 'success')
# Test 2
top = L.Pooling(kernel_size=2, pad=0, stride=2, pool=2)
with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f:
f.write(str(to_proto(top)))
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r')
response = self.client.post(reverse('caffe-import'), {'file': sample_file})
response = json.loads(response.content)
os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'))
self.assertGreaterEqual(len(response['net']['l0']['params']), 4)
self.assertEqual(response['result'], 'success')
def test_caffe_import(self):
# Test 1
data = L.Input(shape={'dim': [10, 3, 224, 224]})
top = L.Python(data, module='pyloss', layer='EuclideanLossLayer', loss_weight=1, name='eucLoss')
with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f:
f.write(str(to_proto(top)))
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r')
response = self.client.post(reverse('caffe-import'), {'file': sample_file})
response = json.loads(response.content)
os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'))
self.assertGreaterEqual(len(response['net']['l1']['params']), 4)
self.assertEqual(response['result'], 'success')
# Test 2
top = L.Python(module='pascal_multilabel_datalayers', layer='PascalMultilabelDataLayerSync',
param_str="{\'pascal_root\': \'../data/pascal/VOC2007\', \'im_shape\': [227, 227], \
\'split\': \'train\', \'batch_size\': 128}")
with open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'w') as f:
f.write(str(to_proto(top)))
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'), 'r')
response = self.client.post(reverse('caffe-import'), {'file': sample_file})
response = json.loads(response.content)
os.remove(os.path.join(settings.BASE_DIR, 'media', 'test.prototxt'))
self.assertGreaterEqual(len(response['net']['l0']['params']), 6)
self.assertEqual(response['result'], 'success')
def test_keras_import(self):
model = Sequential()
model.add(BatchNormalization(center=True, scale=True, beta_regularizer=regularizers.l2(0.01),
gamma_regularizer=regularizers.l2(0.01),
beta_constraint='max_norm', gamma_constraint='max_norm',
input_shape=(10, 16)))
model.build()
json_string = Model.to_json(model)
with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out:
json.dump(json.loads(json_string), out, indent=4)
sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r')
response = self.client.post(reverse('keras-import'), {'file': sample_file})
response = json.loads(response.content)
layerId = sorted(response['net'].keys())
self.assertEqual(response['result'], 'success')
self.assertEqual(response['net'][layerId[0]]['info']['type'], 'Scale')
self.assertEqual(response['net'][layerId[1]]['info']['type'], 'BatchNorm')
# ********** Noise Layers **********
def test_keras_export(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
'keras_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input2'], 'l1': net['InnerProduct']}
net['l0']['connection']['output'].append('l1')
# Test 1
inp = data(net['l0'], '', 'l0')['l0']
temp = dense(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[2].__class__.__name__, 'Dense')
# Test 2
net['l1']['params']['weight_filler'] = 'glorot_normal'
net['l1']['params']['bias_filler'] = 'glorot_normal'
inp = data(net['l0'], '', 'l0')['l0']
temp = dense(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[2].__class__.__name__, 'Dense')
def test_keras_export(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
'keras_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['ReLU']}
# Test 1
net['l0']['connection']['output'].append('l1')
inp = data(net['l0'], '', 'l0')['l0']
temp = activation(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[1].__class__.__name__, 'Activation')
# Test 2
net['l1']['params']['negative_slope'] = 1
net['l0']['connection']['output'].append('l1')
inp = data(net['l0'], '', 'l0')['l0']
temp = activation(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[1].__class__.__name__, 'LeakyReLU')
def test_keras_export(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
'keras_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['Deconvolution']}
net['l0']['connection']['output'].append('l1')
# Test 1
inp = data(net['l0'], '', 'l0')['l0']
temp = deconvolution(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[2].__class__.__name__, 'Conv2DTranspose')
# Test 2
net['l1']['params']['weight_filler'] = 'xavier'
net['l1']['params']['bias_filler'] = 'xavier'
inp = data(net['l0'], '', 'l0')['l0']
temp = deconvolution(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[2].__class__.__name__, 'Conv2DTranspose')
def test_keras_export(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
'keras_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input3'], 'l1': net['Embed']}
net['l0']['connection']['output'].append('l1')
# Test 1
inp = data(net['l0'], '', 'l0')['l0']
temp = embed(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[1].__class__.__name__, 'Embedding')
# Test 2
net['l1']['params']['input_length'] = None
net['l1']['params']['weight_filler'] = 'VarianceScaling'
inp = data(net['l0'], '', 'l0')['l0']
temp = embed(net['l1'], [inp], 'l1')
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[1].__class__.__name__, 'Embedding')
# ********** Merge Layers Test **********
def test_keras_export(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
'keras_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['GaussianNoise']}
net['l0']['connection']['output'].append('l1')
inp = data(net['l0'], '', 'l0')['l0']
net = gaussian_noise(net['l1'], [inp], 'l1')
model = Model(inp, net['l1'])
self.assertEqual(model.layers[1].__class__.__name__, 'GaussianNoise')
def test_keras_export(self):
tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
'keras_export_test.json'), 'r')
response = json.load(tests)
tests.close()
net = yaml.safe_load(json.dumps(response['net']))
net = {'l0': net['Input'], 'l1': net['BatchNorm'], 'l2': net['Scale']}
net['l0']['connection']['output'].append('l1')
# Test 1
inp = data(net['l0'], '', 'l0')['l0']
temp = batch_norm(net['l1'], [inp], 'l1', 'l2', net['l2'])
model = Model(inp, temp['l2'])
self.assertEqual(model.layers[1].__class__.__name__, 'BatchNormalization')
# Test 2
net['l2']['params']['filler'] = 'VarianceScaling'
net['l2']['params']['bias_filler'] = 'VarianceScaling'
inp = data(net['l0'], '', 'l0')['l0']
temp = batch_norm(net['l1'], [inp], 'l1', 'l2', net['l2'])
model = Model(inp, temp['l2'])
self.assertEqual(model.layers[1].__class__.__name__, 'BatchNormalization')
# Test 3
inp = data(net['l0'], '', 'l0')['l0']
temp = batch_norm(net['l1'], [inp], 'l1', 'l0', net['l0'])
model = Model(inp, temp['l1'])
self.assertEqual(model.layers[1].__class__.__name__, 'BatchNormalization')
def find_files(self, root):
a4js_paths = super().find_files(path.join(
settings.BASE_DIR, 'node_modules', 'adhocracy4', 'adhocracy4'
))
a4_paths = super().find_files(get_module_dir('adhocracy4'))
mbjs_paths = super().find_files(path.join(
settings.BASE_DIR, 'node_modules', 'a4-meinberlin', 'meinberlin'
))
mb_paths = super().find_files(get_module_dir('meinberlin'))
liqd_product_paths = super().find_files(
path.relpath(get_module_dir('liqd_product'))
)
return a4js_paths + a4_paths + \
mbjs_paths + mb_paths + \
liqd_product_paths
def get_resetable_apps(app_labels=()):
""" ?????? ??????????, ??? ???????? ????? ???????? """
local_apps = {}
for app in apps.get_apps():
app_path = apps._get_app_path(app)
if app_path.startswith(settings.BASE_DIR):
app_name = app.__name__.rsplit('.', 1)[0]
local_apps[app_name] = app_path
if app_labels:
result_apps = {}
for app_label in app_labels:
if app_label in local_apps:
result_apps[app_label] = local_apps[app_label]
else:
raise CommandError('application %s not found' % app_label)
else:
return result_apps
else:
return local_apps
def verification_token(request, file):
"""
Handles the request for SSL token.
"""
with open(settings.BASE_DIR + '/Plamber/{}'.format(file), 'r') as data:
return HttpResponse(data.read(), content_type='text/plain')