def __init__(self, module, rootPupyPath):
'''
'''
self.module = module
self.rootPupyPath = rootPupyPath
#Constants
self.x86PowershellPath = "syswow64\WindowsPowerShell\\v1.0\\powershell.exe"
self.x64PowershellPath = "system32\\WindowsPowerShell\\v1.0\\powershell.exe"
#Remote paths
self.remoteTempFolder=self.module.client.conn.modules['os.path'].expandvars("%TEMP%")
self.systemRoot = self.module.client.conn.modules['os.path'].expandvars("%SYSTEMROOT%")
self.invokeReflectivePEInjectionRemotePath = "{0}.{1}".format(self.module.client.conn.modules['os.path'].join(self.remoteTempFolder, next(_get_candidate_names())), '.txt')
self.mainPowershellScriptRemotePath = "{0}.{1}".format(self.module.client.conn.modules['os.path'].join(self.remoteTempFolder, next(_get_candidate_names())), '.ps1')
self.pupyDLLRemotePath = "{0}.{1}".format(self.module.client.conn.modules['os.path'].join(self.remoteTempFolder, next(_get_candidate_names())), '.txt')
self.invokeBypassUACRemotePath = "{0}.{1}".format(self.module.client.conn.modules['os.path'].join(self.remoteTempFolder, next(_get_candidate_names())), '.ps1')
#Define Local paths
self.pupyDLLLocalPath = os.path.join(gettempdir(),'dllFile.txt')
self.mainPowerShellScriptPrivilegedLocalPath = os.path.join(gettempdir(),'mainPowerShellScriptPrivileged.txt')
self.invokeReflectivePEInjectionLocalPath = os.path.join(self.rootPupyPath,"pupy", "external", "PowerSploit", "CodeExecution", "Invoke-ReflectivePEInjection.ps1")
self.invokeBypassUACLocalPath = os.path.join(rootPupyPath, "pupy", "external", "Empire", "privesc", "Invoke-BypassUAC.ps1")
#Others
self.HKCU = self.module.client.conn.modules['_winreg'].HKEY_CURRENT_USER
if "64" in self.module.client.desc['proc_arch']: self.powershellPath = self.module.client.conn.modules['os.path'].join(self.systemRoot, self.x64PowershellPath)
else: powershellPath = self.module.client.conn.modules['os.path'].join(self.systemRoot, self.x86PowershellPath)
python类_get_candidate_names()的实例源码
def upx_unpack(self, seed=None):
# dump bytez to a temporary file
tmpfilename = os.path.join(
tempfile._get_default_tempdir(), next(tempfile._get_candidate_names()))
with open(tmpfilename, 'wb') as outfile:
outfile.write(self.bytez)
with open(os.devnull, 'w') as DEVNULL:
retcode = subprocess.call(
['upx', tmpfilename, '-d', '-o', tmpfilename + '_unpacked'], stdout=DEVNULL, stderr=DEVNULL)
os.unlink(tmpfilename)
if retcode == 0: # sucessfully unpacked
with open(tmpfilename + '_unpacked', 'rb') as result:
self.bytez = result.read()
os.unlink(tmpfilename + '_unpacked')
return self.bytez
def test_validate_file_or_dict(self):
# verify user folder is expanded before load the file
temp_name = next(tempfile._get_candidate_names())
file_path = '~/' + temp_name
local_file_path = os.path.expanduser(file_path)
with open(local_file_path, 'w') as f:
f.write('{"prop":"val"}')
# verify we load the json content correctly
try:
res = validate_file_or_dict(file_path)
self.assertEqual(res['prop'], "val")
finally:
os.remove(local_file_path)
# verify expanduser call won't mess up the json data
data = '{"~d": "~/haha"}'
res = validate_file_or_dict(data)
self.assertEqual(res['~d'], '~/haha')
# verify expanduser call again, but use single quot
data = "{'~d': '~/haha'}"
res = validate_file_or_dict(data)
self.assertEqual(res['~d'], '~/haha')
def test_file_loading1(self):
data = self.data[:1000]
directory = tempfile._get_default_tempdir()
filename = next(tempfile._get_candidate_names())
filename = directory + os.sep + filename + ".txt"
np.savetxt(filename, data)
consumer = ChainConsumer()
consumer.add_chain(filename)
summary = consumer.analysis.get_summary()
actual = np.array(list(summary.values())[0])
assert np.abs(actual[1] - 5.0) < 0.5
def test_file_loading2(self):
data = self.data[:1000]
directory = tempfile._get_default_tempdir()
filename = next(tempfile._get_candidate_names())
filename = directory + os.sep + filename + ".npy"
np.save(filename, data)
consumer = ChainConsumer()
consumer.add_chain(filename)
summary = consumer.analysis.get_summary()
actual = np.array(list(summary.values())[0])
assert np.abs(actual[1] - 5.0) < 0.5
def get_temp_file_name(tmp_dir=None, extension=''):
"""Return an availiable name for temporary file."""
tmp_name = next(tempfile._get_candidate_names())
if not tmp_dir:
tmp_dir = tempfile._get_default_tempdir()
if extension is not None:
tmp_name = tmp_name + '.' + extension
return os.path.join(tmp_dir, tmp_name)
def get_temp_file_name(tmp_dir=None, extension=''):
"""Return an availiable name for temporary file."""
if tmp_dir is None:
tmp_dir = iCount.TMP_ROOT
# pylint: disable=protected-access
tmp_name = next(tempfile._get_candidate_names())
if not tmp_dir:
# pylint: disable=protected-access
tmp_dir = tempfile._get_default_tempdir()
if extension is not None:
tmp_name = tmp_name + '.' + extension
return os.path.join(tmp_dir, tmp_name)
def process(self, fuzzresult):
temp_name = next(tempfile._get_candidate_names())
defult_tmp_dir = tempfile._get_default_tempdir()
filename = os.path.join(defult_tmp_dir, temp_name + ".png")
subprocess.call(['cutycapt', '--url=%s' % pipes.quote(fuzzresult.url), '--out=%s' % filename])
self.add_result("Screnshot taken, output at %s" % filename)
def upx_pack(self, seed=None):
# tested with UPX 3.91
random.seed(seed)
tmpfilename = os.path.join(
tempfile._get_default_tempdir(), next(tempfile._get_candidate_names()))
# dump bytez to a temporary file
with open(tmpfilename, 'wb') as outfile:
outfile.write(self.bytez)
options = ['--force', '--overlay=copy']
compression_level = random.randint(1, 9)
options += ['-{}'.format(compression_level)]
# --exact
# compression levels -1 to -9
# --overlay=copy [default]
# optional things:
# --compress-exports=0/1
# --compress-icons=0/1/2/3
# --compress-resources=0/1
# --strip-relocs=0/1
options += ['--compress-exports={}'.format(random.randint(0, 1))]
options += ['--compress-icons={}'.format(random.randint(0, 3))]
options += ['--compress-resources={}'.format(random.randint(0, 1))]
options += ['--strip-relocs={}'.format(random.randint(0, 1))]
with open(os.devnull, 'w') as DEVNULL:
retcode = subprocess.call(
['upx'] + options + [tmpfilename, '-o', tmpfilename + '_packed'], stdout=DEVNULL, stderr=DEVNULL)
os.unlink(tmpfilename)
if retcode == 0: # successfully packed
with open(tmpfilename + '_packed', 'rb') as infile:
self.bytez = infile.read()
os.unlink(tmpfilename + '_packed')
return self.bytez
check-google-java-format.py 文件源码
项目:run-google-java-format
作者: plume-lib
项目源码
文件源码
阅读 31
收藏 0
点赞 0
评论 0
def temporary_file_name():
return os.path.join(temp_dir, next(tempfile._get_candidate_names()))
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
def _mock_candidate_names(*names):
return support.swap_attr(tempfile,
'_get_candidate_names',
lambda: iter(names))
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
def _mock_candidate_names(*names):
return support.swap_attr(tempfile,
'_get_candidate_names',
lambda: iter(names))
bigip_config.py 文件源码
项目:f5-automation-workflows-multicloud
作者: f5devcentral
项目源码
文件源码
阅读 29
收藏 0
点赞 0
评论 0
def merge(self, verify=True):
temp_name = next(tempfile._get_candidate_names())
remote_path = "/var/config/rest/downloads/{0}".format(temp_name)
temp_path = '/tmp/' + temp_name
if self.client.check_mode:
return True
self.upload_to_device(temp_name)
self.move_on_device(remote_path)
response = self.merge_on_device(
remote_path=temp_path, verify=verify
)
self.remove_temporary_file(remote_path=temp_path)
return response
def merge(self, verify=True):
temp_name = next(tempfile._get_candidate_names())
remote_path = "/var/config/rest/downloads/{0}".format(temp_name)
temp_path = '/tmp/' + temp_name
if self.client.check_mode:
return True
self.upload_to_device(temp_name)
self.move_on_device(remote_path)
response = self.merge_on_device(
remote_path=temp_path, verify=verify
)
self.remove_temporary_file(remote_path=temp_path)
return response
def src(self):
if self._values['src'] is not None:
return self._values['src']
result = next(tempfile._get_candidate_names())
return result
def merge(self, verify=True):
temp_name = next(tempfile._get_candidate_names())
remote_path = "/var/config/rest/downloads/{0}".format(temp_name)
temp_path = '/tmp/' + temp_name
if self.client.check_mode:
return True
self.upload_to_device(temp_name)
self.move_on_device(remote_path)
response = self.merge_on_device(
remote_path=temp_path, verify=verify
)
self.remove_temporary_file(remote_path=temp_path)
return response
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
def _mock_candidate_names(*names):
return support.swap_attr(tempfile,
'_get_candidate_names',
lambda: iter(names))
def py_func(func, inp, Tout, name=None, grad=None):
"""Redfine tf.py_func to include gradients"""
temp_name = next(tempfile._get_candidate_names())
_name = 'PyFuncGrad%s' %temp_name;
tf.RegisterGradient(_name)(grad)
g = tf.get_default_graph()
with g.gradient_override_map({"PyFunc": _name}):
return tf.py_func(func, inp, Tout, name=name)
def _find_attachment_ids_email(self):
atts = super(InvoiceEletronic, self)._find_attachment_ids_email()
attachment_obj = self.env['ir.attachment']
if self.model not in ('009'):
return atts
tmp = tempfile._get_default_tempdir()
temp_name = os.path.join(tmp, next(tempfile._get_candidate_names()))
command_args = ["--dpi", "84", str(self.url_danfe), temp_name]
wkhtmltopdf = [_get_wkhtmltopdf_bin()] + command_args
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate()
if process.returncode not in [0, 1]:
raise UserError(_('Wkhtmltopdf failed (error code: %s). '
'Message: %s') % (str(process.returncode), err))
tmpDanfe = None
with open(temp_name, 'r') as f:
tmpDanfe = f.read()
try:
os.unlink(temp_name)
except (OSError, IOError):
_logger.error('Error when trying to remove file %s' % temp_name)
if tmpDanfe:
danfe_id = attachment_obj.create(dict(
name="Danfe-%08d.pdf" % self.numero,
datas_fname="Danfe-%08d.pdf" % self.numero,
datas=base64.b64encode(tmpDanfe),
mimetype='application/pdf',
res_model='account.invoice',
res_id=self.invoice_id.id,
))
atts.append(danfe_id.id)
return atts
def process(self, fuzzresult):
temp_name = next(tempfile._get_candidate_names())
defult_tmp_dir = tempfile._get_default_tempdir()
filename = os.path.join(defult_tmp_dir, temp_name + ".png")
subprocess.call(['cutycapt', '--url=%s' % pipes.quote(fuzzresult.url), '--out=%s' % filename])
self.add_result("Screnshot taken, output at %s" % filename)
def write_s3(bucket, key, rekognition_faces_response_json, rekognition_faces_response_csv):
print('inside write s3 function (bucket: {}, key: {}, json: {}, csv: {})'
.format(bucket, key, rekognition_faces_response_json, rekognition_faces_response_csv))
tmp_filename = 'tmp{}'.format(next(tempfile._get_candidate_names()))
print('tmp filename: {}'.format(tmp_filename))
# write csv file
f = open('/tmp/{}.csv'.format(tmp_filename), 'w')
f.write(rekognition_faces_response_csv)
f.close()
f = open('/tmp/{}.csv'.format(tmp_filename), 'r')
s3client.upload_fileobj(f, Bucket=bucket, Key='csv/' + key + '.csv')
f.close()
# write json file
f = open('/tmp/{}.json'.format(tmp_filename), 'w')
f.write(rekognition_faces_response_json)
f.close()
f = open('/tmp/{}.json'.format(tmp_filename), 'r')
s3client.upload_fileobj(f, Bucket=bucket, Key='json/' + key + '.json')
f.close()
# --------------- Main handler ------------------
def generate_tmp_filename(extension):
return tempfile._get_default_tempdir() + "/" + next(tempfile._get_candidate_names()) + "." + extension