def english_g2p(self, text):
text = self.normalize(text).lower()
try:
arpa_text = subprocess.check_output(['lex_lookup', text])
arpa_text = arpa_text.decode('utf-8')
except OSError:
logging.warning('lex_lookup (from flite) is not installed.')
arpa_text = ''
except subprocess.CalledProcessError:
logging.warning('Non-zero exit status from lex_lookup.')
arpa_text = ''
return self.arpa_to_ipa(arpa_text)
python类check_output()的实例源码
def devices(self):
'''get a dict of attached devices. key is the device serial, value is device name.'''
out = self.run_cmd('devices') #subprocess.check_output([self.adb_path(), 'devices']).decode("utf-8")
if 'adb server is out of date' in out:
out = self.run_cmd('devices')
match = "List of devices attached"
index = out.find(match)
if index < 0:
raise EnvironmentError("adb is not working.")
return dict([s.split("\t") for s in out[index + len(match):].strip().splitlines()
if s.strip() and not s.strip().startswith('*')])
def run_nmap(net):
try:
out = subprocess.check_output(["nmap", "-oX", "-" , "-R", "-p", "22-443", "-sV" , net])
except CalledProcessError:
print("Error in caller\n")
exit(1)
return out
def parent_identical_or_crashes(self, crash, parent):
# Base names
cbasename = os.path.basename(crash)
pbasename = os.path.basename(parent)
## Filter queue filenames with sig info
if self.find_crash_parent_regex.match(pbasename):
self.logr("Parent ({}) looks like crashing input!".format(pbasename))
return True
try:
diff_out = subprocess.check_output("diff -q {} {}".format(crash, parent),
stderr=subprocess.STDOUT, shell=True)
except Exception, e:
diff_out = e.output
if not diff_out.rstrip("\n"):
self.logr("Crash file ({}) and parent ({}) are identical!"
.format(cbasename, pbasename))
return True
cov_cmd = self.coverage_cmd.replace('AFL_FILE', parent)
### Dry-run to make sure parent doesn't cause a crash
if self.does_dry_run_throw_error(cov_cmd):
self.logr("Parent ({}) crashes binary!".format(pbasename))
return True
return False
def does_dry_run_throw_error(self, cmd):
env = os.environ.copy()
if self.sanitizer == 'asan':
spectrum_asan_options(env)
try:
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True, env=env)
except Exception, e:
# OR condition is bug fix for compute shell returning negative instead of positive code
return (e.returncode > 128 or e.returncode < 0)
return False
def subproc_call(cmd, timeout=None):
try:
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT,
shell=True, timeout=timeout)
return output
except subprocess.TimeoutExpired as e:
logger.warn("Command timeout!")
logger.warn(e.output)
except subprocess.CalledProcessError as e:
logger.warn("Commnad failed: {}".format(e.returncode))
logger.warn(e.output)
def get_video_meta(self, video_path):
'''
get video meta information
:param video_path: the absolute path of video file
:return: a dictionary
{
'width': integer,
'height': integer,
'duration': integer (millisecond)
}
if an error occurred, this method will return None
'''
try:
output = subprocess.check_output([
'ffprobe',
'-v',
'error',
'-show_entries',
'format=duration:stream=width:stream=height',
'-select_streams',
'v:0',
'-of',
'json',
b'{0}'.format(video_path.encode('utf-8'))
])
meta = json.loads(output)
result = {}
if 'format' in meta and 'duration' in meta['format']:
result['duration'] = int(float(meta['format']['duration']) * 1000)
if 'streams' in meta and len(meta['streams']) and 'width' in meta['streams'][0] and 'height' in meta['streams'][0]:
result['width'] = meta['streams'][0]['width']
result['height'] = meta['streams'][0]['height']
return result
except subprocess.CalledProcessError as error:
logger.error(error)
return None
def initdb(datadir, prefix='', echo=False):
init_args = [
os.path.join(prefix, 'initdb'),
'-D', datadir,
'-U', 'postgres',
'--auth=trust',
]
output = subprocess.check_output(
init_args,
close_fds=True,
stderr=subprocess.STDOUT,
)
if echo:
print(output.decode('utf-8'))
def get_parent(self, filepath, isCrash=True):
dirname, basename = os.path.split(filepath)
if isCrash:
match = self.find_crash_parent_regex.match(basename)
# (_, _, session, _, syncname, src_id) = match.groups()
(_, _, _, session, _, syncname, src_id) = match.groups()
searchdir = self.args.afl_fuzzing_dir
# if syncname:
# searchdir += '/' + syncname + '/queue'
if session:
searchdir += '/' + session + '/queue'
else:
assert False, "Parent of crash file {} cannot be found".format(basename)
else:
match = self.find_queue_parent_regex.match(basename)
if not match:
self.logr("No parent could be found for {}".format(basename))
return None
(_, syncname, src_id) = match.groups()
searchdir = dirname
if syncname:
searchdir += '/../../' + syncname + '/queue'
search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*"
parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True)
parent_list = filter(None, parent_fname.split("\n"))
if (len(parent_list) == 0):
self.logr("No parents found for file {}".format(basename))
return None
if (len(parent_list) > 1):
self.logr("Multiple parents found for file {}. Selecting first.".format(basename))
return os.path.abspath(parent_list[0].rstrip("\n"))
def get_parent(self, filepath, isCrash=True):
dirname, basename = os.path.split(filepath)
if isCrash:
match = self.find_crash_parent_regex.match(basename)
# (_, _, session, _, syncname, src_id) = match.groups()
(_, _, _, session, _, syncname, src_id) = match.groups()
searchdir = self.args.afl_fuzzing_dir
# if syncname:
# searchdir += '/' + syncname + '/queue'
if session:
searchdir += '/' + session + '/queue'
else:
assert False, "Parent of crash file {} cannot be found".format(basename)
else:
match = self.find_queue_parent_regex.match(basename)
if not match:
self.logr("No parent could be found for {}".format(basename))
return None
(_, syncname, src_id) = match.groups()
searchdir = dirname
if syncname:
searchdir += '/../../' + syncname + '/queue'
search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*"
parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True)
parent_list = filter(None, parent_fname.split("\n"))
if (len(parent_list) == 0):
self.logr("No parents found for file {}".format(basename))
return None
if (len(parent_list) > 1):
self.logr("Multiple parents found for file {}. Selecting first.".format(basename))
return os.path.abspath(parent_list[0].rstrip("\n"))
def run_and_wait(args, timeout=None, logfile=None, append=False,
env=None, cwd=None):
"""Run a command in a subprocess, then wait for it to finish.
Parameters
----------
args : string or list[string]
the command to run. Should be either a command string or a list
of command string and its arguments as strings. A list is preferred;
see Python subprocess documentation.
timeout : float or None
the amount of time to wait for the command to finish, in seconds.
If None, waits indefinitely.
logfile : string or None
If given, stdout and stderr will be written to this file.
append : bool
True to append to the logfile. Defaults to False.
env : dict[string, any]
If not None, environment variables of the subprocess will be set
according to this dictionary instead of inheriting from current
process.
cwd : string or None
The current working directory of the subprocess.
Returns
-------
output : string
the standard output and standard error from the command.
Raises
------
subprocess.CalledProcessError
if any error occurred in the subprocess.
"""
output = subprocess.check_output(args, stderr=subprocess.STDOUT,
timeout=timeout, env=env, cwd=cwd)
output = output.decode(encoding=bag_encoding, errors=bag_codec_error)
if logfile is not None:
write_file(logfile, output, append=append)
return output
def get_parent(self, filepath, isCrash=True):
dirname, basename = os.path.split(filepath)
if isCrash:
match = self.find_crash_parent_regex.match(basename)
# (_, _, session, _, syncname, src_id) = match.groups()
(_, _, session, _, syncname, src_id) = match.groups()
searchdir = self.afl_fuzzing_dir
# if syncname:
# searchdir += '/' + syncname + '/queue'
if session:
searchdir += '/' + session + '/queue'
else:
assert False, "Parent of crash file {} cannot be found".format(basename)
else:
match = self.find_queue_parent_regex.match(basename)
if not match:
self.logr("No parent could be found for {}".format(basename))
return None
(_, syncname, src_id) = match.groups()
searchdir = dirname
if syncname:
searchdir += '/../../' + syncname + '/queue'
search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*"
parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True)
parent_list = filter(None, parent_fname.split("\n"))
if (len(parent_list) == 0):
self.logr("No parents found for file {}".format(basename))
return None
if (len(parent_list) > 1):
self.logr("Multiple parents found for file {}. Selecting first.".format(basename))
return os.path.abspath(parent_list[0].rstrip("\n"))
def generate_processor_script(events_file, log_file=None):
script_file = os.path.join(tempfile.gettempdir(), 'kclipy.%s.processor.py' % short_uid())
if log_file:
log_file = "'%s'" % log_file
else:
log_file = 'None'
content = """#!/usr/bin/env python
import os, sys, glob, json, socket, time, logging, tempfile
import subprocess32 as subprocess
logging.basicConfig(level=logging.INFO)
for path in glob.glob('%s/lib/python*/site-packages'):
sys.path.insert(0, path)
sys.path.insert(0, '%s')
from localstack.config import DEFAULT_ENCODING
from localstack.utils.kinesis import kinesis_connector
from localstack.utils.common import timestamp
events_file = '%s'
log_file = %s
error_log = os.path.join(tempfile.gettempdir(), 'kclipy.error.log')
if __name__ == '__main__':
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
num_tries = 3
sleep_time = 2
error = None
for i in range(0, num_tries):
try:
sock.connect(events_file)
error = None
break
except Exception as e:
error = e
if i < num_tries:
msg = '%%s: Unable to connect to UNIX socket. Retrying.' %% timestamp()
subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True)
time.sleep(sleep_time)
if error:
print("WARN: Unable to connect to UNIX socket after retrying: %%s" %% error)
raise error
def receive_msg(records, checkpointer, shard_id):
try:
# records is a list of amazon_kclpy.messages.Record objects -> convert to JSON
records_dicts = [j._json_dict for j in records]
message_to_send = {'shard_id': shard_id, 'records': records_dicts}
string_to_send = '%%s\\n' %% json.dumps(message_to_send)
bytes_to_send = string_to_send.encode(DEFAULT_ENCODING)
sock.send(bytes_to_send)
except Exception as e:
msg = "WARN: Unable to forward event: %%s" %% e
print(msg)
subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True)
kinesis_connector.KinesisProcessor.run_processor(log_file=log_file, processor_func=receive_msg)
""" % (LOCALSTACK_VENV_FOLDER, LOCALSTACK_ROOT_FOLDER, events_file, log_file)
save_file(script_file, content)
run('chmod +x %s' % script_file)
TMP_FILES.append(script_file)
return script_file
def generate_processor_script(events_file, log_file=None):
script_file = os.path.join(tempfile.gettempdir(), 'kclipy.%s.processor.py' % short_uid())
if log_file:
log_file = "'%s'" % log_file
else:
log_file = 'None'
content = """#!/usr/bin/env python
import os, sys, glob, json, socket, time, logging, tempfile
import subprocess32 as subprocess
logging.basicConfig(level=logging.INFO)
for path in glob.glob('%s/lib/python*/site-packages'):
sys.path.insert(0, path)
sys.path.insert(0, '%s')
from localstack.config import DEFAULT_ENCODING
from localstack.utils.kinesis import kinesis_connector
from localstack.utils.common import timestamp
events_file = '%s'
log_file = %s
error_log = os.path.join(tempfile.gettempdir(), 'kclipy.error.log')
if __name__ == '__main__':
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
num_tries = 3
sleep_time = 2
error = None
for i in range(0, num_tries):
try:
sock.connect(events_file)
error = None
break
except Exception as e:
error = e
if i < num_tries:
msg = '%%s: Unable to connect to UNIX socket. Retrying.' %% timestamp()
subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True)
time.sleep(sleep_time)
if error:
print("WARN: Unable to connect to UNIX socket after retrying: %%s" %% error)
raise error
def receive_msg(records, checkpointer, shard_id):
try:
# records is a list of amazon_kclpy.messages.Record objects -> convert to JSON
records_dicts = [j._json_dict for j in records]
message_to_send = {'shard_id': shard_id, 'records': records_dicts}
string_to_send = '%%s\\n' %% json.dumps(message_to_send)
bytes_to_send = string_to_send.encode(DEFAULT_ENCODING)
sock.send(bytes_to_send)
except Exception as e:
msg = "WARN: Unable to forward event: %%s" %% e
print(msg)
subprocess.check_output('echo "%%s" >> %%s' %% (msg, error_log), shell=True)
kinesis_connector.KinesisProcessor.run_processor(log_file=log_file, processor_func=receive_msg)
""" % (LOCALSTACK_VENV_FOLDER, LOCALSTACK_ROOT_FOLDER, events_file, log_file)
save_file(script_file, content)
run('chmod +x %s' % script_file)
TMP_FILES.append(script_file)
return script_file