def run_cmd(self, cmd, collect, env=None):
out = []
if self.args.verbose:
self.logr(" CMD: %s" % cmd)
fh = None
if self.args.disable_cmd_redirection or collect == self.Want_Output:
fh = open(self.cov_paths['tmp_out'], 'w')
else:
fh = open(os.devnull, 'w')
if env is None:
subprocess.call(cmd, stdin=None,
stdout=fh, stderr=subprocess.STDOUT, shell=True, executable='/bin/bash')
else:
subprocess.call(cmd, stdin=None,
stdout=fh, stderr=subprocess.STDOUT, shell=True, env=env, executable='/bin/bash')
fh.close()
if self.args.disable_cmd_redirection or collect == self.Want_Output:
with open(self.cov_paths['tmp_out'], 'r') as f:
for line in f:
out.append(line.rstrip('\n'))
return out
python类STDOUT的实例源码
def subproc_call(cmd, timeout=None):
try:
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT,
shell=True, timeout=timeout)
return output
except subprocess.TimeoutExpired as e:
logger.warn("Command timeout!")
logger.warn(e.output)
except subprocess.CalledProcessError as e:
logger.warn("Commnad failed: {}".format(e.returncode))
logger.warn(e.output)
def initdb(datadir, prefix='', echo=False):
init_args = [
os.path.join(prefix, 'initdb'),
'-D', datadir,
'-U', 'postgres',
'--auth=trust',
]
output = subprocess.check_output(
init_args,
close_fds=True,
stderr=subprocess.STDOUT,
)
if echo:
print(output.decode('utf-8'))
test_subscription_transport.py 文件源码
项目:graphql-python-subscriptions
作者: hballard
项目源码
文件源码
阅读 31
收藏 0
点赞 0
评论 0
def test_correctly_sets_the_context_in_on_subscribe(server):
node_script = '''
module.paths.push('{0}')
WebSocket = require('ws')
const SubscriptionClient =
require('subscriptions-transport-ws').SubscriptionClient
const CTX = 'testContext';
const client = new SubscriptionClient('ws://localhost:{1}/socket')
client.subscribe({{
query: `subscription context {{
context
}}`,
variables: {{}},
context: CTX,
}}, (error, result) => {{
client.unsubscribeAll();
if (error) {{
console.log(JSON.stringify(error));
}}
if (result) {{
console.log(JSON.stringify({{
client: {{
result: result,
}}
}}));
}} else {{
// pass
}}
}}
);
'''.format(
os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT)
p = subprocess.Popen(
['node', '-e', node_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
time.sleep(.2)
requests.post(
'http://localhost:{0}/publish'.format(TEST_PORT), json=['context', {}])
q = queue.Queue()
t = threading.Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True
t.start()
time.sleep(.2)
ret_values = {}
while True:
try:
_line = q.get_nowait()
if isinstance(_line, bytes):
line = _line.decode()
line = json.loads(line)
ret_values[list(line.keys())[0]] = line[list(line.keys())[0]]
except ValueError:
pass
except queue.Empty:
break
client = ret_values['client']
assert client['result']['context']
assert client['result']['context'] == 'testContext'
test_subscription_transport.py 文件源码
项目:graphql-python-subscriptions
作者: hballard
项目源码
文件源码
阅读 32
收藏 0
点赞 0
评论 0
def test_rejects_unparsable_message(server):
node_script = '''
module.paths.push('{0}');
WebSocket = require('ws');
const GRAPHQL_SUBSCRIPTIONS = 'graphql-subscriptions';
const client = new WebSocket('ws://localhost:{1}/socket',
GRAPHQL_SUBSCRIPTIONS);
client.onmessage = (message) => {{
let msg = JSON.parse(message.data)
console.log(JSON.stringify({{[msg.type]: msg}}))
client.close();
}};
client.onopen = () => {{
client.send('HI');
}}
'''.format(
os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT)
p = subprocess.Popen(
['node', '-e', node_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
q = queue.Queue()
t = threading.Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True
t.start()
time.sleep(.2)
ret_values = {}
while True:
try:
_line = q.get_nowait()
if isinstance(_line, bytes):
line = _line.decode()
line = json.loads(line)
ret_values[list(line.keys())[0]] = line[list(line.keys())[0]]
except ValueError:
pass
except queue.Empty:
break
assert ret_values['subscription_fail']
assert len(ret_values['subscription_fail']['payload']['errors']) > 0
test_subscription_transport.py 文件源码
项目:graphql-python-subscriptions
作者: hballard
项目源码
文件源码
阅读 33
收藏 0
点赞 0
评论 0
def test_rejects_nonsense_message(server):
node_script = '''
module.paths.push('{0}');
WebSocket = require('ws');
const GRAPHQL_SUBSCRIPTIONS = 'graphql-subscriptions';
const client = new WebSocket('ws://localhost:{1}/socket',
GRAPHQL_SUBSCRIPTIONS);
client.onmessage = (message) => {{
let msg = JSON.parse(message.data)
console.log(JSON.stringify({{[msg.type]: msg}}))
client.close();
}};
client.onopen = () => {{
client.send(JSON.stringify({{}}));
}}
'''.format(
os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT)
p = subprocess.Popen(
['node', '-e', node_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
q = queue.Queue()
t = threading.Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True
t.start()
time.sleep(.2)
ret_values = {}
while True:
try:
_line = q.get_nowait()
if isinstance(_line, bytes):
line = _line.decode()
line = json.loads(line)
ret_values[list(line.keys())[0]] = line[list(line.keys())[0]]
except ValueError:
pass
except queue.Empty:
break
assert ret_values['subscription_fail']
assert len(ret_values['subscription_fail']['payload']['errors']) > 0
test_subscription_transport.py 文件源码
项目:graphql-python-subscriptions
作者: hballard
项目源码
文件源码
阅读 34
收藏 0
点赞 0
评论 0
def test_sends_back_any_type_of_error(server):
node_script = '''
module.paths.push('{0}')
WebSocket = require('ws')
const SubscriptionClient =
require('subscriptions-transport-ws').SubscriptionClient
const client = new SubscriptionClient('ws://localhost:{1}/socket')
client.subscribe({{
query: `invalid useInfo {{
error
}}`,
variables: {{}},
}}, function (errors, result) {{
client.unsubscribeAll();
if (errors) {{
console.log(JSON.stringify({{'errors': errors}}))
}}
if (result) {{
console.log(JSON.stringify({{'result': result}}))
}}
}}
);
'''.format(
os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT)
p = subprocess.Popen(
['node', '-e', node_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
time.sleep(5)
q = queue.Queue()
t = threading.Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True
t.start()
time.sleep(.2)
ret_values = {}
while True:
try:
_line = q.get_nowait()
if isinstance(_line, bytes):
line = _line.decode()
line = json.loads(line)
ret_values[list(line.keys())[0]] = line[list(line.keys())[0]]
except ValueError:
pass
except queue.Empty:
break
assert len(ret_values['errors']) > 0
test_subscription_transport.py 文件源码
项目:graphql-python-subscriptions
作者: hballard
项目源码
文件源码
阅读 34
收藏 0
点赞 0
评论 0
def test_handles_errors_prior_to_graphql_execution(server_with_on_sub_handler):
node_script = '''
module.paths.push('{0}')
WebSocket = require('ws')
const SubscriptionClient =
require('subscriptions-transport-ws').SubscriptionClient
const client = new SubscriptionClient('ws://localhost:{1}/socket')
client.subscribe({{
query: `subscription context {{
context
}}`,
variables: {{}},
context: {{}},
}}, function (errors, result) {{
client.unsubscribeAll();
if (errors) {{
console.log(JSON.stringify({{'errors': errors}}))
}}
if (result) {{
console.log(JSON.stringify({{'result': result}}))
}}
}}
);
'''.format(
os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT)
p = subprocess.Popen(
['node', '-e', node_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
time.sleep(.2)
requests.post(
'http://localhost:{0}/publish'.format(TEST_PORT), json=['context', {}])
q = queue.Queue()
t = threading.Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True
t.start()
time.sleep(.2)
ret_values = {}
while True:
try:
_line = q.get_nowait()
if isinstance(_line, bytes):
line = _line.decode()
line = json.loads(line)
ret_values[list(line.keys())[0]] = line[list(line.keys())[0]]
except ValueError:
pass
except queue.Empty:
break
assert isinstance(ret_values['errors'], list)
assert ret_values['errors'][0]['message'] == 'bad'
test_subscription_transport.py 文件源码
项目:graphql-python-subscriptions
作者: hballard
项目源码
文件源码
阅读 29
收藏 0
点赞 0
评论 0
def test_sends_a_keep_alive_signal_in_the_socket(server_with_keep_alive):
node_script = '''
module.paths.push('{0}');
WebSocket = require('ws');
const GRAPHQL_SUBSCRIPTIONS = 'graphql-subscriptions';
const KEEP_ALIVE = 'keepalive';
const client = new WebSocket('ws://localhost:{1}/socket',
GRAPHQL_SUBSCRIPTIONS);
let yieldCount = 0;
client.onmessage = (message) => {{
let msg = JSON.parse(message.data)
if (msg.type === KEEP_ALIVE) {{
yieldCount += 1;
if (yieldCount > 1) {{
let returnMsg = {{'type': msg.type, 'yieldCount': yieldCount}}
console.log(JSON.stringify(returnMsg))
client.close();
}}
}}
}};
'''.format(
os.path.join(os.path.dirname(__file__), 'node_modules'), TEST_PORT)
p = subprocess.Popen(
['node', '-e', node_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
q = queue.Queue()
t = threading.Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True
t.start()
time.sleep(.5)
while True:
try:
_line = q.get_nowait()
if isinstance(_line, bytes):
line = _line.decode()
ret_value = json.loads(line)
except ValueError:
pass
except queue.Empty:
break
assert ret_value['type'] == 'keepalive'
assert ret_value['yieldCount'] > 1
def run_cmd(cmd, args=None, tool=None):
stats = {'timed_out': False,
'output': ''}
timer = None
out = None
out_file = None
friendly_cmd = ' '.join(cmd)
if args and args.verbose and args.log_to_stderr:
out = sys.stderr
elif tool:
out_file = os.path.join(args.output_directory, tool + ".log")
out = open(out_file, 'a')
def output(line):
if out:
out.write(line)
out.flush()
def kill_proc(proc, stats):
output("Timed out after {} seconds on {}\n".format(args.timeout, friendly_cmd))
stats['timed_out'] = True
proc.kill()
output("Running {}\n\n".format(friendly_cmd))
try:
start_time = timeit.default_timer()
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if args and args.timeout:
timer = Timer(args.timeout, kill_proc, [process, stats])
timer.start()
for line in iter(process.stdout.readline, b''):
stats['output'] = stats['output'] + line
output(line)
process.stdout.close()
process.wait()
stats['time'] = timeit.default_timer() - start_time
stats['return_code'] = process.returncode
if timer:
timer.cancel()
except:
output('calling {cmd} failed\n{trace}\n'.format(cmd=friendly_cmd,trace=traceback.format_exc()))
if out_file:
out.close()
return stats
def get_parent(self, filepath, isCrash=True):
dirname, basename = os.path.split(filepath)
if isCrash:
match = self.find_crash_parent_regex.match(basename)
# (_, _, session, _, syncname, src_id) = match.groups()
(_, _, _, session, _, syncname, src_id) = match.groups()
searchdir = self.args.afl_fuzzing_dir
# if syncname:
# searchdir += '/' + syncname + '/queue'
if session:
searchdir += '/' + session + '/queue'
else:
assert False, "Parent of crash file {} cannot be found".format(basename)
else:
match = self.find_queue_parent_regex.match(basename)
if not match:
self.logr("No parent could be found for {}".format(basename))
return None
(_, syncname, src_id) = match.groups()
searchdir = dirname
if syncname:
searchdir += '/../../' + syncname + '/queue'
search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*"
parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True)
parent_list = filter(None, parent_fname.split("\n"))
if (len(parent_list) == 0):
self.logr("No parents found for file {}".format(basename))
return None
if (len(parent_list) > 1):
self.logr("Multiple parents found for file {}. Selecting first.".format(basename))
return os.path.abspath(parent_list[0].rstrip("\n"))
def run_and_wait(args, timeout=None, logfile=None, append=False,
env=None, cwd=None):
"""Run a command in a subprocess, then wait for it to finish.
Parameters
----------
args : string or list[string]
the command to run. Should be either a command string or a list
of command string and its arguments as strings. A list is preferred;
see Python subprocess documentation.
timeout : float or None
the amount of time to wait for the command to finish, in seconds.
If None, waits indefinitely.
logfile : string or None
If given, stdout and stderr will be written to this file.
append : bool
True to append to the logfile. Defaults to False.
env : dict[string, any]
If not None, environment variables of the subprocess will be set
according to this dictionary instead of inheriting from current
process.
cwd : string or None
The current working directory of the subprocess.
Returns
-------
output : string
the standard output and standard error from the command.
Raises
------
subprocess.CalledProcessError
if any error occurred in the subprocess.
"""
output = subprocess.check_output(args, stderr=subprocess.STDOUT,
timeout=timeout, env=env, cwd=cwd)
output = output.decode(encoding=bag_encoding, errors=bag_codec_error)
if logfile is not None:
write_file(logfile, output, append=append)
return output
def get_parent(self, filepath, isCrash=True):
dirname, basename = os.path.split(filepath)
if isCrash:
match = self.find_crash_parent_regex.match(basename)
# (_, _, session, _, syncname, src_id) = match.groups()
(_, _, session, _, syncname, src_id) = match.groups()
searchdir = self.afl_fuzzing_dir
# if syncname:
# searchdir += '/' + syncname + '/queue'
if session:
searchdir += '/' + session + '/queue'
else:
assert False, "Parent of crash file {} cannot be found".format(basename)
else:
match = self.find_queue_parent_regex.match(basename)
if not match:
self.logr("No parent could be found for {}".format(basename))
return None
(_, syncname, src_id) = match.groups()
searchdir = dirname
if syncname:
searchdir += '/../../' + syncname + '/queue'
search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*"
parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True)
parent_list = filter(None, parent_fname.split("\n"))
if (len(parent_list) == 0):
self.logr("No parents found for file {}".format(basename))
return None
if (len(parent_list) > 1):
self.logr("Multiple parents found for file {}. Selecting first.".format(basename))
return os.path.abspath(parent_list[0].rstrip("\n"))
def server_process(datadir, host='127.0.0.1', port=9201, prefix='', echo=False):
args = [
os.path.join(prefix, 'elasticsearch'),
'-Enetwork.host=%s' % host,
'-Ehttp.port=%d' % port,
'-Epath.data=%s' % os.path.join(datadir, 'data'),
'-Epath.logs=%s' % os.path.join(datadir, 'logs'),
]
if os.environ.get('TRAVIS'):
print('IN TRAVIS')
echo=True
args.append('-Epath.conf=%s/conf' % os.environ['TRAVIS_BUILD_DIR'])
elif os.path.exists('/etc/elasticsearch'):
print('NOT IN TRAVIS')
args.append('-Epath.conf=/etc/elasticsearch')
print(args)
process = subprocess.Popen(
args,
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
SUCCESS_LINE = b'started\n'
lines = []
for line in iter(process.stdout.readline, b''):
if echo:
sys.stdout.write(line.decode('utf-8'))
lines.append(line)
if line.endswith(SUCCESS_LINE):
print('detected start, broke')
break
else:
code = process.wait()
msg = ('Process return code: %d\n' % code) + b''.join(lines).decode('utf-8')
raise Exception(msg)
if not echo:
process.stdout.close()
print('returning process')
return process