def test_line_cell_info():
"""%%foo and %foo magics are distinguishable to inspect"""
ip = get_ipython()
ip.magics_manager.register(FooFoo)
oinfo = ip.object_inspect('foo')
nt.assert_true(oinfo['found'])
nt.assert_true(oinfo['ismagic'])
oinfo = ip.object_inspect('%%foo')
nt.assert_true(oinfo['found'])
nt.assert_true(oinfo['ismagic'])
nt.assert_equal(oinfo['docstring'], FooFoo.cell_foo.__doc__)
oinfo = ip.object_inspect('%foo')
nt.assert_true(oinfo['found'])
nt.assert_true(oinfo['ismagic'])
nt.assert_equal(oinfo['docstring'], FooFoo.line_foo.__doc__)
python类get_ipython()的实例源码
def test_alias_magic():
"""Test %alias_magic."""
ip = get_ipython()
mm = ip.magics_manager
# Basic operation: both cell and line magics are created, if possible.
ip.run_line_magic('alias_magic', 'timeit_alias timeit')
nt.assert_in('timeit_alias', mm.magics['line'])
nt.assert_in('timeit_alias', mm.magics['cell'])
# --cell is specified, line magic not created.
ip.run_line_magic('alias_magic', '--cell timeit_cell_alias timeit')
nt.assert_not_in('timeit_cell_alias', mm.magics['line'])
nt.assert_in('timeit_cell_alias', mm.magics['cell'])
# Test that line alias is created successfully.
ip.run_line_magic('alias_magic', '--line env_alias env')
nt.assert_equal(ip.run_line_magic('env', ''),
ip.run_line_magic('env_alias', ''))
def test_save():
"""Test %save."""
ip = get_ipython()
ip.history_manager.reset() # Clear any existing history.
cmds = [u"a=1", u"def b():\n return a**2", u"print(a, b())"]
for i, cmd in enumerate(cmds, start=1):
ip.history_manager.store_inputs(i, cmd)
with TemporaryDirectory() as tmpdir:
file = os.path.join(tmpdir, "testsave.py")
ip.run_line_magic("save", "%s 1-10" % file)
with open(file) as f:
content = f.read()
nt.assert_equal(content.count(cmds[0]), 1)
nt.assert_in('coding: utf-8', content)
ip.run_line_magic("save", "-a %s 1-10" % file)
with open(file) as f:
content = f.read()
nt.assert_equal(content.count(cmds[0]), 2)
nt.assert_in('coding: utf-8', content)
def test_store():
"""Test %store."""
ip = get_ipython()
ip.run_line_magic('load_ext', 'storemagic')
# make sure the storage is empty
ip.run_line_magic('store', '-z')
ip.user_ns['var'] = 42
ip.run_line_magic('store', 'var')
ip.user_ns['var'] = 39
ip.run_line_magic('store', '-r')
nt.assert_equal(ip.user_ns['var'], 42)
ip.run_line_magic('store', '-d var')
ip.user_ns['var'] = 39
ip.run_line_magic('store' , '-r')
nt.assert_equal(ip.user_ns['var'], 39)
def quick_completer(cmd, completions):
""" Easily create a trivial completer for a command.
Takes either a list of completions, or all completions in string (that will
be split on whitespace).
Example::
[d:\ipython]|1> import ipy_completers
[d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
[d:\ipython]|3> foo b<TAB>
bar baz
[d:\ipython]|3> foo ba
"""
if isinstance(completions, string_types):
completions = completions.split()
def do_complete(self, event):
return completions
get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
def _attached_to_ipy_notebook_with_widgets():
try:
# check for widgets
import ipywidgets
if ipywidgets.version_info[0] < 4:
raise ImportError()
# check for ipython kernel
from IPython import get_ipython
ip = get_ipython()
if ip is None:
return False
if not getattr(ip, 'kernel', None):
return False
# No further checks are feasible
return True
except ImportError:
return False
def get_config():
ip = get_ipython()
if ip is None:
profile_dir = paths.locate_profile()
else:
profile_dir = ip.profile_dir.location
json_path = path.join(profile_dir, "ipython_config.json")
try:
with open(json_path, 'r') as f:
config = json.load(f)
except (FileNotFoundError, json.decoder.JSONDecodeError):
config = {}
return config, json_path
def test_create_cell_debug(self, mock_default_context, mock_notebook_environment):
env = {}
mock_default_context.return_value = TestCases._create_context()
mock_notebook_environment.return_value = env
IPython.get_ipython().user_ns = env
# cell output is empty when debug is True
output = google.datalab.contrib.pipeline.commands._pipeline._create_cell(
{'name': 'foo_pipeline', 'debug': True}, self.sample_cell_body)
self.assertTrue(len(output) > 0)
output = google.datalab.contrib.pipeline.commands._pipeline._create_cell(
{'name': 'foo_pipeline', 'debug': False}, self.sample_cell_body)
self.assertTrue(output is None)
output = google.datalab.contrib.pipeline.commands._pipeline._create_cell(
{'name': 'foo_pipeline'}, self.sample_cell_body)
self.assertTrue(output is None)
def _view(args, cell):
csv = datalab.data.Csv(args['input'])
num_lines = int(args['count'] or 5)
headers = None
if cell:
ipy = IPython.get_ipython()
config = _utils.parse_config(cell, ipy.user_ns)
if 'columns' in config:
headers = [e.strip() for e in config['columns'].split(',')]
df = pd.DataFrame(csv.browse(num_lines, headers))
if args['profile']:
# TODO(gram): We need to generate a schema and type-convert the columns before this
# will be useful for CSV
return _utils.profile_df(df)
else:
return IPython.core.display.HTML(df.to_html(index=False))
def _view(args, cell):
csv = google.datalab.data.CsvFile(args['input'])
num_lines = int(args['count'] or 5)
headers = None
if cell:
ipy = IPython.get_ipython()
config = _utils.parse_config(cell, ipy.user_ns)
if 'columns' in config:
headers = [e.strip() for e in config['columns'].split(',')]
df = pd.DataFrame(csv.browse(num_lines, headers))
if args['profile']:
# TODO(gram): We need to generate a schema and type-convert the columns before this
# will be useful for CSV
return _utils.profile_df(df)
else:
return IPython.core.display.HTML(df.to_html(index=False))
def page(data, start=0, screen_lines=0, pager_cmd=None):
"""Display content in a pager, piping through a pager after a certain length.
data can be a mime-bundle dict, supplying multiple representations,
keyed by mime-type, or text.
Pager is dispatched via the `show_in_pager` IPython hook.
If no hook is registered, `pager_page` will be used.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
# first, try the hook
ip = get_ipython()
if ip:
try:
ip.hooks.show_in_pager(data, start=start, screen_lines=screen_lines)
return
except TryNext:
pass
# fallback on default pager
return pager_page(data, start, screen_lines, pager_cmd)
def test_latex_completions():
from IPython.core.latex_symbols import latex_symbols
import random
ip = get_ipython()
# Test some random unicode symbols
keys = random.sample(latex_symbols.keys(), 10)
for k in keys:
text, matches = ip.complete(k)
nt.assert_equal(len(matches),1)
nt.assert_equal(text, k)
nt.assert_equal(matches[0], latex_symbols[k])
# Test a more complex line
text, matches = ip.complete(u'print(\\alpha')
nt.assert_equals(text, u'\\alpha')
nt.assert_equals(matches[0], latex_symbols['\\alpha'])
# Test multiple matching latex symbols
text, matches = ip.complete(u'\\al')
nt.assert_in('\\alpha', matches)
nt.assert_in('\\aleph', matches)
def test_abspath_file_completions():
ip = get_ipython()
with TemporaryDirectory() as tmpdir:
prefix = os.path.join(tmpdir, 'foo')
suffixes = ['1', '2']
names = [prefix+s for s in suffixes]
for n in names:
open(n, 'w').close()
# Check simple completion
c = ip.complete(prefix)[1]
nt.assert_equal(c, names)
# Now check with a function call
cmd = 'a = f("%s' % prefix
c = ip.complete(prefix, cmd)[1]
comp = [prefix+s for s in suffixes]
nt.assert_equal(c, comp)
def test_local_file_completions():
ip = get_ipython()
with TemporaryWorkingDirectory():
prefix = './foo'
suffixes = ['1', '2']
names = [prefix+s for s in suffixes]
for n in names:
open(n, 'w').close()
# Check simple completion
c = ip.complete(prefix)[1]
nt.assert_equal(c, names)
# Now check with a function call
cmd = 'a = f("%s' % prefix
c = ip.complete(prefix, cmd)[1]
comp = set(prefix+s for s in suffixes)
nt.assert_true(comp.issubset(set(c)))
def find_module(self, fullname, path=None):
if self._called:
# already handled
return
if fullname not in ('pylab', 'matplotlib.pyplot'):
# not matplotlib
return
# don't call me again
self._called = True
try:
# remove myself from the import hooks
sys.meta_path = [loader for loader in sys.meta_path if loader is not self]
except ValueError:
pass
ip = get_ipython()
if ip is None:
# not in an interactive environment
return
if ip.pylab_gui_select:
# backend already selected
return
if hasattr(ip, 'kernel'):
# default to inline in kernel environments
ip.enable_matplotlib('inline')
else:
print('enabling matplotlib')
ip.enable_matplotlib()
# install the finder immediately
def __init__(self, nb_path, ns=None):
self.nb_path = nb_path
if ns is None:
self.ns = dict()
else:
self.ns = ns
if 'get_ipython' not in self.ns:
# not sure if thats really needed
self.ns['get_ipython'] = get_ipython
self.shell = InteractiveShell.instance()
self.refresh()
self.run_tag('__init__', strict=False)
def __init__(self, id=None, on_msg=None):
"""
Initializes a Comms object
"""
self.id = id if id else uuid.uuid4().hex
self._on_msg = on_msg
self._comm = None
from IPython import get_ipython
self.manager = get_ipython().kernel.comm_manager
self.manager.register_target(self.id, self._handle_open)
def is_kernel():
"""Detects if running in an IPython session
"""
if 'IPython' not in sys.modules:
# IPython hasn't been imported, definitely not
return False
from IPython import get_ipython
# check for `kernel` attribute on the IPython instance
return getattr(get_ipython(), 'kernel', None) is not None
def time_dense_solvers():
instructions = {
solver: "u = solve_qp(P_array, q, G_array, h, solver='%s')" % solver
for solver in dense_solvers}
print "\nDense solvers",
print "\n-------------"
for solver, instr in instructions.iteritems():
print "%s: " % solver,
get_ipython().magic(u'timeit %s' % instr)
def time_sparse_solvers():
instructions = {
solver: "u = solve_qp(P, q, G, h, solver='%s')" % solver
for solver in sparse_solvers}
print "\nSparse solvers",
print "\n--------------"
for solver, instr in instructions.iteritems():
print "%s: " % solver,
get_ipython().magic(u'timeit %s' % instr)