def refine_to_chain(g, from_attr, to_attr):
'''can be used to refine basic blocks into blocks - the dual of contract_chains()
assume g.node[n][attr] is a list
returns a graph whose nodes are the refinement of the lists into paths
the elements of the lists are held as to_attr
the nodes become tuples (node_index, list_index)'''
paths = []
for n in g.nodes_iter():
block = g.node[n][from_attr]
size = len(block)
path = nx.path_graph(size, create_using=nx.DiGraph())
nx.relabel_nodes(path, mapping={x:(n, x) for x in path.nodes()}, copy=False)
path.add_edges_from(((n, size - 1), (s, 0)) for s in g.successors_iter(n))
paths.append(path)
values = {(n, x): block
for n in g.nodes_iter()
for x, block in enumerate(g.node[n][from_attr])}
res = nx.compose_all(paths)
nx.set_node_attributes(res, to_attr, values)
return res
python类path_graph()的实例源码
def test_sampler():
lsggg=lsgg_test.get_grammar()
graph = lsgg_test.edenize(nx.path_graph(4))
graph.node[3]['label']='5'
score_estimator= sde( lsgg_test.edenize(nx.path_graph(4)) )
sampler = sample.Sampler(grammar=lsggg,score_estimator=score_estimator, n_steps=2)
for i in range(2):
graph, score = sampler.transform(graph).next()
assert (0.000001 > abs( 0.319274373045 - score))
def get_grammar():
lsggg=lsgg.lsgg()
g=prep_cip_extract(nx.path_graph(4))
lsggg.fit([g,g,g])
return lsggg
def test_extract_core_and_interface():
graph=nx.path_graph(4)
prep_cip_extract(graph)
res = lcu.extract_core_and_interface(root_node=3, graph=graph, radius=1,thickness=1)
#gprint(res.graph)
assert ( str(res) == "cip: int:16931, cor:695036, rad:1, thi:1, rot:3")
def test_neighbors():
# make a grammar
lsggg = get_grammar()
#make agraph
g=nx.path_graph(4)
g=edenize(g)
g.node[3]['label']='5'
stuff=list(lsggg.neighbors(g))
assert(6 == len(stuff))
def get_grammar():
lsggg=lsgg()
g=prep_cip_extract(nx.path_graph(4))
lsggg.fit([g,g,g])
return lsggg
def communitySplits(self, graph, weight=None):
"""
Compute the splits for the formation of communities.
Parameters
----------
graph - A networkx graph of digraph.
weight (string) - If None, all edge weights are considered equal.
Otherwise holds the name of the edge attribute used as weight
Returns
-------
The graph with weak edges removed.
Usage
-----
>>> G = nx.path_graph(10)
>>> out = GirvanNewman(G)
>>> comm = out.communities(G, weight=None)
>>> for x in comm:
print x
"""
nConnComp = nx.number_connected_components(graph)
nComm = nConnComp
while (nComm <= nConnComp):
betweenness = nx.edge_betweenness_centrality(graph, weight=weight)
if (len(betweenness.values()) != 0 ):
max_betweenness = max(betweenness.values())
else:
break
for u,v in betweenness.iteritems():
if float(v) == max_betweenness:
# print u,v
graph.remove_edge(u[0], u[1])
nComm = nx.number_connected_components(graph)
return graph
def test_simulation():
graph = nx.path_graph(5)
sm = Simulation(embedding_graph=graph, process_type=Process, channel_type=Channel)
sm.run()
def circular_layout(G, scale=1, center=None, dim=2, direction='CCW'):
# dim=2 only
"""Position nodes on a circle.
Parameters
----------
G : NetworkX graph or list of nodes
scale : float
Scale factor for positions
center : array-like or None
Coordinate pair around which to center the layout.
dim : int
Dimension of layout, currently only dim=2 is supported
Returns
-------
pos : dict
A dictionary of positions keyed by node
Examples
--------
>>> G = nx.path_graph(4)
>>> pos = nx.circular_layout(G)
Notes
-----
This algorithm currently only works in two dimensions and does not
try to minimize edge crossings.
"""
G, center = _process_params(G, center, dim)
if len(G) == 0:
pos = {}
elif len(G) == 1:
pos = {nx.utils.arbitrary_element(G): center}
else:
# Discard the extra angle since it matches 0 radians.
theta = np.linspace(0, 1, len(G) + 1)[:-1] * 2 * np.pi
theta = theta.astype(np.float32)
if direction == 'CCW':
pos = np.column_stack([np.cos(theta), np.sin(theta)])
else:
pos = np.column_stack([np.sin(theta), np.cos(theta)])
pos = rescale_layout(pos, scale=scale) + center
pos = dict(zip(G, pos))
return pos
def read_gml(path, relabel=False):
"""Read graph in GML format from path.
Parameters
----------
path : filename or filehandle
The filename or filehandle to read from.
relabel : bool, optional
If True use the GML node label attribute for node names otherwise use
the node id.
Returns
-------
G : MultiGraph or MultiDiGraph
Raises
------
ImportError
If the pyparsing module is not available.
See Also
--------
write_gml, parse_gml
Notes
-----
Requires pyparsing: http://pyparsing.wikispaces.com/
The GML specification says that files should be ASCII encoded, with any
extended ASCII characters (iso8859-1) appearing as HTML character entities.
References
----------
GML specification:
http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
Examples
--------
>>> G=nx.path_graph(4)
>>> nx.write_gml(G,'test.gml')
>>> H=nx.read_gml('test.gml')
"""
lines = (unescape(line.decode('ascii')) for line in path)
G = parse_gml(lines, relabel=relabel)
return G
def write_gml(G, path):
"""
Write the graph G in GML format to the file or file handle path.
Parameters
----------
path : filename or filehandle
The filename or filehandle to write. Filenames ending in
.gz or .gz2 will be compressed.
See Also
--------
read_gml, parse_gml
Notes
-----
GML specifications indicate that the file should only use
7bit ASCII text encoding.iso8859-1 (latin-1).
This implementation does not support all Python data types as GML
data. Nodes, node attributes, edge attributes, and graph
attributes must be either dictionaries or single stings or
numbers. If they are not an attempt is made to represent them as
strings. For example, a list as edge data
G[1][2]['somedata']=[1,2,3], will be represented in the GML file
as::
edge [
source 1
target 2
somedata "[1, 2, 3]"
]
Examples
---------
>>> G=nx.path_graph(4)
>>> nx.write_gml(G,"test.gml")
Filenames ending in .gz or .bz2 will be compressed.
>>> nx.write_gml(G,"test.gml.gz")
"""
for line in generate_gml(G):
line += '\n'
path.write(line.encode('ascii', 'xmlcharrefreplace'))
# fixture for nose tests