def depart(self, G, rng):
n1 = G.order()
n2 = self.env.embedding.depart_number(G, rng)
if G.order() <= n2:
old = G.nodes()
else:
old = rng.choice(G.nodes(), n2, replace = False)
old = old.tolist()
for v in old:
self.stats["%s_patient_departed" % G.node[v]["bp"]] += 1
self.stats["%s_donor_departed" % G.node[v]["bd"]] += 1
G.remove_nodes_from(old)
self.stats["departed"] += n2
return nx.convert_node_labels_to_integers(G)
python类convert_node_labels_to_integers()的实例源码
def depart(self, G, rng):
n1 = G.order()
n2 = rng.binomial(n1, 1.0 / self.k)
if G.order() <= n2:
old = G.nodes()
else:
old = rng.choice(G.nodes(), n2, replace = False)
old = old.tolist()
for v in old:
self.stats["%s_patient_departed" % G.node[v]["bp"]] += 1
self.stats["%s_donor_departed" % G.node[v]["bd"]] += 1
G.remove_nodes_from(old)
self.stats["departed"] += n2
return nx.convert_node_labels_to_integers(G)
def depart(self, G, rng):
n1 = G.order()
n2 = rng.binomial(n1, 1.0 / self.k)
if G.order() <= n2:
old = G.nodes()
else:
old = rng.choice(G.nodes(), n2, replace = False)
old = old.tolist()
for v in old:
self.stats["%s_patient_departed" % G.node[v]["bp"]] += 1
self.stats["%s_donor_departed" % G.node[v]["bd"]] += 1
G.remove_nodes_from(old)
self.stats["departed"] += n2
return nx.convert_node_labels_to_integers(G)
def get_couling_derivate_matrix(self, h, twist_number, s):
if type(twist_number) == InPhase or type(twist_number) == Twist2D:
if type(twist_number) == InPhase:
mx = 0
my = 0
else:
mx = twist_number.get_mx()
my = twist_number.get_my()
dhdx = h.get_derivative()
dphi_x = (2 * np.pi * mx) / self.nx
dphi_y = (2 * np.pi * my) / self.nx
g = networkx.grid_2d_graph(self.ny, self.nx, periodic=True)
g = networkx.convert_node_labels_to_integers(g, ordering='sorted')
c = _networkx2mat(g, self.n)
a = _build_2d_dhdx_matrix(dhdx, self.nx, self.ny, self.n, dphi_x, dphi_y, s)
d = c * a
return d
else:
raise Exception('Topology not compatible with state')
def depart(self, G, rng):
n1 = G.order()
n2 = rng.binomial(n1, 1.0 / self.k)
old = rng.choice(G.nodes(), n2, replace = False).tolist()
G.remove_nodes_from(old)
self.stats["departed"] += n2
return nx.convert_node_labels_to_integers(G)
def depart(self, G, rng):
n1 = G.order()
n2 = rng.binomial(n1, 1.0 / self.k)
old = rng.choice(G.nodes(), n2, replace = False).tolist()
G.remove_nodes_from(old)
self.stats["departed"] += n2
return nx.convert_node_labels_to_integers(G)
def __load_edgelist(self):
"""
Load the graph
"""
self.G = nx.read_edgelist(self.params["edgelist_path"],create_using=nx.DiGraph())
# self.G = nx.convert_node_labels_to_integers(self.G, first_label=0,ordering="sorted")
return
def __getitem__(self, index):
#TODO: Manually have to check the convert_node_labels_to_integers function
g = nx.convert_node_labels_to_integers(nx.read_graphml(os.path.join(self.root, self.ids[index])))
target = self.classes[index]
h = self.vertex_transform(g)
g, e = self.edge_transform(g)
target = self.target_transform(target)
return (g, h, e), target
def get_couling_derivate_matrix(self, h, twist_number, s):
if type(twist_number) == InPhase:
dhdx = h.get_derivative()
dphi_x = 0.0
dphi_y = 0.0
g = networkx.grid_2d_graph(self.ny, self.nx, periodic=False)
g = networkx.convert_node_labels_to_integers(g, ordering='sorted')
c = _networkx2mat(g, self.n)
a = _build_2d_dhdx_matrix(dhdx, self.nx, self.ny, self.n, dphi_x, dphi_y, s)
d = c * a
return d
else:
raise Exception('Topology not compatible with state')
def core_substitution(graph, orig_cip_graph, new_cip_graph):
"""
graph is the whole graph..
subgraph is the interfaceregrion in that we will transplant
new_cip_graph which is the interface and the new core
"""
graph=_edge_to_vertex(graph)
assert( set(orig_cip_graph.nodes()) - set(graph.nodes()) == set([]) ), 'orig_cip_graph not in graph'
# select only the interfaces of the cips
new_graph_interface_nodes = [n for n, d in new_cip_graph.nodes(data=True) if 'core' not in d]
new_cip_interface_graph = nx.subgraph(new_cip_graph, new_graph_interface_nodes)
original_graph_interface_nodes = [n for n, d in orig_cip_graph.nodes(data=True) if 'core' not in d]
original_interface_graph = nx.subgraph(orig_cip_graph, original_graph_interface_nodes)
# get isomorphism between interfaces, if none is found we return an empty graph
iso = get_good_isomorphism(graph,
orig_cip_graph,
new_cip_graph,
original_interface_graph,
new_cip_interface_graph)
if len(iso) != len(original_interface_graph):
# print iso
# draw.display(orig_cip_graph)
# draw.display(new_cip_graph)
#draw.graphlearn([orig_cip_graph, new_cip_graph],size=10)
logger.log(5,"grammar hash collision, discovered in 'core_substution' ")
return None
# ok we got an isomorphism so lets do the merging
graph = nx.union(graph, new_cip_graph, rename=('', '-'))
# removing old core
# original_graph_core_nodes = [n for n, d in orig_cip_graph.nodes(data=True) if 'core' in d]
original_graph_core_nodes = [n for n, d in orig_cip_graph.nodes(data=True) if 'core' in d]
for n in original_graph_core_nodes:
graph.remove_node(str(n))
# merge interfaces
for k, v in iso.iteritems():
#graph.node[str(k)][
# 'intgggerface'] = True # i am marking the interface only for the backflow probability calculation in graphlearn, this is probably deleteable because we also do this in merge, also this line is superlong Ooo
merge(graph, str(k), '-' + str(v))
# unionizing killed my labels so we need to relabel
graph=eg._revert_edge_to_vertex_transform(graph)
re = nx.convert_node_labels_to_integers(graph)
graph_clean(re)
return re
def plot_nx_graph_3d(graph, radii = None, colormap='jet', line_width = 2, opacity=.9):
"""Plot a 3d graph of the skeleton
Arguments:
radii: radii of the edges used in color code, if None uniform color
Returns:
mayavi scence
"""
# get graph positions
g2 = nx.convert_node_labels_to_integers(graph, label_attribute = 'xyz');
xyz = np.array([x['xyz'] for x in g2.node.values()], dtype = 'int32');
# scalar colors
if radii is not None:
scalars = np.array([radii[tuple(x)] for x in xyz], dtype = 'float32');
else:
#scalars = np.arange(5, xyz.shape[0]+5);
scalars = np.ones(xyz.shape[0], dtype = 'float32');
#pts = mlab.points3d(xyz[:,0], xyz[:,1], xyz[:,2],
# scalars,
# scale_factor=node_size,
# scale_mode='none',
# colormap=graph_colormap,
# resolution=20)
pts = mlab.pipeline.scalar_scatter(xyz[:,0], xyz[:,1], xyz[:,2], scalars)
pts.mlab_source.dataset.lines = np.array(g2.edges(), dtype = 'int32')
pts.update()
#tube = mlab.pipeline.tube(pts, tube_radius=edge_size)
#lab.pipeline.surface(tube, color=edge_color)
lines = mlab.pipeline.stripper(pts);
mlab.pipeline.surface(lines, colormap = colormap, line_width = line_width, opacity = opacity)
if radii is not None:
mlab.colorbar(orientation = 'vertical', title='Radius [pix]');
mlab.axes()
return lines
def reduce_graph_rings(self):
'''
:return:
'''
cycle_name_format = "R_{:}"
index = 0
cycle = self.get_cycle()
while cycle:
cycle_name = cycle_name_format.format(index)
self.graph.add_node(cycle_name)
# ebunch = zip(cycle, (cycle[1:] + cycle[:1]))
self.graph.remove_edges_from(cycle)
for node1, node2 in cycle:
if isinstance(node1, six.string_types):
self.graph.add_edge(node1, cycle_name,
attr_dict={"bond_features": Molecule.bond_features_between_contract_rings()})
continue
neighbours = self.graph.neighbors(node1)
if not neighbours:
continue
for neighbour in neighbours:
edge_attrs = self.get_bond_features(neighbour, node1)
self.graph.add_edge(neighbour, cycle_name, attr_dict={
"bond_features": edge_attrs})
self.graph.remove_edge(node1, neighbour)
nx.set_node_attributes(self.graph, "atom_features",
values={cycle_name: Molecule.atom_features_of_contract_rings(0)})
for node1, node2 in cycle:
if not isinstance(node1, six.string_types):
self.graph.remove_node(node1)
index += 1
cycle = self.get_cycle()
self.graph = nx.convert_node_labels_to_integers(self.graph,
first_label=0)
nx.draw(self.graph)
self.no_of_atoms = len(self.graph)
def docs_to_networkx(dataset, cats, window_size=2, vocabulary_creation=True):
ds = './datasets/%s/' % dataset
Gs = []
labels = []
type_ = 2
vocab_creation = vocabulary_creation
words = [] # for vocabulary
for doc in os.listdir(ds):
if 'train.txt' in doc:
type_ = 1
if type_ == 1:
if os.path.exists("ds/vocab.txt"):
vocab_creation = False
with open(ds + '/train.txt', 'r', encoding='iso-8859-1') as doc:
dc = 1
for line in doc:
label = line[0]
labels.append(label)
terms = extract_terms_from_sentence(line[1:],
stopwords=stopwords.words('english'),
lemmatize=True,
stem=True,
only_N_J=True)
if vocab_creation:
words.extend(terms)
graph = terms_to_graph(terms, window_size)
G = graph_to_networkx(graph, name=label + '_' + str(dc))
# G = nx.convert_node_labels_to_integers(G, first_label=1, label_attribute='label')
nx.set_node_attributes(G, 'label', dict(zip(G.nodes(), G.nodes())))
Gs.append(G)
dc += 1
else:
if os.path.exists("ds/vocab.txt"):
vocab_creation = False
for cat in cats.keys():
for doc in os.listdir(ds + cat):
terms = extract_terms_from_file(ds + cat + '/' + doc,
stopwords=stopwords.words('english'),
lemmatize=True,
stem=True,
only_N_J=True)
if vocab_creation:
words.extend(terms)
graph = terms_to_graph(terms, window_size)
G = graph_to_networkx(graph, name=cat + doc.split('.')[0])
# G = nx.convert_node_labels_to_integers(G, first_label=1, label_attribute='label')
nx.set_node_attributes(G, name='label', values=dict(zip(G.nodes(), G.nodes())))
Gs.append(G)
labels.append(cats[cat])
if vocab_creation:
vocab = dict(Counter(words))
create_vocabulary_file(fname, vocab)
return Gs, labels
# needs fix or discard