def ordered_neighbors(nx_graph, our_address, target_address):
paths = list()
try:
all_neighbors = networkx.all_neighbors(nx_graph, our_address)
except networkx.NetworkXError:
# If `our_address` is not in the graph, no channels opened with the
# address
return []
for neighbor in all_neighbors:
try:
length = networkx.shortest_path_length(
nx_graph,
neighbor,
target_address,
)
heappush(paths, (length, neighbor))
except (networkx.NetworkXNoPath, networkx.NodeNotFound):
pass
return paths
python类all_neighbors()的实例源码
def local_path_index(G, ebunch=None):
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
#NeighborSet = nx.all_neighbors(G, u)
#len( sorted(nx.common_neighbors(G, u, v) ))
paths = list( nx.all_simple_paths(G, source=u, target=v, cutoff=3))
print paths
A2 = 0.0
A3 = 0.0
for path in paths:
if len(path) == 3:
A2 = A2 + 1.0
elif len(path) == 4:
A3 = A3 + 1.0
return A2 + 0.001 * A3 #Coefficient = 0.001
Rank_List = []
for u, v in ebunch:
Rank_List.append((u, v, predict(u, v)))
return Rank_List #((u, v, predict(u, v)) for u, v in ebunch)
#return ((u, v, predict(u, v)) for u, v in ebunch)
multi_objective_pseudo_connection_scan_profiler.py 文件源码
项目:gtfspy
作者: CxAalto
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
def _finalize_profiles(self):
"""
Deal with the first walks by joining profiles to other stops within walking distance.
"""
for stop, stop_profile in self._stop_profiles.items():
assert (isinstance(stop_profile, NodeProfileMultiObjective))
neighbor_label_bags = []
walk_durations_to_neighbors = []
departure_arrival_stop_pairs = []
if stop_profile.get_walk_to_target_duration() != 0 and stop in self._walk_network.node:
neighbors = networkx.all_neighbors(self._walk_network, stop)
for neighbor in neighbors:
neighbor_profile = self._stop_profiles[neighbor]
assert (isinstance(neighbor_profile, NodeProfileMultiObjective))
neighbor_real_connection_labels = neighbor_profile.get_labels_for_real_connections()
neighbor_label_bags.append(neighbor_real_connection_labels)
walk_durations_to_neighbors.append(int(self._walk_network.get_edge_data(stop, neighbor)["d_walk"] /
self._walk_speed))
departure_arrival_stop_pairs.append((stop, neighbor))
stop_profile.finalize(neighbor_label_bags, walk_durations_to_neighbors, departure_arrival_stop_pairs)
def get_neighbours(self):
""" Get all neihbours adjacent to self.our_address. """
try:
return networkx.all_neighbors(self.graph, self.our_address)
except networkx.NetworkXError:
return []
def node_strength(G, w):
Sum_of_weight = 0.0
neighbors = list(nx.all_neighbors(G, w))
for node in neighbors:
Sum_of_weight = Sum_of_weight + G[w][node]['weight']
#end for
return Sum_of_weight
def weighted_local_path_index(G, ebunch=None):
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
#NeighborSet = nx.all_neighbors(G, u)
#len( sorted(nx.common_neighbors(G, u, v) ))
paths = list( nx.all_simple_paths(G, source=u, target=v, cutoff=3))
#print paths
A2_weight = 0.0
A3_weight = 0.0
for path in paths:
if len(path) == 3:
for node in range(0, len(path)-1):
A2_weight = A2_weight + G[path[node]][path[node+1]]['weight']
elif len(path) == 4:
for node in range(0, len(path)-1):
A3_weight = A3_weight + G[path[node]][path[node+1]]['weight']
#value = sum( G[w][u]['weight'] + G[w][v]['weight'] for w in nx.common_neighbors(G, u, v) )
return A2_weight + 0.001 * A3_weight #+value #Coefficient = 0.001
Rank_List = []
for u, v in ebunch:
Rank_List.append((u, v, predict(u, v)))
return Rank_List #((u, v, predict(u, v)) for u, v in ebunch)
#return ((u, v, predict(u, v)) for u, v in ebunch)
def solve(new_graph, solution=None, maxAcceptable=0, orig=None):
path = len(solution)
if path > maxAcceptable:
return
colours_left = len(set([node.colour for node in new_graph.nodes()]))
distinct_groups = len(new_graph.nodes())
# We accept at most N moves.
# If we have gotten to a path of length N, then coloursLeft must == 1.
if not (maxAcceptable - path > colours_left - 2):
return
# Start out by reducing same coloured nodes
if len(new_graph.nodes()) == 1:
return solution
# Now we mutate
for node in list(sorted(new_graph.nodes())):
# Get neighbours
neighbours = list(nx.all_neighbors(new_graph, node))
# And their colours
neigh_colours = [x.colour for x in neighbours]
# We can be assured they are not like ours due to reduceGraph
for colour in neigh_colours:
# We take a copy of our graph, and toggle the colour
tmp_graph = new_graph.copy()
xnode = [x for x in tmp_graph if x.idx == node.idx][0]
xnode.colour = colour
tmp_graph = reduceGraph(tmp_graph)
x = solve(tmp_graph, solution=solution + [(node.idx, colour)], maxAcceptable=maxAcceptable, orig=orig)
if x is not None:
return x
def structure_dependent_index(G, ebunch=None):
if ebunch is None:
ebunch = nx.non_edges(G)
#C = nx.average_clustering(G)
#d = nx.average_shortest_path_length(G)
path_range = max(2, math.ceil(nx.average_shortest_path_length(G)))
#print path_range
def predict(u, v):
#NeighborSet = nx.all_neighbors(G, u)
#len( sorted(nx.common_neighbors(G, u, v) ))
SD_Index = {}
#Generate all simple paths in the graph G from source to target, length <= cutoff .
paths = list( nx.all_simple_paths(G, source=u, target=v, cutoff = path_range))
print paths
for path in paths:
if SD_Index.has_key( len(path) ):
SD_Index[len(path)] = SD_Index[len(path)] + 1.0
else:
SD_Index[len(path)] = 1.0
#end for
print SD_Index
#Sum up the num of paths with different length
Coefficient = 0.6
SD_Value = 0.0
key_Sequence = list(sorted(SD_Index.keys()))
for key in key_Sequence:
if key != 2:
SD_Value = SD_Value + math.pow(Coefficient, key-2.0) * SD_Index[key]
#end for
return SD_Value #Coefficient = 0.6
Rank_List = []
for u, v in ebunch:
Rank_List.append((u, v, predict(u, v)))
return Rank_List #((u, v, predict(u, v)) for u, v in ebunch)
##======================================================================##
def motif_m7(g):
n = nx.number_of_nodes(g)
W = np.zeros((n, n), dtype=float)
W = np.mat(W)
# nei = set(nx.all_neighbors(g, 2))
# print('all_neighbors: ', nei)
for u in range(1, n+1):
u_neighbors = set(nx.all_neighbors(g, u))
# sorted(u_neighbors)
# print(u, u_neighbors)
for v in u_neighbors:
if v < u:
continue
v_neighbors = set(nx.all_neighbors(g, v))
# sorted(v_neighbors)
for w in v_neighbors:
if w < u or w < v:
continue
if g.has_edge(u, v) and g.has_edge(v, u) and g.has_edge(u, w) and g.has_edge(v, w):
W[u - 1, w - 1] = W[u - 1, w - 1] + 1
W[w - 1, u - 1] = W[w - 1, u - 1] + 1
W[u - 1, v - 1] = W[u - 1, v - 1] + 1
W[v - 1, u - 1] = W[v - 1, u - 1] + 1
W[v - 1, w - 1] = W[v - 1, w - 1] + 1
W[w - 1, v - 1] = W[w - 1, v - 1] + 1
continue
if g.has_edge(u, w) and g.has_edge(w, u) and g.has_edge(u, v) and g.has_edge(w, v):
W[u - 1, w - 1] = W[u - 1, w - 1] + 1
W[w - 1, u - 1] = W[w - 1, u - 1] + 1
W[u - 1, v - 1] = W[u - 1, v - 1] + 1
W[v - 1, u - 1] = W[v - 1, u - 1] + 1
W[v - 1, w - 1] = W[v - 1, w - 1] + 1
W[w - 1, v - 1] = W[w - 1, v - 1] + 1
continue
if g.has_edge(v, w) and g.has_edge(w, v) and g.has_edge(w, u) and g.has_edge(v, u):
W[u - 1, w - 1] = W[u - 1, w - 1] + 1
W[w - 1, u - 1] = W[w - 1, u - 1] + 1
W[u - 1, v - 1] = W[u - 1, v - 1] + 1
W[v - 1, u - 1] = W[v - 1, u - 1] + 1
W[v - 1, w - 1] = W[v - 1, w - 1] + 1
W[w - 1, v - 1] = W[w - 1, v - 1] + 1
continue
# print(W)
# print(type(W))
return W
def count_motif(g):
count_number = list([0])*13
n = nx.number_of_nodes(g)
node_set = set()
for u in range(1, n+1):
if not g.has_node(u):
continue
for v in range(u+1, n+1):
if not g.has_node(v):
continue
u_neighbors = list(set(nx.all_neighbors(g, u)))
v_neighbors = list(set(nx.all_neighbors(g, v)))
for i in u_neighbors:
if i > v and i > u:
node_set.add(i)
for i in v_neighbors:
if i > v and i > u:
node_set.add(i)
if len(node_set) <= 0:
continue
print(u, v, node_set)
for w in node_set:
if count_m1(g, u, v, w):
count_number[0] += 1
elif count_m2(g, u, v, w):
count_number[1] += 1
elif count_m3(g, u, v, w):
count_number[2] += 1
elif count_m4(g, u, v, w):
count_number[3] += 1
elif count_m5(g, u, v, w):
count_number[4] += 1
elif count_m6(g, u, v, w):
count_number[5] += 1
elif count_m7(g, u, v, w):
count_number[6] += 1
elif count_m8(g, u, v, w):
count_number[7] += 1
elif count_m9(g, u, v, w):
count_number[8] += 1
elif count_m10(g, u, v, w):
count_number[9] += 1
elif count_m11(g, u, v, w):
count_number[10] += 1
elif count_m12(g, u, v, w):
count_number[11] += 1
elif count_m13(g, u, v, w):
count_number[12] += 1
node_set.clear()
print(count_number)
def paper_figure1():
"""
graph data g come from Jure Leskovec(2016) Higher-order organization of complex networks Fig 1
:return: None
"""
g = nx.DiGraph()
g.add_edge(1, 2)
g.add_edge(1, 3)
g.add_edge(1, 4)
g.add_edge(1, 5)
g.add_edge(1, 6)
g.add_edge(2, 1)
g.add_edge(2, 3)
g.add_edge(2, 4)
g.add_edge(2, 5)
g.add_edge(6, 2)
g.add_edge(6, 7)
g.add_edge(7, 2)
g.add_edge(7, 6)
g.add_edge(8, 2)
g.add_edge(8, 6)
g.add_edge(8, 9)
g.add_edge(8, 10)
g.add_edge(9, 6)
g.add_edge(9, 7)
g.add_edge(9, 8)
g.add_edge(9, 10)
# W = HigherOrderNetwork.motif_m7(g)
# cluster, condv, condc, order = HigherOrderNetwork.spectral_partitioning(W)
# print("\n\npaper figure1's result")
# print('condc: ', condc)
# print('cluster\n', cluster)
# # print(list(nx.all_neighbors(g, 2)))
# HigherOrderNetwork.count_m1(g)
# HigherOrderNetwork.count_m2(g)
# HigherOrderNetwork.count_m3(g)
# HigherOrderNetwork.count_m4(g)
# HigherOrderNetwork.count_m5(g)
# HigherOrderNetwork.count_m6(g)
# HigherOrderNetwork.count_m7(g)
# HigherOrderNetwork.count_m8(g)
# HigherOrderNetwork.count_m9(g)
# HigherOrderNetwork.count_m10(g)
HigherOrderNetwork.count_motif(g)
def reduceGraph(graph):
g = graph.copy()
equivalent_node_a = None
equivalent_node_b = None
for (a, b) in sorted(g.edges()):
# If we have an set of nodes
if a.colour == b.colour:
equivalent_node_a = a
equivalent_node_b = b
# We quit immediately
break
# If we didn't find a node, then we return our graph immediately.
if equivalent_node_a is None:
return g
# Otherwise, we need to make the processing and then return
# reduceGraph(self) in case we missed any nodes (since we broke on first)
a_neighs = list(nx.all_neighbors(g, equivalent_node_a))
b_neighs = list(nx.all_neighbors(g, equivalent_node_b))
# Remove for ease of access
b_neighs.remove(equivalent_node_a)
a_neighs.remove(equivalent_node_b)
to_remove = []
to_add = []
for b_neighbour in b_neighs:
if b_neighbour not in a_neighs:
to_add.append((equivalent_node_a, b_neighbour))
to_remove.append((equivalent_node_b, b_neighbour))
# if a_neighbour not in b_neighs and \
# a_neighbour != equivalent_node_b:
# to_add.append((a_neighbour, equivalent_node_b))
# to_remove.append((a_neighbour, equivalent_node_a))
for (a, b) in to_remove:
g.remove_edge(a, b)
for (a, b) in to_add:
g.add_edge(a, b)
equivalent_node_a.points += equivalent_node_b.points
g.remove_node(equivalent_node_b)
return reduceGraph(g)