python类connected_components()的实例源码

env_generator.py 文件源码 项目:gym-extensions 作者: Breakend 项目源码 文件源码 阅读 40 收藏 0 点赞 0 评论 0
def merge_rectangles_into_obstacles(self, centers, widths, heights, epsilon):
        """Merges rectangles defined by centers, widths, heights. Two rectangles
        with distance < epsilon are considered part of the same object."""

        G = nx.Graph()
        obstacles = {i: Obstacle(centers[i, :], widths[i, 0], heights[i, 0]) for i in range(len(centers))}
        G.add_nodes_from(obstacles.keys())

        for i in obstacles:
            for j in obstacles:
                if i != j and obstacles[i].distance_to_obstacle(obstacles[j]) < epsilon:
                    G.add_edge(i,j)

        merged_obstacles = {}
        conn_components = nx.connected_components(G)
        for cc in conn_components:
            cc = list(cc)
            new_obs = obstacles[cc[0]]
            for i in range(1, len(cc)):
                new_obs.merge(obstacles[cc[i]])

            merged_obstacles[cc[0]] = new_obs

        return merged_obstacles
owner_graph_metrics.py 文件源码 项目:community-networks-monitoring-tools 作者: netCommonsEU 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def getOwnerRobustness(self, graph):
        """ compute the "owner robustness """

        ownerNodes, nodeOwner = self.get_owner_distribution(graph)
        print "# owner".rjust(long_align_space), ",",\
              "main C. size".rjust(long_align_space), ",",\
              "number of components".rjust(long_align_space)
        for owner, nodes in sorted(ownerNodes.items(),
                                   key=lambda(x): -len(x[1])):
            purged_graph = nx.Graph(graph)
            for n in nodes:
                purged_graph.remove_node(n)
            comp_list = list(nx.connected_components(purged_graph))
            main_comp = sorted(comp_list, key=len, reverse=True)[0]
            print owner.rjust(long_align_space), ",",\
                str(len(main_comp)).rjust(long_align_space), ",", \
                str(len(comp_list)).rjust(long_align_space)
        print ""
        print ""

    #  ################# helper functions
    # These functions are needed to handle data structures from
    # other sources of data. You can use a database and dump the
    # data in XML from a db. You probably do not need these functions.
leaflet-dask-delayed.py 文件源码 项目:MDAnalysis-with-Dask 作者: Becksteinlab 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def Leaflet_finder(traj, i, j, ii, jj, len_chunks, cutoff):
    g1 = np.load(os.path.abspath(os.path.normpath(os.path.join(os.getcwd(),traj))), mmap_mode='r')[i:i+len_chunks]
    g2 = np.load(os.path.abspath(os.path.normpath(os.path.join(os.getcwd(),traj))), mmap_mode='r')[j:j+len_chunks]

    block = np.zeros((len(g1),len(g2)),dtype=float)
    block[:,:] = cdist(g1, g2) <= cutoff
    adj_list = np.where(block[:,:] == True)        
    adj_list = np.vstack(adj_list)

    adj_list[0] = adj_list[0]+ii*len_chunks
    adj_list[1] = adj_list[1]+jj*len_chunks

    if adj_list.shape[1] == 0:
        adj_list=np.zeros((2,1))

    graph = nx.Graph()
    edges = [(adj_list[0,k],adj_list[1,k]) for k in range(0,adj_list.shape[1])]
    graph.add_edges_from(edges)
    leaflet = sorted(nx.connected_components(graph), key=len, reverse=True)
    l_connected = [] # Keep only connected components
    for lng in range(len(leaflet)):
        l_connected.append(leaflet[lng])

    return list(l_connected)
leaflet-dask-array.py 文件源码 项目:MDAnalysis-with-Dask 作者: Becksteinlab 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def Leaflet_finder(block, traj, cutoff, len_atom, len_chunks, block_id=None):
    id_0 = block_id[0]
    id_1 = block_id[1]

    block[:,:] = cdist(np.load(traj, mmap_mode='r')[id_0*len_chunks:(id_0+1)*len_chunks], np.load(traj, mmap_mode='r')[id_1*len_chunks:(id_1+1)*len_chunks]) <= cutoff 
    adj_list = np.where(block[:,:] == True)        
    adj_list = np.vstack(adj_list)

    adj_list[0] = adj_list[0]+id_0*len_chunks
    adj_list[1] = adj_list[1]+id_1*len_chunks

    if adj_list.shape[1] == 0:
        adj_list=np.zeros((2,1))

    graph = nx.Graph()
    edges = [(adj_list[0,k],adj_list[1,k]) for k in range(0,adj_list.shape[1])]
    graph.add_edges_from(edges)
    l = np.array({i: item for i, item in enumerate(sorted(nx.connected_components(graph)))}, dtype=np.object).reshape(1,1)

    return l
leaflet-dask-array.py 文件源码 项目:MDAnalysis-with-Dask 作者: Becksteinlab 项目源码 文件源码 阅读 45 收藏 0 点赞 0 评论 0
def Leaflet_finder(block, traj, cutoff, len_atom, len_chunks, block_id=None):
    id_0 = block_id[0]
    id_1 = block_id[1]

    block[:,:] = cdist(np.load(traj, mmap_mode='r')[id_0*len_chunks:(id_0+1)*len_chunks], np.load(traj, mmap_mode='r')[id_1*len_chunks:(id_1+1)*len_chunks]) <= cutoff 
    adj_list = np.where(block[:,:] == True)        
    adj_list = np.vstack(adj_list)

    adj_list[0] = adj_list[0]+id_0*len_chunks
    adj_list[1] = adj_list[1]+id_1*len_chunks

    if adj_list.shape[1] == 0:
        adj_list=np.zeros((2,1))

    graph = nx.Graph()
    edges = [(adj_list[0,k],adj_list[1,k]) for k in range(0,adj_list.shape[1])]
    graph.add_edges_from(edges)
    l = np.array({i: item for i, item in enumerate(sorted(nx.connected_components(graph)))}, dtype=np.object).reshape(1,1)

    return l
Network_Generator.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def ER_Generateor(N=1000, M=3000):
    G = nx.gnm_random_graph(N, M)
    #component of the network
    if nx.is_connected(G) == False:
        # Get the Greatest Component of Networks #####
        components = sorted(nx.connected_components(G), key = len, reverse=True)
        print "Component Number of the Generated Network:", len(components)

        for i in range(1, len(components)):
            for node in components[i]:
                G.remove_node(node)
        #end for
        print nx.is_connected(G)
    #endif

    return G

#************************************************************************
Network_Generator.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def SF_Generateor(N=1000, m=3):
    G = nx.barabasi_albert_graph(N, m)
    #component of the network
    if nx.is_connected(G) == False:
        # Get the Greatest Component of Networks #####
        components = sorted(nx.connected_components(G), key = len, reverse=True)
        print "Component Number of the Generated Network:", len(components)

        for i in range(1, len(components)):
            for node in components[i]:
                G.remove_node(node)
        #end for
        print nx.is_connected(G)
    #endif

    return G

#************************************************************************
Optimal_Network_Attack.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def Optimal_Percolation_Simultaneous_Attack(G, Centrality):
    #print "Optimal_Percolation_Simultaneous_Attack"
    Gn = G.copy()
    Ranking = Ranking_methods.Nodes_Ranking(Gn, Centrality)
    Ranking = sorted(Ranking.iteritems(), key=lambda d:d[1], reverse = True)

    Giant_Component_Size_List = []
    Component_Num_List = []
    for nid in Ranking:
        G.remove_node(nid[0])
        ### Get the Greatest Component of Networks #####
        Components = sorted(nx.connected_components(G), key = len, reverse=True)
        if len(Components) >= 1:
            Giant_Component_Size = len(Components[0])
            if Giant_Component_Size > 1:
                Giant_Component_Size_List.append(Giant_Component_Size)
                Component_Num_List.append(len(Components))
    #end for
    return Giant_Component_Size_List,Component_Num_List

#************************************************************************
Network_Generator.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def ER_Generateor(N=1000, M=3000):
    G = nx.gnm_random_graph(N, M)
    #component of the network
    if nx.is_connected(G) == False:
        # Get the Greatest Component of Networks #####
        components = sorted(nx.connected_components(G), key = len, reverse=True)
        print "Component Number of the Generated Network:", len(components)

        for i in range(1, len(components)):
            for node in components[i]:
                G.remove_node(node)
        #end for
        print nx.is_connected(G)
    #endif

    return G

#************************************************************************
Optimal_Network_Attack.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def Optimal_Percolation_Simultaneous_Attack(G, Centrality):
    #print "Optimal_Percolation_Simultaneous_Attack"
    Gn = G.copy()
    Ranking = Ranking_methods.Nodes_Ranking(Gn, Centrality)
    Ranking = sorted(Ranking.iteritems(), key=lambda d:d[1], reverse = True)

    Giant_Component_Size_List = []
    Component_Num_List = []
    for nid in Ranking:
        G.remove_node(nid[0])
        ### Get the Greatest Component of Networks #####
        Components = sorted(nx.connected_components(G), key = len, reverse=True)
        if len(Components) >= 1:
            Giant_Component_Size = len(Components[0])
            if Giant_Component_Size > 1:
                Giant_Component_Size_List.append(Giant_Component_Size)
                Component_Num_List.append(len(Components))
    #end for
    return Giant_Component_Size_List,Component_Num_List

#************************************************************************
tsp_lazy.py 文件源码 项目:PySCIPOpt 作者: SCIP-Interfaces 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def findSubtours(self, checkonly, sol):
        EPS = 1.e-6
        edges = []
        x = self.model.data
        for (i, j) in x:
            if self.model.getSolVal(sol, x[i, j]) > EPS:
                edges.append((i,j))

        G = networkx.Graph()
        G.add_edges_from(edges)
        Components = list(networkx.connected_components(G))

        if len(Components) == 1:
            return False
        elif checkonly:
            return True

        for S in Components:
            self.model.addCons(quicksum(x[i, j] for i in S for j in S if j > i) <= len(S) - 1)
            print("cut: len(%s) <= %s" % (S, len(S) - 1))

        return True
grouping.py 文件源码 项目:pyhiro 作者: wanweiwei07 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def clusters(points, radius):
    '''
    Find clusters of points which have neighbours closer than radius

    Arguments
    ---------
    points: (n, d) points (of dimension d)
    radius: max distance between points in a cluster

    Returns:
    groups: (m) sequence of indices for points

    '''
    tree   = KDTree(points)
    pairs  = tree.query_pairs(radius)
    graph  = from_edgelist(pairs)
    groups = list(connected_components(graph))
    return groups
graph.py 文件源码 项目:pyhiro 作者: wanweiwei07 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def smoothed(mesh, angle):
    '''
    Return a non- watertight version of the mesh which will
    render nicely with smooth shading. 

    Arguments
    ---------
    mesh:  Trimesh object
    angle: float, angle in radians, adjacent faces which have normals
           below this angle will be smoothed.

    Returns
    ---------
    smooth: Trimesh object
    '''
    adjacency = adjacency_angle(mesh, angle)
    graph = nx.from_edgelist(adjacency)
    graph.add_nodes_from(np.arange(len(mesh.faces)))
    smooth = mesh.submesh(nx.connected_components(graph),
                          only_watertight = False,
                          append = True)
    return smooth
mabed.py 文件源码 项目:pyMABED 作者: AdrienGuille 项目源码 文件源码 阅读 60 收藏 0 点赞 0 评论 0
def merge_redundant_events(self, events):
        # compute the connected components in the redundancy graph
        components = []
        for c in nx.connected_components(self.redundancy_graph):
            components.append(c)
        final_events = []

        # merge redundant events
        for event in events:
            main_word = event[2]
            main_term = main_word
            descriptions = []
            for component in components:
                if main_word in component:
                    main_term = ', '.join(component)
                    for node in component:
                        descriptions.append(self.redundancy_graph.node[node]['description'])
                    break
            if len(descriptions) == 0:
                related_words = event[3]
            else:
                related_words = self.merge_related_words(main_term, descriptions)
            final_event = (event[0], event[1], main_term, related_words, event[4])
            final_events.append(final_event)
        return final_events
multreelib.py 文件源码 项目:MultifurcationFeasibility 作者: Mathagoris 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def get_conflicts(self):
        """Find irreconcilable connected components of leg."""
        conflicts = set()  # connected components with conflict
        for cc in nx.connected_components(self.leg):
            # key = species, val = set of loci in species for this cc
            loci_dct = collections.defaultdict(set)

            for label in cc:
                species, locus = label
                loci_dct[species].add(locus)

            for sp, loci in loci_dct.iteritems():
                # conflict if a species has more than one loci in this cc
                if len(loci) >= 2:
                    conflicts.add(tuple(cc))
                    break
        return conflicts
plctlib.py 文件源码 项目:MultifurcationFeasibility 作者: Mathagoris 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def get_conflicts(leg):
    """Find irreconcilable connected components of leg."""
    conflicts = set() # connected components with conflict
    for cc in nx.connected_components(leg):
        # key = species, val = set of loci in species for this cc
        loci_dct = collections.defaultdict(set)

        for label in cc:
            species, locus = label
            loci_dct[species].add(locus)

        for sp, loci in loci_dct.iteritems():
            # conflict if a species has more than one loci in this cc
            if len(loci) >= 2:
                conflicts.add(tuple(cc))
                break
    return conflicts
graphing.py 文件源码 项目:grocsvs 作者: grocsvs 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def visualize_frags(outdir, graphs, options):
    from rpy2.robjects import r

    utilities.ensure_dir(outdir)

    for i, graph in enumerate(graphs):
        r.pdf(os.path.join(outdir, "fragments.cluster_{}.pdf".format(i)))

        for component in networkx.connected_components(graph):
            subgraph = graph.subgraph(component)

            ends = [node for node,degree in subgraph.degree_iter() if degree==1]
            breakends = [node for node in list(networkx.shortest_simple_paths(subgraph, ends[0], ends[1]))[0]]
            # breakends = [breakend_from_label(node) for node in breakends]
            breakends = breakends[:-1:2] + breakends[-1:]
            # print ")"*100, breakends

            for sample, dataset in sorted(options.iter_10xdatasets()):
                plot_frags(breakends, options, sample, dataset)
        # plot_frags(breakpoints, options, sample, dataset)
        r["dev.off"]()
gatherings_cores.py 文件源码 项目:openbadge-analysis 作者: HumanDynamics 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def extract_groups(m2m):
    """Extracts a list of groups from a social network varying through time.

    Groups are defined as connected components of the social graph at a given
    time bin.

    Parameters
    ----------
    m2m : pd.DataFrame
        The social network, for instance, member-to-member bluetooth proximity
        data.  It must have the following columns: 'datetime', 'member1', and
        'member2'.

    Returns
    -------
    pd.DataFrame :
        The groups, as a sets of members with datetime.
    """
    groups = m2m.groupby('datetime').apply(
        lambda df:
        pd.Series([frozenset(c) for c in nx.connected_components(nx.from_pandas_dataframe(df.reset_index(), 'member1', 'member2'))])
    )
    groups.name = 'members'

    return groups.reset_index()[['datetime', 'members']]
actor.py 文件源码 项目:saapy 作者: ashapochka 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def group_similar_actors(self):
        similar_actor_groups = [list(g) for g in
                                nx.connected_components(self.actor_graph)]
        return similar_actor_groups
git_authorship.py 文件源码 项目:saapy 作者: ashapochka 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def _identify_actors(self, edges, nodes):
        author_graph = nx.Graph()
        author_graph.add_nodes_from(nodes)
        author_graph.add_edges_from(edges)
        same_actor_groups = [list(g) for g in
                             nx.connected_components(author_graph)]
        actors = [dict(primary_id=int(g[0]), group_ids=[int(gid) for gid in g])
                  for g in same_actor_groups]
        return actors
leaflet-dask-delayed.py 文件源码 项目:MDAnalysis-with-Dask 作者: Becksteinlab 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def Leaflet_finder(traj, i, j,len_atom, cutoff):
    atom = np.load(traj)
    g1 = atom[i:i+len_chunks]
    g2 = atom[j:j+len_chunks]
    block = np.zeros((len(g1),len(g2)),dtype=float)
    block[:,:] = cdist(g1, g2) <= cutoff
    S = scipy.sparse.dok_matrix((len_atom, len_atom))
    S[i:i+len_chunks, j:j+len_chunks] = block
    leaflet = sorted(nx.connected_components(nx.Graph(S>0)), key=len, reverse=True)
    l_connected = [] # Keep only connected components
    for lng in range(len(leaflet)):
        if(len(leaflet[lng])>1):l_connected.append(leaflet[lng])

    return list(l_connected)
summarize.py 文件源码 项目:dnflow 作者: DocNow 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def run(self):
        date_path = self.search['date_path']
        files = sorted(os.listdir('data/%s/media' % date_path))
        hashes = {}
        matches = []
        g = nx.Graph()
        update_block_size = get_block_size(len(files), 5)
        for i in range(len(files)):
            f = files[i]
            fn = 'data/%s/media/%s' % (date_path, f)
            ahash = imagehash.average_hash(Image.open(fn))
            dhash = imagehash.dhash(Image.open(fn))
            phash = imagehash.phash(Image.open(fn))
            hashes[f] = {'ahash': ahash, 'dhash': dhash, 'phash': phash}
            for j in range(0, i):
                f2name = files[j]
                f2 = hashes[f2name]
                sumhash = sum([ahash - f2['ahash'],
                               dhash - f2['dhash'],
                               phash - f2['phash']])
                # FIXME: 40 is a hard-coded arbitrary (eyeballed) threshold
                if sumhash <= 40:
                    matches.append([f, files[j],
                                    ahash - f2['ahash'],
                                    dhash - f2['dhash'],
                                    phash - f2['phash'],
                                    sumhash])
                    g.add_edge(f, f2name)
            if i % update_block_size == 0:
                self.update_job(
                    date_path=self.search['date_path'],
                    status="STARTED: %s - %s/%s" %
                           (self.task_family, i, len(files))
                )
        with self.output().open('w') as fp_graph:
            components = list(nx.connected_components(g))
            # Note: sets are not JSON serializable
            d = []
            for s in components:
                d.append(list(s))
            json.dump(d, fp_graph, indent=2)
Optimal_Network_Attack.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def Optimal_Percolation_Sequence_Attack(G, Centrality, r=0.025):
    print "Optimal_Percolation_Sequence_Attack"
    Step = int(r*G.number_of_nodes())
    print Step
    Gn = G.copy()
    Ranking = Ranking_methods.Nodes_Ranking(Gn, Centrality)
    Ranking = sorted(Ranking.iteritems(), key=lambda d:d[1], reverse = True)
    #print Ranking
    G.remove_node(Ranking[0][0])

    ### Get the Greatest Component of Networks #####
    Giant_Component_Size_List = []
    Components = sorted(nx.connected_components(G), key = len, reverse=True)
    Giant_Component_Size = len(Components[0])
    Giant_Component_Size_List.append(Giant_Component_Size)
    #print "Components[0]:",Components[0]

    while Giant_Component_Size_List[-1] > 2 and Ranking != {}:
        Gn = G.copy()
        Ranking = Ranking_methods.Nodes_Ranking(Gn, Centrality)
        Ranking = sorted(Ranking.iteritems(), key=lambda d:d[1], reverse = True)
        #print Ranking
        if len(Ranking) > Step:
            for i in range(0,Step):
                G.remove_node(Ranking[i][0])
        Components = sorted(nx.connected_components(G), key = len, reverse=True)
        Giant_Component_Size = len(Components[0])
        Giant_Component_Size_List.append(Giant_Component_Size)

        #print "Giant_Component_Size_List, Components[0], Ranking:",Centrality, Giant_Component_Size_List, Components,G.edges(), Ranking
        #print "Sequence_attack:", Centrality, Ranking[0][0]
    #end while

    return Giant_Component_Size_List
#===============================================================================================
Optimal_Network_Attack.py 文件源码 项目:PhD 作者: wutaoadeny 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def Optimal_Percolation_Sequence_Attack(G, Centrality, r=0.025):
    print "Optimal_Percolation_Sequence_Attack"
    Step = int(r*G.number_of_nodes())
    print Step
    Gn = G.copy()
    Ranking = Ranking_methods.Nodes_Ranking(Gn, Centrality)
    Ranking = sorted(Ranking.iteritems(), key=lambda d:d[1], reverse = True)
    #print Ranking
    G.remove_node(Ranking[0][0])

    ### Get the Greatest Component of Networks #####
    Giant_Component_Size_List = []
    Components = sorted(nx.connected_components(G), key = len, reverse=True)
    Giant_Component_Size = len(Components[0])
    Giant_Component_Size_List.append(Giant_Component_Size)
    #print "Components[0]:",Components[0]

    while Giant_Component_Size_List[-1] > 2 and Ranking != {}:
        Gn = G.copy()
        Ranking = Ranking_methods.Nodes_Ranking(Gn, Centrality)
        Ranking = sorted(Ranking.iteritems(), key=lambda d:d[1], reverse = True)
        #print Ranking
        if len(Ranking) > Step:
            for i in range(0,Step):
                G.remove_node(Ranking[i][0])
        Components = sorted(nx.connected_components(G), key = len, reverse=True)
        Giant_Component_Size = len(Components[0])
        Giant_Component_Size_List.append(Giant_Component_Size)

        #print "Giant_Component_Size_List, Components[0], Ranking:",Centrality, Giant_Component_Size_List, Components,G.edges(), Ranking
        #print "Sequence_attack:", Centrality, Ranking[0][0]
    #end while

    return Giant_Component_Size_List
#===============================================================================================
vrp_lazy.py 文件源码 项目:PySCIPOpt 作者: SCIP-Interfaces 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def addCuts(self, checkonly):
        """add cuts if necessary and return whether model is feasible"""
        cutsadded = False
        edges = []
        x = self.model.data
        for (i, j) in x:
            if self.model.getVal(x[i, j]) > .5:
                if i != V[0] and j != V[0]:
                    edges.append((i, j))
        G = networkx.Graph()
        G.add_edges_from(edges)
        Components = list(networkx.connected_components(G))
        for S in Components:
            S_card = len(S)
            q_sum = sum(q[i] for i in S)
            NS = int(math.ceil(float(q_sum) / Q))
            S_edges = [(i, j) for i in S for j in S if i < j and (i, j) in edges]
            if S_card >= 3 and (len(S_edges) >= S_card or NS > 1):
                cutsadded = True
                if checkonly:
                    break
                else:
                    self.model.addCons(quicksum(x[i, j] for i in S for j in S if j > i) <= S_card - NS)
                    print("adding cut for", S_edges)

        return cutsadded
networkclustering.py 文件源码 项目:PyPSA 作者: PyPSA 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def busmap_by_linemask(network, mask):
    mask = network.lines.loc[:,['bus0', 'bus1']].assign(mask=mask).set_index(['bus0','bus1'])['mask']
    G = nx.OrderedGraph()
    G.add_nodes_from(network.buses.index)
    G.add_edges_from(mask.index[mask])
    return pd.Series(OrderedDict((n, str(i))
                                 for i, g in enumerate(nx.connected_components(G))
                                 for n in g),
                     name='name')
CommunityDetection.py 文件源码 项目:visa_free 作者: BBischof 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def communities(self, nCommunities, weight=None):
        """
        Compute communities.

        Parameters
        ----------
        nCommunities - number of communities to be returned.
            This is added to simplify the process, the original GN algorithm doesn't 
            need predecided number of communities. 
            Other measures like a threshold on betweenness centrality can be used instead.

        weight (string) - If None, all edge weights are considered equal. 
            Otherwise holds the name of the edge attribute used as weight. 


        Returns
        --------
        A list of communities where each community is a list of the nodes in the community.  
        """
        gr = self.g
        n = nx.number_connected_components(gr)
        components = nx.connected_components(gr)

        while (n < nCommunities):
            gr = self.communitySplits(gr, weight=weight)
            components = nx.connected_components(gr)
            n = nx.number_connected_components(gr)
            if gr.number_of_edges() == 0:
                break
        return components
graph.py 文件源码 项目:pyhiro 作者: wanweiwei07 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def split(mesh, only_watertight=True, adjacency=None):
    '''
    Given a mesh, will split it up into a list of meshes based on face connectivity
    If only_watertight is true, it will only return meshes where each face has
    exactly 3 adjacent faces.

    Arguments
    ----------
    mesh: Trimesh 
    only_watertight: if True, only return watertight components
    adjacency: (n,2) list of face adjacency to override using the plain
               adjacency calculated automatically. 

    Returns
    ----------
    meshes: list of Trimesh objects
    '''

    def split_nx():
        adjacency_graph = nx.from_edgelist(adjacency)
        components = nx.connected_components(adjacency_graph)
        result = mesh.submesh(components, only_watertight=only_watertight)
        return result

    def split_gt():
        g = GTGraph()
        g.add_edge_list(adjacency)
        component_labels = label_components(g, directed=False)[0].a
        components = group(component_labels)
        result = mesh.submesh(components, only_watertight=only_watertight)
        return result

    if adjacency is None:
        adjacency = mesh.face_adjacency

    if _has_gt: 
        return split_gt()
    else:       
        return split_nx()
objectextraction.py 文件源码 项目:SyConn 作者: StructuralNeurobiologyLab 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def make_merge_list(hdf5names, stitch_list, max_labels):
    """
    Creates a merge list from a stitch list by mapping all connected ids to
    one id

    Parameters
    ----------
    hdf5names: list of str
        List of names/ labels to be extracted and processed from the prediction
        file
    stitch_list: dictionary
        Contains pairs of overlapping component ids for each hdf5name
    max_labels dictionary
        Contains the number of different component ids for each hdf5name

    Returns
    -------
    merge_dict: dictionary
        mergelist for each hdf5name
    merge_list_dict: dictionary
        mergedict for each hdf5name
    """

    merge_dict = {}
    merge_list_dict = {}
    for hdf5_name in hdf5names:
        this_stitch_list = stitch_list[hdf5_name]
        max_label = max_labels[hdf5_name]
        graph = nx.from_edgelist(this_stitch_list)
        cc = nx.connected_components(graph)
        merge_dict[hdf5_name] = {}
        merge_list_dict[hdf5_name] = np.arange(max_label + 1)
        for this_cc in cc:
            this_cc = list(this_cc)
            for id in this_cc:
                merge_dict[hdf5_name][id] = this_cc[0]
                merge_list_dict[hdf5_name][id] = this_cc[0]

    return merge_dict, merge_list_dict
nxgraph.py 文件源码 项目:anomalous-vertices-detection 作者: Kagandi 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def connected_components(self):
        """
        Parameters
        ----------

        Returns
        -------
        NxGraph: Graph object

        Examples
        --------
        >>>
        """
        return nx.connected_components(self._graph)


问题


面经


文章

微信
公众号

扫码关注公众号