python类path_graph()的实例源码

graph_utils.py 文件源码 项目:pythia 作者: elazarg 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def refine_to_chain(g, from_attr, to_attr):
    '''can be used to refine basic blocks into blocks - the dual of contract_chains()
    assume g.node[n][attr] is a list
    returns a graph whose nodes are the refinement of the lists into paths
    the elements of the lists are held as to_attr
    the nodes become tuples (node_index, list_index)'''
    paths = []
    for n in g.nodes_iter():
        block = g.node[n][from_attr]
        size = len(block)
        path = nx.path_graph(size, create_using=nx.DiGraph())
        nx.relabel_nodes(path, mapping={x:(n, x) for x in path.nodes()}, copy=False)
        path.add_edges_from(((n, size - 1), (s, 0)) for s in g.successors_iter(n))
        paths.append(path)
    values = {(n, x): block
              for n in g.nodes_iter()
              for x, block in enumerate(g.node[n][from_attr])}
    res = nx.compose_all(paths)
    nx.set_node_attributes(res, to_attr, values)
    return res
sample_test.py 文件源码 项目:GraphLearn 作者: smautner 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def test_sampler():
    lsggg=lsgg_test.get_grammar()
    graph = lsgg_test.edenize(nx.path_graph(4))
    graph.node[3]['label']='5'
    score_estimator= sde( lsgg_test.edenize(nx.path_graph(4)) )

    sampler = sample.Sampler(grammar=lsggg,score_estimator=score_estimator, n_steps=2)


    for i in range(2):
        graph, score = sampler.transform(graph).next()
    assert (0.000001 > abs( 0.319274373045 - score))
lsgg_test.py 文件源码 项目:GraphLearn 作者: smautner 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def get_grammar():
    lsggg=lsgg.lsgg()
    g=prep_cip_extract(nx.path_graph(4))
    lsggg.fit([g,g,g])
    return lsggg
lsgg_test.py 文件源码 项目:GraphLearn 作者: smautner 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def test_extract_core_and_interface():
    graph=nx.path_graph(4)
    prep_cip_extract(graph)
    res = lcu.extract_core_and_interface(root_node=3, graph=graph, radius=1,thickness=1)
    #gprint(res.graph)
    assert ( str(res) == "cip: int:16931, cor:695036, rad:1, thi:1, rot:3")
lsgg_test.py 文件源码 项目:GraphLearn 作者: smautner 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def test_neighbors():
    # make a grammar
    lsggg = get_grammar()

    #make agraph
    g=nx.path_graph(4)
    g=edenize(g)
    g.node[3]['label']='5'
    stuff=list(lsggg.neighbors(g))
    assert(6 ==  len(stuff))
some_neighbors_test.py 文件源码 项目:GraphLearn 作者: smautner 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def get_grammar():
    lsggg=lsgg()
    g=prep_cip_extract(nx.path_graph(4))
    lsggg.fit([g,g,g])
    return lsggg
CommunityDetection.py 文件源码 项目:visa_free 作者: BBischof 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def communitySplits(self, graph, weight=None):
        """
        Compute the splits for the formation of communities. 

        Parameters
        ----------
        graph -  A networkx graph of digraph.
        weight (string) - If None, all edge weights are considered equal. 
            Otherwise holds the name of the edge attribute used as weight


        Returns
        -------
        The graph with weak edges removed. 


        Usage
        -----
        >>> G = nx.path_graph(10)
        >>> out = GirvanNewman(G)
        >>> comm = out.communities(G, weight=None)
        >>> for x in comm:
                print x
        """

        nConnComp = nx.number_connected_components(graph)
        nComm = nConnComp

        while (nComm <= nConnComp):
            betweenness = nx.edge_betweenness_centrality(graph, weight=weight)
            if (len(betweenness.values()) != 0 ):
                max_betweenness = max(betweenness.values())
            else:
                break   
            for u,v in betweenness.iteritems():
                if float(v) == max_betweenness:
                    # print u,v
                    graph.remove_edge(u[0], u[1])
            nComm = nx.number_connected_components(graph)           
        return graph
test_skeleton.py 文件源码 项目:Distributed-Algorithms 作者: abinashmeher999 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def test_simulation():
    graph = nx.path_graph(5)
    sm = Simulation(embedding_graph=graph, process_type=Process, channel_type=Channel)
    sm.run()
graph.py 文件源码 项目:nelpy 作者: nelpy 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def circular_layout(G, scale=1, center=None, dim=2, direction='CCW'):
    # dim=2 only
    """Position nodes on a circle.

    Parameters
    ----------
    G : NetworkX graph or list of nodes

    scale : float
        Scale factor for positions

    center : array-like or None
        Coordinate pair around which to center the layout.

    dim : int
        Dimension of layout, currently only dim=2 is supported

    Returns
    -------
    pos : dict
        A dictionary of positions keyed by node

    Examples
    --------
    >>> G = nx.path_graph(4)
    >>> pos = nx.circular_layout(G)

    Notes
    -----
    This algorithm currently only works in two dimensions and does not
    try to minimize edge crossings.

    """

    G, center = _process_params(G, center, dim)

    if len(G) == 0:
        pos = {}
    elif len(G) == 1:
        pos = {nx.utils.arbitrary_element(G): center}
    else:
        # Discard the extra angle since it matches 0 radians.
        theta = np.linspace(0, 1, len(G) + 1)[:-1] * 2 * np.pi
        theta = theta.astype(np.float32)
        if direction == 'CCW':
            pos = np.column_stack([np.cos(theta), np.sin(theta)])
        else:
            pos = np.column_stack([np.sin(theta), np.cos(theta)])
        pos = rescale_layout(pos, scale=scale) + center
        pos = dict(zip(G, pos))

    return pos
read.py 文件源码 项目:k-clique-graphs-dense-subgraphs 作者: giannisnik 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def read_gml(path, relabel=False):
    """Read graph in GML format from path.

    Parameters
    ----------
    path : filename or filehandle
       The filename or filehandle to read from.

    relabel : bool, optional
       If True use the GML node label attribute for node names otherwise use
       the node id.

    Returns
    -------
    G : MultiGraph or MultiDiGraph

    Raises
    ------
    ImportError
        If the pyparsing module is not available.

    See Also
    --------
    write_gml, parse_gml

    Notes
    -----
    Requires pyparsing: http://pyparsing.wikispaces.com/
    The GML specification says that files should be ASCII encoded, with any
    extended ASCII characters (iso8859-1) appearing as HTML character entities.

    References
    ----------
    GML specification:
    http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html

    Examples
    --------
    >>> G=nx.path_graph(4)
    >>> nx.write_gml(G,'test.gml')
    >>> H=nx.read_gml('test.gml')
    """
    lines = (unescape(line.decode('ascii')) for line in path)
    G = parse_gml(lines, relabel=relabel)
    return G
read.py 文件源码 项目:k-clique-graphs-dense-subgraphs 作者: giannisnik 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def write_gml(G, path):
    """
    Write the graph G in GML format to the file or file handle path.

    Parameters
    ----------
    path : filename or filehandle
       The filename or filehandle to write.  Filenames ending in
       .gz or .gz2 will be compressed.

    See Also
    --------
    read_gml, parse_gml

    Notes
    -----
    GML specifications indicate that the file should only use
    7bit ASCII text encoding.iso8859-1 (latin-1).

    This implementation does not support all Python data types as GML
    data.  Nodes, node attributes, edge attributes, and graph
    attributes must be either dictionaries or single stings or
    numbers.  If they are not an attempt is made to represent them as
    strings.  For example, a list as edge data
    G[1][2]['somedata']=[1,2,3], will be represented in the GML file
    as::

       edge [
         source 1
         target 2
         somedata "[1, 2, 3]"
       ]


    Examples
    ---------
    >>> G=nx.path_graph(4)
    >>> nx.write_gml(G,"test.gml")

    Filenames ending in .gz or .bz2 will be compressed.

    >>> nx.write_gml(G,"test.gml.gz")
    """
    for line in generate_gml(G):
        line += '\n'
        path.write(line.encode('ascii', 'xmlcharrefreplace'))


# fixture for nose tests


问题


面经


文章

微信
公众号

扫码关注公众号