• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python networkx.transitivity函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中networkx.transitivity函数的典型用法代码示例。如果您正苦于以下问题:Python transitivity函数的具体用法?Python transitivity怎么用?Python transitivity使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了transitivity函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: smallWorldness

def smallWorldness(graph):
	return_values = []
	#Small-worldness criteria
	n = len(nx.nodes(graph))
	e = len(nx.edges(graph))
	#probability of edges: (number of edges in real graph)/possible edges
	p = e/float((n*(n-1)/2.0))	
	##
	#generate random graph using probability
	rand_graph = nx.fast_gnp_random_graph(n, p, seed=1)
	#calculate values for real graph and random graph
	Creal = nx.transitivity(graph) #float
	Crand = nx.transitivity(rand_graph) #float
	Lreal = 0
	Lrand = 0
	real_sum = 0
	rand_sum = 0
	splReal = shortest_path_lengths(graph)
	splRand = shortest_path_lengths(rand_graph)
	for i in range(len(splReal)):
		real_sum += splReal[i]
		rand_sum += splRand[i]
	Lreal = real_sum / len(splReal)
	Lrand = rand_sum / len(splRand)		
	#compare with actual graph
	if(Lreal != 0 and Lrand !=0 and Crand !=0):
		S = (Creal)/(Crand) / (float(Lreal)/(Lrand))
	else:
		S = 0
	return_values.append(S)
	return return_values
开发者ID:hlhendy,项目名称:Protein-Structure-Prediction-ML,代码行数:31,代码来源:GraphAttributes.py


示例2: draw_graph

def draw_graph(label_flag=True, remove_isolated=True, different_size=True, iso_level=10, node_size=40):
    G=build_graph(fb.get_friends_network())
    betweenness=nx.betweenness_centrality(G)
    degree=nx.degree_centrality(G)
    degree_num=[ degree[v] for v in G]
    maxdegree=max(degree_num);mindegree=min(degree_num);
    print maxdegree,mindegree
    clustering=nx.clustering(G)
    print nx.transitivity(G)
    # Judge whether remove the isolated point from graph
    if remove_isolated is True:
        H = nx.empty_graph()
        for SG in nx.connected_component_subgraphs(G):
            if SG.number_of_nodes() > iso_level:
                H = nx.union(SG, H)
        G = H
    # Ajust graph for better presentation
    if different_size is True:
        L = nx.degree(G)
        G.dot_size = {}
        for k, v in L.items():
            G.dot_size[k] = v
        #node_size = [betweenness[v] *1000 for v in G]
        node_size = [G.dot_size[v] * 10 for v in G]
        node_color= [((degree[v]-mindegree))/(maxdegree-mindegree) for v in G]
        #edge_width = [getcommonfriends(u,v) for u,v in G.edges()]
    pos = nx.spring_layout(G, iterations=15)
    nx.draw_networkx_edges(G, pos, alpha=0.05)
    nx.draw_networkx_nodes(G, pos, node_size=node_size, node_color=node_color, vmin=0.0,vmax=1.0, alpha=0.3)
    # Judge whether shows label
    if label_flag is True:
        nx.draw_networkx_labels(G, pos, font_size=6,alpha=0.1)
    #nx.draw_graphviz(G)
    plt.show()
    return G
开发者ID:redswallow,项目名称:facebook-sna,代码行数:35,代码来源:visualize.py


示例3: gen_graph_stats

def gen_graph_stats (graph):
	G = nx.read_graphml(graph)
	stats = {}

	edges, nodes = 0,0
	for e in G.edges_iter(): edges += 1
	for n in G.nodes_iter(): nodes += 1
	stats['Edges'] = (edges,'The number of edges within the Graph')
	stats['Nodes'] = (nodes, 'The number of nodes within the Graph')
	print "%i edges, %i nodes" % (edges, nodes)


	# Accessing the highest degree node
	center, degree = sorted(G.degree().items(), key=itemgetter(1), reverse=True)[0]
	stats['Center Node'] = ('%s: %0.5f' % (center,degree),'The center most node in the graph. Which has the highest degree')


	hairball = nx.subgraph(G, [x for x in nx.connected_components(G)][0])
	print "Average shortest path: %0.4f" % nx.average_shortest_path_length(hairball)
	stats['Average Shortest Path Length'] = (nx.average_shortest_path_length(hairball), '')
	# print "Center: %s" % G[center]

	# print "Shortest Path to Center: %s" % p


	print "Degree: %0.5f" % degree
	stats['Degree'] = (degree,'The node degree is the number of edges adjacent to that node.')

	print "Order: %i" % G.number_of_nodes()
	stats['Order'] = (G.number_of_nodes(),'The number of nodes in the graph.')

	print "Size: %i" % G.number_of_edges()
	stats['Size'] = (G.number_of_edges(),'The number of edges in the graph.')

	print "Clustering: %0.5f" % nx.average_clustering(G)
	stats['Average Clustering'] = (nx.average_clustering(G),'The average clustering coefficient for the graph.')

	print "Transitivity: %0.5f" % nx.transitivity(G)
	stats['Transitivity'] = (nx.transitivity(G),'The fraction of all possible triangles present in the graph.')

	part = community.best_partition(G)
	# values = [part.get(node) for node in G.nodes()]

	# nx.draw_spring(G, cmap = plt.get_cmap('jet'), node_color = values, node_size=30, with_labels=False)
	# plt.show()

	mod = community.modularity(part,G)
	print "modularity: %0.5f" % mod
	stats['Modularity'] = (mod,'The modularity of a partition of a graph.')

	knn = nx.k_nearest_neighbors(G)
	print knn
	stats['K Nearest Neighbors'] = (knn,'the average degree connectivity of graph.\nThe average degree connectivity is the average nearest neighbor degree of nodes with degree k. For weighted graphs, an analogous measure can be computed using the weighted average neighbors degre')


	return G, stats
开发者ID:neviim,项目名称:Georgetown-Capstone,代码行数:56,代码来源:Graph_stats.py


示例4: get_small_worldness

def get_small_worldness(filename):
  import networkx as nx
  threshold = 0
  f = open(filename[:-4]+'_small_worldness.dat','w')
  for i in range(0,101):
    threshold = float(i)/100
    G = get_threshold_matrix(filename, threshold)
    ER_graph = nx.erdos_renyi_graph(nx.number_of_nodes(G), nx.density(G))

    cluster = nx.average_clustering(G)
    ER_cluster = nx.average_clustering(ER_graph)
    
    transi = nx.transitivity(G)
    ER_transi = nx.transitivity(ER_graph)

    print 'threshold: %f, average cluster coefficient: %f, random nw: %f, transitivity: %f, random nw: %f' %(threshold, cluster, ER_cluster, transi, ER_transi)

    f.write("%f\t%f\t%f" % (threshold, cluster, ER_cluster))
    components = nx.connected_component_subgraphs(G)
    ER_components = nx.connected_component_subgraphs(ER_graph)

    values = []
    ER_values = []
    for i in range(len(components)):
      if nx.number_of_nodes(components[i]) > 1:
        values.append(nx.average_shortest_path_length(components[i]))
    for i in range(len(ER_components)):
      if nx.number_of_nodes(ER_components[i]) > 1:
        ER_values.append(nx.average_shortest_path_length(ER_components[i]))
    if len(values) == 0:
      f.write("\t0.")
    else:
      f.write("\t%f" % (sum(values)/len(values)))

    if len(ER_values) == 0:
      f.write("\t0.")
    else:
      f.write("\t%f" % (sum(ER_values)/len(ER_values)))
    
    f.write("\t%f\t%f" % (transi, ER_transi))  
    
    if (ER_cluster*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
      S_WS = (cluster/ER_cluster) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
    else:
      S_WS = 0.
    if (ER_transi*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
      S_Delta = (transi/ER_transi) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
    else:
      S_Delta = 0.
    
    f.write("\t%f\t%f" % (S_WS, S_Delta))  
    f.write("\n")
    
  f.close()  
  print "1:threshold 2:cluster-coefficient 3:random-cluster-coefficient 4:shortest-pathlength 5:random-shortest-pathlength 6:transitivity 7:random-transitivity 8:S-Watts-Strogatz 9:S-transitivity" 
开发者ID:sheyma,项目名称:lab_rot_berlin,代码行数:55,代码来源:threshold_matrix.py


示例5: get_small_worldness

def get_small_worldness(G, thr):
	f = open(out_prfx + 'small_worldness.dat', 'a')
	g = open(out_prfx + 'cc_trans_ER.dat', 'a')
	#g.write('r(thre.)\t\cc_A\tcc_ER\ttran_A\ttran_ER\n')
	ER_graph = nx.erdos_renyi_graph(nx.number_of_nodes(G), nx.density(G))
	# erdos-renyi, binomial random graph generator ...(N,D:density)	
	cluster = nx.average_clustering(G)   # clustering coef. of whole network
	ER_cluster = nx.average_clustering(ER_graph)	#cc of random graph
	
	transi = nx.transitivity(G)
	ER_transi = nx.transitivity(ER_graph)

	g.write("%f\t%f\t%f\t%f\t%f\n" % (thr, cluster,ER_cluster,transi,ER_transi ))
	
	f.write("%f\t%f\t%f" % (thr, cluster, ER_cluster))
	components = nx.connected_component_subgraphs(G)
	ER_components = nx.connected_component_subgraphs(ER_graph)

	values = []
	ER_values = []
	for i in range(len(components)):
		if nx.number_of_nodes(components[i]) > 1:
			values.append(nx.average_shortest_path_length(components[i]))
	for i in range(len(ER_components)):
		if nx.number_of_nodes(ER_components[i]) > 1:
			ER_values.append(nx.average_shortest_path_length(ER_components[i]))
	if len(values) == 0:
		f.write("\t0.")
	else:
		f.write("\t%f" % (sum(values)/len(values))) # pathlenght

	if len(ER_values) == 0:
		f.write("\t0.")
	else:
		f.write("\t%f" % (sum(ER_values)/len(ER_values)))

	f.write("\t%f\t%f" % (transi, ER_transi))  

	if (ER_cluster*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
		S_WS = (cluster/ER_cluster) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))  
	else:
		S_WS = 0.
	if (ER_transi*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
		S_Delta = (transi/ER_transi) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
	else:
		S_Delta = 0.

	f.write("\t%f\t%f" % (S_WS, S_Delta)) # S_WS ~ small worldness 
	f.write("\n")

	f.close() 
	g.close()	 
开发者ID:rudimeier,项目名称:MSc_Thesis,代码行数:52,代码来源:sb_randomization.py


示例6: printStats

def printStats(filename):
	'''
	Converts json adjacency list into networkx to calculate and print the
	graphs's 
	  - average clustering coefficient
	  - overall clustering coefficient
	  - maximum diameter
	  - average diameter
	  - number of paritions using community.best_parition
	  - modularity of community.best_partition
	'''
	g = makeGraphFromJSON(filename)
	
	print "Average Clustering Coefficient: %f" % nx.average_clustering(g)
	print "Overall Clustering Coefficient: %f" % nx.transitivity(g)
	
	connected_subgraphs = list(nx.connected_component_subgraphs(g))
	largest = max(nx.connected_component_subgraphs(g), key=len)
	print "# Connected Components: %d" % len(connected_subgraphs)
	print "    Maximal Diameter: %d" % nx.diameter(largest)
	print "    Average Diameter: %f" % nx.average_shortest_path_length(largest)

	# Find partition that maximizes modularity using Louvain's algorithm
	part = community.best_partition(g)	
	print "# Paritions: %d" % (max(part.values()) + 1)
	print "Louvain Modularity: %f" % community.modularity(part, g)
开发者ID:azercephi,项目名称:kraftwerk,代码行数:26,代码来源:analyze.py


示例7: netstats_simple

def netstats_simple(graph):
    G = graph
    if nx.is_connected(G): 
        d = nx.diameter(G)
        r = nx.radius(G)
    else: 
        d = 'NA - graph is not connected' #should be calculatable on unconnected graph - see example code for hack
        r = 'NA - graph is not connected'
   
#using dictionary to pack values and variablesdot, eps, ps, pdf break equally
    result = {#"""single value measures"""  
              'nn': G.number_of_nodes(),
              'ne': G.number_of_edges(),
              'd': d,
              'r': r,
              'conn': nx.number_connected_components(G),
              'asp': nx.average_shortest_path_length(G), 
#              """number of the largest clique"""
              'cn': nx.graph_clique_number(G),
#              """number of maximal cliques"""
              'mcn': nx.graph_number_of_cliques(G),
#              """transitivity - """
              'tr': nx.transitivity(G),
              #cc = nx.clustering(G) """clustering coefficient"""
              'avgcc': nx.average_clustering(G) } 
#    result['d'] = nx.diameter(G)
    print result
    return result
开发者ID:freyley,项目名称:nets,代码行数:28,代码来源:views.py


示例8: compute_singlevalued_measures

def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False):
    """
    Returns a single value per network
    """
    iflogger.info("Computing single valued measures:")
    measures = {}
    iflogger.info("...Computing degree assortativity (pearson number) ...")
    try:
        measures["degree_pearsonr"] = nx.degree_pearsonr(ntwk)
    except AttributeError:  # For NetworkX 1.6
        measures["degree_pearsonr"] = nx.degree_pearson_correlation_coefficient(ntwk)
    iflogger.info("...Computing degree assortativity...")
    try:
        measures["degree_assortativity"] = nx.degree_assortativity(ntwk)
    except AttributeError:
        measures["degree_assortativity"] = nx.degree_assortativity_coefficient(ntwk)
    iflogger.info("...Computing transitivity...")
    measures["transitivity"] = nx.transitivity(ntwk)
    iflogger.info("...Computing number of connected_components...")
    measures["number_connected_components"] = nx.number_connected_components(ntwk)
    iflogger.info("...Computing average clustering...")
    measures["average_clustering"] = nx.average_clustering(ntwk)
    if nx.is_connected(ntwk):
        iflogger.info("...Calculating average shortest path length...")
        measures["average_shortest_path_length"] = nx.average_shortest_path_length(ntwk, weighted)
    if calculate_cliques:
        iflogger.info("...Computing graph clique number...")
        measures["graph_clique_number"] = nx.graph_clique_number(ntwk)  # out of memory error
    return measures
开发者ID:GaelVaroquaux,项目名称:nipype,代码行数:29,代码来源:nx.py


示例9: get_motifs

def get_motifs(filename):
  import networkx as nx
  from math import factorial
  threshold = 0
  f = open(filename[:-4]+'_motifs.dat','w')
  for i in range(0,101):
    threshold = float(i)/100
    G = get_threshold_matrix(filename, threshold)
    tri_dict = nx.triangles(G)
    summe = 0
    for node in tri_dict:
      summe += tri_dict[node]
    
    N = nx.number_of_nodes(G)
    ratio = summe / (3. * binomialCoefficient(N,3))
    
    transi = nx.transitivity(G)
    if transi > 0:
      triads = summe / transi 
      ratio_triads = triads / (3 * binomialCoefficient(N,3))
    else:
      triads = 0.
      ratio_triads = 0.
    
    print 'threshold: %f, number of triangles: %f, ratio: %f, triads: %f, ratio: %f' %(threshold, summe/3, ratio, triads, ratio_triads)
    f.write("%f\t%d\t%f\t%f\t%f\n" % (threshold, summe/3, ratio, triads, ratio_triads))
  f.close()
  print "1:threshold 2:#triangles 3:ratio-to-potential-triangles 4:triads 5:ratio-to-potential-triads"
开发者ID:sheyma,项目名称:lab_rot_berlin,代码行数:28,代码来源:threshold_matrix.py


示例10: connected_components

    def connected_components(self):
        """
        Returns basic statistics about the connected components of the
        graph. This includes their number, order, size, diameter, radius,
        average clusttering coefficient, transitivity, in addition to basic
        info about the largest and smallest connected components.
        """
        cc_stats = {}
        cc = nx.connected_components(self.graph.structure)

        for index, component in enumerate(cc):
            cc_stats[index] = {}
            this_cc = cc_stats[index]

            this_cc["order"] = len(component)
            this_cc["size"] = len(self.graph.structure.edges(component))

            subgraph = self.graph.structure.subgraph(component)
            this_cc["avg_cluster"] = nx.average_clustering(subgraph)
            this_cc["transitivity"] = nx.transitivity(subgraph)

            eccentricity = nx.eccentricity(subgraph)
            ecc_values = eccentricity.values()
            this_cc["diameter"] = max(ecc_values)
            this_cc["radius"] = min(ecc_values)

        return cc_stats
开发者ID:jim-pansn,项目名称:sypy,代码行数:27,代码来源:stats.py


示例11: plot_distribution

def plot_distribution(distribution_type,legend,graph,list_communities,out=None):
	x = [i for i in range(0,len(list_communities[0]))]
	for communities in list_communities:
		if distribution_type.lower() == "nodes":
			y = list(map(len,communities))
		else:
			y = []
			for l in communities:
				H = graph.subgraph(l)
				if distribution_type.lower() == "density":
					y.append(nx.density(H))
				elif distribution_type.lower() == "transitivity":
					y.append(nx.transitivity(H))
				else:
					return None
		plt.plot(x,y,linewidth=2,alpha=0.8)
		#plt.yscale("log")

	plt.legend(legend, loc='upper left')
	plt.xlabel("Comunity ID")
	plt.ylabel(distribution_type)

	if out == None:
		plt.show()
	else:
		plt.savefig(out+".svg",bbox_inches="tight")
	plt.close()
开发者ID:pigna90,项目名称:lastfm_network_analysis,代码行数:27,代码来源:community_discovery.py


示例12: get_network_property

def get_network_property(graph):
    """Returns various property of the graph.

    It calculates the richness coefficient, triangles and transitivity
    coefficient. To do so, it removes self-loops *in-place*. So, there
    is a possibility that the graph passed as parameter has been
    changed.
    """

    remove_self_loop(graph)

    # If number of nodes is less than three
    # no point in calculating these property.
    if len(graph.nodes()) < 3:
        return ({0: 0.0}, 0, 0)

    try:
        richness = nx.rich_club_coefficient(graph)
    except nx.NetworkXAlgorithmError:
        # NetworkXAlgorithmError is raised when
        # it fails achieve desired swaps after
        # maximum number of attempts. It happened
        # for a really small graph. But, just to
        # guard against those cases.
        richness = nx.rich_club_coefficient(graph, False)

    triangle = nx.triangles(graph)
    transitivity = nx.transitivity(graph)

    return (richness, triangle, transitivity)
开发者ID:saeed-abdullah,项目名称:github-social,代码行数:30,代码来源:networkutil.py


示例13: degree_statistics

def degree_statistics(G):
    n_nodes = G.number_of_nodes()
    
    start = time.clock()
    # list of sampled graphs
    g_list[:] = []
    for i in range(N_SAMPLES):
        g_list.append(generate_sample(G))
    print "Sampling graphs - Elapsed ", (time.clock() - start)
    
    #####
    # number of edges s_NE
    s_NE = sum(e[2]['p'] for e in G.edges_iter(data=True))
    
    # average degree s_AD
    s_AD = 2*s_NE /n_nodes
    
    # maximal degree s_MD
    sum_MD = 0.0
    for aG in g_list:
        max_deg = max(aG.degree().itervalues())
        sum_MD += max_deg
        
    s_MD = sum_MD/N_SAMPLES
    
    # degree variance s_DV
    sum_DV = 0.0
    for aG in g_list:
        deg_var = 1.0/n_nodes * sum((d - s_AD)*(d-s_AD) for d in aG.degree().itervalues())
        sum_DV += deg_var
    
    s_DV = sum_DV/N_SAMPLES
    
    # clustering coefficient s_CC
    sum_CC = 0.0
    for aG in g_list:
        cc = nx.transitivity(aG)
        sum_CC += cc
    
    s_CC = sum_CC/N_SAMPLES
    
    # degree distribution
    deg_list = [0 for i in range(MAX_DEG)]
    for aG in g_list:
        for d in aG.degree().itervalues():
            deg_list[d] += 1
            
    i = MAX_DEG-1
    while deg_list[i] == 0:
        i = i-1
    deg_list = deg_list[:i+1]
    print "len(deg_list) =", len(deg_list)
    print deg_list
    
    for i in range(len(deg_list)):
        deg_list[i] = float(deg_list[i])/N_SAMPLES
    
    #
    return s_NE, s_AD, s_MD, s_DV, s_CC, deg_list
开发者ID:hiepbkhn,项目名称:itce2011,代码行数:59,代码来源:utility_measure.py


示例14: cluster

def cluster():
	if created == 0:
		print 'No graph created!'
	elif created == 1:
		try:
			print 'The clustering coefficient for the whole graph is %0.4f.'%(nx.transitivity(G))
		except nx.NetworkXError, e:
			print e
开发者ID:nod3x,项目名称:sns-analysis,代码行数:8,代码来源:graph.py


示例15: preferentialAttachment

def preferentialAttachment(G):
    n = G.number_of_nodes()
    m =  random.randrange(15,20)
    PG = nx.barabasi_albert_graph(n,m)
    plot(PG)
    l =  math.log(n)/math.log(math.log(n))
    print 'Global Clustering: {0}\t'.format(str(nx.transitivity(PG))),
    print 'Average path length : {0}\n'.format(str(l))
开发者ID:akry1,项目名称:SMM_Project1,代码行数:8,代码来源:P4.py


示例16: get_my_small_worldness

def get_my_small_worldness(filename) :
	threshold = 0
	f = open(filename[:-4]+'_small_worldness.dat','w')
	f.write('thresh\t\taver_clus\t\tave_ER_clus\t\tcoup_coeff\t\tchar_path\t\ttransi\t\tER_transi\t\tS_WS\t\tS_delta\n')
	print f
	for i in range(0,101):
		threshold = float(i)/100
		G = get_my_threshold_matrix(filename, threshold)
		ER_graph = nx.erdos_renyi_graph(nx.number_of_nodes(G), nx.density(G)) # random graph
		cluster = nx.average_clustering(G)
		ER_cluster = nx.average_clustering(ER_graph)
		transi = nx.transitivity(G)
		ER_transi = nx.transitivity(ER_graph)
		f.write('%f\t%f\t%f'% (threshold,cluster,ER_cluster))
		components = nx.connected_component_subgraphs(G)
		ER_components = nx.connected_component_subgraphs(ER_graph)
		values = []
		ER_values = []
		for i in range(len(components)) :	
			if nx.number_of_nodes(components[i]) > 1:
				values.append(nx.average_shortest_path_length(components[i]))
		for i in range(len(ER_components)) :	
			if nx.number_of_nodes(ER_components[i]) > 1:
				ER_values.append(nx.average_shortest_path_length(ER_components[i]))
		if len(values) == 0 :
			f.write("\t0.")
		else : 
			f.write("\t%f" % (sum(values)/len(values)))
		if len(ER_values) == 0:
			f.write("\t0.")
		else:
			f.write("\t%f" % (sum(ER_values)/len(ER_values)))
		f.write("\t%f\t%f" % (transi, ER_transi))
		if (ER_cluster*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
			S_WS = (cluster/ER_cluster) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
		else:
			S_WS = 0.
		if (ER_transi*sum(values)*len(values)*sum(ER_values)*len(ER_values)) >0 :
			S_Delta = (transi/ER_transi) / ((sum(values)/len(values)) / (sum(ER_values)/len(ER_values)))
		else:
			S_Delta = 0.
		f.write("\t%f\t%f" % (S_WS, S_Delta))  
		f.write("\n")
	f.close()
开发者ID:sheyma,项目名称:lab_rot_berlin,代码行数:44,代码来源:python_begin_03.py


示例17: test_clustering_transitivity

 def test_clustering_transitivity(self):
     # check that weighted average of clustering is transitivity
     G = nx.complete_graph(5)
     G.remove_edge(1,2)
     t1=nx.transitivity(G)
     (cluster_d2,weights)=nx.clustering(G,weights=True)
     trans=[]
     for v in G.nodes():
         trans.append(cluster_d2[v]*weights[v])
     t2=sum(trans)
     assert_almost_equal(abs(t1-t2),0)
开发者ID:rafaelpiresm,项目名称:projetos_gae,代码行数:11,代码来源:test_cluster.py


示例18: myglobalclust

def myglobalclust(corr):
    if not isinstance(corr,nx.DiGraph):
        return [nx.transitivity(corr)]  #3*triangles/triads
    corr = np.array(nx.to_numpy_matrix(corr))
    mat = np.dot(corr,corr)
    paths = np.sum(mat) - np.trace(mat)
    mat = np.dot(mat,corr)
    loops = np.trace(mat)
    if paths == 0:
        return [0]
    else:
        return [float(loops)/paths]
开发者ID:Jason3424,项目名称:Network-Motif,代码行数:12,代码来源:mynetalgs.py


示例19: calGraph

def calGraph(infile, mode = 1):
	#init Parameter
	inputpath = 'edge_list/'
	n = mode
	Data_G = inputpath+infile+'_'+str(n)+'.edgelist'
	
	#init Graph
	G = nx.read_edgelist(Data_G, create_using=nx.DiGraph())
	GU = nx.read_edgelist(Data_G)
	average_clustering = nx.average_clustering(GU)
	transitivity = nx.transitivity(G)
	return [average_clustering, transitivity]
开发者ID:carlzhangxuan,项目名称:For_Recruit,代码行数:12,代码来源:L3_NetworkX_cluster_daily.py


示例20: graphAnalysis

def graphAnalysis(graph, top_number, save_file_path):
    """
        Do the essential analysis to the final combined graph
    """
    with io.open(save_file_path, 'w') as save_file:

        # centrality
        # degree centrality
        deg_central = nx.degree_centrality(graph)
        deg_central_sort = sorted(deg_central.items(), key = lambda x: x[1], reverse = True)
        top_deg_central_sort = deg_central_sort[:top_number]
        save_file.write('top %d degree centrality items,' % top_number)
        save_file.write(','.join('%s %s' % x for x in top_deg_central_sort))

        # clustering

        # number of triangles: triangles() is not defined for directed graphs
        triangle_num = nx.triangles(graph)
        triangle_num_sort = sorted(triangle_num.items(), key = lambda x: x[1], reverse = True)
        top_triangle_num_sort = triangle_num_sort[:top_number]
        save_file.write('\ntop %d number of triangles including a node as one vertex,' % top_number)
        save_file.write(','.join('%s %s' % x for x in top_triangle_num_sort))

        # clustering coefficient of node in the graph
        cluster_coefficient = nx.clustering(graph)
        cluster_coefficient_sort = sorted(cluster_coefficient.items(), key = lambda x: x[1], reverse = True)
        top_cluster_coefficient_sort = cluster_coefficient_sort[:top_number]
        save_file.write('\ntop %d clustering coefficient items,' % top_number)
        save_file.write(','.join('%s %s' % x for x in top_cluster_coefficient_sort))

        # transitivity of the graph
        triangle_transitivity = nx.transitivity(graph)
        save_file.write('\ntransitivity of the graph,%f' % triangle_transitivity)

        # average clustering coefficient of the graph
        avg_cluster = nx.average_clustering(graph)
        save_file.write('\naverage clustering coefficient of the graph,%f' % avg_cluster)

        # clique
        # size of the largest clique in the graph
        size_largest_clique = nx.graph_clique_number(graph)
        save_file.write('\nsize of the largest clique in the graph,%d' % size_largest_clique)
        
        # all the cliques in the graph
        
        all_clique = nx.find_cliques(graph) # a generator
        list_all_clique = list(all_clique)
        list_all_clique_sort = sorted(list_all_clique, key = lambda x: len(x), reverse = True)
        list_all_clique_sort = [' '.join(clique) for clique in list_all_clique_sort]
        # print list_all_clique_sort
        save_file.write('\ncliques,')
        save_file.write(','.join(x for x in list_all_clique_sort))
开发者ID:BowenLou,项目名称:nlp_network_analysis,代码行数:52,代码来源:co_occurrence_script.py



注:本文中的networkx.transitivity函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python networkx.triangles函数代码示例发布时间:2022-05-27
下一篇:
Python networkx.topological_sort函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap