Network Analysis by aj-geddes/useful-ai-prompts
npx skills add https://github.com/aj-geddes/useful-ai-prompts --skill 'Network Analysis'此技能支持分析网络结构,以识别社区、测量中心性、检测有影响力的节点,并可视化社交网络、组织结构和互连系统中的复杂关系。
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
from collections import defaultdict, Counter
import seaborn as sns
# 创建示例网络(社交网络)
G = nx.Graph()
# 添加带属性的节点
nodes = [
('Alice', {'role': 'Manager', 'dept': 'Sales'}),
('Bob', {'role': 'Engineer', 'dept': 'Tech'}),
('Carol', {'role': 'Designer', 'dept': 'Design'}),
('David', {'role': 'Engineer', 'dept': 'Tech'}),
('Eve', {'role': 'Analyst', 'dept': 'Sales'}),
('Frank', {'role': 'Manager', 'dept': 'HR'}),
('Grace', {'role': 'Designer', 'dept': 'Design'}),
('Henry', {'role': 'Engineer', 'dept': 'Tech'}),
('Iris', {'role': 'Analyst', 'dept': 'Sales'}),
('Jack', {'role': 'Manager', 'dept': 'Finance'}),
]
for node, attrs in nodes:
G.add_node(node, **attrs)
# 添加边(关系)
edges = [
('Alice', 'Bob'), ('Alice', 'Carol'), ('Alice', 'Eve'),
('Bob', 'David'), ('Bob', 'Henry'), ('Carol', 'Grace'),
('David', 'Henry'), ('Eve', 'Iris'), ('Frank', 'Jack'),
('Grace', 'Carol'), ('Alice', 'Frank'), ('Bob', 'Carol'),
('Eve', 'Alice'), ('Iris', 'Eve'), ('Jack', 'Frank'),
('Henry', 'David'), ('Carol', 'David'),
]
G.add_edges_from(edges)
print("网络摘要:")
print(f"节点数: {G.number_of_nodes()}")
print(f"边数: {G.number_of_edges()}")
print(f"密度: {nx.density(G):.2%}")
# 1. 度中心性
degree_centrality = nx.degree_centrality(G)
print("\n1. 度中心性 (前5名):")
for node, score in sorted(degree_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 2. 中介中心性 (对网络的控制力)
betweenness_centrality = nx.betweenness_centrality(G)
print("\n2. 中介中心性 (前5名):")
for node, score in sorted(betweenness_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 3. 接近中心性 (到其他节点的平均距离)
closeness_centrality = nx.closeness_centrality(G)
print("\n3. 接近中心性 (前5名):")
for node, score in sorted(closeness_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 4. 特征向量中心性
try:
eigenvector_centrality = nx.eigenvector_centrality(G, max_iter=100)
print("\n4. 特征向量中心性 (前5名):")
for node, score in sorted(eigenvector_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
except:
print("\n4. 特征向量中心性: 未收敛")
# 5. 社区检测 (使用模块度)
from networkx.algorithms import community
communities = list(community.greedy_modularity_communities(G))
print(f"\n5. 社区检测:")
print(f"社区数量: {len(communities)}")
for i, comm in enumerate(communities):
print(f" 社区 {i+1}: {list(comm)}")
# 6. 网络统计
degrees = [G.degree(n) for n in G.nodes()]
print(f"\n6. 网络统计:")
print(f"平均度: {np.mean(degrees):.2f}")
print(f"最大度: {max(degrees)}")
print(f"最小度: {min(degrees)}")
print(f"聚类系数: {nx.average_clustering(G):.3f}")
print(f"三角形数量: {sum(nx.triangles(G).values()) // 3}")
# 可视化
fig, axes = plt.subplots(2, 2, figsize=(15, 12))
# 网络布局
pos = nx.spring_layout(G, k=0.5, iterations=50, seed=42)
# 1. 网络图 (按度着色)
ax = axes[0, 0]
node_colors = [degree_centrality[node] for node in G.nodes()]
nx.draw_networkx_nodes(G, pos, node_color=node_colors, node_size=1000, cmap='YlOrRd', ax=ax)
nx.draw_networkx_edges(G, pos, alpha=0.5, ax=ax)
nx.draw_networkx_labels(G, pos, font_size=8, ax=ax)
ax.set_title('网络图 (按度中心性着色)')
ax.axis('off')
# 2. 网络图 (按社区着色)
ax = axes[0, 1]
color_map = []
colors = plt.cm.Set3(np.linspace(0, 1, len(communities)))
node_to_color = {}
for i, comm in enumerate(communities):
for node in comm:
node_to_color[node] = colors[i]
color_map = [node_to_color[node] for node in G.nodes()]
nx.draw_networkx_nodes(G, pos, node_color=color_map, node_size=1000, ax=ax)
nx.draw_networkx_edges(G, pos, alpha=0.5, ax=ax)
nx.draw_networkx_labels(G, pos, font_size=8, ax=ax)
ax.set_title('网络图 (按社区着色)')
ax.axis('off')
# 3. 中心性比较
ax = axes[1, 0]
centrality_df = pd.DataFrame({
'Degree': degree_centrality,
'Betweenness': betweenness_centrality,
'Closeness': closeness_centrality,
}).head(8)
centrality_df.plot(kind='barh', ax=ax, width=0.8)
ax.set_xlabel('中心性分数')
ax.set_title('前8个节点 - 中心性比较')
ax.legend(loc='lower right')
ax.grid(True, alpha=0.3, axis='x')
# 4. 度分布
ax = axes[1, 1]
degree_sequence = sorted([d for n, d in G.degree()], reverse=True)
degree_count = Counter(degree_sequence)
degrees_unique = sorted(degree_count.keys())
counts = [degree_count[d] for d in degrees_unique]
ax.bar(degrees_unique, counts, color='steelblue', edgecolor='black', alpha=0.7)
ax.set_xlabel('度')
ax.set_ylabel('计数')
ax.set_title('度分布')
ax.grid(True, alpha=0.3, axis='y')
plt.tight_layout()
plt.show()
# 7. 路径分析
print(f"\n7. 路径分析:")
try:
shortest_path = nx.shortest_path_length(G, 'Alice', 'Jack')
print(f"从 Alice 到 Jack 的最短路径长度: {shortest_path}")
except nx.NetworkXNoPath:
print("节点之间不存在路径")
# 8. 连通性分析
print(f"\n8. 连通性分析:")
print(f"是否连通: {nx.is_connected(G)}")
num_components = nx.number_connected_components(G)
print(f"连通分量数量: {num_components}")
# 9. 相似性度量
def jaccard_similarity(node1, node2):
neighbors1 = set(G.neighbors(node1)) | {node1}
neighbors2 = set(G.neighbors(node2)) | {node2}
intersection = len(neighbors1 & neighbors2)
union = len(neighbors1 | neighbors2)
return intersection / union if union > 0 else 0
print(f"\n9. 节点相似性 (Jaccard):")
print(f"Alice & Bob: {jaccard_similarity('Alice', 'Bob'):.3f}")
print(f"Alice & Jack: {jaccard_similarity('Alice', 'Jack'):.3f}")
# 10. 影响力分数 (指标组合)
influence_score = {}
for node in G.nodes():
score = (degree_centrality[node] * 0.4 +
betweenness_centrality[node] * 0.3 +
closeness_centrality[node] * 0.3)
influence_score[node] = score
print(f"\n10. 影响力分数 (前5名):")
for node, score in sorted(influence_score.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 摘要
print("\n" + "="*50)
print("网络分析摘要")
print("="*50)
print(f"最具影响力: {max(influence_score, key=influence_score.get)}")
print(f"连接最多: {max(degree_centrality, key=degree_centrality.get)}")
print(f"网络瓶颈: {max(betweenness_centrality, key=betweenness_centrality.get)}")
print(f"最接近所有节点: {max(closeness_centrality, key=closeness_centrality.get)}")
print("="*50)
广告位招租
在这里展示您的产品或服务
触达数万 AI 开发者,精准高效
每周安装量
0
代码仓库
GitHub 星标数
116
首次出现时间
1970年1月1日
安全审计
This skill enables analysis of network structures to identify communities, measure centrality, detect influential nodes, and visualize complex relationships in social networks, organizational structures, and interconnected systems.
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
from collections import defaultdict, Counter
import seaborn as sns
# Create sample network (social network)
G = nx.Graph()
# Add nodes with attributes
nodes = [
('Alice', {'role': 'Manager', 'dept': 'Sales'}),
('Bob', {'role': 'Engineer', 'dept': 'Tech'}),
('Carol', {'role': 'Designer', 'dept': 'Design'}),
('David', {'role': 'Engineer', 'dept': 'Tech'}),
('Eve', {'role': 'Analyst', 'dept': 'Sales'}),
('Frank', {'role': 'Manager', 'dept': 'HR'}),
('Grace', {'role': 'Designer', 'dept': 'Design'}),
('Henry', {'role': 'Engineer', 'dept': 'Tech'}),
('Iris', {'role': 'Analyst', 'dept': 'Sales'}),
('Jack', {'role': 'Manager', 'dept': 'Finance'}),
]
for node, attrs in nodes:
G.add_node(node, **attrs)
# Add edges (relationships)
edges = [
('Alice', 'Bob'), ('Alice', 'Carol'), ('Alice', 'Eve'),
('Bob', 'David'), ('Bob', 'Henry'), ('Carol', 'Grace'),
('David', 'Henry'), ('Eve', 'Iris'), ('Frank', 'Jack'),
('Grace', 'Carol'), ('Alice', 'Frank'), ('Bob', 'Carol'),
('Eve', 'Alice'), ('Iris', 'Eve'), ('Jack', 'Frank'),
('Henry', 'David'), ('Carol', 'David'),
]
G.add_edges_from(edges)
print("Network Summary:")
print(f"Nodes: {G.number_of_nodes()}")
print(f"Edges: {G.number_of_edges()}")
print(f"Density: {nx.density(G):.2%}")
# 1. Degree Centrality
degree_centrality = nx.degree_centrality(G)
print("\n1. Degree Centrality (Top 5):")
for node, score in sorted(degree_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 2. Betweenness Centrality (control over network)
betweenness_centrality = nx.betweenness_centrality(G)
print("\n2. Betweenness Centrality (Top 5):")
for node, score in sorted(betweenness_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 3. Closeness Centrality (average distance to others)
closeness_centrality = nx.closeness_centrality(G)
print("\n3. Closeness Centrality (Top 5):")
for node, score in sorted(closeness_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# 4. Eigenvector Centrality
try:
eigenvector_centrality = nx.eigenvector_centrality(G, max_iter=100)
print("\n4. Eigenvector Centrality (Top 5):")
for node, score in sorted(eigenvector_centrality.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
except:
print("\n4. Eigenvector Centrality: Not converged")
# 5. Community Detection (using modularity)
from networkx.algorithms import community
communities = list(community.greedy_modularity_communities(G))
print(f"\n5. Community Detection:")
print(f"Number of communities: {len(communities)}")
for i, comm in enumerate(communities):
print(f" Community {i+1}: {list(comm)}")
# 6. Network Statistics
degrees = [G.degree(n) for n in G.nodes()]
print(f"\n6. Network Statistics:")
print(f"Average Degree: {np.mean(degrees):.2f}")
print(f"Max Degree: {max(degrees)}")
print(f"Min Degree: {min(degrees)}")
print(f"Clustering Coefficient: {nx.average_clustering(G):.3f}")
print(f"Number of Triangles: {sum(nx.triangles(G).values()) // 3}")
# Visualization
fig, axes = plt.subplots(2, 2, figsize=(15, 12))
# Network layout
pos = nx.spring_layout(G, k=0.5, iterations=50, seed=42)
# 1. Network Graph (colored by degree)
ax = axes[0, 0]
node_colors = [degree_centrality[node] for node in G.nodes()]
nx.draw_networkx_nodes(G, pos, node_color=node_colors, node_size=1000, cmap='YlOrRd', ax=ax)
nx.draw_networkx_edges(G, pos, alpha=0.5, ax=ax)
nx.draw_networkx_labels(G, pos, font_size=8, ax=ax)
ax.set_title('Network Graph (Colored by Degree Centrality)')
ax.axis('off')
# 2. Network Graph (colored by communities)
ax = axes[0, 1]
color_map = []
colors = plt.cm.Set3(np.linspace(0, 1, len(communities)))
node_to_color = {}
for i, comm in enumerate(communities):
for node in comm:
node_to_color[node] = colors[i]
color_map = [node_to_color[node] for node in G.nodes()]
nx.draw_networkx_nodes(G, pos, node_color=color_map, node_size=1000, ax=ax)
nx.draw_networkx_edges(G, pos, alpha=0.5, ax=ax)
nx.draw_networkx_labels(G, pos, font_size=8, ax=ax)
ax.set_title('Network Graph (Colored by Community)')
ax.axis('off')
# 3. Centrality Comparison
ax = axes[1, 0]
centrality_df = pd.DataFrame({
'Degree': degree_centrality,
'Betweenness': betweenness_centrality,
'Closeness': closeness_centrality,
}).head(8)
centrality_df.plot(kind='barh', ax=ax, width=0.8)
ax.set_xlabel('Centrality Score')
ax.set_title('Top 8 Nodes - Centrality Comparison')
ax.legend(loc='lower right')
ax.grid(True, alpha=0.3, axis='x')
# 4. Degree Distribution
ax = axes[1, 1]
degree_sequence = sorted([d for n, d in G.degree()], reverse=True)
degree_count = Counter(degree_sequence)
degrees_unique = sorted(degree_count.keys())
counts = [degree_count[d] for d in degrees_unique]
ax.bar(degrees_unique, counts, color='steelblue', edgecolor='black', alpha=0.7)
ax.set_xlabel('Degree')
ax.set_ylabel('Count')
ax.set_title('Degree Distribution')
ax.grid(True, alpha=0.3, axis='y')
plt.tight_layout()
plt.show()
# 7. Path Analysis
print(f"\n7. Path Analysis:")
try:
shortest_path = nx.shortest_path_length(G, 'Alice', 'Jack')
print(f"Shortest path from Alice to Jack: {shortest_path}")
except nx.NetworkXNoPath:
print("No path exists between nodes")
# 8. Connectivity Analysis
print(f"\n8. Connectivity Analysis:")
print(f"Is connected: {nx.is_connected(G)}")
num_components = nx.number_connected_components(G)
print(f"Number of connected components: {num_components}")
# 9. Similarity Measures
def jaccard_similarity(node1, node2):
neighbors1 = set(G.neighbors(node1)) | {node1}
neighbors2 = set(G.neighbors(node2)) | {node2}
intersection = len(neighbors1 & neighbors2)
union = len(neighbors1 | neighbors2)
return intersection / union if union > 0 else 0
print(f"\n9. Node Similarity (Jaccard):")
print(f"Alice & Bob: {jaccard_similarity('Alice', 'Bob'):.3f}")
print(f"Alice & Jack: {jaccard_similarity('Alice', 'Jack'):.3f}")
# 10. Influence Score (Combination of metrics)
influence_score = {}
for node in G.nodes():
score = (degree_centrality[node] * 0.4 +
betweenness_centrality[node] * 0.3 +
closeness_centrality[node] * 0.3)
influence_score[node] = score
print(f"\n10. Influence Score (Top 5):")
for node, score in sorted(influence_score.items(), key=lambda x: x[1], reverse=True)[:5]:
print(f" {node}: {score:.3f}")
# Summary
print("\n" + "="*50)
print("NETWORK ANALYSIS SUMMARY")
print("="*50)
print(f"Most influential: {max(influence_score, key=influence_score.get)}")
print(f"Most connected: {max(degree_centrality, key=degree_centrality.get)}")
print(f"Network bottleneck: {max(betweenness_centrality, key=betweenness_centrality.get)}")
print(f"Closest to all: {max(closeness_centrality, key=closeness_centrality.get)}")
print("="*50)
Weekly Installs
0
Repository
GitHub Stars
116
First Seen
Jan 1, 1970
Security Audits
AI Elements:基于shadcn/ui的AI原生应用组件库,快速构建对话界面
53,500 周安装