1 | def run(nodes: RDD[(Long, Long)], edges: RDD[(Long, Long, Double)], isDirected: Boolean, partitionNum: Int = 1) : Double |
参数名称 |
参数含义 |
取值类型 |
---|---|---|
nodes |
从文件读入的节点与对应的社区信息 |
RDD[(Long, Long)] |
edges |
从文件读入的图边列表信息(权值大于0) |
RDD[(Long, Long, Double)] |
isDirected |
图的属性信息,是否为有向图 |
Boolean |
partitionNum |
数据分区个数 |
Int大于0的整型 |
1 2 3 4 5 6 7 8 9 10 11 | val sparkconf = new SparkConf().setAppName("modularityCompute").setMaster(host) val sc = new SparkContext(sparkconf) val nodesRDD = sc.makeRDD(Array((1L, 1L), (2L, 1L), (3L, 2L), (4L, 2L), (5L, 2L), (6L, 3L), (7L, 3L))) val edgesRDD = sc.makeRDD(Array((1L, 2L, 1.0), (1L, 3L, 1.0), (1L, 4L, 1.0), (2L, 3L, 1.0), (3L, 4L, 1.0), (3L, 5L, 1.0), (4L, 5L, 1.0), (4L, 6L, 1.0), (5L, 1L, 1.0), (5L, 2L, 1.0), (5L, 6L, 1.0), (6L, 7L, 1.0))) val isDirected = false val partitionNum = 10 val modularityQ = Modularity.run(nodesRDD, edgesRDD, isDirected, partitionNum) println(“Modularity:\t%.5f”.format(modularityQ)) |
1 | Modularity: 0.01042 |