py2neo+Neo4j

#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
sys.path.append("/home/core_adm/wei_chen")
from graphframes import GraphFrame
from pyspark import SparkContext, SparkConf
from pyspark.sql import SparkSession, HiveContext
from py2neo import *

graph = Graph("bolt://10.2.5.20:7687",user="neo4j",password="6bc405faee9d7206196cc67fdbb555f6")

def  FindDepthNOde(spark,NodeSearch,Depth):
    sql="MATCH (n)-[r]->(m)  WHERE id(m)=index  RETURN  id(n) as src,id(m)  as dst, type(r) as relation "
    edges=graph.data(sql.replace("index",str(NodeSearch)))
    return edges 


def Graph_ini(spark,NodeSearch,Depth):
   sql="MATCH (n)-[r]->(m)  WHERE id(m)=index  RETURN  id(n) as src,id(m)  as dst, type(r) as relation "
   edges=graph.data(sql.replace("index",str(NodeSearch)))
   print(edges)
   TempNode=[]
   for i in  range(Depth):
     for node in edges:
       print(node['src'])
       TempNode=FindDepthNOde(spark,node['src'],Depth)
     edges=TempNode
       
   print(edges)  
   print("Hello")
   print(NodeSearch)


if __name__ == "__main__":
    conf = SparkConf()
    conf.set("spark.port.maxRetries", "100")
    sc = SparkContext(conf=conf, appName="spark_Count_Operation")
    sqlContext = HiveContext(sc)
    spark = sqlContext.sparkSession
    sc.setCheckpointDir("hdfs:///tmp/chen")
    NodeSearch=2
    Depth=3
    Graph_ini(spark,NodeSearch,Depth)
    sc.stop()

猜你喜欢

转载自blog.csdn.net/sinat_19672945/article/details/82625519