|
# coding:utf-8
__author__ = 'chenhuachao'
'''
利用pyspark连接kafka,统计访问者的IP信息,做出的一个实时的防采集
'''
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import redis
import datetime
from pyspark.streaming.kafka import KafkaUtils
from pyspark.streaming import StreamingContext
from pyspark import SparkConf, SparkContext
def parse(logstring):
try:
infodict = eval(logstring.encode('utf-8'))
ip =infodict.get('ip')
assert infodict['tj-event'] == 'onload'
assert ip
return (ip)
except:
return ()
def insert_redis(rdd):
'''将符合条件的结果写入到redis'''
conn = redis.Redis(host='redis的IP',port=6380)
for i,j in rdd.collect():
print i,j
if j >=3 and j != "":
conn.sadd('cheating_ip_set_{0}'.format(datetime.datetime.now().strftime("%Y%m%d")),i)
conn.expire('cheating_ip_set',86400)
if __name__ == "__main__":
topic = 'statis-detailinfo-pageevent'
sc = SparkContext(appName="pyspark_kafka_streaming_chc")
ssc = StreamingContext(sc,10)
checkpointDirectory = '/tmp/checkpoint/cp3'
ssc.checkpoint(checkpointDirectory)
kvs = KafkaUtils.createDirectStream(ssc,['statis-detailinfo-pageevent'],kafkaParams={"auto.offset.reset": "largest","metadata.broker.list":"kafka-IP:9092,kafka-IP:9092"})
#kvs.map(lambda line:line[1]).map(lambda x:parse(x)).pprint()
#这里用到了一个滑动窗口的概念,需要深入了解的可以参考http://www.kancloud.cn/kancloud/spark-programming-guide/51567
#ipcount = kvs.map(lambda line: line[1]).map(lambda x:parse(x)).map(lambda ip:(ip,1)).reduceByKey(lambda ips,num:ips+num)
ipcount = kvs.map(lambda line: line[1]).map(lambda x:parse(x)).map(lambda ip:(ip,1)).reduceByKeyAndWindow(lambda ips,num:ips+num,30,10)
# 预处理,如果需要多次计算则使用缓存
# 传入rdd进行循坏,即用于foreachRdd(insertRedis)
ipcount.foreachRDD(insert_redis)
# 各节点的rdd的循坏
# wordCounts.foreachRDD(lambda rdd: rdd.foreach(sendRecord))
ssc.start()
|
|
|