bt种子爬虫程序和种子解析(大蟒蛇语言编写)

最近亲眼看到快播被大批警察包围,感觉到快播注定要关闭很多东西,很多宅男宅女们又要寂寞了,于是乎,疯狂的研究DHT网络技术

看到网上也有开源的代码,这不,我拿来进行了二次重写,呵呵,上代码:

#encoding: utf-8
import socket
from hashlib import sha1
from random import randint
from struct import unpack, pack
from socket import inet_aton, inet_ntoa
from bisect import bisect_left
from threading import Timer
from time import sleep
import MySQLdb
from datetime import *
import time

from bencode import bencode, bdecode

BOOTSTRAP_NODES = [
    ("router.bittorrent.com", 6881),
    ("dht.transmissionbt.com", 6881),
    ("router.utorrent.com", 6881)
]
TID_LENGTH = 4
KRPC_TIMEOUT = 10
REBORN_TIME = 5 * 60
K = 8

def entropy(bytes):
    s = ""
    for i in range(bytes):
        s += chr(randint(0, 255))
    return s

def random_id():
    hash = sha1()
    hash.update( entropy(20) )
    return hash.digest()

def decode_nodes(nodes):
    n = []
    length = len(nodes)
    if (length % 26) != 0:
        return n
    for i in range(0, length, 26):
        nid = nodes[i:i+20]
        ip = inet_ntoa(nodes[i+20:i+24])
        port = unpack("!H", nodes[i+24:i+26])[0]
        n.append( (nid, ip, port) )
    return n

def encode_nodes(nodes):
    strings = []
    for node in nodes:
        s = "%s%s%s" % (node.nid, inet_aton(node.ip), pack("!H", node.port))
        strings.append(s)

    return "".join(strings)

def intify(hstr):
    return long(hstr.encode(‘hex‘), 16)    

def timer(t, f):
    Timer(t, f).start()

class BucketFull(Exception):
    pass

class KRPC(object):
    def __init__(self):
        self.types = {
            "r": self.response_received,
            "q": self.query_received
        }
        self.actions = {
            "ping": self.ping_received,
            "find_node": self.find_node_received,
            "get_peers": self.get_peers_received,
            "announce_peer": self.announce_peer_received,
        }

        self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.socket.bind(("0.0.0.0", self.port))

    def response_received(self, msg, address):
        self.find_node_handler(msg)

    def query_received(self, msg, address):
        try:
            self.actions[msg["q"]](msg, address)
        except KeyError:
            pass

    def send_krpc(self, msg, address):
        try:
            self.socket.sendto(bencode(msg), address)
        except:
            pass

class Client(KRPC):
    def __init__(self, table):
        self.table = table

        timer(KRPC_TIMEOUT, self.timeout)
        timer(REBORN_TIME, self.reborn)
        KRPC.__init__(self)

    def find_node(self, address, nid=None):
        nid = self.get_neighbor(nid) if nid else self.table.nid
        tid = entropy(TID_LENGTH)

        msg = {
            "t": tid,
            "y": "q",
            "q": "find_node",
            "a": {"id": nid, "target": random_id()}
        }
        self.send_krpc(msg, address)

    def find_node_handler(self, msg):
        try:
            nodes = decode_nodes(msg["r"]["nodes"])
            for node in nodes:
                (nid, ip, port) = node
                if len(nid) != 20: continue
                if nid == self.table.nid: continue
                self.find_node( (ip, port), nid )
        except KeyError:
            pass

    def joinDHT(self):
        for address in BOOTSTRAP_NODES:
            self.find_node(address)

    def timeout(self):
        if len( self.table.buckets ) < 2:
            self.joinDHT()
        timer(KRPC_TIMEOUT, self.timeout)

    def reborn(self):
        self.table.nid = random_id()
        self.table.buckets = [ KBucket(0, 2**160) ]
        timer(REBORN_TIME, self.reborn)

    def start(self):
        self.joinDHT()

        while True:
            try:
                (data, address) = self.socket.recvfrom(65536)
                msg = bdecode(data)
                self.types[msg["y"]](msg, address)
            except Exception:
                pass

    def get_neighbor(self, target):
        return target[:10]+random_id()[10:]

class Server(Client):
    def __init__(self, master, table, port):
        self.table = table
        self.master = master
        self.port = port
        Client.__init__(self, table)

    def ping_received(self, msg, address):
        try:
            nid = msg["a"]["id"]
            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {"id": self.get_neighbor(nid)}
            }
            self.send_krpc(msg, address)
            self.find_node(address, nid)
        except KeyError:
            pass

    def find_node_received(self, msg, address):
        try:
            target = msg["a"]["target"]
            neighbors = self.table.get_neighbors(target)

            nid = msg["a"]["id"]
            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {
                    "id": self.get_neighbor(target),
                    "nodes": encode_nodes(neighbors)
                }
            }
            self.table.append(KNode(nid, *address))
            self.send_krpc(msg, address)
            self.find_node(address, nid)
        except KeyError:
            pass

    def get_peers_received(self, msg, address):
        try:
            infohash = msg["a"]["info_hash"]

            neighbors = self.table.get_neighbors(infohash)

            nid = msg["a"]["id"]
            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {
                    "id": self.get_neighbor(infohash),
                    "nodes": encode_nodes(neighbors)
                }
            }
            self.table.append(KNode(nid, *address))
            self.send_krpc(msg, address)
            self.master.log(infohash)
            self.find_node(address, nid)
        except KeyError:
            pass

    def announce_peer_received(self, msg, address):
        try:
            infohash = msg["a"]["info_hash"]
            nid = msg["a"]["id"]

            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {"id": self.get_neighbor(infohash)}
            }

            self.table.append(KNode(nid, *address))
            self.send_krpc(msg, address)
            self.master.log(infohash)
            self.find_node(address, nid)
        except KeyError:
            pass

class KTable(object):
    def __init__(self, nid):
        self.nid = nid
        self.buckets = [ KBucket(0, 2**160) ]

    def append(self, node):
        index = self.bucket_index(node.nid)
        try:
            bucket = self.buckets[index]
            bucket.append(node)
        except IndexError:
            return
        except BucketFull:
            if not bucket.in_range(self.nid): return

            self.split_bucket(index)
            self.append(node)

    def get_neighbors(self, target):
        nodes = []
        if len(self.buckets) == 0: return nodes
        if len(target) != 20 : return nodes

        index = self.bucket_index(target)
        try:
            nodes = self.buckets[index].nodes
            min = index - 1
            max = index + 1

            while len(nodes) < K and ((min >= 0) or (max < len(self.buckets))):
                if min >= 0:
                    nodes.extend(self.buckets[min].nodes)

                if max < len(self.buckets):
                    nodes.extend(self.buckets[max].nodes)

                min -= 1
                max += 1

            num = intify(target)
            nodes.sort(lambda a, b, num=num: cmp(num^intify(a.nid), num^intify(b.nid)))
            return nodes[:K]
        except IndexError:
            return nodes

    def bucket_index(self, target):
        return bisect_left(self.buckets, intify(target))

    def split_bucket(self, index):
        old = self.buckets[index]
        point = old.max - (old.max - old.min)/2
        new = KBucket(point, old.max)
        old.max = point
        self.buckets.insert(index + 1, new)
        for node in old.nodes[:]:
            if new.in_range(node.nid):
                new.append(node)
                old.remove(node)

    def __iter__(self):
        for bucket in self.buckets:
            yield bucket

class KBucket(object):
    __slots__ = ("min", "max", "nodes")

    def __init__(self, min, max):
        self.min = min
        self.max = max
        self.nodes = []

    def append(self, node):
        if node in self:
            self.remove(node)
            self.nodes.append(node)
        else:
            if len(self) < K:
                self.nodes.append(node)
            else:
                raise BucketFull

    def remove(self, node):
        self.nodes.remove(node)

    def in_range(self, target):
        return self.min <= intify(target) < self.max

    def __len__(self):
        return len(self.nodes)

    def __contains__(self, node):
        return node in self.nodes

    def __iter__(self):
        for node in self.nodes:
            yield node

    def __lt__(self, target):
        return self.max <= target

class KNode(object):
    __slots__ = ("nid", "ip", "port")

    def __init__(self, nid, ip, port):
        self.nid = nid
        self.ip = ip
        self.port = port

    def __eq__(self, other):
        return self.nid == other.nid

#using example
class Master(object):
    def __init__(self, f):
        self.f = f
        try:
            self.conn=MySQLdb.connect(host=‘localhost‘,user=‘root‘,passwd=‘‘,db=‘bt‘,port=3306)
            self.cur=self.conn.cursor()
        except MySQLdb.Error,e:
            print "Mysql Error %d: %s" % (e.args[0], e.args[1])

    def log(self, infohash):
        try:
            sql = "insert into bt_main_new(hash,name,length,date) values(%s,%s,%s,%s)"
            date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
            re=self.cur.execute(sql,(infohash,‘‘,‘‘,date))
            self.conn.commit()
            self.cur.close()
            self.conn.close()
            #print re
        except MySQLdb.Error,e:
            print "Mysql Error %d: %s" % (e.args[0], e.args[1])
        self.f.write(infohash.encode("hex")+"\n")
        self.f.flush()
try:
    d = date.today()
    f = open("%s.log" % d, "a")
    m = Master(f)
    s = Server(Master(f), KTable(random_id()), 8006)
    s.start()
except KeyboardInterrupt:
    s.socket.close()
    f.close()

本爬虫程序,会自动爬取得网络上分享的bt种子,写入文件盒数据库,爬取的只是个种子的hash码,还需要到网络上下载种子进行分析

下载种子,相信大家都知道国外有几个免费分享种子的网站,大家可以根据hash码去下载,分析,下面呈上我写的一个分析种子的程序:

#! /usr/bin/python

# -*- coding: utf-8 -*-

import MySQLdb
from datetime import *
import time
import re
from time import sleep
import bencode
import urllib2
import base64

try:
    conn=MySQLdb.connect(host=‘localhost‘,user=‘root‘,passwd=‘‘,db=‘bt‘,port=3306)
    cur=conn.cursor()
    sql = "select * from bt_main where name = ‘‘ order by id desc"
    count = cur.execute(sql)
    rows = cur.fetchall()
    for row in rows:

        if row[2].strip() != ‘‘:
            continue

        id = row[0]
        hash = row[1]
        url = "http://haofuli.duapp.com/go/info.php?hash=%s" % hash
        file = urllib2.urlopen(url).read()
        if "error!" == file:
            try:
                sql = "update bt_main set isTrue = 0 where id = %s "
                re = cur.execute(sql,(id))
                conn.commit()
            except MySQLdb.Error,e:
                print "Mysql Error %d: %s" % (e.args[0], e.args[1])
        else:
            #decode
            try:
                fileEncode = bencode.bdecode(file)
            except Exception,e:pass
            if ‘name.utf-8‘ in fileEncode[‘info‘]:
                filename=fileEncode[‘info‘][‘name.utf-8‘]
            else:
                filename = fileEncode[‘info‘][‘name‘]
            ##length
            if "length" in fileEncode[‘info‘]:
                length = fileEncode[‘info‘][‘length‘]
            else:
                length = 0
            try:
                sql = "update bt_main set name = %s , length = %s , isTrue = 1 where id = %s"
                re = cur.execute(sql,(base64.b64encode(filename),length,id))
                conn.commit()
            except MySQLdb.Error,e:
                print "Mysql Error %d: %s" % (e.args[0], e.args[1])
except MySQLdb.Error,e:
    print "Mysql Error %d: %s" % (e.args[0], e.args[1])

上面的只是简单的分析,对于多文件的,还没有处理。我最近在解析种子的时候,总是出现莫名的填充文件的问题,可能是版本过低吧,最近仍旧在解决。

BT种子站,本人用PHP做了一个BT种子站,域名再次就不说啦哈,csdn不让写。大家可以回复向我索取域名哈。

时间: 2024-08-30 01:29:04

bt种子爬虫程序和种子解析(大蟒蛇语言编写)的相关文章

转发---十大编程算法助程序员走上大神之路

十大编程算法助程序员走上大神之路 本文为转发,源地址:http://linux.cn/article-3613-1.html 算法一:快速排序算法 快速排序是由东尼·霍尔所发展的一种排序算法.在平均状况下,排序 n 个项目要Ο(n log n)次比较.在最坏状况下则需要Ο(n2)次比较,但这种状况并不常见.事实上,快速排序通常明显比其他Ο(n log n) 算法更快,因为它的内部循环(inner loop)可以在大部分的架构上很有效率地被实现出来. 快速排序使用分治法(Divide and co

十大编程算法助程序员走上大神路

算法一:快速排序算法 快速排序是由东尼·霍尔所发展的一种排序算法.在平均状况下,排序 n 个项目要Ο(n log n)次比较.在最坏状况下则需要Ο(n2)次比较,但这种状况并不常见.事实上,快速排序通常明显比其他Ο(n log n) 算法更快,因为它的内部循环(inner loop)可以在大部分的架构上很有效率地被实现出来. 快速排序使用分治法(Divide and conquer)策略来把一个串行(list)分为两个子串行(sub-lists). 算法步骤: 1 从数列中挑出一个元素,称为 "

网络爬虫作业代码代写代实现、代做爬虫程序

网络爬虫作业代码代写代实现.代做爬虫程序任务二.网络爬虫实现 一.任务描述编写大学排名爬虫程序,从"最好大学网"获取"软科中国最好大学排名"2016.2017.2018年的国内大学排名数据,并将它们输出出来.2016年中国最好大学排名网址目的1.学习运用requests库编写基本URL访问过程2.学习运用beautifulsoup4库解析和处理HTML3.掌握编写网络爬虫的基本方法二.任务分析(必须有,主要分析任务需求,完成任务的思路与方法,采用的技术等,如爬虫的任

Python3网络爬虫(十一):爬虫黑科技之让你的爬虫程序更像人类用户的行为(代理IP池等)

原文链接: Jack-Cui,http://blog.csdn.net/c406495762 运行平台: Windows Python版本: Python3.x IDE: Sublime text3 1 前言 近期,有些朋友问我一些关于如何应对反爬虫的问题.由于好多朋友都在问,因此决定写一篇此类的博客.把我知道的一些方法,分享给大家.博主属于小菜级别,玩爬虫也完全是处于兴趣爱好,如有不足之处,还望指正. 在互联网上进行自动数据采集(抓取)这件事和互联网存在的时间差不多一样长.今天大众好像更倾向于

编写一个网易云音乐爬虫程序

本次借助wxPython编写一个网易云音乐的爬虫程序,能够根据一个歌单链接下载其下的所有音乐 前置说明 网易云音乐提供了一个下载接口:http://music.163.com/song/media/outer/url?id=xxx 所以只需要拿到歌单中每首歌曲对应的 id 即可 1.分析歌单网页元素 打开网易云音乐,复制一个歌单链接 打开chrome,查看网页元素 这里有个细节,我们拿到的歌单url中有一个符号“/#”,因为之前爬虫其他网站时,也是直接请求初始url,一般Elements标签中的

使用PHP创建基本的爬虫程序【转】

Web Crawler, 也时也称scrapers,即网络爬虫,用于自动搜索internet并从中提取 想要的内容.互联网的发展离不开它们.爬虫是搜索引擎的核心,通过智能算法发现符合 你输入的关键字的网页. Google网络爬虫会进入你的域名,然后扫描你网站的所有网页,从中析取网页标题,描述, 关键字和链接 - 然后把这些的评价返回给Google HQ,把内容存放至海量的数据库中. 今天,我很高兴告诉你怎么做自己的爬虫 -- 它并不对整个互联网进行搜索,只是对指定的 一个网址获取所有链接和信息[

分享常见网站的爬虫程序下载地址[微信|微博|企信|知乎|优酷|京东]

给大家分享一些常见网站的爬虫程序获取地址,站长.数据分析师.爬虫工程师们有福利了!!!大家都懂的,好东西不轻易说出去~ 1.微信公众号文章采集:http://www.shenjianshou.cn/index.php?r=market/configDetail&pid=157 2.新浪微博采集:http://www.shenjianshou.cn/index.php?r=market/configDetail&pid=139 3.企信工商信息采集爬虫:http://www.shenjian

编写爬虫程序的神器 - Groovy + Jsoup + Sublime

写过很多个爬虫小程序了,之前几次主要用C# + Html Agility Pack来完成工作.由于.NET BCL只提供了"底层"的HttpWebRequest和"中层"的WebClient,故对HTTP操作还是需要编写很多代码的.加上编写C#需要使用Visual Studio这个很"重"的工具,开发效率长期以来处于一种低下的状态.   最近项目里面接触到了一种神奇的语言Groovy -- 一种全面兼容Java语言且提供了大量额外语法功能的动态语

爬虫程序

下面是一个简单的爬虫程序. ? 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81