#!/usr/bin/python # evertracker.py import socket import urllib import cgi import bencode import signal import sys import time import types import random from pysqlite2 import dbapi2 as sqlite # Name and versioning NAME = "EverTracker" VERSION = "0.1.3" # Config options for networking HOST = "hermit" PORT= 6969 BACKLOG= 5 SIZE= 1024 # Config options for tracker MAX_NUM_PEERS=50 # Config options for SQL DB_NAME = "evdb" # Socket variable, for use globally s = None # Database variable, for use globally con = None # Called on statup to initialize the database connection, and create the necessary tables if they don't already exist. def init_database(): global con try: # Create the sql database file con = sqlite.connect(DB_NAME) # Create the table of the stats on the different torrents that the tracker is tracking # Primary key "torrentID" is used to relate peers to torrents con.execute("create table if not exists torrents (torrentID integer not null primary key, complete integer, downloaded integer, incomplete integer, name string, info_hash string)") # Create the table for the different peers participating in the different torrents con.execute("create table if not exists peers (torrentID integer not null primary key, peer_id string, ip text, port integer, uploaded integer, downloaded integer, left integer, status integer, key text, trackerid text)") # Commit changes, because it auto-commits before, not after, queries con.commit() except: print "Error initializing the database " + DB_NAME cleanup() # Generate a number of peers with random dummy data just for testing purposes def make_peers(numpeers): global con peers = [] print "Inserting test peers into database....", sys.stdout.flush() for n in range(numpeers): peer = ( "EVERCLIENT"+str(random.randint(0,9))*10, ".".join([str(random.randint(0,255)),str(random.randint(0,255)),str(random.randint(0,255)),str(random.randint(0,255))]), random.randint(1,65536), random.randint(1,1000000), random.randint(1,1000000), random.randint(1,1000000), random.randint(0,4), "KEY"+str(random.randint(0,9))*17, "EVERTRACKER", ) peers.append(peer) con.executemany("insert into peers (peer_id,ip,port,uploaded,downloaded,left,status,key,trackerid) values (?,?,?,?,?,?,?,?,?)",peers) con.commit() print "Done\n" # Called on startup to bind the socket for networking. Use config options defined at top of file. def bind_socket(): global s # Attempt to bind PORT on HOST try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((HOST,PORT)) s.listen(BACKLOG) # If that fails, except nicely by closing the attempt, and displaying debugging output except socket.error, (value,message): if s: s.close() print "Could not open socket: " + message # Error code 1, unclean exit sys.exit(1) # Cleanup method to call so it properly closes the socket def cleanup(): global s s.close() # Catch kill signals properly and call the cleanup method, instead of dying messily and leaving the socket bound def signal_handler(signal, frame): print 'Quit' cleanup() sys.exit(0) # Convert an announce url to a scrape url. Takes a string, returns a string. def announce_to_scrape(announce): pieces = announce.split("/") last = pieces[-1] if last[0:8] == "announce": return announce[0:-8] + "scrape" + last [8:] # Get a number of random peers, as determined by numpeers, for use by the client that is announcing. # Takes an integer, returns a list of dictionaries def getpeerlist(numpeers): global con # Don't give the client more than what they really need (download performance plateaus around 30 peers, and the official client stops accepting when it's connected to 50) # A good value for MAX_NUM_PEERS is thus around 50 if numpeers > MAX_NUM_PEERS: numpeers = MAX_NUM_PEERS # Fetch random numpeers from peers #query = "SELECT peer_id, ip, port FROM peers LIMIT %i" % numpeers query = "SELECT peer_id,ip,port FROM peers WHERE rowid>=random()%(SELECT max(rowid) FROM peers) LIMIT " + str(numpeers) # Cur is a list of tuples of the values from the query cur = con.execute(query) # I love Python so, so much. # Inline function that attaches the keys tuple back to the values func = lambda x: dict(zip(("peer_id","ip","port"),x)) # Now do it over the entire list, WITHOUT A FOR LOOP >:-OOOO peers = map(func,cur) return peers # First step. Separate out the different lines in the request. def parse_request(raw): #Parse out GET args properly, so the bencoded dictionary can be extracted # Sample value for data: #GET /announce/?uploaded=314159&compact=&numwant=50&ip=127.0.0.1&info_hash=abcdefghijklmnopqrstuvwxyz&event=started&downloaded=951413&trackerid=&key=AWANG&peer_id=evertestclient000000&port=6881&left=1 HTTP/1.0 #Host: localhost:6969 #User-agent: Python-urllib/1.16 parts = {} # Put the lines into a more useable form for line in raw.split("\n"): # GET is where the urlencoded data is from the client if line[:3] == "GET": # Parse out just the URL url = line.split(" ")[1] url = "".join(url.split("?")[1:]) # Get the dictionary of data from the URL data = cgi.parse_qs(url) # Cheap python tricks :) parts['get'] = dict([(x,data[x][0]) for x in data.keys()]) # This says the IP address and port of the client if line[:4] == "Host": parts['host'] = line # This says what the client is using to send the request if line[:10] == "User-agent": parts['user_agent'] = line return parts # Take the output from parse_request, and assuming that it's an announce request, parse it accordingly. def parse_announce(data): # Do all the parsing parts = parse_request(data) data = parts['get'] # Check that the request has all the required keys required_keys = ['info_hash','peer_id','port','uploaded','downloaded','left','compact','event'] for key in required_keys: if not data.has_key(key): print "Missing key " + key return None return data def announce_response(data): # Takes care of parsing out all the data from the url and verifying it data = parse_announce(data) print data # Optional keys that the tracker supports optional_keys = ['ip','numwant','key','trackerid'] # If the client uses numwants, use that value, else default if data.has_key("numwant"): try: numwant = int(data['numwant']) except ValueError: numwant = 50 else: numwant = 50 # Dummy value for now failure_reason = "" if failure_reason: response = {"failure reason":failure_reason} # Human readable error message else: response = {"warning message":"", # Human readable warning message "interval":12*60, # Seconds client should wait before sending another request "min interval":4*60, # Seconds client must wait before sending another request "tracker id":"EVERTRACKER", # String client should send back on further announces "complete":1, # Num seeds "incomplete":0, # Num peers "peers":getpeerlist(numwant), # Peer list, one dict per peer. Default to 50, don't send seeds to seeds } # Finally, bencode and return the response response = bencode.bencode(response) return response def scrape_response(data): for line in data.split("\n"): if line[0:3] == "GET": get = line break get = get.split(" ")[1] get = "".join(get.split("?")[1:]) print cgi.parse_qs(get) response = { failure_reason:"", # human readable error, optional flags:{ # optional stuff "min_request_interval":"0", # time to wait before scraping again }, files:{ # keys are 20-byte sha urlencoded hashes, values are dicts of info on each torrent "0"*20:{ "complete":0, # number of seeds "downloaded":0, # total number of downloads "incomplete":0, # number of peers "name":"EVERTORRENT", # Optional internal name of torrent }, }, } def main(): global s,con # Initialize networking bind_socket() # Initialize database init_database() # Insert test data if no data in peers cur = con.execute("select * from peers limit 1") if type(cur.fetchone()) == types.NoneType: make_peers(5000) # We know we've started now print NAME + " " + VERSION + "\n Initialized as '" + HOST + "'\n Listening for new connections on port " + str(PORT) + "\n" # Attach signal handler signal.signal(signal.SIGINT, signal_handler) # Start listening for clients while 1: #try: client, address = s.accept() data = client.recv(SIZE) if data: # This will return the bencoded dictionary response as a text/plain document, as is required client.send(announce_response(data)) client.close() #except: #print "Unknown Error:", sys.exc_info()[0] #cleanup() #sys.exit(1) cleanup() sys.exit(0) if __name__ == "__main__": main()