diff options
Diffstat (limited to 'BitTorrent/track.py')
-rw-r--r-- | BitTorrent/track.py | 607 |
1 files changed, 304 insertions, 303 deletions
diff --git a/BitTorrent/track.py b/BitTorrent/track.py index b6ec2db..fe4788f 100644 --- a/BitTorrent/track.py +++ b/BitTorrent/track.py @@ -12,20 +12,17 @@ import sys import os -import signal import re -from threading import Event -from urlparse import urlparse -from traceback import print_exc from time import time, gmtime, strftime, localtime from random import shuffle from types import StringType, IntType, LongType, ListType, DictType -from binascii import b2a_hex -from cStringIO import StringIO + +from twisted.web import server +from twisted.web.resource import Resource +from twisted.internet import reactor +from twisted.python import log from BitTorrent.parseargs import parseargs, formatDefinitions -from BitTorrent.RawServer_magic import RawServer -from BitTorrent.HTTPHandler import HTTPHandler, months, weekdays from BitTorrent.parsedir import parsedir from BitTorrent.NatCheck import NatCheck from BitTorrent.bencode import bencode, bdecode, Bencached @@ -36,7 +33,7 @@ from BitTorrent import version defaults = [ ('port', 80, _("Port to listen on.")), - ('dfile', None, + ('dfile', '/tmp/dfile.txt', _("file to store recent downloader info in")), ('bind', '', _("ip to bind to locally")), @@ -76,8 +73,6 @@ defaults = [ ('allowed_controls', 0, _("allow special keys in torrents in the allowed_dir to affect " "tracker access")), - ('hupmonitor', 0, - _("whether to reopen the log file upon receipt of HUP signal")), ('show_infopage', 1, _("whether to display an info page when the tracker's root dir " "is loaded")), @@ -93,8 +88,6 @@ defaults = [ "local network IPs (0 = never, 1 = always, 2 = ignore if NAT " "checking is not enabled). HTTP proxy headers giving address " "of original client are treated the same as --ip.")), - ('logfile', '', - _("file to write the tracker logs, use - for stdout (default)")), ('allow_get', 0, _("use with allowed_dir; adds a /file?hash={hash} url that " "allows users to download the torrent file")), @@ -105,10 +98,6 @@ defaults = [ _("scrape access allowed (can be none, specific or full)")), ('max_give', 200, _("maximum number of peers to give with any one request")), - ('twisted', -1, - _("Use Twisted network libraries for network connections. 1 means use twisted, 0 means do not use twisted, -1 means autodetect, and prefer twisted")), - ('pid', '/var/run/bittorrent-tracker.pid', - "Path to PID file") ] def statefiletemplate(x): @@ -159,7 +148,6 @@ def statefiletemplate(x): raise ValueError dirkeys[y[1]] = 1 - alas = _("your file may exist elsewhere in the universe\nbut alas, not here\n") def isotime(secs = None): @@ -169,9 +157,9 @@ def isotime(secs = None): http_via_filter = re.compile(' for ([0-9.]+)\Z') -def _get_forwarded_ip(headers): - if headers.has_key('http_x_forwarded_for'): - header = headers['http_x_forwarded_for'] +def _get_forwarded_ip(request): + header = request.getHeader('X-Forwarded-For') + if header: try: x,y = header.split(',') except: @@ -179,20 +167,22 @@ def _get_forwarded_ip(headers): if not is_local_ip(x): return x return y - if headers.has_key('http_client_ip'): - return headers['http_client_ip'] - if headers.has_key('http_via'): - x = http_via_filter.search(headers['http_via']) - try: + header = request.getHeader('Client-IP') + if header: + return header + header = request.getHeader('Via') + if header: + x = http_via_filter.search(header) + if x: return x.group(1) - except: - pass - if headers.has_key('http_from'): - return headers['http_from'] + + header = request.getHeader('From') + if header: + return header return None -def get_forwarded_ip(headers): - x = _get_forwarded_ip(headers) +def get_forwarded_ip(request): + x = _get_forwarded_ip(request) if x is None or not is_valid_ipv4(x) or is_local_ip(x): return None return x @@ -222,16 +212,17 @@ def is_local_ip(ip): try: v = [int(x) for x in ip.split('.')] if v[0] == 10 or v[0] == 127 or v[:2] in ([192, 168], [169, 254]): - return 1 + return True if v[0] == 172 and v[1] >= 16 and v[1] <= 31: - return 1 + return True except ValueError: - return 0 - + return False class Tracker(object): - def __init__(self, config, rawserver): + def __init__(self): + + config, files = parseargs([], defaults, 0, 0) self.config = config self.response_size = config['response_size'] self.max_give = config['max_give'] @@ -245,8 +236,7 @@ class Tracker(object): self.favicon = h.read() h.close() except: - print _("**warning** specified favicon file -- %s -- does not exist.") % favicon - self.rawserver = rawserver + log.msg(_("**warning** specified favicon file -- %s -- does not exist.") % favicon) self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]] self.cached_t = {} # format: infohash: [time, cache] self.times = {} @@ -268,8 +258,7 @@ class Tracker(object): statefiletemplate(tempstate) self.state = tempstate except: - print _("**warning** statefile %s corrupt; resetting") % \ - self.dfile + log.msg(_("**warning** statefile %s corrupt; resetting") % self.dfile) self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) @@ -293,32 +282,10 @@ class Tracker(object): self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] - rawserver.add_task(self.save_dfile, self.save_dfile_interval) + reactor.callLater(self.save_dfile_interval, self.save_dfile) self.prevtime = time() self.timeout_downloaders_interval = config['timeout_downloaders_interval'] - rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) - self.logfile = None - self.log = None - if (config['logfile'] != '') and (config['logfile'] != '-'): - try: - self.logfile = config['logfile'] - self.log = open(self.logfile,'a') - sys.stdout = self.log - print _("# Log Started: "), isotime() - except: - print _("**warning** could not redirect stdout to log file: "), sys.exc_info()[0] - - if config['hupmonitor']: - def huphandler(signum, frame, self = self): - try: - self.log.close () - self.log = open(self.logfile,'a') - sys.stdout = self.log - print _("# Log reopened: "), isotime() - except: - print _("**warning** could not reopen logfile") - - signal.signal(signal.SIGHUP, huphandler) + reactor.callLater(self.timeout_downloaders_interval, self.expire_downloaders) self.allow_get = config['allow_get'] @@ -347,99 +314,6 @@ class Tracker(object): return is_valid_ipv4(given_ip) and ( not self.only_local_override_ip or is_local_ip(ip) ) - def get_infopage(self): - try: - if not self.config['show_infopage']: - return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas) - red = self.config['infopage_redirect'] - if red != '': - return (302, 'Found', {'Content-Type': 'text/html', 'Location': red}, - '<A HREF="'+red+'">Click Here</A>') - - s = StringIO() - s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \ - '<html><head><title>BitTorrent download info</title>\n') - if self.favicon is not None: - s.write('<link rel="shortcut icon" href="/favicon.ico">\n') - s.write('</head>\n<body>\n' \ - '<h3>BitTorrent download info</h3>\n'\ - '<ul>\n' - '<li><strong>tracker version:</strong> %s</li>\n' \ - '<li><strong>server time:</strong> %s</li>\n' \ - '</ul>\n' % (version, isotime())) - if self.allowed is not None: - if self.show_names: - names = [ (value['name'], infohash) - for infohash, value in self.allowed.iteritems()] - else: - names = [(None, infohash) for infohash in self.allowed] - else: - names = [ (None, infohash) for infohash in self.downloads] - if not names: - s.write('<p>not tracking any files yet...</p>\n') - else: - names.sort() - tn = 0 - tc = 0 - td = 0 - tt = 0 # Total transferred - ts = 0 # Total size - nf = 0 # Number of files displayed - if self.allowed is not None and self.show_names: - s.write('<table summary="files" border="1">\n' \ - '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n') - else: - s.write('<table summary="files">\n' \ - '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n') - for name, infohash in names: - l = self.downloads[infohash] - n = self.completed.get(infohash, 0) - tn = tn + n - c = self.seedcount[infohash] - tc = tc + c - d = len(l) - c - td = td + d - nf = nf + 1 - if self.allowed is not None and self.show_names: - if self.allowed.has_key(infohash): - sz = self.allowed[infohash]['length'] # size - ts = ts + sz - szt = sz * n # Transferred for this torrent - tt = tt + szt - if self.allow_get == 1: - linkname = '<a href="/file?info_hash=' + quote(infohash) + '">' + name + '</a>' - else: - linkname = name - s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \ - % (b2a_hex(infohash), linkname, size_format(sz), c, d, n, size_format(szt))) - else: - s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \ - % (b2a_hex(infohash), c, d, n)) - ttn = 0 - for i in self.completed.values(): - ttn = ttn + i - if self.allowed is not None and self.show_names: - s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' - % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt))) - else: - s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n' - % (nf, tc, td, tn, ttn)) - s.write('</table>\n' \ - '<ul>\n' \ - '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \ - '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \ - '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \ - '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \ - '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \ - '</ul>\n') - - s.write('</body>\n' \ - '</html>\n') - return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue()) - except: - print_exc() - return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error') - def scrapedata(self, infohash, return_name = True): l = self.downloads[infohash] n = self.completed.get(infohash, 0) @@ -450,57 +324,6 @@ class Tracker(object): f['name'] = self.allowed[infohash]['name'] return (f) - def get_scrape(self, paramslist): - fs = {} - if paramslist.has_key('info_hash'): - if self.config['scrape_allowed'] not in ['specific', 'full']: - return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, - bencode({'failure reason': - _("specific scrape function is not available with this tracker.")})) - for infohash in paramslist['info_hash']: - if self.allowed is not None and infohash not in self.allowed: - continue - if infohash in self.downloads: - fs[infohash] = self.scrapedata(infohash) - else: - if self.config['scrape_allowed'] != 'full': - return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, - bencode({'failure reason': - _("full scrape function is not available with this tracker.")})) - if self.allowed is not None: - hashes = self.allowed - else: - hashes = self.downloads - for infohash in hashes: - fs[infohash] = self.scrapedata(infohash) - - return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs})) - - def get_file(self, infohash): - if not self.allow_get: - return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, - _("get function is not available with this tracker.")) - if not self.allowed.has_key(infohash): - return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas) - fname = self.allowed[infohash]['file'] - fpath = self.allowed[infohash]['path'] - return (200, 'OK', {'Content-Type': 'application/x-bittorrent', - 'Content-Disposition': 'attachment; filename=' + fname}, - open(fpath, 'rb').read()) - - def check_allowed(self, infohash, paramslist): - if self.allowed is not None: - if not self.allowed.has_key(infohash): - return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, - bencode({'failure reason': - _("Requested download is not authorized for use with this tracker.")})) - if self.config['allowed_controls']: - if self.allowed[infohash].has_key('failure reason'): - return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, - bencode({'failure reason': self.allowed[infohash]['failure reason']})) - - return None - def add_data(self, infohash, event, ip, paramslist): peers = self.downloads.setdefault(infohash, {}) ts = self.times.setdefault(infohash, {}) @@ -558,7 +381,7 @@ class Tracker(object): peer['nat'] = 0 self.natcheckOK(infohash,myid,ip1,port,left) else: - NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver) + NatCheck(self.connectback_result,infohash,myid,ip1,port) else: peer['nat'] = 2**30 if event == 'completed': @@ -613,7 +436,7 @@ class Tracker(object): recheck = True if recheck: - NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver) + NatCheck(self.connectback_result,infohash,myid,ip1,port) return rsize @@ -682,74 +505,6 @@ class Tracker(object): data['peers'] = peerdata return data - def get(self, connection, path, headers): - ip = connection.get_ip() - - nip = get_forwarded_ip(headers) - if nip and not self.only_local_override_ip: - ip = nip - - paramslist = {} - def params(key, default = None, l = paramslist): - if l.has_key(key): - return l[key][0] - return default - - try: - (scheme, netloc, path, pars, query, fragment) = urlparse(path) - if self.uq_broken == 1: - path = path.replace('+',' ') - query = query.replace('+',' ') - path = unquote(path)[1:] - for s in query.split('&'): - if s != '': - i = s.index('=') - kw = unquote(s[:i]) - paramslist.setdefault(kw, []) - paramslist[kw] += [unquote(s[i+1:])] - - if path == '' or path == 'index.html': - return self.get_infopage() - if path == 'scrape': - return self.get_scrape(paramslist) - if (path == 'file'): - return self.get_file(params('info_hash')) - if path == 'favicon.ico' and self.favicon is not None: - return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon) - if path != 'announce': - return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas) - - # main tracker function - infohash = params('info_hash') - if not infohash: - raise ValueError, 'no info hash' - - notallowed = self.check_allowed(infohash, paramslist) - if notallowed: - return notallowed - - event = params('event') - - rsize = self.add_data(infohash, event, ip, paramslist) - - except ValueError, e: - return (400, 'Bad Request', {'Content-Type': 'text/plain'}, - 'you sent me garbage - ' + str(e)) - - if params('compact'): - return_type = 2 - elif params('no_peer_id'): - return_type = 1 - else: - return_type = 0 - - data = self.peerlist(infohash, event=='stopped', not params('left'), - return_type, rsize) - - if paramslist.has_key('scrape'): - data['scrape'] = self.scrapedata(infohash, False) - - return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data)) def natcheckOK(self, infohash, peerid, ip, port, not_seed): bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) @@ -760,9 +515,9 @@ class Tracker(object): def natchecklog(self, peerid, ip, port, result): year, month, day, hour, minute, second, a, b, c = localtime(time()) - print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % ( - ip, quote(peerid), day, months[month], year, hour, minute, second, - ip, port, result) + log.msg('%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % ( + ip, quote(peerid), day, months[month], year, hour, minute, second, + ip, port, result)) def connectback_result(self, result, downloadid, peerid, ip, port): record = self.downloads.get(downloadid, {}).get(peerid) @@ -789,13 +544,16 @@ class Tracker(object): record['nat'] += 1 def save_dfile(self): - self.rawserver.add_task(self.save_dfile, self.save_dfile_interval) + # need to arrange for this to be called just before shutdown + log.msg('save_dfile') + reactor.callLater(self.save_dfile_interval, self.save_dfile) h = open(self.dfile, 'wb') h.write(bencode(self.state)) h.close() def parse_allowed(self): - self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval) + log.msg('parse_allowed') + reactor.callLater(self.parse_dir_interval, self.parse_allowed) # logging broken .torrent files would be useful but could confuse # programs parsing log files, so errors are just ignored for now @@ -828,6 +586,8 @@ class Tracker(object): del dls[peerid] def expire_downloaders(self): + log.msg('expire_downloaders') + reactor.callLater(self.timeout_downloaders_interval, self.expire_downloaders) for infohash, peertimes in self.times.items(): for myid, t in peertimes.items(): if t < self.prevtime: @@ -840,26 +600,267 @@ class Tracker(object): del self.times[key] del self.downloads[key] del self.seedcount[key] - self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) -def track(args): - if len(args) == 0: - print formatDefinitions(defaults, 80) - return - try: - config, files = parseargs(args, defaults, 0, 0) - except ValueError, e: - print _("error: ") + str(e) - print _("run with no arguments for parameter explanations") - return - file(config['pid'], 'w').write(str(os.getpid())) - r = RawServer(Event(), config) - t = Tracker(config, r) - s = r.create_serversocket(config['port'], config['bind'], True) - r.start_listening(s, HTTPHandler(t.get, config['min_time_between_log_flushes'])) - r.listen_forever() - t.save_dfile() - print _("# Shutting down: ") + isotime() +class InfoPage(Resource): + def __init__(self, tracker): + Resource.__init__(self) + self.tracker = tracker + + def getChild(self, name, request): + if name in ['', 'index.html', 'index.htm']: + return self + return Resource.getChild(self, name, request) + + def render_GET(self, request): + try: + if not self.tracker.config['show_infopage']: + request.setResponseCode(404, 'Not Found') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return alas + + red = self.tracker.config['infopage_redirect'] + if red != '': + request.redirect(red) + request.finish() + return server.NOT_DONE_YET + + request.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \ + '<html><head><title>BitTorrent download info</title>\n') + if self.tracker.favicon is not None: + request.write('<link rel="shortcut icon" href="/favicon.ico">\n') + request.write('</head>\n<body>\n' \ + '<h3>BitTorrent download info</h3>\n'\ + '<ul>\n' + '<li><strong>tracker version:</strong> %s</li>\n' \ + '<li><strong>server time:</strong> %s</li>\n' \ + '</ul>\n' % (version, isotime())) + if self.tracker.allowed is not None: + if self.tracker.show_names: + names = [ (value['name'], infohash) + for infohash, value in self.tracker.allowed.iteritems()] + else: + names = [(None, infohash) for infohash in self.tracker.allowed] + else: + names = [ (None, infohash) for infohash in self.tracker.downloads] + if not names: + request.write('<p>not tracking any files yet...</p>\n') + else: + names.sort() + tn = 0 + tc = 0 + td = 0 + tt = 0 # Total transferred + ts = 0 # Total size + nf = 0 # Number of files displayed + if self.tracker.allowed is not None and self.tracker.show_names: + request.write('<table summary="files" border="1">\n' \ + '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n') + else: + request.write('<table summary="files">\n' \ + '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n') + for name, infohash in names: + l = self.tracker.downloads[infohash] + n = self.tracker.completed.get(infohash, 0) + tn = tn + n + c = self.tracker.seedcount[infohash] + tc = tc + c + d = len(l) - c + td = td + d + nf = nf + 1 + if self.tracker.allowed is not None and self.tracker.show_names: + if self.tracker.allowed.has_key(infohash): + sz = self.tracker.allowed[infohash]['length'] # size + ts = ts + sz + szt = sz * n # Transferred for this torrent + tt = tt + szt + if self.tracker.allow_get == 1: + linkname = '<a href="/file?info_hash=' + quote(infohash) + '">' + name + '</a>' + else: + linkname = name + request.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \ + % (infohash.encode('hex'), linkname, size_format(sz), c, d, n, size_format(szt))) + else: + request.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \ + % (infohash.encode('hex'), c, d, n)) + ttn = 0 + for i in self.tracker.completed.values(): + ttn = ttn + i + if self.tracker.allowed is not None and self.tracker.show_names: + request.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' + % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt))) + else: + request.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n' + % (nf, tc, td, tn, ttn)) + request.write('</table>\n' \ + '<ul>\n' \ + '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \ + '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \ + '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \ + '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \ + '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \ + '</ul>\n') + + request.write('</body>\n' \ + '</html>\n') + request.finish() + return server.NOT_DONE_YET + + except: + request.setResponseCode(500, 'Internal Server Error') + log.err() + return 'Server Error' + +class Scrape(Resource): + isLeaf = True + + def __init__(self, tracker): + Resource.__init__(self) + self.tracker = tracker + + def render_GET(self, request): + fs = {} + if request.args.has_key('info_hash'): + if self.tracker.config['scrape_allowed'] not in ['specific', 'full']: + request.setResponseCode(400, 'Not Authorized') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return bencode({'failure reason': + _("specific scrape function is not available with this tracker.")}) + for infohash in request.args['info_hash']: + if self.tracker.allowed is not None and infohash not in self.tracker.allowed: + continue + if infohash in self.tracker.downloads: + fs[infohash] = self.tracker.scrapedata(infohash) + else: + if self.tracker.config['scrape_allowed'] != 'full': + request.setResponseCode(400, 'Not Authorized') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return bencode({'failure reason': + _("full scrape function is not available with this tracker.")}) + if self.tracker.allowed is not None: + hashes = self.tracker.allowed + else: + hashes = self.tracker.downloads + for infohash in hashes: + fs[infohash] = self.tracker.scrapedata(infohash) + + request.setHeader('Content-Type', 'text/plain') + return bencode({'files': fs}) + +class File(Resource): + isLeaf = True + + def __init__(self, tracker): + Resource.__init__(self) + self.tracker = tracker + + def render_GET(self, request): + if not self.tracker.allow_get: + request.setResponseCode(400, 'Not Authorized') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return _("get function is not available with this tracker.") + + infohash = None + if request.args.has_key('info_hash'): + infohash = request.args['info_hash'][0] + + if not self.tracker.allowed.has_key(infohash): + request.setResponseCode(400, 'Not Authorized') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return alas + + fname = self.tracker.allowed[infohash]['file'] + fpath = self.tracker.allowed[infohash]['path'] + + request.setHeader('Content-Type', 'application/x-bittorrent') + reuqest.setHeader('Content-Disposition', 'attachment; filename=' + fname) + return open(fpath, 'rb').read() + +class Announce(Resource): + isLeaf = True + + def __init__(self, tracker): + Resource.__init__(self) + self.tracker = tracker + + def render_GET(self, request): + ip = request.getClientIP() + + nip = get_forwarded_ip(request) + if nip and not self.tracker.only_local_override_ip: + ip = nip + + infohash = request.args.get('info_hash', [None])[0] + + if infohash is None: + request.setResponseCode(400, 'Bad Request') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return 'info_hash not specified' + + if self.tracker.allowed is not None: + if not self.tracker.allowed.has_key(infohash): + # is 200 really right? + request.setResponseCode(200, 'Not Authorized') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return bencode({'failure reason': + _("Requested download is not authorized for use with this tracker.")}) + + if self.tracker.config['allowed_controls']: + if self.tracker.allowed[infohash].has_key('failure reason'): + # is 200 really right? + request.setResponseCode(200, 'Not Authorized') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return bencode({'failure reason': self.tracker.allowed[infohash]['failure reason']}) + + event = request.args.get('event', [None])[0] + + rsize = self.tracker.add_data(infohash, event, ip, request.args) + + compact = request.args.get('compact', [None])[0] + + no_peer_id = request.args.get('no_peer_id', [None])[0] + + if compact: + return_type = 2 + elif no_peer_id: + return_type = 1 + else: + return_type = 0 + + left = request.args.get('left', [None])[0] + + data = self.tracker.peerlist(infohash, event == 'stopped', not left, return_type, rsize) + + if request.args.has_key('scrape'): + data['scrape'] = self.tracker.scrapedata(infohash, False) + + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return bencode(data) + +class FavIcon(Resource): + isLeaf = True + + def __init__(self, tracker): + Resource.__init__(self) + self.tracker = tracker + + def render_GET(self, request): + if self.tracker.favicon is None: + request.setResponseCode(404, 'Not Found') + request.setHeader('Content-Type', 'text/plain') + request.setHeader('Pragma', 'no-cache') + return 'Not Found!' + + request.setHeader('Content-Type', 'image/x-icon') + return self.tracker.favicon def size_format(s): if (s < 1024): |