diff options
| author | Izhar Firdaus <izhar@mitsuko.kagesenshi.org> | 2009-10-18 12:23:44 +0800 |
|---|---|---|
| committer | Izhar Firdaus <izhar@mitsuko.kagesenshi.org> | 2009-10-18 12:23:44 +0800 |
| commit | ea29f099a3aafd9783f768554341a9d1566605e0 (patch) | |
| tree | 4032cefbd06bde340ed2a274008bed92274d447f | |
| parent | a06c5d77e380f9ee163da8561aba47acd1786ff5 (diff) | |
| download | hack-patches-ea29f099a3aafd9783f768554341a9d1566605e0.tar.gz hack-patches-ea29f099a3aafd9783f768554341a9d1566605e0.tar.xz hack-patches-ea29f099a3aafd9783f768554341a9d1566605e0.zip | |
* modified customgrabber.py, its now a yum plugin
| -rw-r--r-- | customgrabber.py | 123 | ||||
| -rw-r--r-- | fastestmirror-repomd-download-speed-timing.patch | 5 |
2 files changed, 75 insertions, 53 deletions
diff --git a/customgrabber.py b/customgrabber.py index 3c76d3a..9b9c3ab 100644 --- a/customgrabber.py +++ b/customgrabber.py @@ -1,54 +1,75 @@ # file: /usr/lib/python2.5/site-packages/urlgrabber/customgrabber.py -import grabber, sys, os -import subprocess -import urllib2 - -def get_filesize(url): - usock = urllib2.urlopen(url) - size = usock.info().get('Content-Length') - if size is None: - size = 0 - return float(size) - -class AxelGrabber(grabber.URLGrabber,object): - def urlgrab(self, url, filename=None, **kwargs): - """grab the file at and make a local copy at - If filename is none, the basename of the url is used. - urlgrab returns the filename of the local file, which may be - different from the passed-in filename if copy_local == 0. - """ - - opts = self.opts.derive(**kwargs) - (url,parts) = opts.urlparser.parse(url, opts) - (scheme, host, path, parm, query, frag) = parts - fsize = get_filesize(url) - if (fsize/1024) < 100: - parts = 1 - elif (fsize/1024) < 500: - parts = 2 - elif (fsize/1024/1024) < 1: - parts = 3 - elif (fsize/1024/1024) < 5: - parts = 4 - elif (fsize/1024/1024) < 10: - parts = 6 - elif (fsize/1024/1024) < 15: - parts = 8 - else: - parts = 10 - - if parts == 1: - return super (AxelGrabber, self).urlgrab(url, filename, **kwargs) - - def retryfunc(opts, url, filename, parts): - if os.path.exists(filename): - if not os.path.exists("%s.st" % filename): - os.unlink(filename) - p = subprocess.Popen(['/usr/bin/axel','-n','%s' % parts,'-a','-o',filename,url],stdout=sys.stdout,stderr=sys.stderr) - o = p.wait() - if o: - raise grabber.URLGrabError(-1) - return filename +from yum.plugins import TYPE_CORE + +requires_api_version = '2.5' +plugin_type = (TYPE_CORE,) + +def axel_grabber_patch(): + import urlgrabber.grabber as grabber + + if getattr(grabber,'_axel_grabber_patched',False): + return + + print "Applying customgrabber patch" + + import sys, os + import subprocess + import urllib2 + + + grabber.URLGrabber._orig_urlgrab = grabber.URLGrabber.urlgrab + + def get_filesize(url): + usock = urllib2.urlopen(url) + size = usock.info().get('Content-Length') + if size is None: + size = 0 + return float(size) + + def urlgrab(self, url, filename=None, **kwargs): + """grab the file at and make a local copy at + If filename is none, the basename of the url is used. + urlgrab returns the filename of the local file, which may be + different from the passed-in filename if copy_local == 0. + """ + + opts = self.opts.derive(**kwargs) + (url,parts) = opts.urlparser.parse(url, opts) + (scheme, host, path, parm, query, frag) = parts + fsize = get_filesize(url) + if (fsize/1024) < 100: + parts = 1 + elif (fsize/1024) < 500: + parts = 2 + elif (fsize/1024/1024) < 1: + parts = 3 + elif (fsize/1024/1024) < 5: + parts = 4 + elif (fsize/1024/1024) < 10: + parts = 6 + elif (fsize/1024/1024) < 15: + parts = 8 + else: + parts = 10 + + if parts == 1: + return self._orig_urlgrab(url, filename, **kwargs) - return self._retry(opts, retryfunc, url, filename, parts) + def retryfunc(opts, url, filename, parts): + if os.path.exists(filename): + if not os.path.exists("%s.st" % filename): + os.unlink(filename) + p = subprocess.Popen(['/usr/bin/axel','-n','%s' % parts,'-a','-o',filename,url],stdout=sys.stdout,stderr=sys.stderr) + o = p.wait() + if o: + raise grabber.URLGrabError(-1) + return filename + + return self._retry(opts, retryfunc, url, filename, parts) + + grabber.URLGrabber.urlgrab = urlgrab + grabber._axel_grabber_patched = True + +axel_grabber_patch() + diff --git a/fastestmirror-repomd-download-speed-timing.patch b/fastestmirror-repomd-download-speed-timing.patch index 3768fe2..167422f 100644 --- a/fastestmirror-repomd-download-speed-timing.patch +++ b/fastestmirror-repomd-download-speed-timing.patch @@ -1,5 +1,6 @@ ---- fastestmirror.py.old 2008-11-12 23:03:22.000000000 +0800 -+++ fastestmirror.py 2008-12-25 16:27:29.000000000 +0800 +diff -ruN yum-utils-1.1.18.old/plugins/fastestmirror/fastestmirror.py yum-utils-1.1.18/plugins/fastestmirror/fastestmirror.py +--- yum-utils-1.1.18.old/plugins/fastestmirror/fastestmirror.py 2008-12-25 16:45:42.000000000 +0800 ++++ yum-utils-1.1.18/plugins/fastestmirror/fastestmirror.py 2008-12-25 16:46:30.000000000 +0800 @@ -45,6 +45,7 @@ import urlparse import datetime |
