summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHedayat Vatankhah <hedayat.fwd@gmail.com>2014-03-30 02:40:00 +0430
committerHedayat Vatankhah <hedayat.fwd@gmail.com>2014-03-30 02:40:00 +0430
commit43c60dbb5b5f57596e1559c7f52b0ef519399671 (patch)
tree1cf16b167754f76fa92204724e364f033e318a9f
parent2958f9662962d9d2e72aef72c7eb51684563b016 (diff)
downloadyum-fast-downloader-43c60dbb5b5f57596e1559c7f52b0ef519399671.tar.gz
yum-fast-downloader-43c60dbb5b5f57596e1559c7f52b0ef519399671.tar.xz
yum-fast-downloader-43c60dbb5b5f57596e1559c7f52b0ef519399671.zip
Integrate with URLGrabber and support latest Yum versions
- support recent Yum versions - integrate with urlgrabber to download all files - new configuration option for specifying arbitrary arguments for aria2c
-rw-r--r--README12
-rwxr-xr-xurlgrabber-ext-down-yfd126
-rw-r--r--yum-fast-downloader.conf2
-rw-r--r--yum-fast-downloader.py204
-rw-r--r--yum-fast-downloader.spec10
5 files changed, 175 insertions, 179 deletions
diff --git a/README b/README
index 313e1e1..60a5bcb 100644
--- a/README
+++ b/README
@@ -2,24 +2,22 @@ Yum Fast Downloader Plugin
==========================
This plugin intends to speedup yum downloads by using a download manager
application (currently Aria 2) for downloading files instead of URLGrabber
-which is used by yum.
+which is used by yum.
Currently, it provides the following features:
* Downloading repository metadata from several mirrors simultaneously
* Downloading multiple packages in parallel from several mirrors
-* Supports HTTP/HTTPS proxies
+* Downloading delta rpms
* Using fastest mirrors for downloading
-Currently, it doesn't support downloading delta rpms and other kinds of files
-
Installation
============
All you need to do is to put yum-fast-downloader.py into /usr/lib/yum-plugins
-and yum-fast-downloader.conf in /etc/yum/pluginconf.d/
+and yum-fast-downloader.conf in /etc/yum/pluginconf.d/.
+Also put yum-fast-downloader-finalize and urlgrabber-ext-down-yfd into /usr/libexec
Future Plans
============
-* Support downloading delta packages
-* Support downloading other files
+* Support for http/https proxies
* Support other download managers
* What else?!
diff --git a/urlgrabber-ext-down-yfd b/urlgrabber-ext-down-yfd
new file mode 100755
index 0000000..92e061c
--- /dev/null
+++ b/urlgrabber-ext-down-yfd
@@ -0,0 +1,126 @@
+#! /usr/bin/python -tt
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+#
+# Copyright Hedayat Vatankhah, 2012.
+#
+# Author: Hedayat Vatankhah <hedayat.fwd@gmail.com>
+#
+# * Used the code of original external downloader of urlgrabber:
+# Copyright 2011-2012 Zdenek Pavlas
+
+import time, errno, sys
+import os
+import shutil
+import yum
+import subprocess
+from urlgrabber.grabber import _ExternalDownloader
+
+from urlgrabber.grabber import \
+ _readlines, URLGrabberOptions, _loads, \
+ URLGrabError
+
+# variables
+global_cache_dir='/var/cache/yum'
+downloader_common_args = []
+
+# downloader functions
+
+def _getAria2CArgs(urls, remote_path, local_path, proxies = None):
+ args = []
+ args += downloader_common_args
+ args.append("--out={0}.yfd".format(os.path.basename(local_path)))
+ args.append("--dir={0}".format(os.path.dirname(local_path)))
+ args.append("--split={0}".format(max(5, len(urls))))
+ if proxies:
+ for (scheme, proxy) in proxies.items():
+ if not proxy.startswith('ftp://'):
+ args.append("--{0}-proxy={1}".format(scheme, proxy))
+
+ for url in urls:
+ args.append(url + remote_path)
+
+ return args;
+
+def downloadFile(urls, remote_path, local_path, proxies = None):
+ if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
+ args = _getAria2CArgs(urls, remote_path, local_path)
+ popen = subprocess.Popen(args, stdout=2)
+ popen.wait()
+ i=0
+ while not os.path.exists(local_path):
+ time.sleep(0.1)
+ i+=1
+ if i > 50:
+ os.write(2, "Error in downloading file: {0}".format(local_path))
+ raise URLGrabError(2, "Download failed")
+ else:
+ shutil.copyfile(urls[0][len("file:"):]+remote_path, local_path)
+
+def configure():
+ global global_cache_dir
+ global downloader_common_args
+
+ global_cache_dir = sys.argv[1]
+ downloader_common_args = sys.argv[2:]
+
+def write(fmt, *arg):
+ try: os.write(1, fmt % arg)
+ except OSError, e:
+ if e.args[0] != errno.EPIPE: raise
+ sys.exit(1)
+
+class ProxyProgress:
+ def start(self, *d1, **d2):
+ self.next_update = 0
+ def update(self, _amount_read):
+ t = time.time()
+ if t < self.next_update: return
+ self.next_update = t + 0.31
+ write('%d %d\n', self._id, _amount_read)
+
+def main():
+ import signal
+ signal.signal(signal.SIGINT, lambda n, f: sys.exit(1))
+ configure()
+ cnt = 0
+ while True:
+ lines = _readlines(0)
+ if not lines: break
+ for line in lines:
+ cnt += 1
+ opts = URLGrabberOptions()
+ opts._id = cnt
+ for k in line.split(' '):
+ k, v = k.split('=', 1)
+ setattr(opts, k, _loads(v))
+ if opts.progress_obj:
+ opts.progress_obj = ProxyProgress()
+ opts.progress_obj._id = cnt
+
+ dlsz = dltm = 0
+ try:
+ downloadFile(opts.urls, opts.relative_url, opts.filename)
+ size = 0
+ # dlsz = fo._tm_last[0] - fo._tm_first[0]
+ # dltm = fo._tm_last[1] - fo._tm_first[1]
+ ug_err = 'OK'
+ except URLGrabError, e:
+ size = 0
+ ug_err = '%d %d %s' % (e.errno, getattr(e, 'code', 0), e.strerror)
+ write('%d %d %d %.3f %s\n', opts._id, size, dlsz, dltm, ug_err)
+
+if __name__ == '__main__':
+ main()
diff --git a/yum-fast-downloader.conf b/yum-fast-downloader.conf
index 0393122..fe6002c 100644
--- a/yum-fast-downloader.conf
+++ b/yum-fast-downloader.conf
@@ -1,9 +1,11 @@
[main]
enabled=1
+[aria2c]
# Options controlling the behavior:
#
# max-concurrent-downloads=10
# min_split_size=1M
# max_connection_per_server=5
+# additional_args=-q
#
diff --git a/yum-fast-downloader.py b/yum-fast-downloader.py
index 010be25..c3fe043 100644
--- a/yum-fast-downloader.py
+++ b/yum-fast-downloader.py
@@ -24,163 +24,39 @@
#
import os
-import shutil
import yum
+import subprocess
from yum.plugins import TYPE_CORE
-from yum.yumRepo import YumRepository
-from yum import Errors
-from yum import URLGrabError
-from time import sleep
+from urlgrabber.grabber import _ExternalDownloader
requires_api_version = '2.6'
plugin_type = (TYPE_CORE,)
# variables
-originalRetrieveMD = YumRepository._retrieveMD
-downloader_app = ''
+originalExtStart = _ExternalDownloader.start
+downloader_app = 'aria2c'
global_cache_dir=''
downloader_common_args = {
'aria2c' : ["aria2c", "--continue", "--check-certificate=false",
"--on-download-complete=/usr/libexec/yum-fast-downloader-finalize"]
}
-# downloader functions
-
-def _getAria2CArgs(urls, remote_path, local_path, proxies = None):
- args = []
- args += downloader_common_args['aria2c']
- args.append("--out={0}.yfd".format(os.path.basename(local_path)))
- args.append("--dir={0}".format(os.path.dirname(local_path)))
- args.append("--split={0}".format(max(5, len(urls))))
- if proxies:
- for (scheme, proxy) in proxies.items():
- if not proxy.startswith('ftp://'):
- args.append("--{0}-proxy={1}".format(scheme, proxy))
-
- for url in urls:
- args.append(url + remote_path)
-
- return args;
-
-def downloadFile(urls, remote_path, local_path, proxies = None):
- if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
- repo_name = os.path.basename(os.path.dirname(local_path))
- print "\n============================================================" \
- "====================\nDownloading ({0}) {1}\n=========="\
- "==================================================================="\
- "===".format(repo_name, remote_path)
- args = _getAria2CArgs(urls, remote_path, local_path)
- ret = os.spawnvp(os.P_WAIT, downloader_app, args)
- if ret:
- raise yum.plugins.PluginYumExit(
- "{0} exited with exit code: {1}".format(downloader_app, ret))
- i=0
- while not os.path.exists(local_path):
- sleep(0.1)
- i+=1
- if i > 50:
- raise yum.plugins.PluginYumExit(
- "Error in downloading file: {0}".format(local_path))
-
-def queueDownload(inputFile, urls, remote_path, local_path, proxies = None):
- if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
- h = ""
- for url in urls:
- h = h + url + remote_path + "\t"
- if len(h) > 4080:
- break;
- inputFile.write(url + remote_path + "\t")
- inputFile.write("\n")
- inputFile.write(" out={0}.yfd\n".format(os.path.basename(local_path)))
- inputFile.write(" dir={0}\n".format(os.path.dirname(local_path)))
- inputFile.write(" split={0}\n".format(max(5, len(urls))))
-
- if proxies:
- for (scheme, proxy) in proxies.items():
- if not proxy.startswith('ftp://'):
- inputFile.write(" {0}-proxy={1}\n".format(scheme, proxy))
-
-def downloadQueuedFiles(inputFileName):
- args = []
- args += downloader_common_args['aria2c']
- args.append("--input-file={0}".format(inputFileName))
- ret = os.spawnvp(os.P_WAIT, "aria2c", args)
- sleep(1)
- if ret:
- raise yum.plugins.PluginYumExit(
- "{0} exited with exit code: {1}".format(downloader_app, ret))
-
# Hooks!
-# Written looking at yum's _retrieveMD implementation. Most parts have
-# beed directly grabbed from that.
-def myRetrieveMD(self, mdtype, retrieve_can_fail=False, **kwargs):
- """ replace's yum's default _retrieveMD function to use a downloader """
- # calls original function on return to make sure that everything is OK
- thisdata = self.repoXML.getData(mdtype)
-
- (r_base, remote) = thisdata.location
- fname = os.path.basename(remote)
- local = self.cachedir + '/' + fname
-
- if self.retrieved.get(mdtype):
- # got it, move along #maybe "local"
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
-
- if self.cache == 1:
- if os.path.exists(local):
- try:
- self.checkMD(local, mdtype)
- except URLGrabError, e:
- raise Errors.RepoError, \
- "Caching enabled and local cache: %s does not match checksum" % local
- else:
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
-
- else: # ain't there - raise
- raise Errors.RepoError, \
- "Caching enabled but no local cache of %s from %s" % (local,
- self)
-
- if (os.path.exists(local) or
- self._preload_md_from_system_cache(os.path.basename(local))):
- if self._checkMD(local, mdtype, check_can_fail=True):
- self.retrieved[mdtype] = 1
- # it's the same return the local one
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
-
- try:
- def checkfunc(obj):
- self.checkMD(obj, mdtype)
- self.retrieved[mdtype] = 1
- text = "%s/%s" % (self.id, mdtype)
- if thisdata.size is None:
- reget = None
- else:
- reget = 'simple'
- if os.path.exists(local):
- if os.stat(local).st_size >= int(thisdata.size):
- misc.unlink_f(local)
- downloadFile(self.urls, remote, local, self.proxy_dict)
-# local = self._getFile(relative=remote,
-# local=local,
-# copy_local=1,
-# reget=reget,
-# checkfunc=checkfunc,
-# text=text,
-# cache=self.http_caching == 'all',
-# size=thisdata.size,
-# **kwargs)
- except Errors.RepoError:
- if retrieve_can_fail:
- return None
- raise
- except URLGrabError, e:
- if retrieve_can_fail:
- return None
- raise Errors.RepoError, \
- "Could not retrieve %s matching remote checksum from %s" % (local, self)
- else:
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
+def myExtDownloaderInit(self):
+ self.popen = subprocess.Popen(
+ ['/usr/libexec/urlgrabber-ext-down-yfd', global_cache_dir] +
+ downloader_common_args[downloader_app],
+ stdin = subprocess.PIPE,
+ stdout = subprocess.PIPE,
+ )
+ self.stdin = self.popen.stdin.fileno()
+ self.stdout = self.popen.stdout.fileno()
+ self.running = {}
+ self.cnt = 0
+
+def myExtDownloaderStart(self, opts):
+ opts.urls = [m['mirror'] for m in opts.mirror_group[0].mirrors]
+ originalExtStart(self, opts)
def postconfig_hook(conduit):
global downloader_app
@@ -188,12 +64,14 @@ def postconfig_hook(conduit):
global downloader_common_args
downloader_app = conduit.confString('main', 'downloader', default='aria2c')
global_cache_dir = conduit.getConf().cachedir
- max_concurrent_downloads = conduit.confString('main',
+ max_concurrent_downloads = conduit.confString('aria2c',
'max-concurrent-downloads', default='10')
- min_split_size = conduit.confString('main',
+ min_split_size = conduit.confString('aria2c',
'min_split_size', default='1M')
- max_connection_per_server = conduit.confString('main',
+ max_connection_per_server = conduit.confString('aria2c',
'max_connection_per_server', default='5')
+ aria2_additional_args = conduit.confString('aria2c', 'additional_args',
+ default='')
# append aria2c options
downloader_common_args['aria2c'].append(
@@ -206,35 +84,19 @@ def postconfig_hook(conduit):
"--min-split-size={0}".format(min_split_size))
downloader_common_args['aria2c'].append(
"--max-connection-per-server={0}".format(max_connection_per_server))
+ if aria2_additional_args:
+ downloader_common_args['aria2c'] += aria2_additional_args.split(' ')
def prereposetup_hook(conduit):
- ret_insmethod = type(YumRepository._retrieveMD)
- YumRepository._retrieveMD = ret_insmethod(myRetrieveMD, None, YumRepository)
+ downloader_insmethod = type(_ExternalDownloader.__init__)
+ _ExternalDownloader.__init__ = downloader_insmethod(myExtDownloaderInit, None, _ExternalDownloader)
+ _ExternalDownloader._options+=('urls', 'relative_url')
+
+ start_method = type(_ExternalDownloader.start)
+ _ExternalDownloader.start = start_method(myExtDownloaderStart, None, _ExternalDownloader)
+ _ExternalDownloader.repos = conduit.getRepos().listEnabled()
# repos = conduit.getRepos()
# for repo in repos.listEnabled():
# print "name: %s" % repo.name
# print "URLS: %s" % repo.urls
# repo._retrieveMD = testRetrieveMD
-
-def predownload_hook(conduit):
- # it is also called on remove!
- if not conduit.getDownloadPackages():
- return
- inputFileName = "{0}/aria.input".format(global_cache_dir)
- ariaInputFile = open(inputFileName, "w")
- for pkg in conduit.getDownloadPackages():
- local = pkg.localPkg()
- if os.path.exists(local):
- if pkg.verifyLocalPkg():
- conduit.info(5, "Already have package for %s.%s."
- % (pkg.name, pkg.arch))
- continue
-
-# download_dir = os.path.join(pkg.repo.cachedir, 'alternative_downloader_cache')
-# conduit.info(2, "Getting: {0} into {1}".format(pkg.remote_url, local))
-# downloadFile(pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict)
- queueDownload(ariaInputFile, pkg.repo.urls, pkg.remote_path,
- local, pkg.repo.proxy_dict)
- ariaInputFile.close()
- downloadQueuedFiles(inputFileName)
- os.unlink(inputFileName)
diff --git a/yum-fast-downloader.spec b/yum-fast-downloader.spec
index 5a53da1..698c210 100644
--- a/yum-fast-downloader.spec
+++ b/yum-fast-downloader.spec
@@ -1,6 +1,6 @@
Summary: A fast downloader plugin for yum
Name: yum-fast-downloader
-Version: 0.5.12
+Version: 0.6.0
Release: 1
License: GPLv2+
Group: System Environment/Base
@@ -16,6 +16,7 @@ Currently, it provides the following features:
* Using fastest mirrors for downloading
* Downloading repository metadata from several mirrors simultaneously
* Downloading multiple packages in parallel from several mirrors
+* Downloading delta rpms
%prep
%setup -q
@@ -26,6 +27,7 @@ install -d -m 755 $RPM_BUILD_ROOT/usr/lib/yum-plugins \
install -m 644 %{name}.py $RPM_BUILD_ROOT/usr/lib/yum-plugins/
install -m 644 %{name}.conf $RPM_BUILD_ROOT/etc/yum/pluginconf.d/
install -m 755 %{name}-finalize $RPM_BUILD_ROOT/usr/libexec/
+install -m 755 urlgrabber-ext-down-yfd $RPM_BUILD_ROOT/usr/libexec/
%files
%config(noreplace) /etc/yum/pluginconf.d/%{name}.conf
@@ -34,6 +36,12 @@ install -m 755 %{name}-finalize $RPM_BUILD_ROOT/usr/libexec/
/usr/libexec/*
%changelog
+* Sat Mar 29 2014 Hedayat Vatankhah <hedayat.fwd+rpmchlog@gmail.com> - 0.6.0-1
+- Support recent versions of yum which integrate presto
+- Integrate with urlgrabber and register as an external downloader for it.
+ Now everything (except the initial repomd.xml files) is handled by YFD
+- Additional argumens for aria2c can be specified in config now
+
* Fri Jan 25 2013 Hedayat Vatankhah <hedayat.fwd+rpmchlog@gmail.com> - 0.5.12-1
- Import URLGrabError