summaryrefslogtreecommitdiffstats
path: root/yum-fast-downloader.py
diff options
context:
space:
mode:
authorHedayat Vatankhah <hedayat.fwd@gmail.com>2014-03-30 02:40:00 +0430
committerHedayat Vatankhah <hedayat.fwd@gmail.com>2014-03-30 02:40:00 +0430
commit43c60dbb5b5f57596e1559c7f52b0ef519399671 (patch)
tree1cf16b167754f76fa92204724e364f033e318a9f /yum-fast-downloader.py
parent2958f9662962d9d2e72aef72c7eb51684563b016 (diff)
downloadyum-fast-downloader-43c60dbb5b5f57596e1559c7f52b0ef519399671.tar.gz
yum-fast-downloader-43c60dbb5b5f57596e1559c7f52b0ef519399671.tar.xz
yum-fast-downloader-43c60dbb5b5f57596e1559c7f52b0ef519399671.zip
Integrate with URLGrabber and support latest Yum versions
- support recent Yum versions - integrate with urlgrabber to download all files - new configuration option for specifying arbitrary arguments for aria2c
Diffstat (limited to 'yum-fast-downloader.py')
-rw-r--r--yum-fast-downloader.py204
1 files changed, 33 insertions, 171 deletions
diff --git a/yum-fast-downloader.py b/yum-fast-downloader.py
index 010be25..c3fe043 100644
--- a/yum-fast-downloader.py
+++ b/yum-fast-downloader.py
@@ -24,163 +24,39 @@
#
import os
-import shutil
import yum
+import subprocess
from yum.plugins import TYPE_CORE
-from yum.yumRepo import YumRepository
-from yum import Errors
-from yum import URLGrabError
-from time import sleep
+from urlgrabber.grabber import _ExternalDownloader
requires_api_version = '2.6'
plugin_type = (TYPE_CORE,)
# variables
-originalRetrieveMD = YumRepository._retrieveMD
-downloader_app = ''
+originalExtStart = _ExternalDownloader.start
+downloader_app = 'aria2c'
global_cache_dir=''
downloader_common_args = {
'aria2c' : ["aria2c", "--continue", "--check-certificate=false",
"--on-download-complete=/usr/libexec/yum-fast-downloader-finalize"]
}
-# downloader functions
-
-def _getAria2CArgs(urls, remote_path, local_path, proxies = None):
- args = []
- args += downloader_common_args['aria2c']
- args.append("--out={0}.yfd".format(os.path.basename(local_path)))
- args.append("--dir={0}".format(os.path.dirname(local_path)))
- args.append("--split={0}".format(max(5, len(urls))))
- if proxies:
- for (scheme, proxy) in proxies.items():
- if not proxy.startswith('ftp://'):
- args.append("--{0}-proxy={1}".format(scheme, proxy))
-
- for url in urls:
- args.append(url + remote_path)
-
- return args;
-
-def downloadFile(urls, remote_path, local_path, proxies = None):
- if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
- repo_name = os.path.basename(os.path.dirname(local_path))
- print "\n============================================================" \
- "====================\nDownloading ({0}) {1}\n=========="\
- "==================================================================="\
- "===".format(repo_name, remote_path)
- args = _getAria2CArgs(urls, remote_path, local_path)
- ret = os.spawnvp(os.P_WAIT, downloader_app, args)
- if ret:
- raise yum.plugins.PluginYumExit(
- "{0} exited with exit code: {1}".format(downloader_app, ret))
- i=0
- while not os.path.exists(local_path):
- sleep(0.1)
- i+=1
- if i > 50:
- raise yum.plugins.PluginYumExit(
- "Error in downloading file: {0}".format(local_path))
-
-def queueDownload(inputFile, urls, remote_path, local_path, proxies = None):
- if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
- h = ""
- for url in urls:
- h = h + url + remote_path + "\t"
- if len(h) > 4080:
- break;
- inputFile.write(url + remote_path + "\t")
- inputFile.write("\n")
- inputFile.write(" out={0}.yfd\n".format(os.path.basename(local_path)))
- inputFile.write(" dir={0}\n".format(os.path.dirname(local_path)))
- inputFile.write(" split={0}\n".format(max(5, len(urls))))
-
- if proxies:
- for (scheme, proxy) in proxies.items():
- if not proxy.startswith('ftp://'):
- inputFile.write(" {0}-proxy={1}\n".format(scheme, proxy))
-
-def downloadQueuedFiles(inputFileName):
- args = []
- args += downloader_common_args['aria2c']
- args.append("--input-file={0}".format(inputFileName))
- ret = os.spawnvp(os.P_WAIT, "aria2c", args)
- sleep(1)
- if ret:
- raise yum.plugins.PluginYumExit(
- "{0} exited with exit code: {1}".format(downloader_app, ret))
-
# Hooks!
-# Written looking at yum's _retrieveMD implementation. Most parts have
-# beed directly grabbed from that.
-def myRetrieveMD(self, mdtype, retrieve_can_fail=False, **kwargs):
- """ replace's yum's default _retrieveMD function to use a downloader """
- # calls original function on return to make sure that everything is OK
- thisdata = self.repoXML.getData(mdtype)
-
- (r_base, remote) = thisdata.location
- fname = os.path.basename(remote)
- local = self.cachedir + '/' + fname
-
- if self.retrieved.get(mdtype):
- # got it, move along #maybe "local"
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
-
- if self.cache == 1:
- if os.path.exists(local):
- try:
- self.checkMD(local, mdtype)
- except URLGrabError, e:
- raise Errors.RepoError, \
- "Caching enabled and local cache: %s does not match checksum" % local
- else:
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
-
- else: # ain't there - raise
- raise Errors.RepoError, \
- "Caching enabled but no local cache of %s from %s" % (local,
- self)
-
- if (os.path.exists(local) or
- self._preload_md_from_system_cache(os.path.basename(local))):
- if self._checkMD(local, mdtype, check_can_fail=True):
- self.retrieved[mdtype] = 1
- # it's the same return the local one
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
-
- try:
- def checkfunc(obj):
- self.checkMD(obj, mdtype)
- self.retrieved[mdtype] = 1
- text = "%s/%s" % (self.id, mdtype)
- if thisdata.size is None:
- reget = None
- else:
- reget = 'simple'
- if os.path.exists(local):
- if os.stat(local).st_size >= int(thisdata.size):
- misc.unlink_f(local)
- downloadFile(self.urls, remote, local, self.proxy_dict)
-# local = self._getFile(relative=remote,
-# local=local,
-# copy_local=1,
-# reget=reget,
-# checkfunc=checkfunc,
-# text=text,
-# cache=self.http_caching == 'all',
-# size=thisdata.size,
-# **kwargs)
- except Errors.RepoError:
- if retrieve_can_fail:
- return None
- raise
- except URLGrabError, e:
- if retrieve_can_fail:
- return None
- raise Errors.RepoError, \
- "Could not retrieve %s matching remote checksum from %s" % (local, self)
- else:
- return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs)
+def myExtDownloaderInit(self):
+ self.popen = subprocess.Popen(
+ ['/usr/libexec/urlgrabber-ext-down-yfd', global_cache_dir] +
+ downloader_common_args[downloader_app],
+ stdin = subprocess.PIPE,
+ stdout = subprocess.PIPE,
+ )
+ self.stdin = self.popen.stdin.fileno()
+ self.stdout = self.popen.stdout.fileno()
+ self.running = {}
+ self.cnt = 0
+
+def myExtDownloaderStart(self, opts):
+ opts.urls = [m['mirror'] for m in opts.mirror_group[0].mirrors]
+ originalExtStart(self, opts)
def postconfig_hook(conduit):
global downloader_app
@@ -188,12 +64,14 @@ def postconfig_hook(conduit):
global downloader_common_args
downloader_app = conduit.confString('main', 'downloader', default='aria2c')
global_cache_dir = conduit.getConf().cachedir
- max_concurrent_downloads = conduit.confString('main',
+ max_concurrent_downloads = conduit.confString('aria2c',
'max-concurrent-downloads', default='10')
- min_split_size = conduit.confString('main',
+ min_split_size = conduit.confString('aria2c',
'min_split_size', default='1M')
- max_connection_per_server = conduit.confString('main',
+ max_connection_per_server = conduit.confString('aria2c',
'max_connection_per_server', default='5')
+ aria2_additional_args = conduit.confString('aria2c', 'additional_args',
+ default='')
# append aria2c options
downloader_common_args['aria2c'].append(
@@ -206,35 +84,19 @@ def postconfig_hook(conduit):
"--min-split-size={0}".format(min_split_size))
downloader_common_args['aria2c'].append(
"--max-connection-per-server={0}".format(max_connection_per_server))
+ if aria2_additional_args:
+ downloader_common_args['aria2c'] += aria2_additional_args.split(' ')
def prereposetup_hook(conduit):
- ret_insmethod = type(YumRepository._retrieveMD)
- YumRepository._retrieveMD = ret_insmethod(myRetrieveMD, None, YumRepository)
+ downloader_insmethod = type(_ExternalDownloader.__init__)
+ _ExternalDownloader.__init__ = downloader_insmethod(myExtDownloaderInit, None, _ExternalDownloader)
+ _ExternalDownloader._options+=('urls', 'relative_url')
+
+ start_method = type(_ExternalDownloader.start)
+ _ExternalDownloader.start = start_method(myExtDownloaderStart, None, _ExternalDownloader)
+ _ExternalDownloader.repos = conduit.getRepos().listEnabled()
# repos = conduit.getRepos()
# for repo in repos.listEnabled():
# print "name: %s" % repo.name
# print "URLS: %s" % repo.urls
# repo._retrieveMD = testRetrieveMD
-
-def predownload_hook(conduit):
- # it is also called on remove!
- if not conduit.getDownloadPackages():
- return
- inputFileName = "{0}/aria.input".format(global_cache_dir)
- ariaInputFile = open(inputFileName, "w")
- for pkg in conduit.getDownloadPackages():
- local = pkg.localPkg()
- if os.path.exists(local):
- if pkg.verifyLocalPkg():
- conduit.info(5, "Already have package for %s.%s."
- % (pkg.name, pkg.arch))
- continue
-
-# download_dir = os.path.join(pkg.repo.cachedir, 'alternative_downloader_cache')
-# conduit.info(2, "Getting: {0} into {1}".format(pkg.remote_url, local))
-# downloadFile(pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict)
- queueDownload(ariaInputFile, pkg.repo.urls, pkg.remote_path,
- local, pkg.repo.proxy_dict)
- ariaInputFile.close()
- downloadQueuedFiles(inputFileName)
- os.unlink(inputFileName)