#! /usr/bin/python -tt # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # # Copyright Hedayat Vatankhah, 2011. # # Author: Hedayat Vatankhah # # * Used local and presto plugins and yum's code to learn more about yum # internals. # # import os import shutil import yum from yum.plugins import TYPE_CORE from yum.yumRepo import YumRepository from yum import Errors from time import sleep requires_api_version = '2.6' plugin_type = (TYPE_CORE,) # variables originalRetrieveMD = YumRepository._retrieveMD downloader_app = '' global_cache_dir='' downloader_common_args = { 'aria2c' : ["aria2c", "--continue", "--check-certificate=false", "--on-download-complete=/usr/libexec/yum-fast-downloader-finalize"] } # downloader functions def _getAria2CArgs(urls, remote_path, local_path, proxies = None): args = [] args += downloader_common_args['aria2c'] args.append("--out={0}.yfd".format(os.path.basename(local_path))) args.append("--dir={0}".format(os.path.dirname(local_path))) args.append("--split={0}".format(max(5, len(urls)))) if proxies: for (scheme, proxy) in proxies.items(): if not proxy.startswith('ftp://'): args.append("--{0}-proxy={1}".format(scheme, proxy)) for url in urls: args.append(url + remote_path) return args; def downloadFile(urls, remote_path, local_path, proxies = None): if urls[0].startswith("http://") or urls[0].startswith("ftp://"): repo_name = os.path.basename(os.path.dirname(local_path)) print "\n============================================================" \ "====================\nDownloading ({0}) {1}\n=========="\ "==================================================================="\ "===".format(repo_name, remote_path) args = _getAria2CArgs(urls, remote_path, local_path) ret = os.spawnvp(os.P_WAIT, downloader_app, args) if ret: raise yum.plugins.PluginYumExit( "{0} exited with exit code: {1}".format(downloader_app, ret)) i=0 while not os.path.exists(local_path): sleep(0.1) i+=1 if i > 50: raise yum.plugins.PluginYumExit( "Error in downloading file: {0}".format(local_path)) def queueDownload(inputFile, urls, remote_path, local_path, proxies = None): if urls[0].startswith("http://") or urls[0].startswith("ftp://"): h = "" for url in urls: h = h + url + remote_path + "\t" if len(h) > 4080: break; inputFile.write(url + remote_path + "\t") inputFile.write("\n") inputFile.write(" out={0}.yfd\n".format(os.path.basename(local_path))) inputFile.write(" dir={0}\n".format(os.path.dirname(local_path))) inputFile.write(" split={0}\n".format(max(5, len(urls)))) if proxies: for (scheme, proxy) in proxies.items(): if not proxy.startswith('ftp://'): inputFile.write(" {0}-proxy={1}\n".format(scheme, proxy)) def downloadQueuedFiles(inputFileName): args = [] args += downloader_common_args['aria2c'] args.append("--input-file={0}".format(inputFileName)) ret = os.spawnvp(os.P_WAIT, "aria2c", args) sleep(1) if ret: raise yum.plugins.PluginYumExit( "{0} exited with exit code: {1}".format(downloader_app, ret)) # Hooks! # Written looking at yum's _retrieveMD implementation. Most parts have # beed directly grabbed from that. def myRetrieveMD(self, mdtype, retrieve_can_fail=False, **kwargs): """ replace's yum's default _retrieveMD function to use a downloader """ # calls original function on return to make sure that everything is OK thisdata = self.repoXML.getData(mdtype) (r_base, remote) = thisdata.location fname = os.path.basename(remote) local = self.cachedir + '/' + fname if self.retrieved.get(mdtype): # got it, move along #maybe "local" return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs) if self.cache == 1: if os.path.exists(local): try: self.checkMD(local, mdtype) except URLGrabError, e: raise Errors.RepoError, \ "Caching enabled and local cache: %s does not match checksum" % local else: return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs) else: # ain't there - raise raise Errors.RepoError, \ "Caching enabled but no local cache of %s from %s" % (local, self) if (os.path.exists(local) or self._preload_md_from_system_cache(os.path.basename(local))): if self._checkMD(local, mdtype, check_can_fail=True): self.retrieved[mdtype] = 1 # it's the same return the local one return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs) try: def checkfunc(obj): self.checkMD(obj, mdtype) self.retrieved[mdtype] = 1 text = "%s/%s" % (self.id, mdtype) if thisdata.size is None: reget = None else: reget = 'simple' if os.path.exists(local): if os.stat(local).st_size >= int(thisdata.size): misc.unlink_f(local) downloadFile(self.urls, remote, local, self.proxy_dict) # local = self._getFile(relative=remote, # local=local, # copy_local=1, # reget=reget, # checkfunc=checkfunc, # text=text, # cache=self.http_caching == 'all', # size=thisdata.size, # **kwargs) except Errors.RepoError: if retrieve_can_fail: return None raise except URLGrabError, e: if retrieve_can_fail: return None raise Errors.RepoError, \ "Could not retrieve %s matching remote checksum from %s" % (local, self) else: return originalRetrieveMD(self, mdtype, retrieve_can_fail, **kwargs) def postconfig_hook(conduit): global downloader_app global global_cache_dir global downloader_common_args downloader_app = conduit.confString('main', 'downloader', default='aria2c') global_cache_dir = conduit.getConf().cachedir max_concurrent_downloads = conduit.confString('main', 'max-concurrent-downloads', default='10') min_split_size = conduit.confString('main', 'min_split_size', default='1M') max_connection_per_server = conduit.confString('main', 'max_connection_per_server', default='5') # append aria2c options downloader_common_args['aria2c'].append( "--server-stat-if={0}/aria2c_server_stats".format(global_cache_dir)) downloader_common_args['aria2c'].append( "--server-stat-of={0}/aria2c_server_stats".format(global_cache_dir)) downloader_common_args['aria2c'].append( "--max-concurrent-downloads={0}".format(max_concurrent_downloads)) downloader_common_args['aria2c'].append( "--min-split-size={0}".format(min_split_size)) downloader_common_args['aria2c'].append( "--max-connection-per-server={0}".format(max_connection_per_server)) def prereposetup_hook(conduit): ret_insmethod = type(YumRepository._retrieveMD) YumRepository._retrieveMD = ret_insmethod(myRetrieveMD, None, YumRepository) # repos = conduit.getRepos() # for repo in repos.listEnabled(): # print "name: %s" % repo.name # print "URLS: %s" % repo.urls # repo._retrieveMD = testRetrieveMD def predownload_hook(conduit): # it is also called on remove! if not conduit.getDownloadPackages(): return inputFileName = "{0}/aria.input".format(global_cache_dir) ariaInputFile = open(inputFileName, "w") for pkg in conduit.getDownloadPackages(): local = pkg.localPkg() if os.path.exists(local): if pkg.verifyLocalPkg(): conduit.info(5, "Already have package for %s.%s." % (pkg.name, pkg.arch)) continue # download_dir = os.path.join(pkg.repo.cachedir, 'alternative_downloader_cache') # conduit.info(2, "Getting: {0} into {1}".format(pkg.remote_url, local)) # downloadFile(pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict) queueDownload(ariaInputFile, pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict) ariaInputFile.close() downloadQueuedFiles(inputFileName) os.unlink(inputFileName)