#! /usr/bin/python -tt # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # # Copyright Hedayat Vatankhah, 2011. # # Author: Hedayat Vatankhah # # * Used local and presto plugins and yum's code to learn more about yum # internals. # # import os import shutil import yum from yum.plugins import TYPE_CORE from yum.yumRepo import YumRepository requires_api_version = '2.6' plugin_type = (TYPE_CORE,) # variables originalRetrieveMD = YumRepository._retrieveMD downloader_app = '' global_cache_dir='' downloader_common_args = { 'aria2c' : ["aria2c", "--continue", "--check-certificate=false"] } # downloader functions def _getAria2CArgs(urls, remote_path, local_path, proxies = None): args = downloader_common_args['aria2c'] args.append("--out={0}".format(os.path.basename(local_path))) args.append("--dir={0}".format(os.path.dirname(local_path))) for url in urls: args.append(url + remote_path) args.append("--split={0}".format(max(5, len(urls)))) if proxies: for (scheme, proxy) in proxies.items(): if not proxy.startswith('ftp://'): args.append("--{0}-proxy={1}".format(scheme, proxy)) return args; def downloadFile(urls, remote_path, local_path, proxies = None): if urls[0].startswith("http://") or urls[0].startswith("ftp://"): args = _getAria2CArgs(urls, remote_path, local_path) ret = os.spawnvp(os.P_WAIT, downloader_app, args) if ret: raise yum.plugins.PluginYumExit( "{0} exited with exit code: {1}".format(downloader_app, ret)) def queueDownload(inputFile, urls, remote_path, local_path, proxies = None): if urls[0].startswith("http://") or urls[0].startswith("ftp://"): for url in urls: inputFile.write(url + remote_path + "\t") inputFile.write("\n") inputFile.write(" out={0}\n".format(os.path.basename(local_path))) inputFile.write(" dir={0}\n".format(os.path.dirname(local_path))) inputFile.write(" split={0}\n".format(max(5, len(urls)))) if proxies: for (scheme, proxy) in proxies.items(): if not proxy.startswith('ftp://'): inputFile.write(" {0}-proxy={1}\n".format(scheme, proxy)) def downloadQueuedFiles(inputFileName): args = downloader_common_args['aria2c'] args.append("--input-file={0}".format(inputFileName)) ret = os.spawnvp(os.P_WAIT, "aria2c", args) if ret: raise yum.plugins.PluginYumExit( "{0} exited with exit code: {1}".format(downloader_app, ret)) # Hooks! # Written looking at yum's _retrieveMD implementation. Most parts have # beed directly grabbed from that. def myRetrieveMD(self, mdtype, retrieve_can_fail=False): """ replace's yum's default _retrieveMD function to use a downloader """ # calls original function on return to make sure that everything is OK thisdata = self.repoXML.getData(mdtype) (r_base, remote) = thisdata.location fname = os.path.basename(remote) local = self.cachedir + '/' + fname if self.retrieved.get(mdtype): # got it, move along return originalRetrieveMD(self, mdtype, retrieve_can_fail) if self.cache == 1: return originalRetrieveMD(self, mdtype, retrieve_can_fail) if (os.path.exists(local) or self._preload_md_from_system_cache(os.path.basename(local))): if self._checkMD(local, mdtype, check_can_fail=True): return originalRetrieveMD(self, mdtype, retrieve_can_fail) if thisdata.size and os.path.exists(local): if os.stat(local).st_size >= int(thisdata.size): yum.misc.unlink_f(local) downloadFile(self.urls, remote, local, self.proxy_dict) return originalRetrieveMD(self, mdtype, retrieve_can_fail) def postconfig_hook(conduit): global downloader_app global global_cache_dir global downloader_common_args downloader_app = conduit.confString('main', 'downloader', default='aria2c') global_cache_dir = conduit.getConf().cachedir max_concurrent_downloads = conduit.confString('main', 'max-concurrent-downloads', default='10') # append aria2c options downloader_common_args['aria2c'].append( "--server-stat-if={0}/aria2c_server_stats".format(global_cache_dir)) downloader_common_args['aria2c'].append( "--server-stat-of={0}/aria2c_server_stats".format(global_cache_dir)) downloader_common_args['aria2c'].append( "--max-concurrent-downloads={0}".format(max_concurrent_downloads)) def prereposetup_hook(conduit): ret_insmethod = type(YumRepository._retrieveMD) YumRepository._retrieveMD = ret_insmethod(myRetrieveMD, None, YumRepository) # repos = conduit.getRepos() # for repo in repos.listEnabled(): # print "name: %s" % repo.name # print "URLS: %s" % repo.urls # repo._retrieveMD = testRetrieveMD def predownload_hook(conduit): # it is also called on remove! if not conduit.getDownloadPackages(): return inputFileName = "{0}/aria.input".format(global_cache_dir) ariaInputFile = open(inputFileName, "w") for pkg in conduit.getDownloadPackages(): local = pkg.localPkg() if os.path.exists(local): if pkg.verifyLocalPkg(): conduit.info(5, "Already have package for %s.%s." % (pkg.name, pkg.arch)) continue # download_dir = os.path.join(pkg.repo.cachedir, 'alternative_downloader_cache') # conduit.info(2, "Getting: {0} into {1}".format(pkg.remote_url, local)) # downloadFile(pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict) queueDownload(ariaInputFile, pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict) ariaInputFile.close() downloadQueuedFiles(inputFileName) os.unlink(inputFileName)