summaryrefslogtreecommitdiffstats
path: root/yum-fast-downloader.py
blob: f5f2dd2ec1ab5a3b8e3555f615277ceecc953ede (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
#! /usr/bin/python -tt
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
# Copyright Hedayat Vatankhah, 2011.
#
# Author: Hedayat Vatankhah <hedayat.fwd@gmail.com>
#
# * Used local and presto plugins and yum's code to learn more about yum 
# internals.
# 
#

import os
import shutil
import yum
from yum.plugins import TYPE_CORE
from yum.yumRepo import YumRepository

requires_api_version = '2.6'
plugin_type = (TYPE_CORE,)

# variables
originalRetrieveMD = YumRepository._retrieveMD
downloader_app = ''
global_cache_dir=''
downloader_common_args = {
    'aria2c' : ["aria2c", "--continue", "--check-certificate=false"]
}

# downloader functions

def _getAria2CArgs(urls, remote_path, local_path, proxies = None):
    args = downloader_common_args['aria2c']
    args.append("--out={0}".format(os.path.basename(local_path)))
    args.append("--dir={0}".format(os.path.dirname(local_path)))
    for url in urls:
        args.append(url + remote_path)
    args.append("--split={0}".format(max(5, len(urls))))
 
    if proxies:
        for (scheme, proxy) in proxies.items():
            if not proxy.startswith('ftp://'):
                args.append("--{0}-proxy={1}".format(scheme, proxy))
    return args;

def downloadFile(urls, remote_path, local_path, proxies = None):
    if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
        args = _getAria2CArgs(urls, remote_path, local_path)
        ret = os.spawnvp(os.P_WAIT, downloader_app, args)
        if ret:
            raise yum.plugins.PluginYumExit(
                   "{0} exited with exit code: {1}".format(downloader_app, ret))

def queueDownload(inputFile, urls, remote_path, local_path, proxies = None):
    if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
        for url in urls:
            inputFile.write(url + remote_path + "\t")
        inputFile.write("\n")
        inputFile.write("  out={0}\n".format(os.path.basename(local_path)))
        inputFile.write("  dir={0}\n".format(os.path.dirname(local_path)))
        inputFile.write("  split={0}\n".format(max(5, len(urls))))
     
        if proxies:
            for (scheme, proxy) in proxies.items():
                if not proxy.startswith('ftp://'):
                    inputFile.write("  {0}-proxy={1}\n".format(scheme, proxy))

def downloadQueuedFiles(inputFileName):
    args = downloader_common_args['aria2c']
    args.append("--input-file={0}".format(inputFileName))
    ret = os.spawnvp(os.P_WAIT, "aria2c", args)
    if ret:
        raise yum.plugins.PluginYumExit(
                   "{0} exited with exit code: {1}".format(downloader_app, ret))

# Hooks! 

# Written looking at yum's _retrieveMD implementation. Most parts have 
# beed directly grabbed from that.
def myRetrieveMD(self, mdtype, retrieve_can_fail=False):
    """ replace's yum's default _retrieveMD function to use a downloader """
    # calls original function on return to make sure that everything is OK
    thisdata = self.repoXML.getData(mdtype)

    (r_base, remote) = thisdata.location
    fname = os.path.basename(remote)
    local = self.cachedir + '/' + fname

    if self.retrieved.get(mdtype):
        # got it, move along
        return originalRetrieveMD(self, mdtype, retrieve_can_fail)

    if self.cache == 1:
        return originalRetrieveMD(self, mdtype, retrieve_can_fail)

    if (os.path.exists(local) or
        self._preload_md_from_system_cache(os.path.basename(local))):
        if self._checkMD(local, mdtype, check_can_fail=True):
            return originalRetrieveMD(self, mdtype, retrieve_can_fail)

    if thisdata.size and os.path.exists(local):
        if os.stat(local).st_size >= int(thisdata.size):
            yum.misc.unlink_f(local)
    downloadFile(self.urls, remote, local, self.proxy_dict)
    return originalRetrieveMD(self, mdtype, retrieve_can_fail)

def postconfig_hook(conduit):
    global downloader_app
    global global_cache_dir
    global downloader_common_args
    downloader_app = conduit.confString('main', 'downloader', default='aria2c')
    global_cache_dir = conduit.getConf().cachedir
    max_concurrent_downloads = conduit.confString('main',
             'max-concurrent-downloads', default='10')

    # append aria2c options
    downloader_common_args['aria2c'].append(
        "--server-stat-if={0}/aria2c_server_stats".format(global_cache_dir))
    downloader_common_args['aria2c'].append(
        "--server-stat-of={0}/aria2c_server_stats".format(global_cache_dir))
    downloader_common_args['aria2c'].append(
        "--max-concurrent-downloads={0}".format(max_concurrent_downloads))


def prereposetup_hook(conduit):
    ret_insmethod = type(YumRepository._retrieveMD)
    YumRepository._retrieveMD = ret_insmethod(myRetrieveMD, None, YumRepository)
#    repos = conduit.getRepos()
#    for repo in repos.listEnabled():
#       print "name: %s" % repo.name
#       print "URLS: %s" % repo.urls
#       repo._retrieveMD = testRetrieveMD

def predownload_hook(conduit):
    # it is also called on remove!
    if not conduit.getDownloadPackages():
        return
    inputFileName = "{0}/aria.input".format(global_cache_dir)
    ariaInputFile = open(inputFileName, "w")
    for pkg in conduit.getDownloadPackages():
        local = pkg.localPkg()
        if os.path.exists(local):
            if pkg.verifyLocalPkg():
                conduit.info(5, "Already have package for %s.%s."
                                 % (pkg.name, pkg.arch))
                continue

#       download_dir = os.path.join(pkg.repo.cachedir, 'alternative_downloader_cache')
#       conduit.info(2, "Getting: {0} into {1}".format(pkg.remote_url, local))
#       downloadFile(pkg.repo.urls, pkg.remote_path, local, pkg.repo.proxy_dict)
        queueDownload(ariaInputFile, pkg.repo.urls, pkg.remote_path,
                local, pkg.repo.proxy_dict)
    ariaInputFile.close()
    downloadQueuedFiles(inputFileName) 
    os.unlink(inputFileName)