summaryrefslogtreecommitdiffstats
path: root/urlgrabber-ext-down-yfd
diff options
context:
space:
mode:
Diffstat (limited to 'urlgrabber-ext-down-yfd')
-rwxr-xr-xurlgrabber-ext-down-yfd126
1 files changed, 126 insertions, 0 deletions
diff --git a/urlgrabber-ext-down-yfd b/urlgrabber-ext-down-yfd
new file mode 100755
index 0000000..92e061c
--- /dev/null
+++ b/urlgrabber-ext-down-yfd
@@ -0,0 +1,126 @@
+#! /usr/bin/python -tt
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Library General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+#
+# Copyright Hedayat Vatankhah, 2012.
+#
+# Author: Hedayat Vatankhah <hedayat.fwd@gmail.com>
+#
+# * Used the code of original external downloader of urlgrabber:
+# Copyright 2011-2012 Zdenek Pavlas
+
+import time, errno, sys
+import os
+import shutil
+import yum
+import subprocess
+from urlgrabber.grabber import _ExternalDownloader
+
+from urlgrabber.grabber import \
+ _readlines, URLGrabberOptions, _loads, \
+ URLGrabError
+
+# variables
+global_cache_dir='/var/cache/yum'
+downloader_common_args = []
+
+# downloader functions
+
+def _getAria2CArgs(urls, remote_path, local_path, proxies = None):
+ args = []
+ args += downloader_common_args
+ args.append("--out={0}.yfd".format(os.path.basename(local_path)))
+ args.append("--dir={0}".format(os.path.dirname(local_path)))
+ args.append("--split={0}".format(max(5, len(urls))))
+ if proxies:
+ for (scheme, proxy) in proxies.items():
+ if not proxy.startswith('ftp://'):
+ args.append("--{0}-proxy={1}".format(scheme, proxy))
+
+ for url in urls:
+ args.append(url + remote_path)
+
+ return args;
+
+def downloadFile(urls, remote_path, local_path, proxies = None):
+ if urls[0].startswith("http://") or urls[0].startswith("ftp://"):
+ args = _getAria2CArgs(urls, remote_path, local_path)
+ popen = subprocess.Popen(args, stdout=2)
+ popen.wait()
+ i=0
+ while not os.path.exists(local_path):
+ time.sleep(0.1)
+ i+=1
+ if i > 50:
+ os.write(2, "Error in downloading file: {0}".format(local_path))
+ raise URLGrabError(2, "Download failed")
+ else:
+ shutil.copyfile(urls[0][len("file:"):]+remote_path, local_path)
+
+def configure():
+ global global_cache_dir
+ global downloader_common_args
+
+ global_cache_dir = sys.argv[1]
+ downloader_common_args = sys.argv[2:]
+
+def write(fmt, *arg):
+ try: os.write(1, fmt % arg)
+ except OSError, e:
+ if e.args[0] != errno.EPIPE: raise
+ sys.exit(1)
+
+class ProxyProgress:
+ def start(self, *d1, **d2):
+ self.next_update = 0
+ def update(self, _amount_read):
+ t = time.time()
+ if t < self.next_update: return
+ self.next_update = t + 0.31
+ write('%d %d\n', self._id, _amount_read)
+
+def main():
+ import signal
+ signal.signal(signal.SIGINT, lambda n, f: sys.exit(1))
+ configure()
+ cnt = 0
+ while True:
+ lines = _readlines(0)
+ if not lines: break
+ for line in lines:
+ cnt += 1
+ opts = URLGrabberOptions()
+ opts._id = cnt
+ for k in line.split(' '):
+ k, v = k.split('=', 1)
+ setattr(opts, k, _loads(v))
+ if opts.progress_obj:
+ opts.progress_obj = ProxyProgress()
+ opts.progress_obj._id = cnt
+
+ dlsz = dltm = 0
+ try:
+ downloadFile(opts.urls, opts.relative_url, opts.filename)
+ size = 0
+ # dlsz = fo._tm_last[0] - fo._tm_first[0]
+ # dltm = fo._tm_last[1] - fo._tm_first[1]
+ ug_err = 'OK'
+ except URLGrabError, e:
+ size = 0
+ ug_err = '%d %d %s' % (e.errno, getattr(e, 'code', 0), e.strerror)
+ write('%d %d %d %.3f %s\n', opts._id, size, dlsz, dltm, ug_err)
+
+if __name__ == '__main__':
+ main()