[yum-commits] 2 commits - scripts/urlgrabber-ext-down urlgrabber/grabber.py
zpavlas at osuosl.org
zpavlas at osuosl.org
Wed Aug 29 13:00:48 UTC 2012
scripts/urlgrabber-ext-down | 11 +++++++----
urlgrabber/grabber.py | 19 +++++++++++++++----
2 files changed, 22 insertions(+), 8 deletions(-)
New commits:
commit 971e8f42f1922278e0a949cf0e80bd84fc23c7c4
Author: ZdenÄk Pavlas <zpavlas at redhat.com>
Date: Wed Aug 29 11:50:46 2012 +0200
mirror selection: private flag overrides est. speed. BZ 851178.
diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py
index 5df0436..f750a62 100644
--- a/urlgrabber/grabber.py
+++ b/urlgrabber/grabber.py
@@ -2263,8 +2263,9 @@ def parallel_wait(meter=None):
speed = _TH.estimate(key)
speed /= 1 + host_con.get(key, 0)
- # 2-tuple to select mirror with least failures
- speed = -failed.get(key, 0), speed
+ # order by: least failures, private flag, best speed
+ private = mirror.get('kwargs', {}).get('private', False)
+ speed = -failed.get(key, 0), private, speed
if best is None or speed > best_speed:
best = mirror
best_speed = speed
commit 4db7b23949ce3628f0e0e65c7c56900fef2ea78a
Author: ZdenÄk Pavlas <zpavlas at redhat.com>
Date: Mon Aug 27 14:07:39 2012 +0200
timedhosts: Measure bandwidth, ignore latency
Use only file offsets and timestamps at the first and last call
of PyCurlFileObject._retrieve(). Should help 851178.
diff --git a/scripts/urlgrabber-ext-down b/scripts/urlgrabber-ext-down
index 3da55a4..3dafb12 100755
--- a/scripts/urlgrabber-ext-down
+++ b/scripts/urlgrabber-ext-down
@@ -55,18 +55,21 @@ def main():
if opts.progress_obj:
opts.progress_obj = ProxyProgress()
opts.progress_obj._id = cnt
- tm = time.time()
+
+ dlsz = dltm = 0
try:
fo = PyCurlFileObject(opts.url, opts.filename, opts)
fo._do_grab()
fo.fo.close()
size = fo._amount_read
- dlsz = size - fo._reget_length
+ if fo._tm_last:
+ dlsz = fo._tm_last[0] - fo._tm_first[0]
+ dltm = fo._tm_last[1] - fo._tm_first[1]
ug_err = 'OK'
except URLGrabError, e:
- size = dlsz = 0
+ size = 0
ug_err = '%d %s' % e.args
- write('%d %d %d %.3f %s\n', opts._id, size, dlsz, time.time() - tm, ug_err)
+ write('%d %d %d %.3f %s\n', opts._id, size, dlsz, dltm, ug_err)
if __name__ == '__main__':
main()
diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py
index 0d67b1f..5df0436 100644
--- a/urlgrabber/grabber.py
+++ b/urlgrabber/grabber.py
@@ -1126,11 +1126,13 @@ class URLGrabber(object):
return filename
def retryfunc(opts, url, filename):
- tm = time.time()
fo = PyCurlFileObject(url, filename, opts)
try:
fo._do_grab()
- _TH.update(url, fo._amount_read - fo._reget_length, time.time() - tm, None)
+ if fo._tm_last:
+ dlsz = fo._tm_last[0] - fo._tm_first[0]
+ dltm = fo._tm_last[1] - fo._tm_first[1]
+ _TH.update(url, dlsz, dltm, None)
if not opts.checkfunc is None:
obj = CallbackObject(filename=filename, url=url)
_run_callback(opts.checkfunc, obj)
@@ -1223,6 +1225,8 @@ class PyCurlFileObject(object):
self._error = (None, None)
self.size = 0
self._hdr_ended = False
+ self._tm_first = None
+ self._tm_last = None
self._do_open()
@@ -1237,6 +1241,12 @@ class PyCurlFileObject(object):
def _retrieve(self, buf):
try:
+ tm = self._amount_read + len(buf), time.time()
+ if self._tm_first is None:
+ self._tm_first = tm
+ else:
+ self._tm_last = tm
+
if not self._prog_running:
if self.opts.progress_obj:
size = self.size + self._reget_length
More information about the Yum-commits
mailing list