[Yum-devel] [PATCH 2/4] python3-compat: except

Zdeněk Pavlas zpavlas at redhat.com
Thu Jul 26 11:37:56 UTC 2012


---
 scripts/urlgrabber          |    6 ++--
 scripts/urlgrabber-ext-down |    4 +-
 urlgrabber/byterange.py     |   10 ++++----
 urlgrabber/grabber.py       |   51 ++++++++++++++++++++++---------------------
 urlgrabber/mirror.py        |    4 +-
 5 files changed, 38 insertions(+), 37 deletions(-)

diff --git a/scripts/urlgrabber b/scripts/urlgrabber
index 09cd896..a23a11c 100644
--- a/scripts/urlgrabber
+++ b/scripts/urlgrabber
@@ -178,7 +178,7 @@ class client_options:
         try:
             optlist, args = getopt.getopt(sys.argv[1:], short_options,
                                           long_options + ug_long)
-        except getopt.GetoptError, e:
+        except getopt.GetoptError as e:
             print >>sys.stderr, "Error:", e
             self.help([], ret=1)
 
@@ -218,7 +218,7 @@ class client_options:
             if o in ug_dash:
                 try:
                     val = eval(v)
-                except Exception, e:
+                except Exception as e:
                     print "error processing option value: %s" % v
                     print e
                     sys.exit(1)
@@ -299,7 +299,7 @@ class ugclient:
                 for i in range(0, self.op.repeat):
                     f = self.g.urlgrab(url, self.op.outputfile)
                 if self.op.localfile: print f
-            except URLGrabError, e:
+            except URLGrabError as e:
                 print e
         
     def set_debug_logger(self, dbspec):
diff --git a/scripts/urlgrabber-ext-down b/scripts/urlgrabber-ext-down
index 3da55a4..3fcc8e2 100755
--- a/scripts/urlgrabber-ext-down
+++ b/scripts/urlgrabber-ext-down
@@ -25,7 +25,7 @@ from urlgrabber.grabber import \
 
 def write(fmt, *arg):
     try: os.write(1, fmt % arg)
-    except OSError, e:
+    except OSError as e:
         if e.args[0] != errno.EPIPE: raise
         sys.exit(1)
 
@@ -63,7 +63,7 @@ def main():
                 size = fo._amount_read
                 dlsz = size - fo._reget_length
                 ug_err = 'OK'
-            except URLGrabError, e:
+            except URLGrabError as e:
                 size = dlsz = 0
                 ug_err = '%d %s' % e.args
             write('%d %d %d %.3f %s\n', opts._id, size, dlsz, time.time() - tm, ug_err)
diff --git a/urlgrabber/byterange.py b/urlgrabber/byterange.py
index e30cd48..f7caec4 100644
--- a/urlgrabber/byterange.py
+++ b/urlgrabber/byterange.py
@@ -28,7 +28,7 @@ DEBUG = None
 
 try:    
     from cStringIO import StringIO
-except ImportError, msg: 
+except ImportError:
     from StringIO import StringIO
 
 class RangeError(IOError):
@@ -285,7 +285,7 @@ class FTPRangeHandler(urllib2.FTPHandler):
         
         try:
             host = socket.gethostbyname(host)
-        except socket.error, msg:
+        except socket.error as msg:
             raise urllib2.URLError(msg)
         path, attrs = splitattr(req.get_selector())
         dirs = path.split('/')
@@ -338,7 +338,7 @@ class FTPRangeHandler(urllib2.FTPHandler):
             sf = StringIO(headers)
             headers = mimetools.Message(sf)
             return addinfourl(fp, headers, req.get_full_url())
-        except ftplib.all_errors, msg:
+        except ftplib.all_errors as msg:
             raise IOError('ftp error', msg)
 
     def connect_ftp(self, user, passwd, host, port, dirs):
@@ -364,7 +364,7 @@ class ftpwrapper(urllib.ftpwrapper):
             # Use nlst to see if the file exists at all
             try:
                 self.ftp.nlst(file)
-            except ftplib.error_perm, reason:
+            except ftplib.error_perm as reason:
                 raise IOError('ftp error', reason)
             # Restore the transfer mode!
             self.ftp.voidcmd(cmd)
@@ -372,7 +372,7 @@ class ftpwrapper(urllib.ftpwrapper):
             try:
                 cmd = 'RETR ' + file
                 conn = self.ftp.ntransfercmd(cmd, rest)
-            except ftplib.error_perm, reason:
+            except ftplib.error_perm as reason:
                 if str(reason)[:3] == '501':
                     # workaround for REST not supported error
                     fp, retrlen = self.retrfile(file, type)
diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py
index 02898cb..9e1c6ee 100644
--- a/urlgrabber/grabber.py
+++ b/urlgrabber/grabber.py
@@ -502,7 +502,7 @@ except:
 try:
     # this part isn't going to do much - need to talk to gettext
     from i18n import _
-except ImportError, msg:
+except ImportError:
     def _(st): return st
     
 ########################################################################
@@ -1020,10 +1020,10 @@ class URLGrabber(object):
                 r = apply(func, (opts,) + args, {})
                 if DEBUG: DEBUG.info('success')
                 return r
-            except URLGrabError, e:
+            except URLGrabError as e:
                 exception = e
                 callback = opts.failure_callback
-            except KeyboardInterrupt, e:
+            except KeyboardInterrupt as e:
                 exception = e
                 callback = opts.interrupt_callback
                 if not callback:
@@ -1124,7 +1124,7 @@ class URLGrabber(object):
         
         try:
             return self._retry(opts, retryfunc, url, filename)
-        except URLGrabError, e:
+        except URLGrabError as e:
             _TH.update(url, 0, 0, e)
             opts.exception = e
             return _run_callback(opts.failfunc, opts)
@@ -1407,7 +1407,7 @@ class PyCurlFileObject(object):
         
         try:
             self.curl_obj.perform()
-        except pycurl.error, e:
+        except pycurl.error as e:
             # XXX - break some of these out a bit more clearly
             # to other URLGrabErrors from 
             # http://curl.haxx.se/libcurl/c/libcurl-errors.html
@@ -1614,22 +1614,22 @@ class PyCurlFileObject(object):
             else:
                 fo = opener.open(req)
             hdr = fo.info()
-        except ValueError, e:
+        except ValueError as e:
             err = URLGrabError(1, _('Bad URL: %s : %s') % (self.url, e, ))
             err.url = self.url
             raise err
 
-        except RangeError, e:
+        except RangeError as e:
             err = URLGrabError(9, _('%s on %s') % (e, self.url))
             err.url = self.url
             raise err
-        except urllib2.HTTPError, e:
+        except urllib2.HTTPError as e:
             new_e = URLGrabError(14, _('%s on %s') % (e, self.url))
             new_e.code = e.code
             new_e.exception = e
             new_e.url = self.url
             raise new_e
-        except IOError, e:
+        except IOError as e:
             if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
                 err = URLGrabError(12, _('Timeout on %s: %s') % (self.url, e))
                 err.url = self.url
@@ -1639,12 +1639,12 @@ class PyCurlFileObject(object):
                 err.url = self.url
                 raise err
 
-        except OSError, e:
+        except OSError as e:
             err = URLGrabError(5, _('%s on %s') % (e, self.url))
             err.url = self.url
             raise err
 
-        except HTTPException, e:
+        except HTTPException as e:
             err = URLGrabError(7, _('HTTP Exception (%s) on %s: %s') % \
                             (e.__class__.__name__, self.url, e))
             err.url = self.url
@@ -1671,7 +1671,7 @@ class PyCurlFileObject(object):
                                  (self.filename, mode))
             try:
                 self.fo = open(self.filename, mode)
-            except IOError, e:
+            except IOError as e:
                 err = URLGrabError(16, _(\
                   'error opening local file from %s, IOError: %s') % (self.url, e))
                 err.url = self.url
@@ -1690,7 +1690,7 @@ class PyCurlFileObject(object):
 
         try:            
             self._do_perform()
-        except URLGrabError, e:
+        except URLGrabError as e:
             self.fo.flush()
             self.fo.close()
             raise e
@@ -1713,7 +1713,7 @@ class PyCurlFileObject(object):
             if mod_time != -1:
                 try:
                     os.utime(self.filename, (mod_time, mod_time))
-                except OSError, e:
+                except OSError as e:
                     err = URLGrabError(16, _(\
                       'error setting timestamp on file %s from %s, OSError: %s') 
                               % (self.filename, self.url, e))
@@ -1722,7 +1722,7 @@ class PyCurlFileObject(object):
             # re open it
             try:
                 self.fo = open(self.filename, 'r')
-            except IOError, e:
+            except IOError as e:
                 err = URLGrabError(16, _(\
                   'error opening file from %s, IOError: %s') % (self.url, e))
                 err.url = self.url
@@ -1768,17 +1768,17 @@ class PyCurlFileObject(object):
             else:           readamount = min(amt, self._rbufsize)
             try:
                 new = self.fo.read(readamount)
-            except socket.error, e:
+            except socket.error as e:
                 err = URLGrabError(4, _('Socket Error on %s: %s') % (self.url, e))
                 err.url = self.url
                 raise err
 
-            except socket.timeout, e:
+            except socket.timeout as e:
                 raise URLGrabError(12, _('Timeout on %s: %s') % (self.url, e))
                 err.url = self.url
                 raise err
 
-            except IOError, e:
+            except IOError as e:
                 raise URLGrabError(4, _('IOError on %s: %s') %(self.url, e))
                 err.url = self.url
                 raise err
@@ -2161,7 +2161,8 @@ def parallel_wait(meter = 'text'):
             if ug_err is None:
                 if opts.checkfunc:
                     try: _run_callback(opts.checkfunc, opts)
-                    except URLGrabError, ug_err: pass
+                    except URLGrabError as e:
+                        ug_err = e
                 if ug_err is None:
                     continue
 
@@ -2169,7 +2170,7 @@ def parallel_wait(meter = 'text'):
             if opts.failure_callback:
                 opts.exception = ug_err
                 try: _run_callback(opts.failure_callback, opts)
-                except URLGrabError, ug_err:
+                except URLGrabError:
                     retry = 0 # no retries
             if opts.tries < retry and ug_err.errno in opts.retrycodes:
                 start(opts, opts.tries + 1) # simple retry
@@ -2254,7 +2255,7 @@ def parallel_wait(meter = 'text'):
             while host_con.get(key, 0) >= limit:
                 perform()
             start(opts, 1)
-    except IOError, e:
+    except IOError as e:
         if e.errno != 4: raise
         raise KeyboardInterrupt
 
@@ -2363,11 +2364,11 @@ def _main_test():
                                                         default_grabber.bandwidth)
 
     try: from progress import text_progress_meter
-    except ImportError, e: pass
+    except ImportError: pass
     else: kwargs['progress_obj'] = text_progress_meter()
 
     try: name = apply(urlgrab, (url, filename), kwargs)
-    except URLGrabError, e: print e
+    except URLGrabError as e: print e
     else: print 'LOCAL FILE:', name
 
 
@@ -2384,7 +2385,7 @@ def _retry_test():
         kwargs[k] = int(v)
 
     try: from progress import text_progress_meter
-    except ImportError, e: pass
+    except ImportError: pass
     else: kwargs['progress_obj'] = text_progress_meter()
 
     def cfunc(filename, hello, there='foo'):
@@ -2402,7 +2403,7 @@ def _retry_test():
         
     kwargs['checkfunc'] = (cfunc, ('hello',), {'there':'there'})
     try: name = apply(retrygrab, (url, filename), kwargs)
-    except URLGrabError, e: print e
+    except URLGrabError as e: print e
     else: print 'LOCAL FILE:', name
 
 def _file_object_test(filename=None):
diff --git a/urlgrabber/mirror.py b/urlgrabber/mirror.py
index ac78b34..75d9cae 100644
--- a/urlgrabber/mirror.py
+++ b/urlgrabber/mirror.py
@@ -400,7 +400,7 @@ class MirrorGroup:
             if DEBUG: DEBUG.info('MIRROR: trying %s -> %s', url, fullurl)
             try:
                 return func_ref( *(fullurl,), **kwargs )
-            except URLGrabError, e:
+            except URLGrabError as e:
                 if DEBUG: DEBUG.info('MIRROR: failed')
                 obj = CallbackObject()
                 obj.exception = e
@@ -422,7 +422,7 @@ class MirrorGroup:
         func = 'urlgrab'
         try:
             return self._mirror_try(func, url, kw)
-        except URLGrabError, e:
+        except URLGrabError as e:
             obj = CallbackObject(url=url, filename=filename, exception=e, **kwargs)
             return _run_callback(kwargs.get('failfunc', _do_raise), obj)
     
-- 
1.7.4.4



More information about the Yum-devel mailing list