]> TLD Linux GIT Repositories - tld-builder.git/commitdiff
- python 3.x fixes
authorMarcin Krol <hawk@tld-linux.org>
Mon, 22 Mar 2021 00:29:59 +0000 (01:29 +0100)
committerMarcin Krol <hawk@tld-linux.org>
Mon, 22 Mar 2021 00:29:59 +0000 (01:29 +0100)
21 files changed:
TLD_Builder/acl.py
TLD_Builder/blacklist.py
TLD_Builder/bqueue.py
TLD_Builder/config.py
TLD_Builder/deps.py
TLD_Builder/file_sender.py
TLD_Builder/get_br.py
TLD_Builder/gpg.py
TLD_Builder/install.py
TLD_Builder/load_balancer.py
TLD_Builder/log.py
TLD_Builder/maintainer.py
TLD_Builder/poldek.py
TLD_Builder/request.py
TLD_Builder/request_fetcher.py
TLD_Builder/request_handler.py
TLD_Builder/request_handler_server
TLD_Builder/request_handler_server.py
TLD_Builder/rpm_builder.py
TLD_Builder/srpm_builder.py
TLD_Builder/util.py

index cf69fe8fee133f764597b906af06b10d6be5ca5d..0d45634fe4e79972938da93333be616253d52af2 100644 (file)
@@ -25,7 +25,7 @@ class User:
         self.change_requester = False
 
         if p.has_option(login, "gpg_emails"):
-            self.gpg_emails = string.split(p.get(login, "gpg_emails"))
+            self.gpg_emails = p.get(login, "gpg_emails").split()
         else:
             log.panic("acl: [%s] has no gpg_emails" % login)
 
@@ -39,8 +39,8 @@ class User:
             self.change_requester = True
 
         if p.has_option(login, "privs"):
-            for p in string.split(p.get(login, "privs")):
-                l = string.split(p, ":")
+            for p in p.get(login, "privs").split():
+                l = p.split(":")
                 if len(l) == 2:
                     p+=":*"
                 if len(l) not in (2,3) or l[0] == "" or l[1] == "":
@@ -113,12 +113,12 @@ class ACL_Conf:
         p.readfp(open(path.acl_conf))
         self.users = {}
         for login in p.sections():
-            if self.users.has_key(login):
+            if login in self.users:
                 log.panic("acl: duplicate login: %s" % login)
                 continue
             user = User(p, login)
             for e in user.gpg_emails:
-                if self.users.has_key(e):
+                if e in self.users:
                     log.panic("acl: user email colision %s <-> %s" % \
                               (self.users[e].login, login))
                 else:
@@ -128,7 +128,7 @@ class ACL_Conf:
 
     def user_by_email(self, ems):
         for e in ems:
-            if self.users.has_key(e):
+            if e in self.users:
                 return self.users[e]
         return None
 
@@ -136,7 +136,7 @@ class ACL_Conf:
         return self.users[l]
 
     def user(self, l):
-        if not self.users.has_key(l):
+        if not l in self.users:
             log.panic("no such user: %s" % l)
         return self.users[l]
 
index 40367c261e425d6bbf3e2c8e7daab423ef357817..3f552386548f348b9ce406847028ace475a31eb0 100644 (file)
@@ -39,7 +39,7 @@ class Blacklist_File:
         status.pop()
 
     def package(self, p):
-#       log.notice("blacklist check: %s (%d)" % (p, self.blacklist.has_key(p)))
+#       log.notice("blacklist check: %s (%d)" % (p, p in self.blacklist))
         if p in self.blacklist:
             return True
         return False
index e8877524f6e3a373257d652e6243bf9239591574..eab166d18c1dfdb9ce6c47cf6a0f7e281a32cad9 100644 (file)
@@ -35,7 +35,7 @@ class B_Queue:
         f.flush()
         os.fsync(f.fileno())
         f.close()
-        os.chmod(tmpfname, 0644)
+        os.chmod(tmpfname, 0o0644)
         os.rename(tmpfname, fname)
 
     def dump_html(self, fname):
@@ -66,7 +66,7 @@ class B_Queue:
         f.flush()
         os.fsync(f.fileno())
         f.close()
-        os.chmod(tmpfname, 0644)
+        os.chmod(tmpfname, 0o0644)
         os.rename(tmpfname, fname)
 
     # read possibly compressed, signed queue
@@ -90,7 +90,7 @@ class B_Queue:
         self._open()
         self.signers = []
         body = self.fd.read()
-        if string.strip(body) == "":
+        if body.strip() == "":
             # empty file, don't choke
             self.requests = []
             return
@@ -144,7 +144,7 @@ class B_Queue:
         f.flush()
         os.fsync(f.fileno())
         f.close()
-        os.chmod(tmpname, 0644)
+        os.chmod(tmpname, 0o0644)
         os.rename(tmpname, name)
 
     def add(self, req):
index 3bede1b8718b0e7276512fe3654ee4bfd7cf736b..11ea547a764d038ce70d7e46ef1c9a2f401adfd8 100644 (file)
@@ -44,11 +44,11 @@ class Builder_Conf:
         p = ConfigParser.ConfigParser()
         def get(o, d = None, sec=None):
             if p.has_option(sec, o):
-                return string.strip(p.get(sec, o))
+                return p.get(sec, o).strip()
             elif p.has_option(builder, o):
-                return string.strip(p.get(builder, o))
+                return p.get(builder, o).strip()
             elif p.has_option("all", o):
-                return string.strip(p.get("all", o))
+                return p.get("all", o).strip()
             elif d != None:
                 return d
             else:
@@ -62,7 +62,7 @@ class Builder_Conf:
         if p.has_option("all", "syslog"):
             f = p.get("all", "syslog")
             if f != "":
-                if syslog_facilities.has_key(f):
+                if f in syslog_facilities:
                     log.open_syslog("builder", syslog_facilities[f])
                 else:
                     log.panic("no such syslog facility: %s" % f)
@@ -71,8 +71,8 @@ class Builder_Conf:
             builder = get("src_builder", builder)
         self.builder = builder
 
-        self.binary_builders = string.split(get("binary_builders"))
-        self.src_builder = string.strip(get("src_builder", ""))
+        self.binary_builders = get("binary_builders").split()
+        self.src_builder = get("src_builder", "").strip()
         self.max_keep_time = int(get("max_keep_time", 168))*60*60
         self.bot_email = get("bot_email", "")
         self.control_url = get("control_url")
@@ -116,7 +116,7 @@ class Builder_Conf:
 
         f = get("syslog", "")
         if f != "":
-            if syslog_facilities.has_key(f):
+            if f in syslog_facilities:
                 log.open_syslog(self.builder, syslog_facilities[f])
             else:
                 log.panic("no such syslog facility: %s" % f)
index 404b25cac7ed3a2edb4c47d0293cb35edfd91049..d8cc9c4cc6050d3f69983352cfa07d4043ca7ee4 100644 (file)
@@ -26,9 +26,9 @@ def compute_deps():
         while 1:
             l = f.readline()
             if l == "": break
-            l = string.strip(l)
+            l = l.strip()
             if l == "@":
-                cur_pkg = string.strip(f.readline())
+                cur_pkg = f.readline().strip()
                 rpm_req[cur_pkg] = []
                 continue
             rpm_req[cur_pkg].append(l)
@@ -38,7 +38,7 @@ def compute_deps():
         msg("done\n")
 
     def add_provides(pkg, what):
-        if rpm_prov.has_key(what):
+        if what in rpm_prov:
             msg("[%s: %s, %s] " % (what, rpm_prov[what], pkg))
         else:
             rpm_prov[what] = pkg
@@ -50,9 +50,9 @@ def compute_deps():
         while 1:
             l = f.readline()
             if l == "": break
-            l = string.strip(l)
+            l = l.strip()
             if l == "@":
-                cur_pkg = string.strip(f.readline())
+                cur_pkg = f.readline().strip()
                 continue
             add_provides(cur_pkg, l)
             if l[0] == '/':
@@ -68,11 +68,11 @@ def compute_deps():
         while 1:
             l = f.readline()
             if l == "": break
-            l = string.strip(l)
+            l = l.strip()
             if l == "@":
-                cur_pkg = string.strip(f.readline())
+                cur_pkg = f.readline().strip()
                 continue
-            if req_files.has_key(l):
+            if l in req_files:
                 add_provides(cur_pkg, l)
         f.close()
         msg("done\n")
@@ -83,7 +83,7 @@ def compute_deps():
             pkg_reqs = []
             for req in reqs:
                 if req[0:7] == "rpmlib(": continue
-                if rpm_prov.has_key(req):
+                if req in rpm_prov:
                     if rpm_prov[req] not in pkg_reqs:
                         pkg_reqs.append(rpm_prov[req])
                 else:
@@ -109,13 +109,13 @@ def remove_list(req, need):
     """
     need_m = {}
     def close(n):
-        if need_m.has_key(n): return
+        if n in need_m: return
         need_m[n] = 1
-        if not req.has_key(n): return
+        if not n in req: return
         for k in req[n]:
             close(k)
     for n in need: close(n)
     rm = []
     for p in req.keys():
-        if not need_m.has_key(p): rm.append(p)
+        if not p in need_m: rm.append(p)
     return rm
index d09affb0e5d2adaba4558bdb663af15eca945dc7..77a9848bc2d97c214d0764e261eddb9ecfc48e01 100644 (file)
@@ -30,7 +30,7 @@ def read_name_val(file):
             return r
         m = rx.search(l)
         if m:
-            r[m.group(1)] = string.strip(m.group(2))
+            r[m.group(1)] = m.group(2).strip()
         else:
             break
     f.close()
@@ -61,7 +61,7 @@ def rsync_file(src, target, host):
     p = open(path.rsync_password_file, "r")
     password = ""
     for l in p:
-        l = string.split(l)
+        l = l.split()
         if len(l) >= 2 and l[0] == host:
             password = l[1]
     p.close()
@@ -133,8 +133,8 @@ def maybe_flush_queue(dir):
     retry_delay = 0
     try:
         f = open(dir + "/retry-at")
-        last_retry = int(string.strip(f.readline()))
-        retry_delay = int(string.strip(f.readline()))
+        last_retry = int(f.readline().strip())
+        retry_delay = int(f.readline().strip())
         f.close()
         if last_retry + retry_delay > time.time():
             return
@@ -164,7 +164,7 @@ def flush_queue(dir):
         if d != None: q.append(d)
     def mycmp(x, y):
         rc = cmp(x['Time'], y['Time'])
-        if rc == 0 and x.has_key('Type') and y.has_key('Type'):
+        if rc == 0 and 'Type' in x and 'Type' in y:
             return cmp(x['Type'], y['Type'])
         else:
             return rc
@@ -193,7 +193,7 @@ def flush_queue(dir):
         for src, msg in problems.items():
             pr = pr + "[src: %s]\n\n%s\n" % (src, msg)
         for d in remaining:
-            if d.has_key('Requester'):
+            if 'Requester' in d:
                 emails[d['Requester']] = 1
         e = emails.keys()
         m = mailer.Message()
index 933548c172d60c81a6c5034172fefcef0fd9f58b..867e233af1cf1383447bd7256139ca0a18e61df2 100644 (file)
@@ -13,10 +13,10 @@ def get_build_requires(spec, bconds_with, bconds_without):
     def expand_conds(l):
         def expand_one(m):
             if m.group(1) == "?":
-                if macros.has_key(m.group(2)):
+                if m.group(2) in macros:
                     return m.group(3)
             else:
-                if not macros.has_key(m.group(2)):
+                if not m.group(2) in macros:
                     return m.group(3)
             return ""
 
@@ -29,8 +29,8 @@ def get_build_requires(spec, bconds_with, bconds_without):
     macro_rx = re.compile(r"%\{([a-zA-Z0-9_+]+)\}")
     def expand_macros(l):
         def expand_one(m):
-            if macros.has_key(m.group(1)):
-                return string.strip(macros[m.group(1)])
+            if m.group(1) in macros:
+                return macros[m.group(1)].strip()
             else:
                 return m.group(0) # don't change
 
@@ -63,7 +63,7 @@ def get_build_requires(spec, bconds_with, bconds_without):
 
     f = open(spec)
     for l in f:
-        l = string.strip(l)
+        l = l.strip()
         if l == "%changelog": break
 
         # %bcond_with..
@@ -71,10 +71,10 @@ def get_build_requires(spec, bconds_with, bconds_without):
         if m:
             bcond = m.group(2)
             if m.group(1) == "with":
-                if macros.has_key("_with_%s" % bcond):
+                if "_with_%s" % bcond in macros:
                     macros["with_%s" % bcond] = 1
             else:
-                if not macros.has_key("_without_%s" % bcond):
+                if not "_without_%s" % bcond in macros:
                     macros["with_%s" % bcond] = 1
             continue
 
index 055c39083d286eb95263b4c7c271bcb5b8753809..edb8dff81e3ad76839764813253072d09e5e7b7e 100644 (file)
@@ -20,7 +20,7 @@ def get_keys(buf):
 
     if not os.path.isfile('/usr/bin/gpg'):
         log.error("missing gnupg binary: /usr/bin/gpg")
-        raise OSError, 'Missing gnupg binary'
+        raise OSError('Missing gnupg binary')
 
     d_stdout = None
     d_stderr = None
@@ -52,7 +52,7 @@ def verify_sig(buf):
 
     if not os.path.isfile('/usr/bin/gpg'):
         log.error("missing gnupg binary: /usr/bin/gpg")
-        raise OSError, 'Missing gnupg binary'
+        raise OSError('Missing gnupg binary')
 
     d_stdout = None
     d_stderr = None
@@ -77,7 +77,7 @@ def verify_sig(buf):
 def sign(buf):
     if not os.path.isfile('/usr/bin/gpg'):
         log.error("missing gnupg binary: /usr/bin/gpg")
-        raise OSError, 'Missing gnupg binary'
+        raise OSError('Missing gnupg binary')
 
     d_stdout = None
     d_stderr = None
index e57ad2a7fe41e1e6789729770d9add2760185f52..21f80a44ef793d3aa1eed06bd27d251323b23e3a 100644 (file)
@@ -183,7 +183,7 @@ def install_br(r, b):
     nbr = ""
     for bre in needed.keys():
         nbr = nbr + " " + re.escape(bre)
-    br = string.strip(nbr)
+    br = nbr.strip()
     b.log_line("updating poldek cache...")
     chroot.run("poldek --up --upa", user = "root", logfile = b.logfile)
     # check conflicts in BRed packages
@@ -215,7 +215,7 @@ def install_br(r, b):
     nbr = ""
     for bre in needed.keys():
         nbr = nbr + " " + re.escape(bre)
-    br = string.strip(nbr)
+    br = nbr.strip()
 
     b.log_line("installing BR: %s" % br)
     res = chroot.run("set -x; poldek --noask --caplookup -Q -v %s --upgrade %s" % (b.ignores(), br),
index a530f816f24b9ace40850ef833e3cb67e3670bf7..019c1844fb786c69e8cb94e45a9a366149ffdea4 100644 (file)
@@ -31,8 +31,8 @@ def builders_order():
 
     for l in f:
         line_no += 1
-        b = string.strip(l)
-        if bs.has_key(b):
+        b = l.strip()
+        if b in bs:
             bs[b] = line_no
         else:
             log.alert("found strange lock in got-lock: %s" % b)
index 54a6c672e21a8c32b01085f528b36e22623f839e..f4529fac83f683070fae70bc0d272e4fa96e0d20 100644 (file)
@@ -29,7 +29,7 @@ def log(p, s):
 
 def panic(s):
     log(syslog.LOG_ALERT, "PANIC: %s" % s)
-    raise Exception, "PANIC: %s" % str(s)
+    raise Exception("PANIC: %s" % str(s))
 
 def alert(s):
     log(syslog.LOG_ALERT, "alert: %s" % s)
index 5ba3deb8669b3055aecc151706f70edcd78cbfe6..6b1b45a4d887ad4b653087a4dd64bbce4cbf389e 100644 (file)
@@ -26,7 +26,7 @@ def send_rpmqa():
     log = tmp + config.rpmqa_filename
     open(log, 'a').write("Query done at: %s\n" % datetime.datetime.now().isoformat(' '))
     chroot.run("rpm -qa|sort", logfile=log)
-    os.chmod(log,0644)
+    os.chmod(log,0o0644)
     ftp.init(rpmqa=True)
     ftp.add(log)
     ftp.flush()
index df9df9ba6bddb2925ba7fc8e0c3385d3f441522e..e8fe7402b129ea918711ca4ed6863035d72b8aa5 100644 (file)
@@ -60,7 +60,7 @@ def get_poldek_requires():
             if cur_pkg:
                 pkg_reqs[cur_pkg] = cur_pkg_reqs
             cur_pkg = m.groups(1)
-            if pkg_reqs.has_key(cur_pkg):
+            if cur_pkg in pkg_reqs:
                 cur_pkg = None
                 cur_pkg_reqs = None
             else:
@@ -69,7 +69,7 @@ def get_poldek_requires():
         m = req_rx.match(l)
         if m:
             reqs = []
-            for x in string.split(m.group(1)):
+            for x in m.group(1).split():
                 if x in ["RPMLIB_CAP", "NOT", "FOUND", "UNMATCHED"]: continue
                 m = pkg_name_rx.match(x)
                 if m:
index 4ce68e33093d5f20ee430487c44eb0840b415c5e..2e1e7c3d094e951bfe44401bd331e9bbfb68ebb1 100644 (file)
@@ -59,7 +59,7 @@ def iso8601(ts, timezone='UTC'):
     return dt.isoformat()
 
 def is_blank(e):
-    return e.nodeType == Element.TEXT_NODE and string.strip(e.nodeValue) == ""
+    return e.nodeType == Element.TEXT_NODE and e.nodeValue.strip() == ""
 
 class Group:
     def __init__(self, e):
@@ -72,7 +72,7 @@ class Group:
         self.requester = ""
         self.max_jobs = 0
         self.requester_email = ""
-        self.flags = string.split(attr(e, "flags", ""))
+        self.flags = attr(e, "flags", "").split()
         for c in e.childNodes:
             if is_blank(c): continue
 
@@ -97,7 +97,7 @@ class Group:
             deps = []
             m[b.b_id] = b
             for dep in b.depends_on:
-                if m.has_key(dep):
+                if dep in m:
                     # avoid self-deps
                     if id(m[dep]) != id(b):
                         deps.append(m[dep])
@@ -206,7 +206,7 @@ class Batch:
         self.skip = []
         self.gb_id = ""
         self.b_id = attr(e, "id")
-        self.depends_on = string.split(attr(e, "depends-on"))
+        self.depends_on = attr(e, "depends-on").split()
         self.upgraded = True
 
         self.parse_xml(e)
@@ -233,7 +233,7 @@ class Batch:
             elif c.nodeName == "command":
                 self.spec = "COMMAND"
                 self.command = text(c).strip()
-                self.command_flags = string.split(attr(c, "flags", ""))
+                self.command_flags = attr(c, "flags", "").split()
             elif c.nodeName == "info":
                 self.info = text(c)
             elif c.nodeName == "kernel":
@@ -441,7 +441,7 @@ class Batch:
         ignores = []
 
         # add php version based ignores
-        if self.defines.has_key('php_suffix'):
+        if 'php_suffix' in self.defines:
             # current version if -D php_suffix is present
             php_version = php_name_to_ver(self.defines['php_suffix'])
         else:
@@ -501,7 +501,7 @@ class Batch:
            <spec>%s</spec>
            <branch>%s</branch>
            <info>%s</info>\n""" % (self.b_id,
-                 string.join(list(map(lambda (b): b.b_id, self.depends_on))),
+                 string.join(list(map(lambda b: b.b_id, self.depends_on))),
                  escape(self.src_rpm),
                  escape(' '.join(self.command_flags)), escape(self.command),
                  escape(self.spec), escape(self.branch), escape(self.info)))
@@ -516,7 +516,7 @@ class Batch:
         if self.defines:
             f.write("           %s\n" % self.defines_xml())
         for b in self.builders:
-            if self.builders_status_buildtime.has_key(b):
+            if b in self.builders_status_buildtime:
                 t = self.builders_status_buildtime[b]
             else:
                 t = "0"
@@ -578,7 +578,7 @@ class Notification:
         for r in q.requests:
             if r.kind == "group":
                 for b in r.batches:
-                    if self.batches.has_key(b.b_id):
+                    if b.b_id in self.batches:
                         b.builders_status[self.builder] = self.batches[b.b_id]
                         b.builders_status_time[self.builder] = time.time()
                         b.builders_status_buildtime[self.builder] = "0" #self.batches_buildtime[b.b_id]
index 6122612142a2b31bac411ee22b14e68da0e2d57c..609ec711bcf644425bc247a671e6df4a45e44339 100644 (file)
@@ -29,13 +29,13 @@ from config import config, init_conf
 last_count = 0
 
 def alarmalarm(signum, frame):
-    raise IOError, 'TCP connection hung'
+    raise IOError('TCP connection hung')
 
 def has_new(control_url):
     global last_count
     cnt_f = open(path.last_req_no_file)
     try:
-        last_count = int(string.strip(cnt_f.readline()))
+        last_count = int(cnt_f.readline().strip())
     except ValueError as e:
         last_count = 0
 
@@ -48,7 +48,7 @@ def has_new(control_url):
         headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
         req = urllib2.Request(url=control_url + "/max_req_no", headers=headers)
         f = urllib2.urlopen(req)
-        count = int(string.strip(f.readline()))
+        count = int(f.readline().strip())
         signal.alarm(0)
     except Exception as e:
         signal.alarm(0)
index a002a496ec15acfe758870501acdba81519e8e09..e3c5ca1f6df7912510a1d04cecad42d65d5d777e 100644 (file)
@@ -159,7 +159,7 @@ def handle_notification(r, user):
     q = B_Queue(path.req_queue_file)
     q.lock(0)
     q.read()
-    not_fin = list(filter(lambda (r): not r.is_done(), q.requests))
+    not_fin = list(filter(lambda r: not r.is_done(), q.requests))
     r.apply_to(q)
     for r in not_fin:
         if r.is_done():
index 0c1f2658a5dbbee0e3276fe8ca8a644224e18382..ed50abbb0ccc23daaec8d8d9f219d23ec86d659a 100755 (executable)
@@ -27,8 +27,8 @@ if [ "$daemon" = 1 ]; then
        echo "Starting request_handler_server(s)"
        rm -f $sock
        cd $(dirname $sock)
-       tmux -S $sock new-session -d -s "request_handler_server" -n "http" "python request_handler_server.py" && \
-               exec tmux -S $sock new-window -d -t "request_handler_server" -n "https" "python request_handler_server.py ssl"
+       tmux -S $sock new-session -d -s "request_handler_server" -n "http" "python3 request_handler_server.py" && \
+               exec tmux -S $sock new-window -d -t "request_handler_server" -n "https" "python3 request_handler_server.py ssl"
        exit 1
 fi
 
index 5ffa1aa7ca8069c36a6b2d797132efe8d4d9f132..8f9200fe6586f3f17e10dd76a473ee289cd3b907 100644 (file)
@@ -11,7 +11,7 @@ import traceback
 import os
 from config import config, init_conf
 
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from http.server import BaseHTTPRequestHandler, HTTPServer
 
 import request_handler
 import path
@@ -58,7 +58,7 @@ def write_css():
     if os.path.exists(css_file) and os.stat(css_src).st_mtime < os.stat(css_file).st_mtime:
         return
 
-    old_umask = os.umask(0022)
+    old_umask = os.umask(0o0022)
     r = open(css_src, 'r')
     f = open(css_file, "w")
     f.write(r.read())
@@ -73,7 +73,7 @@ def write_js():
     if os.path.exists(js_file) and os.stat(js_src).st_mtime < os.stat(js_file).st_mtime:
         return
 
-    old_umask = os.umask(0022)
+    old_umask = os.umask(0o0022)
     r = open(js_src, 'r')
     f = open(js_file, 'w')
     f.write(r.read())
index 74c7faa776255abed312b0673c72a23dd23585c3..7da282c1d6098bf31450c40a793ae75d14bec5bd 100644 (file)
@@ -338,7 +338,7 @@ def check_load():
     do_exit = 0
     try:
         f = open("/proc/loadavg")
-        if float(string.split(f.readline())[2]) > config.max_load:
+        if float(f.readline().split()[2]) > config.max_load:
             do_exit = 1
     except:
         pass
index d95dede5c914819c46c54c7d1a494adb1ae249e0..9e45e55e30614d8a35d190a87c3a3dfda27cc875 100644 (file)
@@ -36,7 +36,7 @@ from config import config, init_conf
 def pick_request(q):
     def mycmp(r1, r2):
         if r1.kind != 'group' or r2.kind != 'group':
-            raise Exception, "non-group requests"
+            raise Exception("non-group requests")
         pri_diff = cmp(r1.priority, r2.priority)
         if pri_diff == 0:
             return cmp(r1.time, r2.time)
@@ -55,7 +55,7 @@ def store_binary_request(r):
         return
     r.batches = new_b
     # store new queue and max_req_no for binary builders
-    num = int(string.strip(open(path.max_req_no_file, "r").read())) + 1
+    num = int(open(path.max_req_no_file, "r").read().strip()) + 1
 
     r.no = num
     q = B_Queue(path.req_queue_file)
@@ -75,7 +75,7 @@ def store_binary_request(r):
     cnt_f.flush()
     os.fsync(cnt_f.fileno())
     cnt_f.close()
-    os.chmod(tmpfname, 0644)
+    os.chmod(tmpfname, 0o0644)
     os.rename(tmpfname, path.max_req_no_file)
 
 def transfer_file(r, b):
@@ -83,7 +83,7 @@ def transfer_file(r, b):
     f = b.src_rpm_file
     # export files from chroot
     chroot.cp(f, outfile = local, rm = True)
-    os.chmod(local, 0644)
+    os.chmod(local, 0o0644)
     ftp.add(local)
 
     if config.gen_upinfo and 'test-build' not in r.flags:
@@ -146,7 +146,7 @@ def build_srpm(r, b):
 
 def handle_request(r):
     os.mkdir(path.srpms_dir + '/' + r.id)
-    os.chmod(path.srpms_dir + '/' + r.id, 0755)
+    os.chmod(path.srpms_dir + '/' + r.id, 0o0755)
     ftp.init(r)
     buildlogs.init(r)
     build.build_all(r, build_srpm)
index b4c859944cd447ff4a201336e28e7ae98717f3f3..67bd9a1be30d230f7be2c0bdf4d9a32b4a1c3ba3 100644 (file)
@@ -11,10 +11,10 @@ def uuid_python():
 
 def uuid_external():
     f = os.popen("uuidgen 2>&1")
-    u = string.strip(f.read())
+    u = f.read().strip()
     f.close()
     if len(u) != 36:
-        raise Exception, "uuid: fatal, cannot generate uuid: %s" % u
+        raise Exception("uuid: fatal, cannot generate uuid: %s" % u)
     return u
 
 # uuid module available in python >= 2.5