self.change_requester = False
if p.has_option(login, "gpg_emails"):
- self.gpg_emails = string.split(p.get(login, "gpg_emails"))
+ self.gpg_emails = p.get(login, "gpg_emails").split()
else:
log.panic("acl: [%s] has no gpg_emails" % login)
self.change_requester = True
if p.has_option(login, "privs"):
- for p in string.split(p.get(login, "privs")):
- l = string.split(p, ":")
+ for p in p.get(login, "privs").split():
+ l = p.split(":")
if len(l) == 2:
p+=":*"
if len(l) not in (2,3) or l[0] == "" or l[1] == "":
p.readfp(open(path.acl_conf))
self.users = {}
for login in p.sections():
- if self.users.has_key(login):
+ if login in self.users:
log.panic("acl: duplicate login: %s" % login)
continue
user = User(p, login)
for e in user.gpg_emails:
- if self.users.has_key(e):
+ if e in self.users:
log.panic("acl: user email colision %s <-> %s" % \
(self.users[e].login, login))
else:
def user_by_email(self, ems):
for e in ems:
- if self.users.has_key(e):
+ if e in self.users:
return self.users[e]
return None
return self.users[l]
def user(self, l):
- if not self.users.has_key(l):
+ if not l in self.users:
log.panic("no such user: %s" % l)
return self.users[l]
status.pop()
def package(self, p):
-# log.notice("blacklist check: %s (%d)" % (p, self.blacklist.has_key(p)))
+# log.notice("blacklist check: %s (%d)" % (p, p in self.blacklist))
if p in self.blacklist:
return True
return False
f.flush()
os.fsync(f.fileno())
f.close()
- os.chmod(tmpfname, 0644)
+ os.chmod(tmpfname, 0o0644)
os.rename(tmpfname, fname)
def dump_html(self, fname):
f.flush()
os.fsync(f.fileno())
f.close()
- os.chmod(tmpfname, 0644)
+ os.chmod(tmpfname, 0o0644)
os.rename(tmpfname, fname)
# read possibly compressed, signed queue
self._open()
self.signers = []
body = self.fd.read()
- if string.strip(body) == "":
+ if body.strip() == "":
# empty file, don't choke
self.requests = []
return
f.flush()
os.fsync(f.fileno())
f.close()
- os.chmod(tmpname, 0644)
+ os.chmod(tmpname, 0o0644)
os.rename(tmpname, name)
def add(self, req):
p = ConfigParser.ConfigParser()
def get(o, d = None, sec=None):
if p.has_option(sec, o):
- return string.strip(p.get(sec, o))
+ return p.get(sec, o).strip()
elif p.has_option(builder, o):
- return string.strip(p.get(builder, o))
+ return p.get(builder, o).strip()
elif p.has_option("all", o):
- return string.strip(p.get("all", o))
+ return p.get("all", o).strip()
elif d != None:
return d
else:
if p.has_option("all", "syslog"):
f = p.get("all", "syslog")
if f != "":
- if syslog_facilities.has_key(f):
+ if f in syslog_facilities:
log.open_syslog("builder", syslog_facilities[f])
else:
log.panic("no such syslog facility: %s" % f)
builder = get("src_builder", builder)
self.builder = builder
- self.binary_builders = string.split(get("binary_builders"))
- self.src_builder = string.strip(get("src_builder", ""))
+ self.binary_builders = get("binary_builders").split()
+ self.src_builder = get("src_builder", "").strip()
self.max_keep_time = int(get("max_keep_time", 168))*60*60
self.bot_email = get("bot_email", "")
self.control_url = get("control_url")
f = get("syslog", "")
if f != "":
- if syslog_facilities.has_key(f):
+ if f in syslog_facilities:
log.open_syslog(self.builder, syslog_facilities[f])
else:
log.panic("no such syslog facility: %s" % f)
while 1:
l = f.readline()
if l == "": break
- l = string.strip(l)
+ l = l.strip()
if l == "@":
- cur_pkg = string.strip(f.readline())
+ cur_pkg = f.readline().strip()
rpm_req[cur_pkg] = []
continue
rpm_req[cur_pkg].append(l)
msg("done\n")
def add_provides(pkg, what):
- if rpm_prov.has_key(what):
+ if what in rpm_prov:
msg("[%s: %s, %s] " % (what, rpm_prov[what], pkg))
else:
rpm_prov[what] = pkg
while 1:
l = f.readline()
if l == "": break
- l = string.strip(l)
+ l = l.strip()
if l == "@":
- cur_pkg = string.strip(f.readline())
+ cur_pkg = f.readline().strip()
continue
add_provides(cur_pkg, l)
if l[0] == '/':
while 1:
l = f.readline()
if l == "": break
- l = string.strip(l)
+ l = l.strip()
if l == "@":
- cur_pkg = string.strip(f.readline())
+ cur_pkg = f.readline().strip()
continue
- if req_files.has_key(l):
+ if l in req_files:
add_provides(cur_pkg, l)
f.close()
msg("done\n")
pkg_reqs = []
for req in reqs:
if req[0:7] == "rpmlib(": continue
- if rpm_prov.has_key(req):
+ if req in rpm_prov:
if rpm_prov[req] not in pkg_reqs:
pkg_reqs.append(rpm_prov[req])
else:
"""
need_m = {}
def close(n):
- if need_m.has_key(n): return
+ if n in need_m: return
need_m[n] = 1
- if not req.has_key(n): return
+ if not n in req: return
for k in req[n]:
close(k)
for n in need: close(n)
rm = []
for p in req.keys():
- if not need_m.has_key(p): rm.append(p)
+ if not p in need_m: rm.append(p)
return rm
return r
m = rx.search(l)
if m:
- r[m.group(1)] = string.strip(m.group(2))
+ r[m.group(1)] = m.group(2).strip()
else:
break
f.close()
p = open(path.rsync_password_file, "r")
password = ""
for l in p:
- l = string.split(l)
+ l = l.split()
if len(l) >= 2 and l[0] == host:
password = l[1]
p.close()
retry_delay = 0
try:
f = open(dir + "/retry-at")
- last_retry = int(string.strip(f.readline()))
- retry_delay = int(string.strip(f.readline()))
+ last_retry = int(f.readline().strip())
+ retry_delay = int(f.readline().strip())
f.close()
if last_retry + retry_delay > time.time():
return
if d != None: q.append(d)
def mycmp(x, y):
rc = cmp(x['Time'], y['Time'])
- if rc == 0 and x.has_key('Type') and y.has_key('Type'):
+ if rc == 0 and 'Type' in x and 'Type' in y:
return cmp(x['Type'], y['Type'])
else:
return rc
for src, msg in problems.items():
pr = pr + "[src: %s]\n\n%s\n" % (src, msg)
for d in remaining:
- if d.has_key('Requester'):
+ if 'Requester' in d:
emails[d['Requester']] = 1
e = emails.keys()
m = mailer.Message()
def expand_conds(l):
def expand_one(m):
if m.group(1) == "?":
- if macros.has_key(m.group(2)):
+ if m.group(2) in macros:
return m.group(3)
else:
- if not macros.has_key(m.group(2)):
+ if not m.group(2) in macros:
return m.group(3)
return ""
macro_rx = re.compile(r"%\{([a-zA-Z0-9_+]+)\}")
def expand_macros(l):
def expand_one(m):
- if macros.has_key(m.group(1)):
- return string.strip(macros[m.group(1)])
+ if m.group(1) in macros:
+ return macros[m.group(1)].strip()
else:
return m.group(0) # don't change
f = open(spec)
for l in f:
- l = string.strip(l)
+ l = l.strip()
if l == "%changelog": break
# %bcond_with..
if m:
bcond = m.group(2)
if m.group(1) == "with":
- if macros.has_key("_with_%s" % bcond):
+ if "_with_%s" % bcond in macros:
macros["with_%s" % bcond] = 1
else:
- if not macros.has_key("_without_%s" % bcond):
+ if not "_without_%s" % bcond in macros:
macros["with_%s" % bcond] = 1
continue
if not os.path.isfile('/usr/bin/gpg'):
log.error("missing gnupg binary: /usr/bin/gpg")
- raise OSError, 'Missing gnupg binary'
+ raise OSError('Missing gnupg binary')
d_stdout = None
d_stderr = None
if not os.path.isfile('/usr/bin/gpg'):
log.error("missing gnupg binary: /usr/bin/gpg")
- raise OSError, 'Missing gnupg binary'
+ raise OSError('Missing gnupg binary')
d_stdout = None
d_stderr = None
def sign(buf):
if not os.path.isfile('/usr/bin/gpg'):
log.error("missing gnupg binary: /usr/bin/gpg")
- raise OSError, 'Missing gnupg binary'
+ raise OSError('Missing gnupg binary')
d_stdout = None
d_stderr = None
nbr = ""
for bre in needed.keys():
nbr = nbr + " " + re.escape(bre)
- br = string.strip(nbr)
+ br = nbr.strip()
b.log_line("updating poldek cache...")
chroot.run("poldek --up --upa", user = "root", logfile = b.logfile)
# check conflicts in BRed packages
nbr = ""
for bre in needed.keys():
nbr = nbr + " " + re.escape(bre)
- br = string.strip(nbr)
+ br = nbr.strip()
b.log_line("installing BR: %s" % br)
res = chroot.run("set -x; poldek --noask --caplookup -Q -v %s --upgrade %s" % (b.ignores(), br),
for l in f:
line_no += 1
- b = string.strip(l)
- if bs.has_key(b):
+ b = l.strip()
+ if b in bs:
bs[b] = line_no
else:
log.alert("found strange lock in got-lock: %s" % b)
def panic(s):
log(syslog.LOG_ALERT, "PANIC: %s" % s)
- raise Exception, "PANIC: %s" % str(s)
+ raise Exception("PANIC: %s" % str(s))
def alert(s):
log(syslog.LOG_ALERT, "alert: %s" % s)
log = tmp + config.rpmqa_filename
open(log, 'a').write("Query done at: %s\n" % datetime.datetime.now().isoformat(' '))
chroot.run("rpm -qa|sort", logfile=log)
- os.chmod(log,0644)
+ os.chmod(log,0o0644)
ftp.init(rpmqa=True)
ftp.add(log)
ftp.flush()
if cur_pkg:
pkg_reqs[cur_pkg] = cur_pkg_reqs
cur_pkg = m.groups(1)
- if pkg_reqs.has_key(cur_pkg):
+ if cur_pkg in pkg_reqs:
cur_pkg = None
cur_pkg_reqs = None
else:
m = req_rx.match(l)
if m:
reqs = []
- for x in string.split(m.group(1)):
+ for x in m.group(1).split():
if x in ["RPMLIB_CAP", "NOT", "FOUND", "UNMATCHED"]: continue
m = pkg_name_rx.match(x)
if m:
return dt.isoformat()
def is_blank(e):
- return e.nodeType == Element.TEXT_NODE and string.strip(e.nodeValue) == ""
+ return e.nodeType == Element.TEXT_NODE and e.nodeValue.strip() == ""
class Group:
def __init__(self, e):
self.requester = ""
self.max_jobs = 0
self.requester_email = ""
- self.flags = string.split(attr(e, "flags", ""))
+ self.flags = attr(e, "flags", "").split()
for c in e.childNodes:
if is_blank(c): continue
deps = []
m[b.b_id] = b
for dep in b.depends_on:
- if m.has_key(dep):
+ if dep in m:
# avoid self-deps
if id(m[dep]) != id(b):
deps.append(m[dep])
self.skip = []
self.gb_id = ""
self.b_id = attr(e, "id")
- self.depends_on = string.split(attr(e, "depends-on"))
+ self.depends_on = attr(e, "depends-on").split()
self.upgraded = True
self.parse_xml(e)
elif c.nodeName == "command":
self.spec = "COMMAND"
self.command = text(c).strip()
- self.command_flags = string.split(attr(c, "flags", ""))
+ self.command_flags = attr(c, "flags", "").split()
elif c.nodeName == "info":
self.info = text(c)
elif c.nodeName == "kernel":
ignores = []
# add php version based ignores
- if self.defines.has_key('php_suffix'):
+ if 'php_suffix' in self.defines:
# current version if -D php_suffix is present
php_version = php_name_to_ver(self.defines['php_suffix'])
else:
<spec>%s</spec>
<branch>%s</branch>
<info>%s</info>\n""" % (self.b_id,
- string.join(list(map(lambda (b): b.b_id, self.depends_on))),
+ string.join(list(map(lambda b: b.b_id, self.depends_on))),
escape(self.src_rpm),
escape(' '.join(self.command_flags)), escape(self.command),
escape(self.spec), escape(self.branch), escape(self.info)))
if self.defines:
f.write(" %s\n" % self.defines_xml())
for b in self.builders:
- if self.builders_status_buildtime.has_key(b):
+ if b in self.builders_status_buildtime:
t = self.builders_status_buildtime[b]
else:
t = "0"
for r in q.requests:
if r.kind == "group":
for b in r.batches:
- if self.batches.has_key(b.b_id):
+ if b.b_id in self.batches:
b.builders_status[self.builder] = self.batches[b.b_id]
b.builders_status_time[self.builder] = time.time()
b.builders_status_buildtime[self.builder] = "0" #self.batches_buildtime[b.b_id]
last_count = 0
def alarmalarm(signum, frame):
- raise IOError, 'TCP connection hung'
+ raise IOError('TCP connection hung')
def has_new(control_url):
global last_count
cnt_f = open(path.last_req_no_file)
try:
- last_count = int(string.strip(cnt_f.readline()))
+ last_count = int(cnt_f.readline().strip())
except ValueError as e:
last_count = 0
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
req = urllib2.Request(url=control_url + "/max_req_no", headers=headers)
f = urllib2.urlopen(req)
- count = int(string.strip(f.readline()))
+ count = int(f.readline().strip())
signal.alarm(0)
except Exception as e:
signal.alarm(0)
q = B_Queue(path.req_queue_file)
q.lock(0)
q.read()
- not_fin = list(filter(lambda (r): not r.is_done(), q.requests))
+ not_fin = list(filter(lambda r: not r.is_done(), q.requests))
r.apply_to(q)
for r in not_fin:
if r.is_done():
echo "Starting request_handler_server(s)"
rm -f $sock
cd $(dirname $sock)
- tmux -S $sock new-session -d -s "request_handler_server" -n "http" "python request_handler_server.py" && \
- exec tmux -S $sock new-window -d -t "request_handler_server" -n "https" "python request_handler_server.py ssl"
+ tmux -S $sock new-session -d -s "request_handler_server" -n "http" "python3 request_handler_server.py" && \
+ exec tmux -S $sock new-window -d -t "request_handler_server" -n "https" "python3 request_handler_server.py ssl"
exit 1
fi
import os
from config import config, init_conf
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from http.server import BaseHTTPRequestHandler, HTTPServer
import request_handler
import path
if os.path.exists(css_file) and os.stat(css_src).st_mtime < os.stat(css_file).st_mtime:
return
- old_umask = os.umask(0022)
+ old_umask = os.umask(0o0022)
r = open(css_src, 'r')
f = open(css_file, "w")
f.write(r.read())
if os.path.exists(js_file) and os.stat(js_src).st_mtime < os.stat(js_file).st_mtime:
return
- old_umask = os.umask(0022)
+ old_umask = os.umask(0o0022)
r = open(js_src, 'r')
f = open(js_file, 'w')
f.write(r.read())
do_exit = 0
try:
f = open("/proc/loadavg")
- if float(string.split(f.readline())[2]) > config.max_load:
+ if float(f.readline().split()[2]) > config.max_load:
do_exit = 1
except:
pass
def pick_request(q):
def mycmp(r1, r2):
if r1.kind != 'group' or r2.kind != 'group':
- raise Exception, "non-group requests"
+ raise Exception("non-group requests")
pri_diff = cmp(r1.priority, r2.priority)
if pri_diff == 0:
return cmp(r1.time, r2.time)
return
r.batches = new_b
# store new queue and max_req_no for binary builders
- num = int(string.strip(open(path.max_req_no_file, "r").read())) + 1
+ num = int(open(path.max_req_no_file, "r").read().strip()) + 1
r.no = num
q = B_Queue(path.req_queue_file)
cnt_f.flush()
os.fsync(cnt_f.fileno())
cnt_f.close()
- os.chmod(tmpfname, 0644)
+ os.chmod(tmpfname, 0o0644)
os.rename(tmpfname, path.max_req_no_file)
def transfer_file(r, b):
f = b.src_rpm_file
# export files from chroot
chroot.cp(f, outfile = local, rm = True)
- os.chmod(local, 0644)
+ os.chmod(local, 0o0644)
ftp.add(local)
if config.gen_upinfo and 'test-build' not in r.flags:
def handle_request(r):
os.mkdir(path.srpms_dir + '/' + r.id)
- os.chmod(path.srpms_dir + '/' + r.id, 0755)
+ os.chmod(path.srpms_dir + '/' + r.id, 0o0755)
ftp.init(r)
buildlogs.init(r)
build.build_all(r, build_srpm)
def uuid_external():
f = os.popen("uuidgen 2>&1")
- u = string.strip(f.read())
+ u = f.read().strip()
f.close()
if len(u) != 36:
- raise Exception, "uuid: fatal, cannot generate uuid: %s" % u
+ raise Exception("uuid: fatal, cannot generate uuid: %s" % u)
return u
# uuid module available in python >= 2.5