# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
-import ConfigParser
+import sys
+if sys.version_info[0] == 2:
+ import ConfigParser
+else:
+ import configparser as ConfigParser
import string
import fnmatch
import os
import re
import gzip
import time
-import StringIO
+import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import os
import fcntl
import string
return
try:
self.requests = request.parse_requests(body)
- except Exception, e:
+ except Exception as e:
log.panic("error parsing %s: %s" % (self.name, e))
pass
# TODO: the append here by shell hack should be solved in python
c = "(%s) >> %s 2>&1" % (command, batch.logfile)
f = os.popen(c)
- for l in f.xreadlines():
+ for l in f:
pass
r = f.close()
if r == None:
def cp(file, outfile, user="builder", rm=False):
m = md5()
- m.update(str(random.sample(xrange(100000), 500)))
+ m.update(str(random.sample(range(100000), 500)))
digest = m.hexdigest()
marker_start = "--- FILE BEGIN DIGEST %s ---" % digest
# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
-import ConfigParser
+import sys
+if sys.version_info[0] == 2:
+ import ConfigParser
+else:
+ import configparser as ConfigParser
import string
import os
import syslog
f = open(file)
r = {'_file': file[:-5], '_desc': file}
rx = re.compile(r"^([^:]+)\s*:(.*)$")
- for l in f.xreadlines():
+ for l in f:
if l == "END\n":
f.close()
return r
p = open(path.rsync_password_file, "r")
password = ""
- for l in p.xreadlines():
+ for l in p:
l = string.split(l)
if len(l) >= 2 and l[0] == host:
password = l[1]
req.add_header('X-Filename', os.path.basename(src))
f = urllib2.urlopen(req)
f.close()
- except Exception, e:
+ except Exception as e:
problems[src] = e
return e
return 0
if m:
return not post_file(src, target)
log.alert("unsupported protocol: %s" % target)
- except OSError, e:
+ except OSError as e:
problems[src] = e
log.error("send_file(%s, %s): %s" % (src, target, e))
return False
emails = {}
emails[config.admin_email] = 1
pr = ""
- for src, msg in problems.iteritems():
+ for src, msg in problems.items():
pr = pr + "[src: %s]\n\n%s\n" % (src, msg)
for d in remaining:
if d.has_key('Requester'):
# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
+from __future__ import print_function
+
import re
import string
-import xreadlines
from util import *
build_req = []
f = open(spec)
- for l in xreadlines.xreadlines(f):
+ for l in f:
l = string.strip(l)
if l == "%changelog": break
msg("spec error (%s): %s\n" % (spec, l))
for x in build_req:
- print x
+ print(x)
import log
import subprocess
import re
-import StringIO
+import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import util
import os
gpg_run = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
try:
d_stdout, d_stderr = gpg_run.communicate(buf.encode('utf-8'))
- except OSError, e:
+ except OSError as e:
log.error("gnupg run, does gpg binary exist? : %s" % e)
raise
- rx = re.compile("^gpg: Signature made .*using [DR]SA key ID (.+)")
+ rx = re.compile("^gpg:.*using\s[DR]SA\skey\s(?:ID\s)?(\w+)")
keys = []
for l in d_stderr.split('\n'):
gpg_run = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
try:
d_stdout, d_stderr = gpg_run.communicate(buf.encode('utf-8'))
- except OSError, e:
+ except OSError as e:
log.error("gnupg run failed, does gpg binary exist? : %s" % e)
raise
gpg_run = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
try:
d_stdout, d_stderr = gpg_run.communicate(buf.encode('utf-8'))
- except OSError, e:
+ except OSError as e:
log.error("gnupg signing failed, does gpg binary exist? : %s" % e)
raise
import re, os
import string
-import StringIO
+import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import chroot
import util
'dev',
'poldek',
'rpm-build',
- 'pdksh',
'mksh',
- 'coreutils'
+ 'coreutils',
+ 'util-linux'
]
def close_killset(killset):
f = chroot.popen("poldek --noask --test --test --erase %s" % p, user = "root")
crucial = 0
e = []
- for l in f.xreadlines():
+ for l in f:
m = rx.search(l)
if m:
pkg = m.group('name')
f = chroot.popen("rpm --test -F %s 2>&1" % string.join(b.files), user = "root")
killset = {}
rx = re.compile(r' \(installed\) (?P<name>[^\s]+)-[^-]+-[^-]+$')
- for l in f.xreadlines():
+ for l in f:
m = rx.search(l)
if m: killset[m.group('name')] = 1
f.close()
b.log_line("package %s removal failed" % k)
return True
+def is_rpmorg():
+ f = chroot.popen("rpm --version 2>&1")
+ v = re.compile(r'(RPM version|rpm \(RPM\)) (?P<major>\d)\.(?P<minor>\d+)(\.\d+)?')
+ for l in f:
+ m = v.search(l)
+ if m:
+ major = int(m.group('major'))
+ minor = int(m.group('minor'))
+ if major == 4 and minor > 5:
+ f.close()
+ return True
+ f.close()
+ return False
+
def uninstall_self_conflict(b):
b.log_line("checking BuildConflict-ing packages")
- f = chroot.popen("set -e; TMPDIR=%(tmpdir)s " \
- "rpmbuild -br --nobuild %(rpmdefs)s %(topdir)s/%(spec)s 2>&1" % {
+ if is_rpmorg():
+ rpmcommand = "rpmbuild --nobuild -br"
+ else:
+ rpmcommand = "rpmbuild -bp --nobuild --short-circuit --define 'prep exit 0'"
+ f = chroot.popen("set -e; TMPDIR=%(tmpdir)s %(rpmcommand)s %(rpmdefs)s %(topdir)s/%(spec)s 2>&1" % {
+ 'rpmcommand': rpmcommand,
'tmpdir': b.tmpdir(),
'rpmdefs' : b.rpmbuild_opts(),
'topdir' : b.get_topdir(),
# java-sun conflicts with soprano-2.1.67-1.src
rx = re.compile(r"\s+(?P<name>[\w-]+)\s+.*conflicts with [^\s]+-[^-]+-[^-]+\.src($| .*)")
conflicting = {}
- for l in f.xreadlines():
+ for l in f:
m = rx.search(l)
if m:
b.log_line("rpmbuild: %s" % l.rstrip())
ignore_br = re.compile(r'^\s*(rpmlib|cpuinfo|getconf|uname|soname|user|group|mounted|diskspace|digest|gnupg|macro|envvar|running|sanitycheck|vcheck|signature|verify|exists|executable|readable|writable)\(.*')
tmpdir = b.tmpdir()
- cmd = "set -e; TMPDIR=%(tmpdir)s rpmbuild -br --nobuild %(rpmdefs)s %(topdir)s/%(spec)s 2>&1" % {
+ if is_rpmorg():
+ rpmcommand = "rpmbuild --nobuild -br"
+ else:
+ rpmcommand = "rpmbuild --nobuild"
+ cmd = "set -e; TMPDIR=%(tmpdir)s %(rpmcommand)s %(rpmdefs)s %(topdir)s/%(spec)s 2>&1" % {
+ 'rpmcommand': rpmcommand,
'tmpdir': tmpdir,
'topdir' : b.get_topdir(),
'rpmdefs' : b.rpmbuild_opts(),
rx = re.compile(r"^\s*(?P<name>[^\s]+) .*is needed by")
needed = {}
b.log_line("checking BR")
- for l in f.xreadlines():
+ for l in f:
b.log_line("rpm: %s" % l.rstrip())
m = rx.search(l)
if m and not ignore_br.match(l):
# jmx is needed by (installed) java-commons-modeler-2.0-1.noarch
rx = re.compile(r".*(conflicts with|is required by|is needed by)( installed| \(installed\)|) (?P<name>[^\s]+)-[^-]+-[^-]+($| .*)")
conflicting = {}
- for l in f.xreadlines():
+ for l in f:
b.log_line("poldek: %s" % l.rstrip())
m = rx.search(l)
if m: conflicting[m.group('name')] = 1
f = open(path.got_lock_file, "r+")
line_no = 0
- for l in f.xreadlines():
+ for l in f:
line_no += 1
b = string.strip(l)
if bs.has_key(b):
import time
import os
import sys
-import StringIO
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
from config import config
import util
# just head and tail
f = open(log)
line_cnt = 0
- for l in f.xreadlines():
+ for l in f:
line_cnt += 1
f.seek(0)
line = 0
- for l in f.xreadlines():
+ for l in f:
if line < 100 or line > line_cnt - 100:
self.body.write(recode(l))
if line == line_cnt - 100:
f = os.popen(send_sendmail, "w")
try:
self.write_to(f)
- except IOError, e:
+ except IOError as e:
log.alert("sending email message failed: %s" % e)
f.close()
return False
--- /dev/null
+#!/usr/bin/python3
+# I would name this file as fedmsg,
+# but don't know how to import 'fedmsg' from system dir and from local dir
+
+import fedmsg
+import fedmsg.config
+
+config = fedmsg.config.load_config([], None)
+config['active'] = True
+config['endpoints']['relay_inbound'] = config['relay_inbound']
+fedmsg.init(name='relay_inbound', cert_prefix='builder', **config)
+
+def notify(topic, **kwargs):
+ fedmsg.publish(
+ topic=topic,
+ msg=dict(kwargs),
+ modname="builder",
+ )
# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
-import StringIO
+import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import mailer
import gpg
import select
import os
-import StringIO
+import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
def rw_pipe(buf_, infd, outfd):
buf = StringIO.StringIO()
import re
import types
import string
-import xreadlines
from chroot import *
from util import *
cur_pkg = None
f = chr_popen("poldek -v -v --verify --unique-pkg-names")
- for l in xreadlines.xreadlines(f):
+ for l in f:
m = name_rx.match(l)
if m:
if cur_pkg:
f = open(b.logfile)
copy_mode = 0
out = []
- for l in f.xreadlines():
+ for l in f:
if l == msg:
copy_mode = 1
out.append(l)
f = open(b.logfile)
copy_mode = 0
need_header = 1
- for l in f.xreadlines():
+ for l in f:
if l == beg:
if need_header:
need_header = 0
subject += ' '.join((s_failed, s_ok)).strip()
m = mailer.Message()
+ m.set_headers(to = r.requester_email,
+ cc = config.builder_list,
+ subject = subject[0:100])
+ if is_src:
+ m.set_header("Message-ID", "<%s@tld.src.builder>" % r.id)
+ else:
+ m.set_header("References", "<%s@tld.src.builder>" % r.id)
+ m.set_header("In-Reply-To", "<%s@tld.src.builder>" % r.id)
+
+ m.set_header("X-Entity-Ref-ID", "%s" % r.id)
- m.write("Request by: %s\n\n" % r.requester_email)
for b in r.batches:
if b.build_failed and b.logfile == None:
info = b.skip_reason
m.append_log(b.logfile)
m.write("\n\n")
- m.set_headers(to = r.requester_email,
- subject = subject[0:100])
- if is_src:
- m.set_header("Message-ID", "<req-%s@tld.src.builder>" % r.id)
- else:
- m.set_header("References", "<req-%s@tld.src.builder>" % r.id)
- m.set_header("In-Reply-To", "<req-%s@tld.src.builder>" % r.id)
- m.send()
-
- m.remove_header("To")
- m.remove_header("Cc")
- m.set_header("To", config.builder_list)
- # reset Message-ID
- m.set_std_headers()
- if is_src:
- m.set_header("Message-ID", "<%s@tld.src.builder>" % r.id)
- else:
- m.set_header("References", "<%s@tld.src.builder>" % r.id)
- m.set_header("In-Reply-To", "<%s@tld.src.builder>" % r.id)
m.send()
def send_cia_report(r, is_src = False):
import log
from acl import acl
from config import config
-from subprocess import call
__all__ = ['parse_request', 'parse_requests']
def escape(s):
return xml.sax.saxutils.escape(s)
+# return timestamp with timezone information
+# so we could parse it in javascript
+def tzdate(t):
+ # as strftime %z is unofficial, and does not work, need to make it numeric ourselves
+ date = time.strftime("%a %b %d %Y %H:%M:%S", time.localtime(t))
+ # NOTE: the altzone is showing CURRENT timezone, not what the "t" reflects
+ # NOTE: when DST is off timezone gets it right, altzone not
+ if time.daylight:
+ tzoffset = time.altzone
+ else:
+ tzoffset = time.timezone
+ tz = '%+05d' % (-tzoffset / 3600 * 100)
+ return date + ' ' + tz
+
# return date in iso8601 format
def iso8601(ts, timezone='UTC'):
tz = pytz.timezone(timezone)
b.dump(f)
f.write("\n")
+ # return structure usable for json encoding
+ def dump_json(self):
+ batches = []
+ for b in self.batches:
+ batches.append(b.dump_json())
+
+ return dict(
+ no=self.no,
+ id=self.id,
+ time=self.time,
+ requester=self.requester,
+ priority=self.priority,
+ max_jobs=self.max_jobs,
+ flags=self.flags,
+ batches=batches,
+ )
+
def dump_html(self, f):
f.write(
"<div id=\"%(no)d\" class=\"request %(flags)s\">\n"
"<a href=\"#%(no)d\">%(no)d</a>. "
- "<time class=\"timeago\" datetime=\"%(datetime)s\">%(time)s</time> "
+ "<time class=\"timeago\" title=\"%(datetime)s\" datetime=\"%(datetime)s\">%(time)s</time> "
"from <b class=requester>%(requester)s</b> "
"<small>%(id)s, prio=%(priority)d, jobs=%(max_jobs)d, %(flags)s</small>\n"
% {
'no': self.no,
'id': '<a href="srpms/%(id)s">%(id)s</a>' % {'id': self.id},
- 'time': escape(time.strftime("%a %b %d %Y %H:%M:%S %z", time.localtime(self.time))),
+ 'time': escape(tzdate(self.time)),
'datetime': escape(iso8601(self.time)),
'requester': escape(self.requester),
'priority': self.priority,
ok = 0
return ok
+# transform php package name (52) to version (5.2)
+def php_name_to_ver(v):
+ return '.'.join(list(v))
+
+# transform php version (5.2) to package name (52)
+def php_ver_to_name(v):
+ return v.replace('.', '')
+
class Batch:
+ DEFAULT_PHP = '5.3'
+
def __init__(self, e):
self.bconds_with = []
self.bconds_without = []
def is_command(self):
return self.command != ""
+ # return structure usable for json encoding
+ def dump_json(self):
+ return dict(
+ command=self.command,
+ command_flags=self.command_flags,
+
+ spec=self.spec,
+ branch=self.branch,
+ package=self.spec[:-5],
+ src_rpm=self.src_rpm,
+
+ bconds_with=self.bconds_with,
+ bconds_without=self.bconds_without,
+
+ kernel=self.kernel,
+ target=self.target,
+ defines=self.defines,
+
+ builders=self.builders,
+ builders_status=self.builders_status,
+ builders_status_time=self.builders_status_time,
+ builders_status_buildtime=self.builders_status_buildtime,
+ )
+
def dump_html(self, f, rid):
f.write("<li>\n")
if self.is_command():
desc = "SH: <pre>%s</pre> flags: [%s]" % (self.command, ' '.join(self.command_flags))
else:
- cmd = "/usr/bin/git ls-remote --heads git://git.tld-linux.org/packages/%s 1>/dev/null 2>&1" % (self.spec[:-5])
- r = call(cmd, shell=True)
- if r == 0:
- dist = "tld"
- else:
- dist = "pld"
- package_url = "http://git.%(dist)s-linux.org/?p=packages/%(package)s.git;a=blob;f=%(spec)s;hb=%(branch)s" % {
- 'dist': dist,
- 'spec': urllib.quote(self.spec),
- 'branch': urllib.quote(self.branch),
- 'package': urllib.quote(self.spec[:-5]),
- }
+ cmd = "/usr/bin/git ls-remote --heads git://git.tld-linux.org/packages/%s 1>/dev/null 2>&1" % (self.spec[:-5])
+ r = call(cmd, shell=True)
+ if r == 0:
+ package_url = "http://git.tld-linux.org/?p=packages/%(package)s.git;a=blob;f=%(spec)s;hb=%(branch)s" % {
+ 'spec': urllib.quote(self.spec),
+ 'branch': urllib.quote(self.branch),
+ 'package': urllib.quote(self.spec[:-5]),
+ }
+ else:
+ package_url = "http://git.pld-linux.org/gitweb.cgi?p=packages/%(package)s.git;f=%(spec)s;h=%(branch)s;a=shortlog" % {
+ 'spec': urllib.quote(self.spec),
+ 'branch': urllib.quote(self.branch),
+ 'package': urllib.quote(self.spec[:-5]),
+ }
desc = "%(src_rpm)s (<a href=\"%(package_url)s\">%(spec)s -r %(branch)s</a>%(rpmopts)s)" % {
'src_rpm': self.src_rpm,
'spec': self.spec,
"--define '_builddir %{_topdir}/BUILD' "
return rpmdefs + rpmopts
- def php_ignores(self):
- # transform php package name (52) to version (5.2)
- def php_name_to_ver(v):
- return '.'.join(list(v))
-
- # transform php version (5.2) to package name (52)
- def php_ver_to_name(v):
- return v.replace('.', '')
-
+ def php_ignores(self, php_version):
# available php versions in distro
- php_versions = ['4', '5.2', '5.3', '5.4', '5.5', '5.6', '7.0']
-
- # current version if -D php_suffix is present
- php_version = php_name_to_ver(self.defines['php_suffix'])
+ php_versions = ['7.2', '7.3', '7.4', '8.0']
# remove current php version
try:
# map them to poldek ignores
# always ignore hhvm
res = ['hhvm-*']
- for v in map(php_ver_to_name, php_versions):
+ for v in list(map(php_ver_to_name, php_versions)):
res.append("php%s-*" % v)
return res
# add php version based ignores
if self.defines.has_key('php_suffix'):
- ignores.extend(self.php_ignores())
+ # current version if -D php_suffix is present
+ php_version = php_name_to_ver(self.defines['php_suffix'])
+ else:
+ php_version = self.DEFAULT_PHP
+
+ ignores.extend(self.php_ignores(php_version))
# return empty string if the list is empty
if len(ignores) == 0:
def add_ignore(s):
return "--ignore=%s" % s
- return " ".join(map(add_ignore, ignores))
+ return " ".join(list(map(add_ignore, ignores)))
def kernel_string(self):
r = ""
<spec>%s</spec>
<branch>%s</branch>
<info>%s</info>\n""" % (self.b_id,
- string.join(map(lambda (b): b.b_id, self.depends_on)),
+ string.join(list(map(lambda (b): b.b_id, self.depends_on))),
escape(self.src_rpm),
escape(' '.join(self.command_flags)), escape(self.command),
escape(self.spec), escape(self.branch), escape(self.info)))
else:
log.panic("xml: evil notification child (%s)" % c.nodeName)
+ # return structure usable for json encoding
+ def dump_json(self):
+ return dict(
+ id=self.group_id,
+ builder=self.builder,
+ batches=self.batches,
+ batches_buildtime=self.batches_buildtime,
+ )
+
def apply_to(self, q):
for r in q.requests:
if r.kind == "group":
import os
import urllib
import urllib2
-import StringIO
import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import gzip
import path
cnt_f = open(path.last_req_no_file)
try:
last_count = int(string.strip(cnt_f.readline()))
- except ValueError, e:
+ except ValueError as e:
last_count = 0
cnt_f.close()
f = urllib2.urlopen(req)
count = int(string.strip(f.readline()))
signal.alarm(0)
- except Exception, e:
+ except Exception as e:
signal.alarm(0)
log.error("can't fetch %s: %s" % (control_url + "/max_req_no", e))
sys.exit(1)
req = urllib2.Request(url=control_url + "/queue.gz", headers=headers)
f = urllib2.urlopen(req)
signal.alarm(0)
- except Exception, e:
+ except Exception as e:
signal.alarm(0)
log.error("can't fetch %s: %s" % (control_url + "/queue.gz", e))
sys.exit(1)
f = gzip.GzipFile(fileobj = sio)
try:
fdata = f.read()
- except struct.error, e:
+ except struct.error as e:
log.alert("corrupted fetched queue.gz file")
sys.exit(1)
(signers, body) = gpg.verify_sig(fdata)
q.read()
for r in reqs:
if r.kind != 'group':
- raise Exception, 'handle_reqs: fatal: huh? %s' % r.kind
+ raise Exception('handle_reqs: fatal: huh? %s' % r.kind)
need_it = 0
for b in r.batches:
if builder in b.builders:
import string
import time
import os
-import StringIO
import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import fnmatch
import gpg
from bqueue import B_Queue
from config import config, init_conf
from mailer import Message
+#import messagebus
def check_double_id(id):
id_nl = id + "\n"
ids = open(path.processed_ids_file)
- for i in ids.xreadlines():
+ for i in ids:
if i == id_nl:
# FIXME: security email here?
log.alert("request %s already processed" % id)
q = B_Queue(path.req_queue_file)
q.lock(0)
q.read()
- not_fin = filter(lambda (r): not r.is_done(), q.requests)
+ not_fin = list(filter(lambda (r): not r.is_done(), q.requests))
r.apply_to(q)
for r in not_fin:
if r.is_done():
util.clean_tmp(path.srpms_dir + '/' + r.id)
return False
return True
- q.requests = filter(leave_it, q.requests)
+ q.requests = list(filter(leave_it, q.requests))
q.write()
q.dump(path.queue_stats_file)
q.dump_html(path.queue_html_stats_file)
status.push("request from %s" % user.login)
r = request.parse_request(body)
if r.kind == 'group':
+# messagebus.notify(topic="request.group", user=user.login, **r.dump_json())
handle_group(r, user)
elif r.kind == 'notification':
+# messagebus.notify(topic="request.notify", user=user.login, **r.dump_json())
handle_notification(r, user)
else:
msg = "%s: don't know how to handle requests of this kind '%s'" \
-#!/usr/bin/python
+#!/usr/bin/python3
import socket
import string
self.send_response(200)
self.end_headers()
- except Exception, e:
+ except Exception as e:
self.send_error(500, "%s: %s" % (filename, e))
self.end_headers()
log.error("request_handler_server: [%s]: exception: %s\n%s" % (self.client_address[0], e, traceback.format_exc()))
certfile = path.conf_dir + "/" + config.request_handler_server_ssl_cert,
ca_certs = "/etc/certs/ca-certificates.crt",
server_side=True)
- except Exception, e:
+ except Exception as e:
log.notice("request_handler_server: can't start server on [%s:%d], ssl=%s: %s" % (host, port, str(srv_ssl), e))
print >> sys.stderr, "ERROR: Can't start server on [%s:%d], ssl=%s: %s" % (host, port, str(srv_ssl), e)
sys.exit(1)
# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
+from __future__ import print_function
+
import sys
import os
import atexit
def pick_request(q):
def mycmp(r1, r2):
if r1.kind != 'group' or r2.kind != 'group':
- raise Exception, "non-group requests"
+ raise Exception("non-group requests")
pri_diff = cmp(r1.priority, r2.priority)
if pri_diff == 0:
return cmp(r1.time, r2.time)
req = urllib2.Request(url=src_url, headers=headers)
f = urllib2.urlopen(req)
good = True
- except urllib2.HTTPError, error:
+ except urllib2.HTTPError as error:
return False
- except urllib2.URLError, error:
+ except urllib2.URLError as error:
# see errno.h
try:
errno = error.errno
req = urllib2.Request(url=src_url, headers=headers)
f = urllib2.urlopen(req)
good = True
- except urllib2.HTTPError, error:
+ except urllib2.HTTPError as error:
# fail in a way where cron job will retry
msg = "unable to fetch url %s, http code: %d" % (src_url, error.code)
b.log_line(msg)
queue_time = time.time() - r.time
# 6 hours
if error.code != 404 or (queue_time >= 0 and queue_time < (6 * 60 * 60)):
- raise IOError, msg
+ raise IOError(msg)
else:
msg = "in queue for more than 6 hours, download failing"
b.log_line(msg)
return False
- except urllib2.URLError, error:
+ except urllib2.URLError as error:
errno = 0
if isinstance(error.args[0], IOError):
errno = error.args[0].errno
continue
else:
try:
- print "error.errno: %s" % str(error.errno)
- except Exception, e:
- print "error.errno: exception %s" % e
+ print("error.errno: %s" % str(error.errno))
+ except Exception as e:
+ print("error.errno: exception %s" % e)
try:
- print "error.reason %s" % str(error.reason)
- except Exception, e:
- print "error.reason exception %s" % e
+ print("error.reason %s" % str(error.reason))
+ except Exception as e:
+ print("error.reason exception %s" % e)
raise
o = chroot.popen("cat > %s" % b.src_rpm, mode = "w")
try:
bytes = util.sendfile(f, o)
- except IOError, e:
+ except IOError as e:
b.log_line("error: unable to write to `%s': %s" % (b.src_rpm, e))
raise
return res
b.log_line("started at: %s" % time.asctime())
+
+ b.log_line("killing old processes on a builder")
+ chroot.run("/bin/kill --verbose -9 -1", logfile = b.logfile)
+
+ b.log_line("cleaning up /tmp")
+ chroot.run("rm -rf /tmp/B.*", logfile = b.logfile)
+
fetch_src(r, b)
b.log_line("installing srpm: %s" % b.src_rpm)
res = chroot.run("""
if r.max_jobs > 0:
max_jobs = max(min(config.max_jobs, r.max_jobs), 1)
cmd = "set -ex; : build-id: %(r_id)s; TMPDIR=%(tmpdir)s exec nice -n %(nice)s " \
- "rpmbuild -bb --define '_smp_mflags -j%(max_jobs)d' --define '_tld_builder 1' %(rpmdefs)s %(topdir)s/%(spec)s" % {
+ "rpmbuild -bb --define '_smp_mflags -j%(max_jobs)d' --define '_make_opts -Otarget' --define '_tld_builder 1' %(rpmdefs)s %(topdir)s/%(spec)s" % {
'r_id' : r.id,
'tmpdir': tmpdir,
'nice' : config.nice,
q.lock(0)
q.read()
previouslen=len(q.requests)
- q.requests=filter(otherreqs, q.requests)
+ q.requests=list(filter(otherreqs, q.requests))
if len(q.requests)<previouslen:
q.write()
q.unlock()
def main():
if len(sys.argv) < 2:
- raise Exception, "fatal: need to have builder name as first arg"
+ raise Exception("fatal: need to have builder name as first arg")
return main_for(sys.argv[1])
if __name__ == '__main__':
import string
import time
import os
-import StringIO
import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import re
import shutil
import atexit
import status
import build
import report
+#import messagebus
from lock import lock
from bqueue import B_Queue
return "FAIL"
status.push("building %s" % b.spec)
+# messagebus.notify(topic="build_srpm.start", spec=b.spec, flags=r.flags, batch=b, request=r)
b.src_rpm = ""
builder_opts = "-nu -nm --nodeps --http --define \'_tld_builder 1\'"
util.append_to(b.logfile, "error: No files produced.")
res = "FAIL"
if res == 0 and not "test-build" in r.flags:
- util.append_to(b.logfile, "Writing pkgrev")
+ util.append_to(b.logfile, "Writing package revision")
res = chroot.run("cd rpm/packages; ./builder -bs %s -r %s --pkgrev %s %s" % \
(b.bconds_string(), b.branch, b.defines_string(), b.spec), logfile = b.logfile)
if res == 0:
if res:
res = "FAIL"
+
+# messagebus.notify(topic="build_srpm.finish", spec=b.spec)
return res
def handle_request(r):
def collect_files(log, basedir = "/home"):
f = open(log, 'r')
rx = re.compile(r"^Wrote: (%s.*\.rpm)$" % basedir)
- proc = re.compile(r"^Processing (files):.*$")
+ proc = re.compile(r"^Processing files:.*$")
files = []
- for l in reversed(list(f.xreadlines())):
+ for l in reversed(list(f)):
if proc.match(l):
break
m = rx.search(l)
import sys
import log
import traceback
-import StringIO
+import sys
+if sys.version_info[0] == 2:
+ import StringIO
+else:
+ from io import StringIO
import os
import time
kde4-kdeplasma-addons.spec \
kde4-kdesdk.spec \
kde4-kdeadmin.spec \
-kde4-kdetoys.spec"
+kde4-kdetoys.spec \
+kde4-kget.spec \
+kde4-kppp.spec \
+kde4-krfb.spec"
KOFFICE="kde4-koffice.spec kde4-koffice-l10n.spec"
L10N="kde4-l10n.spec"
KDEVELOP="kde4-kdevplatform.spec \
req = urllib2.Request(url, data)
f = urllib2.urlopen(req)
f.close()
-except Exception, e:
+except Exception as e:
print >> sys.stderr, "Problem while sending request via HTTP: %s: %s" % (url, e)
sys.exit(1)
print >> sys.stdout, "Request queued via HTTP."
# htmlspecialchars: escape <, > and &
hsc() {
local input=$1
- echo -E "$input" | sed -e 's,&,\&,g;s,<,\<,g;s,>,\>,g'
+ printf "%s\n" "$input" | sed -e 's,&,\&,g;s,<,\<,g;s,>,\>,g'
}
# simple df_fetcher, based on packages/fetchsrc_request
if [ "$command" ]; then
bid=$(uuidgen)
- echo -E >&2 "* Command: $command"
+ printf "%s\n" "* Command: $command" >&2
echo " <batch id='$bid' depends-on=''>"
echo " <command flags='$command_flags'>"
hsc "$command"
if [ "$no_depend" = yes ]; then
depend=
fi
- echo -E >&2 "* Post-Command: $post_command"
+ printf "%s\n" "* Post-Command: $post_command" >&2
echo " <batch id='$bid' depends-on='$depend'>"
echo " <command flags='$command_flags'>"
hsc "$post_command"
-#! /usr/bin/python
+#! /usr/bin/python3
# As omta segfaults and rest is so huge and does't work out of box
# mailer="./smtpwrapper.py" or whatever path is