import time
import datetime
import string
-import urllib
-import urllib2
+import urllib.request
+import urllib.parse
+import urllib.error
from config import config, init_conf
from bqueue import B_Queue
import lock
import util
+import shutil
import loop
import path
import status
def mycmp(r1, r2):
if r1.kind != 'group' or r2.kind != 'group':
raise Exception("non-group requests")
- pri_diff = cmp(r1.priority, r2.priority)
+ pri_diff = util.cmp(r1.priority, r2.priority)
if pri_diff == 0:
- return cmp(r1.time, r2.time)
+ return util.cmp(r1.time, r2.time)
else:
return pri_diff
- q.requests.sort(mycmp)
+ q.requests.sort(key=util.cmp_to_key(mycmp))
ret = q.requests[0]
return ret
while not good:
try:
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
- req = urllib2.Request(url=src_url, headers=headers)
- f = urllib2.urlopen(req)
+ req = urllib.request.Request(url=src_url, headers=headers)
+ f = urllib.request.urlopen(req)
good = True
- except urllib2.HTTPError as error:
+ except urllib.error.HTTPError as error:
return False
- except urllib2.URLError as error:
+ except urllib.error.URLError as error:
# see errno.h
try:
errno = error.errno
return False
def fetch_src(r, b):
- src_url = config.control_url + "/srpms/" + r.id + "/" + urllib.quote(b.src_rpm)
+ src_url = config.control_url + "/srpms/" + r.id + "/" + urllib.parse.quote(b.src_rpm)
b.log_line("fetching %s" % src_url)
start = time.time()
good = False
while not good:
try:
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
- req = urllib2.Request(url=src_url, headers=headers)
- f = urllib2.urlopen(req)
+ req = urllib.request.Request(url=src_url, headers=headers)
+ f = urllib.request.urlopen(req)
good = True
- except urllib2.HTTPError as error:
+ except urllib.error.HTTPError as error:
# fail in a way where cron job will retry
msg = "unable to fetch url %s, http code: %d" % (src_url, error.code)
b.log_line(msg)
msg = "in queue for more than 6 hours, download failing"
b.log_line(msg)
return False
- except urllib2.URLError as error:
+ except urllib.error.URLError as error:
errno = 0
if isinstance(error.args[0], IOError):
errno = error.args[0].errno
o = chroot.popen("cat > %s" % b.src_rpm, mode = "w")
try:
- bytes = util.sendfile(f, o)
+ shutil.copyfileobj(f, o)
except IOError as e:
b.log_line("error: unable to write to `%s': %s" % (b.src_rpm, e))
raise
+ bytes = float(f.headers['content-length'])
f.close()
o.close()
t = time.time() - start
b.log_line("started at: %s" % time.asctime())
- b.log_line("killing old processes on a builder")
- chroot.run("/bin/kill --verbose -9 -1", logfile = b.logfile)
-
b.log_line("cleaning up /tmp")
chroot.run("rm -rf /tmp/B.*", logfile = b.logfile)
b.log_line("copy rpm files to cache_dir: %s" % rpm_cache_dir)
chroot.run(
"cp -f %s %s && poldek --mo=nodiff --mkidxz -s %s/" % \
- (b.files.join(), rpm_cache_dir, rpm_cache_dir),
+ (' '.join(b.files), rpm_cache_dir, rpm_cache_dir),
logfile = b.logfile, user = "root"
)
else: