X-Git-Url: https://git.tld-linux.org/?a=blobdiff_plain;f=TLD_Builder%2Frpm_builder.py;h=c2ac3634da4217b3e0657b925af51a47eb1ef93d;hb=a68953c88fba6e657febf9cb06273f44288f759c;hp=80677d52de68a0146c26e04745b4ba95ed46a47e;hpb=efb29ebd19be1db746873c582469d897b76f50ef;p=tld-builder.git diff --git a/TLD_Builder/rpm_builder.py b/TLD_Builder/rpm_builder.py index 80677d5..c2ac363 100644 --- a/TLD_Builder/rpm_builder.py +++ b/TLD_Builder/rpm_builder.py @@ -9,12 +9,12 @@ import time import datetime import string import urllib -import urllib2 from config import config, init_conf from bqueue import B_Queue import lock import util +import shutil import loop import path import status @@ -46,12 +46,12 @@ def pick_request(q): def mycmp(r1, r2): if r1.kind != 'group' or r2.kind != 'group': raise Exception("non-group requests") - pri_diff = cmp(r1.priority, r2.priority) + pri_diff = util.cmp(r1.priority, r2.priority) if pri_diff == 0: - return cmp(r1.time, r2.time) + return util.cmp(r1.time, r2.time) else: return pri_diff - q.requests.sort(key=mycmp) + q.requests.sort(key=util.cmp_to_key(mycmp)) ret = q.requests[0] return ret @@ -62,12 +62,12 @@ def check_skip_build(r, b): while not good: try: headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } - req = urllib2.Request(url=src_url, headers=headers) - f = urllib2.urlopen(req) + req = urllib.request.Request(url=src_url, headers=headers) + f = urllib.request.urlopen(req) good = True - except urllib2.HTTPError as error: + except urllib.error.HTTPError as error: return False - except urllib2.URLError as error: + except urllib.error.URLError as error: # see errno.h try: errno = error.errno @@ -85,17 +85,17 @@ def check_skip_build(r, b): return False def fetch_src(r, b): - src_url = config.control_url + "/srpms/" + r.id + "/" + urllib.quote(b.src_rpm) + src_url = config.control_url + "/srpms/" + r.id + "/" + urllib.parse.quote(b.src_rpm) b.log_line("fetching %s" % src_url) start = time.time() good = False while not good: try: headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } - req = urllib2.Request(url=src_url, headers=headers) - f = urllib2.urlopen(req) + req = urllib.request.Request(url=src_url, headers=headers) + f = urllib.request.urlopen(req) good = True - except urllib2.HTTPError as error: + except urllib.error.HTTPError as error: # fail in a way where cron job will retry msg = "unable to fetch url %s, http code: %d" % (src_url, error.code) b.log_line(msg) @@ -107,7 +107,7 @@ def fetch_src(r, b): msg = "in queue for more than 6 hours, download failing" b.log_line(msg) return False - except urllib2.URLError as error: + except urllib.error.URLError as error: errno = 0 if isinstance(error.args[0], IOError): errno = error.args[0].errno @@ -129,11 +129,12 @@ def fetch_src(r, b): o = chroot.popen("cat > %s" % b.src_rpm, mode = "w") try: - bytes = util.sendfile(f, o) + shutil.copyfileobj(f, o) except IOError as e: b.log_line("error: unable to write to `%s': %s" % (b.src_rpm, e)) raise + bytes = f.tell() f.close() o.close() t = time.time() - start