import shutil
import sys
import traceback
-import urllib2
-
+import urllib
from config import config, init_conf
import mailer
import path
global problems
try:
f = open(src, 'r')
- data = f.read()
+ data = f.read().encode('utf-8')
f.close()
- req = urllib2.Request(url, data)
- req.add_header('X-Filename', os.path.basename(src))
- f = urllib2.urlopen(req)
+ headers = { 'X-Filename' : os.path.basename(src) }
+ req = urllib.request.Request(url, data=data, headers=headers)
+ f = urllib.request.urlopen(req)
f.close()
except Exception as e:
problems[src] = e
import signal
import os
import urllib
-import urllib2
import sys
from io import StringIO
import gzip
signal.alarm(300)
try:
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
- req = urllib2.Request(url=control_url + "/max_req_no", headers=headers)
- f = urllib2.urlopen(req)
+ req = urllib.request.Request(url=control_url + "/max_req_no", headers=headers)
+ f = urllib.request.urlopen(req)
count = int(f.readline().strip())
signal.alarm(0)
except Exception as e:
signal.alarm(300)
try:
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
- req = urllib2.Request(url=control_url + "/queue.gz", headers=headers)
- f = urllib2.urlopen(req)
+ req = urllib.request.Request(url=control_url + "/queue.gz", headers=headers)
+ f = urllib.request.urlopen(req)
signal.alarm(0)
except Exception as e:
signal.alarm(0)
import datetime
import string
import urllib
-import urllib2
from config import config, init_conf
from bqueue import B_Queue
while not good:
try:
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
- req = urllib2.Request(url=src_url, headers=headers)
- f = urllib2.urlopen(req)
+ req = urllib.request.Request(url=src_url, headers=headers)
+ f = urllib.request.urlopen(req)
good = True
- except urllib2.HTTPError as error:
+ except urllib.error.HTTPError as error:
return False
- except urllib2.URLError as error:
+ except urllib.error.URLError as error:
# see errno.h
try:
errno = error.errno
while not good:
try:
headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
- req = urllib2.Request(url=src_url, headers=headers)
- f = urllib2.urlopen(req)
+ req = urllib.request.Request(url=src_url, headers=headers)
+ f = urllib.request.urlopen(req)
good = True
- except urllib2.HTTPError as error:
+ except urllib.error.HTTPError as error:
# fail in a way where cron job will retry
msg = "unable to fetch url %s, http code: %d" % (src_url, error.code)
b.log_line(msg)
msg = "in queue for more than 6 hours, download failing"
b.log_line(msg)
return False
- except urllib2.URLError as error:
+ except urllib.error.URLError as error:
errno = 0
if isinstance(error.args[0], IOError):
errno = error.args[0].errno