import string
import signal
import os
+import urllib
import urllib2
import StringIO
import sys
signal.signal(signal.SIGALRM, alarmalarm)
signal.alarm(300)
try:
- f = urllib2.urlopen(control_url + "/max_req_no")
+ headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+ f = urllib2.urlopen(control_url + "/max_req_no", headers)
count = int(string.strip(f.readline()))
signal.alarm(0)
except Exception, e:
socket.setdefaulttimeout(240)
signal.alarm(300)
try:
- f = urllib2.urlopen(control_url + "/queue.gz")
+ headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+ f = urllib2.urlopen(control_url + "/queue.gz", headers)
signal.alarm(0)
except Exception, e:
signal.alarm(0)
import atexit
import time
import string
+import urllib
import urllib2
from config import config, init_conf
b.log_line("checking if we should skip the build")
while not good:
try:
- f = urllib2.urlopen(src_url)
+ headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+ f = urllib2.urlopen(src_url, headers)
good = True
except urllib2.HTTPError, error:
return False
good = False
while not good:
try:
- f = urllib2.urlopen(src_url)
+ headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+ f = urllib2.urlopen(src_url, headers)
good = True
except urllib2.HTTPError, error:
# fail in a way where cron job will retry