]> git.pld-linux.org Git - projects/pld-builder.new.git/commitdiff
- never trust cache/proxy
authorArkadiusz Miśkiewicz <arekm@maven.pl>
Tue, 1 Sep 2009 09:16:39 +0000 (09:16 +0000)
committercvs2git <feedback@pld-linux.org>
Sun, 24 Jun 2012 12:13:13 +0000 (12:13 +0000)
Changed files:
    PLD_Builder/request_fetcher.py -> 1.27
    PLD_Builder/rpm_builder.py -> 1.106

PLD_Builder/request_fetcher.py
PLD_Builder/rpm_builder.py

index fcce56487c3b43f063694a95a09b1409169461ba..ce886e0df3635ae42722df289a59a2f7ade4a389 100644 (file)
@@ -3,6 +3,7 @@
 import string
 import signal
 import os
+import urllib
 import urllib2
 import StringIO
 import sys
@@ -40,7 +41,8 @@ def has_new(control_url):
     signal.signal(signal.SIGALRM, alarmalarm)
     signal.alarm(300)
     try:
-        f = urllib2.urlopen(control_url + "/max_req_no")
+        headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+        f = urllib2.urlopen(control_url + "/max_req_no", headers)
         count = int(string.strip(f.readline()))
         signal.alarm(0)
     except Exception, e:
@@ -58,7 +60,8 @@ def fetch_queue(control_url):
     socket.setdefaulttimeout(240)
     signal.alarm(300)
     try:
-        f = urllib2.urlopen(control_url + "/queue.gz")
+        headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+        f = urllib2.urlopen(control_url + "/queue.gz", headers)
         signal.alarm(0)
     except Exception, e:
         signal.alarm(0)
index 1523bcc38625928dfcfb46970055a018551ea795..442bb17a5115105357291e3399ad54ca047ede50 100644 (file)
@@ -5,6 +5,7 @@ import os
 import atexit
 import time
 import string
+import urllib
 import urllib2
 
 from config import config, init_conf
@@ -57,7 +58,8 @@ def check_skip_build(r, b):
     b.log_line("checking if we should skip the build")
     while not good:
         try:
-            f = urllib2.urlopen(src_url)
+            headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+            f = urllib2.urlopen(src_url, headers)
             good = True
         except urllib2.HTTPError, error:
                 return False
@@ -79,7 +81,8 @@ def fetch_src(r, b):
     good = False
     while not good:
         try:
-            f = urllib2.urlopen(src_url)
+            headers = urllib.urlencode( { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' } )
+            f = urllib2.urlopen(src_url, headers)
             good = True
         except urllib2.HTTPError, error:
             # fail in a way where cron job will retry
This page took 0.0873390000000001 seconds and 4 git commands to generate.