]> git.pld-linux.org Git - projects/pld-builder.new.git/blobdiff - PLD_Builder/request_fetcher.py
Typo.
[projects/pld-builder.new.git] / PLD_Builder / request_fetcher.py
index cd39090fbb9ff95dac9e3693c9d04d22b36e0640..9877cde771572ba4e7a23ca73fa2e2251a639ee2 100644 (file)
@@ -4,6 +4,7 @@ import string
 import signal
 import os
 import urllib
+import urllib2
 import StringIO
 import sys
 import gzip
@@ -16,6 +17,8 @@ import util
 import gpg
 import request
 import loop
+import socket
+import struct
 from acl import acl
 from bqueue import B_Queue
 from config import config, init_conf
@@ -28,40 +31,56 @@ def alarmalarm(signum, frame):
 def has_new(control_url):
     global last_count
     cnt_f = open(path.last_req_no_file)
-    last_count = int(string.strip(cnt_f.readline()))
+    try:
+        last_count = int(string.strip(cnt_f.readline()))
+    except ValueError, e:
+        last_count = 0
+
     cnt_f.close()
     f = None
+    socket.setdefaulttimeout(240)
     signal.signal(signal.SIGALRM, alarmalarm)
+    signal.alarm(300)
     try:
-        signal.alarm(240)
-        f = urllib.urlopen(control_url + "/max_req_no")
+        headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
+        req = urllib2.Request(url=control_url + "/max_req_no", headers=headers)
+        f = urllib2.urlopen(req)
+        count = int(string.strip(f.readline()))
         signal.alarm(0)
-    except:
+    except Exception, e:
         signal.alarm(0)
-        log.error("can't fetch %s" % (control_url + "/max_req_no"))
+        log.error("can't fetch %s: %s" % (control_url + "/max_req_no", e))
         sys.exit(1)
     res = 0
-    if int(string.strip(f.readline())) != last_count:
+    if count != last_count:
         res = 1
     f.close()
     return res
 
 def fetch_queue(control_url):
     signal.signal(signal.SIGALRM, alarmalarm)
+    socket.setdefaulttimeout(240)
+    signal.alarm(300)
     try:
-        signal.alarm(240)
-        f = urllib.urlopen(control_url + "/queue.gz")
+        headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
+        req = urllib2.Request(url=control_url + "/queue.gz", headers=headers)
+        f = urllib2.urlopen(req)
         signal.alarm(0)
-    except:
+    except Exception, e:
         signal.alarm(0)
-        log.error("can't fetch %s" % (control_url + "/queue.gz"))
+        log.error("can't fetch %s: %s" % (control_url + "/queue.gz", e))
         sys.exit(1)
     sio = StringIO.StringIO()
     util.sendfile(f, sio)
     f.close()
     sio.seek(0)
     f = gzip.GzipFile(fileobj = sio)
-    (signers, body) = gpg.verify_sig(f)
+    try:
+        fdata = f.read()
+    except struct.error, e:
+        log.alert("corrupted fetched queue.gz file")
+        sys.exit(1)
+    (signers, body) = gpg.verify_sig(fdata)
     u = acl.user_by_email(signers)
     if u == None:
         log.alert("queue.gz not signed with signature of valid user: %s" % signers)
@@ -69,7 +88,6 @@ def fetch_queue(control_url):
     if not u.can_do("sign_queue", "all"):
         log.alert("user %s is not allowed to sign my queue" % u.login)
         sys.exit(1)
-    body.seek(0)
     return request.parse_requests(body)
 
 def handle_reqs(builder, reqs):
@@ -80,8 +98,8 @@ def handle_reqs(builder, reqs):
     q.lock(0)
     q.read()
     for r in reqs:
-        if r.kind != 'group': 
-            raise 'handle_reqs: fatal: huh? %s' % r.kind
+        if r.kind != 'group':
+            raise Exception, 'handle_reqs: fatal: huh? %s' % r.kind
         need_it = 0
         for b in r.batches:
             if builder in b.builders:
@@ -96,15 +114,16 @@ def main():
     lck = lock.lock("request_fetcher", non_block = True)
     if lck == None:
         sys.exit(1)
-    init_conf("")
-    
+    init_conf()
+    acl.try_reload()
+
     status.push("fetching requests")
     if has_new(config.control_url):
         q = fetch_queue(config.control_url)
         max_no = 0
         q_new = []
         for r in q:
-            if r.no > max_no: 
+            if r.no > max_no:
                 max_no = r.no
             if r.no > last_count:
                 q_new.append(r)
@@ -115,8 +134,8 @@ def main():
         f.close()
     status.pop()
     lck.close()
-    
+
 if __name__ == '__main__':
-    # http connection is established (and few bytes transferred through it) 
+    # http connection is established (and few bytes transferred through it)
     # each $secs seconds.
     loop.run_loop(main, secs = 10)
This page took 0.037384 seconds and 4 git commands to generate.