import signal
import os
import urllib
+import urllib2
import StringIO
import sys
import gzip
import gpg
import request
import loop
+import socket
from acl import acl
from bqueue import B_Queue
from config import config, init_conf
def has_new(control_url):
global last_count
cnt_f = open(path.last_req_no_file)
- last_count = int(string.strip(cnt_f.readline()))
+ try:
+ last_count = int(string.strip(cnt_f.readline()))
+ except ValueError, e:
+ last_count = 0
+
cnt_f.close()
f = None
+ socket.setdefaulttimeout(240)
signal.signal(signal.SIGALRM, alarmalarm)
+ signal.alarm(300)
try:
- signal.alarm(240)
- f = urllib.urlopen(control_url + "/max_req_no")
+ headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
+ req = urllib2.Request(url=control_url + "/max_req_no", headers=headers)
+ f = urllib2.urlopen(req)
count = int(string.strip(f.readline()))
signal.alarm(0)
- except:
+ except Exception, e:
signal.alarm(0)
- log.error("can't fetch %s" % (control_url + "/max_req_no"))
+ log.error("can't fetch %s: %s" % (control_url + "/max_req_no", e))
sys.exit(1)
res = 0
if count != last_count:
def fetch_queue(control_url):
signal.signal(signal.SIGALRM, alarmalarm)
+ socket.setdefaulttimeout(240)
+ signal.alarm(300)
try:
- signal.alarm(240)
- f = urllib.urlopen(control_url + "/queue.gz")
+ headers = { 'Cache-Control': 'no-cache', 'Pragma': 'no-cache' }
+ req = urllib2.Request(url=control_url + "/queue.gz", headers=headers)
+ f = urllib2.urlopen(req)
signal.alarm(0)
- except:
+ except Exception, e:
signal.alarm(0)
- log.error("can't fetch %s" % (control_url + "/queue.gz"))
+ log.error("can't fetch %s: %s" % (control_url + "/queue.gz", e))
sys.exit(1)
sio = StringIO.StringIO()
util.sendfile(f, sio)
f.close()
sio.seek(0)
f = gzip.GzipFile(fileobj = sio)
- (signers, body) = gpg.verify_sig(f)
+ (signers, body) = gpg.verify_sig(f.read())
u = acl.user_by_email(signers)
if u == None:
log.alert("queue.gz not signed with signature of valid user: %s" % signers)
if not u.can_do("sign_queue", "all"):
log.alert("user %s is not allowed to sign my queue" % u.login)
sys.exit(1)
- body.seek(0)
return request.parse_requests(body)
def handle_reqs(builder, reqs):
q.lock(0)
q.read()
for r in reqs:
- if r.kind != 'group':
- raise 'handle_reqs: fatal: huh? %s' % r.kind
+ if r.kind != 'group':
+ raise Exception, 'handle_reqs: fatal: huh? %s' % r.kind
need_it = 0
for b in r.batches:
if builder in b.builders:
lck = lock.lock("request_fetcher", non_block = True)
if lck == None:
sys.exit(1)
- init_conf("")
-
+ init_conf()
+ acl.try_reload()
+
status.push("fetching requests")
if has_new(config.control_url):
q = fetch_queue(config.control_url)
max_no = 0
q_new = []
for r in q:
- if r.no > max_no:
+ if r.no > max_no:
max_no = r.no
if r.no > last_count:
q_new.append(r)
f.close()
status.pop()
lck.close()
-
+
if __name__ == '__main__':
- # http connection is established (and few bytes transferred through it)
+ # http connection is established (and few bytes transferred through it)
# each $secs seconds.
loop.run_loop(main, secs = 10)