self.email = get("email")
self.buildlogs_url = get("buildlogs_url", "/dev/null")
self.ftp_url = get("ftp_url")
+ self.notify_url = get("notify_url")
self.test_ftp_url = get("test_ftp_url", "/dev/null")
self.rpmqa_url = get("rpmqa_url", "/dev/null")
self.rpmqa_filename = get("rpmqa_filename")
import shutil
import sys
import traceback
+import urllib2
from config import config, init_conf
import mailer
res = f.close()
if password != None: os.unlink(".rsync.pass")
return f.close()
-
+
+def post_file(src, url):
+ global problem
+ try:
+ f = open(src, 'r')
+ data = f.read()
+ f.close()
+ req = urllib2.Request(url, data)
+ f = urllib2.urlopen(req)
+ code = f.code
+ f.close()
+ except Exception, e:
+ problem = e
+ return e
+ if code == 200:
+ return 0
+ else:
+ return code
+
def send_file(src, target):
log.notice("sending %s (size %d bytes)" % (target, os.stat(src).st_size))
m = re.match('rsync://([^/]+)/.*', target)
m = re.match('scp://([^@:]+@[^/:]+)(:|)(.*)', target)
if m:
return scp_file(src, m.group(1) + ":" + m.group(3))
+ m = re.match('http://.*', target)
+ if m:
+ return post_file(src, target)
log.alert("unsupported protocol: %s" % target)
# pretend everything went OK, so file is removed from queue,
# and doesn't cause any additional problems
if lock.lock("sending-files", non_block = 1) == None:
return
init_conf()
+ maybe_flush_queue(path.notify_queue_dir)
maybe_flush_queue(path.buildlogs_queue_dir)
maybe_flush_queue(path.ftp_queue_dir)
import mailer
import gpg
import util
+import notifyq
from config import config
class Notifier:
self.xml.write("<notification group-id='%s' builder='%s'>\n" % \
(g.id, config.builder))
- def send(self):
+ def send(self, r):
+ sio = StringIO.StringIO()
self.xml.write("</notification>\n")
- msg = mailer.Message()
- msg.set_headers(to = config.notify_email, subject = "status notification")
- msg.set_header("X-New-PLD-Builder", "status-notification")
self.xml.seek(0)
- util.sendfile(gpg.sign(self.xml), msg)
- msg.send()
+ util.sendfile(gpg.sign(self.xml), sio)
self.xml = None
+ sio.seek(0)
+ notifyq.init(r)
+ notifyq.add(sio)
+ notifyq.flush()
def add_batch(self, b, s):
self.xml.write(" <batch id='%s' status='%s' />\n" % (b.b_id, s))
def add_batch(batch, status):
n.add_batch(batch, status)
-def send():
- n.send()
+def send(r):
+ n.send(r)
--- /dev/null
+# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
+
+import path
+import os
+import shutil
+import time
+
+from config import config
+import util
+
+class Notify_Queue:
+ def __init__(self):
+ self.queue = None
+ self.some_failed = 0
+
+ def init(self, g=None):
+ self.queue = []
+ self.requester_email = g.requester_email
+ self.notify_url = config.notify_url
+
+ def add(self, file):
+ id = util.uuid()
+ f = open(path.notify_queue_dir + id, 'w')
+ f.write(file.read())
+ f.close()
+ self.queue.append({'id': id})
+
+ def flush(self):
+ def desc(l):
+ return """Target: %s
+Id: %s
+Builder: %s
+Time: %d
+Requester: %s
+END
+""" % (self.notify_url, l['id'], config.builder, time.time(), self.requester_email)
+
+ for l in self.queue:
+ f = open(path.notify_queue_dir + l['id'] + ".desc", "w")
+ f.write(desc(l))
+ f.close()
+
+ def kill(self):
+ for l in self.queue:
+ os.unlink(path.notify_queue_dir + l)
+
+queue = Notify_Queue()
+
+def add(notify):
+ queue.add(notify)
+
+def flush():
+ queue.flush()
+
+def kill():
+ queue.kill()
+
+def init(r):
+ queue.init(r)
+
+def status():
+ return queue.status
+
+def clear_status():
+ queue.status = ""
queue_file = spool_dir + "queue"
req_queue_file = spool_dir + "req_queue"
processed_ids_file = spool_dir + "processed_ids"
+notify_queue_dir = spool_dir + "notify/"
buildlogs_queue_dir = spool_dir + "buildlogs/"
ftp_queue_dir = spool_dir + "ftp/"
build_dir = spool_dir + "builds/"
status.pop()
return True
-def main():
+def handle_request_main(stream):
init_conf("src")
status.push("handling email request")
- ret = handle_request(sys.stdin)
+ ret = handle_request(stream)
status.pop()
- sys.exit(not ret)
+ return ret
+
+def main():
+ sys.exit(not handle_request_main(sys.stdin))
if __name__ == '__main__':
wrap.wrap(main)
build.build_all(r, build_rpm)
report.send_report(r, is_src = False)
ftp.flush()
- notify.send()
+ notify.send(r)
def check_load():
do_exit = 0
exit 1
fi
-mkdir -p spool/{builds,buildlogs,ftp} www/srpms lock
+mkdir -p spool/{builds,buildlogs,notify,ftp} www/srpms lock
echo 0 > www/max_req_no
echo 0 > spool/last_req_no
echo -n > spool/processed_ids
chroot = /home/pld/builderth/chroots/chroot-src/
buildlogs_url = rsync://blogs-th@buildlogs.pld-linux.org/pld-buildlogs-th-SRPMS/
ftp_url = scp://pldth@ep09.pld-linux.org:ftp/.incoming/SRPMS/
+notify_url = http://ep09.pld-linux.org:1234/
test_ftp_url = scp://pldth@ep09.pld-linux.org:ftp/.test-builds/SRPMS/
rpmqa_url = scp://pldth@ep09.pld-linux.org:ftp/.stat/builder/th/
rpmqa_filename = rpmqa-SRPMS.txt
arch = athlon
chroot = /home/users/builderth/chroot-athlon/
buildlogs_url = /dev/null
+notify_url = http://ep09.pld-linux.org:1234/
ftp_url = scp://pldth@ep09.pld-linux.org:ftp/.incoming/athlon/
test_ftp_url = /dev/null
rpmqa_url = /dev/null