# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
-import os, config, string, urllib, re, rpm
+from __future__ import print_function
+
+import os
+import config
+import string
+try:
+ import urllib.request as urlmess
+except ImportError:
+ import urllib as urlmess
+ pass
+import re
+import rpm
from common import fileexists, noarchcachedir
from baseftptree import BasePkg, BaseFtpTree
from sign import is_signed
return
def __str__(self):
- print "","An Error occured!"
+ return "An Error occured!"
def bailoutonerror():
if not errnum == 0:
- print "%d error(s) encountered... aborting" % errnum
- raise SomeError
+ print("%d error(s) encountered... aborting" % errnum)
+ raise SomeError()
def pinfo(msg):
- print 'INFO: ' + msg
+ print('INFO: ' + msg)
def perror(msg):
global errnum
errnum = errnum + 1
- print 'ERR: ' + msg
+ print('ERR: ' + msg)
def pwarning(msg):
- print 'WARN: ' + msg
+ print('WARN: ' + msg)
def rm(file, test = False):
if test:
else:
try:
os.remove(file)
- except OSError, e:
+ except OSError as e:
pinfo("os.remove(%s): %s" % (file, e))
#raise
fsrc = src
fdst = dst + '/' + src.split('/')[-1]
if test:
- if not os.path.exists(src):
+ if not os.path.exists(fsrc):
pinfo("TEST os.rename(%s, %s): source doesn't exists" % (fsrc, fdst))
if not os.path.exists(dst):
pinfo("TEST destination doesn't exist: %s" % dst)
else:
try:
os.rename(fsrc, fdst)
- except OSError, e:
+ except OSError as e:
pinfo("os.rename(%s, %s): %s" % (fsrc, fdst, e))
raise
class Pkg(BasePkg):
def __init__(self, nvr, tree):
BasePkg.__init__(self, nvr, tree)
- self.name = string.join(nvr.split('-')[:-2], '-')
+ self.name = '-'.join(nvr.split('-')[:-2])
self.version = nvr.split('-')[-2]
self.release = nvr.split('-')[-1]
self.marked4removal = False
def load(self, content=None):
BasePkg.load(self, content)
- if self.info.has_key('move'):
+ if 'move' in self.info:
self.mark4moving()
def writeinfo(self):
for bid in self.build.keys():
f.write("info:build:%s:requester:%s\ninfo:build:%s:requester_email:%s\n" % (bid, self.build[bid].requester, bid, self.build[bid].requester_email))
for key in self.info.keys():
- f.write("info:%s:%s\n" % (key, string.join(self.info[key], ':')))
+ f.write("info:%s:%s\n" % (key, ':'.join(self.info[key])))
for arch in self.files.keys():
for rpm in self.files[arch]:
f.write("file:%s:%s\n" % (arch, rpm))
-
+
def remove(self, test = False):
"""
Remove package from ftp
"""
def rpmhdr(pkg):
ts = rpm.ts()
- ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
+ ts.setVSFlags(rpm.RPMVSF_NODSAHEADER)
fdno = os.open(pkg, os.O_RDONLY)
hdr = ts.hdrFromFdno(fdno)
os.close(fdno)
self.do_checkbuild = True
def __getitem__(self, key):
- if self.loadedpkgs.has_key(key):
+ if key in self.loadedpkgs:
return self.loadedpkgs[key]
elif key in self.pkgnames:
pkg=Pkg(key, self)
self.loadedpkgs[key]=pkg
return pkg
else:
- raise KeyError, key
+ raise KeyError(key)
def has_key(self, key):
if key in self.pkgnames:
def testmove(self, dsttree, archivetree = None):
self.__checkbuild(self.marked4moving)
self.__checkarchs(dsttree, self.marked4moving)
- self.__checkduplicates(self.marked4moving)
+ if not dsttree.treename.count("archive"):
+ self.__checkduplicates(self.marked4moving)
self.__checksigns(dsttree, self.marked4moving, test = True)
self.__checkforobsoletes(dsttree, self.marked4moving, test = True)
self.__checkforrelease(dsttree, self.marked4moving, test = True)
-
+
if not self.treename.count("archive"):
self.__rmolderfromsrc(test = True)
-
- self.__rmotherfromdst(dsttree, test = True, archivetree = archivetree)
+ if not dsttree.treename.count("archive"):
+ self.__rmotherfromdst(dsttree, test = True, archivetree = archivetree)
for pkg in self.marked4moving:
pkg.move(dsttree, test = True)
if not self.treename.count("archive"):
self.__rmolderfromsrc()
-
- self.__rmotherfromdst(dsttree, archivetree = archivetree)
+ if not dsttree.treename.count("archive"):
+ self.__rmotherfromdst(dsttree, archivetree = archivetree)
for pkg in self.marked4moving:
pkg.move(dsttree)
def mark4moving(self, wannabepkgs):
self.__mark4something(wannabepkgs, Pkg.mark4moving)
-
# Internal functions below
def __arch_stringify(self, list):
ret = []
- # XXX: is dist line in any config?
- dist = 'ac'
+ dist = config.ftp_dist;
for arch in list:
ret.append(dist + '-' + arch)
return ' '.join(ret)
Reads config.builderqueue to grab the info
"""
- f = urllib.urlopen(config.builderqueue)
+ f = urlmess.urlopen(config.builderqueue)
requests = {}
reid = re.compile(r'^.*id=(.*) pri.*$')
regb = re.compile(r'^group:.*$|builders:.*$', re.M)
- for i in re.findall(regb, f.read()):
+ for i in re.findall(regb, f.read().decode('utf-8')):
if i[0] == 'g':
id = reid.sub(r'\1', i)
requests[id] = ""
for pkg in marked:
for bid in pkg.build.keys():
- if requests.has_key(bid) and not requests[bid].find('?') == -1:
+ if bid in requests and not requests[bid].find('?') == -1:
pkg.error("(buildid %s) building not finished" % bid)
def __checkarchs(self, dsttree, marked):
pkg.error('moving would remove archs: %s' % self.__arch_stringify(missingarchs))
else:
# warn if a package isn't built for all archs
- if (config.separate_noarch and 'noarch' in pkg.files.keys() and len(pkg.files.keys()) == 2):
- continue
- elif len(pkg.files.keys()) != len(config.ftp_archs) + 1:
+ # ftp_archs + SRPMS
+ ftp_archs_num = len(config.ftp_archs) + 1
+ if (config.separate_noarch and 'noarch' in pkg.files.keys()):
+ # ftp_archs + SRPMS + noarch subpackages
+ ftp_archs_num += 1
+ # plain simple noarch package
+ if (len(pkg.files.keys()) == 2):
+ continue
+
+ if len(pkg.files.keys()) != ftp_archs_num:
missingarchs = []
for arch in config.ftp_archs:
if arch not in pkg.files.keys():