# vi: encoding=utf-8 ts=8 sts=4 sw=4 et
-import os, config, string, urllib, re, rpm
+from __future__ import print_function
+
+import os
+import config
+import string
+try:
+ import urllib.request as urlmess
+except ImportError:
+ import urllib as urlmess
+ pass
+import re
+import rpm
from common import fileexists, noarchcachedir
from baseftptree import BasePkg, BaseFtpTree
-errnum=0
+from sign import is_signed
-quietmode=False
+errnum = 0
+quietmode = False
class SomeError(Exception):
def __init__(self):
return
def __str__(self):
- print "","An Error occured!"
+ return "An Error occured!"
def bailoutonerror():
if not errnum == 0:
- print "%d error(s) encountered... aborting" % errnum
- raise SomeError
+ print("%d error(s) encountered... aborting" % errnum)
+ raise SomeError()
def pinfo(msg):
- print 'INFO: ' + msg
+ print('INFO: ' + msg)
def perror(msg):
global errnum
- errnum=errnum+1
- print 'ERR: ' + msg
+ errnum = errnum + 1
+ print('ERR: ' + msg)
def pwarning(msg):
- print 'WARN: ' + msg
+ print('WARN: ' + msg)
-def rm(file, test=False):
+def rm(file, test = False):
if test:
if not os.path.exists(file):
pinfo("TEST os.remove(%s): file doesn't exists" % file)
else:
try:
os.remove(file)
- except OSError, e:
+ except OSError as e:
pinfo("os.remove(%s): %s" % (file, e))
#raise
-def mv(src, dst, test=False):
+def mv(src, dst, test = False):
fsrc = src
- fdst = dst+'/'+src.split('/')[-1]
+ fdst = dst + '/' + src.split('/')[-1]
if test:
- if not os.path.exists(src):
+ if not os.path.exists(fsrc):
pinfo("TEST os.rename(%s, %s): source doesn't exists" % (fsrc, fdst))
+ if not os.path.exists(dst):
+ pinfo("TEST destination doesn't exist: %s" % dst)
else:
try:
os.rename(fsrc, fdst)
- except OSError, e:
+ except OSError as e:
pinfo("os.rename(%s, %s): %s" % (fsrc, fdst, e))
raise
class Pkg(BasePkg):
def __init__(self, nvr, tree):
BasePkg.__init__(self, nvr, tree)
- self.name = string.join(nvr.split('-')[:-2], '-')
+ self.name = '-'.join(nvr.split('-')[:-2])
self.version = nvr.split('-')[-2]
self.release = nvr.split('-')[-1]
self.marked4removal = False
return rpm.labelCompare(('0', self.version, self.release),
('0', pkg.version, pkg.release))
+
+ # unfortunately can't do new Pkg(NVR), and have no "tree" in this pkg context
+ # so this static function
+ def is_debuginfo(self, nvr):
+ """
+ returns true if NVR is debuginfo package and separate debuginfo is enabled
+ """
+ if not config.separate_debuginfo:
+ return False
+ pkg = nvr.split('-')[:-2]
+ return pkg[-1] == 'debuginfo'
+
+ def is_sourcefile(self, file):
+ """
+ returns true if file is source package
+ """
+ return file[-8:] == '.src.rpm'
+
+ # returns true if package build is integer
+ def is_release(self):
+ """
+ To account Release tags with subver macros, we consider integer release
+ if it contains odd number of dots:
+
+ 1 -> True
+ 0.1 -> False
+ 0.%{subver}.%{rel}, %{rel} = 1 -> 0.20010.1 -> True
+ 0.%{subver}.%{rel}, %{rel} = 0.1 -> 0.20010.0.1 -> False
+ """
+ return self.release.count('.') % 2 == 0
+
def mark4moving(self):
if not self.marked4moving:
# Only one pkg in this pool can be marked for moving
def load(self, content=None):
BasePkg.load(self, content)
- if self.info.has_key('move'):
+ if 'move' in self.info:
self.mark4moving()
def writeinfo(self):
for bid in self.build.keys():
f.write("info:build:%s:requester:%s\ninfo:build:%s:requester_email:%s\n" % (bid, self.build[bid].requester, bid, self.build[bid].requester_email))
for key in self.info.keys():
- f.write("info:%s:%s\n" % (key, string.join(self.info[key], ':')))
+ f.write("info:%s:%s\n" % (key, ':'.join(self.info[key])))
for arch in self.files.keys():
for rpm in self.files[arch]:
f.write("file:%s:%s\n" % (arch, rpm))
-
+
def remove(self, test = False):
"""
Remove package from ftp
"""
for arch in self.files.keys():
for rpm in self.files[arch]:
- rm(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, test)
+ if self.is_debuginfo(rpm):
+ rm(self.tree.basedir + '/' + arch + '/debuginfo/' + rpm, test)
+ else:
+ rm(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, test)
if arch == 'noarch':
if fileexists(noarchcachedir + rpm + '.filelist'):
rm(noarchcachedir + rpm + '.filelist', test)
rm(noarchcachedir + rpm + '.reqlist', test)
rm(self.tree.basedir + '/SRPMS/.metadata/' + self.nvr + '.src.rpm.info', test)
- def move(self, dsttree, test=False):
+ def rpmfiles(self, debugfiles = True, sourcefiles = True):
+ """
+ Return rpm files related to this package
+ """
+ files = []
+ for arch, rpms in self.files.items():
+ for nvr in rpms:
+ if self.is_debuginfo(nvr):
+ if debugfiles:
+ files.append(self.tree.basedir + '/' + arch + '/debuginfo/' + nvr)
+ else:
+ if self.is_sourcefile(nvr):
+ if sourcefiles:
+ files.append(self.tree.basedir + '/' + arch + '/RPMS/' + nvr)
+ else:
+ files.append(self.tree.basedir + '/' + arch + '/RPMS/' + nvr)
+ return files
+
+ def obsoletes(self):
+ """
+ Return obsoletes for all packages in Pkg:
+
+ {'php-geshi': set(['geshi'])}
+
+ """
+ def rpmhdr(pkg):
+ ts = rpm.ts()
+ ts.setVSFlags(rpm.RPMVSF_NODSAHEADER)
+ fdno = os.open(pkg, os.O_RDONLY)
+ hdr = ts.hdrFromFdno(fdno)
+ os.close(fdno)
+ return hdr
+
+ obsoletes = {}
+ for rpmfile in self.rpmfiles():
+ if not os.path.exists(rpmfile):
+ continue
+ hdr = rpmhdr(rpmfile)
+ if not hdr[rpm.RPMTAG_OBSOLETES]:
+ continue
+
+ name = hdr[rpm.RPMTAG_NAME]
+ if not name in obsoletes:
+ obsoletes[name] = set()
+
+ for tag in hdr[rpm.RPMTAG_OBSOLETES]:
+ obsoletes[name].add(tag)
+
+ return obsoletes
+
+ def move(self, dsttree, test = False):
if dsttree.has_key(self.nvr):
movedany = False
for arch in self.files.keys():
msg = "TEST "
pinfo("%sArch %s for %s is already present in dest tree; removing from srctree" % (msg, arch, self.nvr))
for rpm in self.files[arch]:
- rm(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, test)
+ if self.is_debuginfo(rpm):
+ rm(self.tree.basedir + '/' + arch + '/debuginfo/' + rpm, test)
+ else:
+ rm(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, test)
else:
movedany = True
dsttree[self.nvr].files[arch] = self.files[arch]
for rpm in self.files[arch]:
- mv(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, dsttree.basedir + '/' + arch + '/RPMS/', test)
- if not test and movedany:
+ if self.is_debuginfo(rpm):
+ mv(self.tree.basedir + '/' + arch + '/debuginfo/' + rpm, dsttree.basedir + '/' + arch + '/debuginfo/', test)
+ else:
+ mv(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, dsttree.basedir + '/' + arch + '/RPMS/', test)
+ if not test and movedany:
for bid in self.build.keys():
dsttree[self.nvr].build[bid] = self.build[bid]
dsttree[self.nvr].writeinfo()
# move files
for arch in self.files.keys():
for rpm in self.files[arch]:
- mv(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, dsttree.basedir + '/' + arch + '/RPMS/', test)
+ if self.is_debuginfo(rpm):
+ mv(self.tree.basedir + '/' + arch + '/debuginfo/' + rpm, dsttree.basedir + '/' + arch + '/debuginfo/', test)
+ else:
+ mv(self.tree.basedir + '/' + arch + '/RPMS/' + rpm, dsttree.basedir + '/' + arch + '/RPMS/', test)
# move metadata
mv(self.tree.basedir + '/SRPMS/.metadata/' + self.nvr + '.src.rpm.info', dsttree.basedir + '/SRPMS/.metadata/', test)
self.do_checkbuild = True
def __getitem__(self, key):
- if self.loadedpkgs.has_key(key):
+ if key in self.loadedpkgs:
return self.loadedpkgs[key]
elif key in self.pkgnames:
pkg=Pkg(key, self)
self.loadedpkgs[key]=pkg
return pkg
else:
- raise KeyError, key
+ raise KeyError(key)
def has_key(self, key):
if key in self.pkgnames:
self.__checkbuild(self.loadedpkgs.values())
self.__checkarchs(dsttree, self.loadedpkgs.values())
- def testmove(self, dsttree):
+ def testmove(self, dsttree, archivetree = None):
self.__checkbuild(self.marked4moving)
self.__checkarchs(dsttree, self.marked4moving)
-
- self.__rmolderfromsrc(test = True)
- self.__rmotherfromdst(dsttree, test = True)
+ if not dsttree.treename.count("archive"):
+ self.__checkduplicates(self.marked4moving)
+
+ self.__checksigns(dsttree, self.marked4moving, test = True)
+ self.__checkforobsoletes(dsttree, self.marked4moving, test = True)
+ self.__checkforrelease(dsttree, self.marked4moving, test = True)
+
+ if not self.treename.count("archive"):
+ self.__rmolderfromsrc(test = True)
+ if not dsttree.treename.count("archive"):
+ self.__rmotherfromdst(dsttree, test = True, archivetree = archivetree)
for pkg in self.marked4moving:
pkg.move(dsttree, test = True)
- def movepkgs(self, dsttree):
+ def movepkgs(self, dsttree, archivetree = None):
if self.do_checkbuild:
self.__checkbuild(self.marked4moving)
bailoutonerror()
+
self.__checkarchs(dsttree, self.marked4moving)
bailoutonerror()
- self.__rmolderfromsrc()
- self.__rmotherfromdst(dsttree)
+
+ self.__checksigns(dsttree, self.marked4moving)
+ bailoutonerror()
+
+ if not self.treename.count("archive"):
+ self.__rmolderfromsrc()
+ if not dsttree.treename.count("archive"):
+ self.__rmotherfromdst(dsttree, archivetree = archivetree)
for pkg in self.marked4moving:
pkg.move(dsttree)
+ def rpmfiles(self, debugfiles = True, sourcefiles = True):
+ if self.do_checkbuild:
+ self.__checkbuild(self.marked4moving)
+
+ files = []
+ for pkg in self.marked4moving:
+ files += pkg.rpmfiles(debugfiles = debugfiles, sourcefiles = sourcefiles)
+ return files
+
def removepkgs(self):
if self.do_checkbuild:
self.__checkbuild(self.marked4removal)
def mark4moving(self, wannabepkgs):
self.__mark4something(wannabepkgs, Pkg.mark4moving)
-
# Internal functions below
+ def __arch_stringify(self, list):
+ ret = []
+ dist = config.ftp_dist;
+ for arch in list:
+ ret.append(dist + '-' + arch)
+ return ' '.join(ret)
def __loadpkgnames(self):
def checkfiletype(name):
def __mark4something(self, wannabepkgs, markfunction):
def chopoffextension(pkg):
- found=pkg.find('.src.rpm')
- if found==-1:
+ found = pkg.find('.src.rpm')
+ if found == -1:
return pkg
else:
return pkg[:found]
+
for wannabepkg in wannabepkgs:
- pkgname=chopoffextension(wannabepkg)
+ pkgname = chopoffextension(wannabepkg)
if pkgname in self.pkgnames:
if not pkgname in self.loadedpkgs.keys():
self.loadedpkgs[pkgname]=Pkg(pkgname, self)
Reads config.builderqueue to grab the info
"""
- f = urllib.urlopen(config.builderqueue)
+ f = urlmess.urlopen(config.builderqueue)
requests = {}
reid = re.compile(r'^.*id=(.*) pri.*$')
regb = re.compile(r'^group:.*$|builders:.*$', re.M)
- for i in re.findall(regb, f.read()):
+ for i in re.findall(regb, f.read().decode('utf-8')):
if i[0] == 'g':
id = reid.sub(r'\1', i)
requests[id] = ""
for pkg in marked:
for bid in pkg.build.keys():
- if requests.has_key(bid) and not requests[bid].find('?') == -1:
+ if bid in requests and not requests[bid].find('?') == -1:
pkg.error("(buildid %s) building not finished" % bid)
def __checkarchs(self, dsttree, marked):
if arch not in pkg.files.keys():
missingarchs.append(arch)
if missingarchs:
- pkg.error('moving would remove archs: %s' % missingarchs)
+ pkg.error('moving would remove archs: %s' % self.__arch_stringify(missingarchs))
else:
# warn if a package isn't built for all archs
- if (config.separate_noarch and 'noarch' in pkg.files.keys() and len(pkg.files.keys()) == 2):
- continue
- elif len(pkg.files.keys()) != len(config.ftp_archs) + 1:
+ # ftp_archs + SRPMS
+ ftp_archs_num = len(config.ftp_archs) + 1
+ if (config.separate_noarch and 'noarch' in pkg.files.keys()):
+ # ftp_archs + SRPMS + noarch subpackages
+ ftp_archs_num += 1
+ # plain simple noarch package
+ if (len(pkg.files.keys()) == 2):
+ continue
+
+ if len(pkg.files.keys()) != ftp_archs_num:
missingarchs = []
for arch in config.ftp_archs:
if arch not in pkg.files.keys():
missingarchs.append(arch)
- pkg.warning('not built for archs: %s' % missingarchs)
+ pkg.warning('not built for archs: %s' % self.__arch_stringify(missingarchs))
- def __rmolderfromsrc(self, test=False):
+ def __checkduplicates(self, marked):
+ """
+ Checks if marked packages contain duplicate packages (with different versions)
+ """
+ for pkg in marked:
+ olderpkgnames = self.__find_older_pkgs(pkg)
+ for i in olderpkgnames:
+ markednames = [str(x) for x in marked]
+ if i in markednames:
+ pkg.error('duplicate package: %s' % i)
+
+ def __rmolderfromsrc(self, test = False):
for pkg in self.marked4moving:
olderpkgnames = self.__find_older_pkgs(pkg)
for i in olderpkgnames:
Pkg(i, self).remove(test)
- def __rmotherfromdst(self, dsttree, test = False):
+ def __rmotherfromdst(self, dsttree, test = False, archivetree = None):
for pkg in self.marked4moving:
pkgnames = self.__find_other_pkgs(pkg, dsttree)
for i in pkgnames:
- Pkg(i, dsttree).remove(test)
+ if archivetree == None:
+ Pkg(i, dsttree).remove(test)
+ else:
+ Pkg(i, dsttree).move(archivetree, test = test)
# Used more than once filter functions
def __find_other_pkgs(self, pkg, tree):
return False
return filter(filter_older_pkgs, self.__find_other_pkgs(pkg, self))
+ def __checksigns(self, tree, pkgs, test = False):
+ """
+ Checks if pkgs in tree are all signed.
+
+ in case of test = true, error flag is set for unsigned packages
+ """
+ if not tree.treename in config.signed_trees:
+ return
+
+ for pkg in pkgs:
+ unsigned = 0
+ for file in pkg.rpmfiles():
+ if not is_signed(file):
+ unsigned += 1
+
+ if unsigned != 0:
+ if test == True:
+ if not quietmode:
+ pkg.warning('%d files not signed' % unsigned)
+ else:
+ pkg.error('%d files not signed' % unsigned)
+
+ def __checkforobsoletes(self, tree, pkgs, test = False):
+ """
+ Checks queue file if package obsoletes something in destination tree and suggest for removal.
+
+ Only NAME tag is compared, i.e virtual packages do not get reported.
+
+ """
+ if test != True:
+ return
+
+ def findbyname(name):
+ def x(nvr):
+ return '-'.join(nvr.split('-')[:-2]) == name
+ return filter(x, tree.pkgnames)
+
+ for pkg in pkgs:
+ obsoletes = pkg.obsoletes()
+ if not obsoletes:
+ continue
+
+ for pn, setlist in obsoletes.items():
+ for item in setlist:
+ p = findbyname(item)
+ if p:
+ pkg.warning('obsoletes %s (via %s) in dest tree, perhaps you want rmpkg' % (p,pn))
+
+ def __checkforrelease(self, tree, pkgs, test = False):
+ """
+ Checks queue file if package release is non integer.
+
+ """
+ if test != True:
+ return
+
+ for pkg in pkgs:
+ if not pkg.is_release():
+ pkg.warning('non-integer release: %s' % pkg.release)