1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or (at your option) any later version.
11 from tempfile import NamedTemporaryFile, mkdtemp
12 from shutil import rmtree
13 from osc.fetch import *
14 from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir
15 from osc.core import get_binarylist, get_binary_file
16 from osc.util import rpmquery, debquery
21 from xml.etree import cElementTree as ET
23 import cElementTree as ET
25 from conf import config, cookiejar
27 change_personality = {
38 'armv4l': [ 'armv4l' ],
39 'armv5el':[ 'armv4l', 'armv5el' ],
40 'armv6el':[ 'armv4l', 'armv5el', 'armv6el' ],
41 'armv6l' :[ 'armv4l', 'armv5el', 'armv6el' ],
42 'armv7el':[ 'armv4l', 'armv5el', 'armv6el', 'armv7el' ],
43 'armv7l' :[ 'armv4l', 'armv5el', 'armv6el', 'armv7el' ],
44 'armv8el':[ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ],
45 'armv8l' :[ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ],
47 'ppc64': [ 'ppc', 'ppc64' ],
49 'i386': [ 'i586', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
50 'i586': [ 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
51 'i686': [ 'i586', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
52 'x86_64': ['i686', 'i586', 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
53 'sparc64': ['sparc64v', 'sparcv9v', 'sparcv9', 'sparcv8', 'sparc'],
56 # real arch of this machine
57 hostarch = os.uname()[4]
58 if hostarch == 'i686': # FIXME
62 """represent the contents of a buildinfo file"""
64 def __init__(self, filename, apiurl, buildtype = 'spec', localpkgs = []):
66 tree = ET.parse(filename)
68 print >>sys.stderr, 'could not parse the buildinfo:'
69 print >>sys.stderr, open(filename).read()
76 if root.find('error') != None:
77 sys.stderr.write('buildinfo is broken... it says:\n')
78 error = root.find('error').text
79 sys.stderr.write(error + '\n')
82 if not (apiurl.startswith('https://') or apiurl.startswith('http://')):
83 raise urllib2.URLError('invalid protocol for the apiurl: \'%s\'' % apiurl)
85 self.buildtype = buildtype
88 # are we building .rpm or .deb?
89 # XXX: shouldn't we deliver the type via the buildinfo?
90 self.pacsuffix = 'rpm'
91 if self.buildtype == 'dsc':
92 self.pacsuffix = 'deb'
94 self.buildarch = root.find('arch').text
95 if root.find('release') != None:
96 self.release = root.find('release').text
99 self.downloadurl = root.get('downloadurl')
101 if root.find('debuginfo') != None:
103 self.debuginfo = int(root.find('debuginfo').text)
111 for node in root.findall('bdep'):
112 p = Pac(node, self.buildarch, self.pacsuffix,
115 self.projects[p.project] = 1
118 self.vminstall_list = [ dep.name for dep in self.deps if dep.vminstall ]
119 self.cbinstall_list = [ dep.name for dep in self.deps if dep.cbinstall ]
120 self.cbpreinstall_list = [ dep.name for dep in self.deps if dep.cbpreinstall ]
121 self.preinstall_list = [ dep.name for dep in self.deps if dep.preinstall ]
122 self.runscripts_list = [ dep.name for dep in self.deps if dep.runscripts ]
125 def has_dep(self, name):
131 def remove_dep(self, name):
140 """represent a package to be downloaded
142 We build a map that's later used to fill our URL templates
144 def __init__(self, node, buildarch, pacsuffix, apiurl, localpkgs = []):
147 for i in ['name', 'package',
148 'version', 'release',
149 'project', 'repository',
150 'preinstall', 'vminstall', 'noinstall', 'runscripts',
151 'cbinstall', 'cbpreinstall',
153 self.mp[i] = node.get(i)
155 self.mp['buildarch'] = buildarch
156 self.mp['pacsuffix'] = pacsuffix
158 self.mp['arch'] = node.get('arch') or self.mp['buildarch']
160 # this is not the ideal place to check if the package is a localdep or not
161 localdep = self.mp['name'] in localpkgs
162 if not localdep and not (node.get('project') and node.get('repository')):
163 raise oscerr.APIError('incomplete information for package %s, may be caused by a broken project configuration.'
167 self.mp['extproject'] = node.get('project').replace(':', ':/')
168 self.mp['extrepository'] = node.get('repository').replace(':', ':/')
169 self.mp['repopackage'] = node.get('package') or '_repository'
170 self.mp['repoarch'] = node.get('repoarch') or self.mp['buildarch']
172 if pacsuffix == 'deb' and not (self.mp['name'] and self.mp['arch'] and self.mp['version']):
173 raise oscerr.APIError(
174 "buildinfo for package %s/%s/%s is incomplete"
175 % (self.mp['name'], self.mp['arch'], self.mp['version']))
177 self.mp['apiurl'] = apiurl
179 if pacsuffix == 'deb':
180 self.filename = debquery.DebQuery.filename(self.mp['name'], self.mp['version'], self.mp['release'], self.mp['arch'])
182 self.filename = rpmquery.RpmQuery.filename(self.mp['name'], self.mp['version'], self.mp['release'], self.mp['arch'])
184 self.mp['filename'] = self.filename
185 if self.mp['repopackage'] == '_repository':
186 self.mp['repofilename'] = self.mp['name']
188 self.mp['repofilename'] = self.mp['filename']
190 # make the content of the dictionary accessible as class attributes
191 self.__dict__.update(self.mp)
194 def makeurls(self, cachedir, urllist):
199 # by using the urlgrabber with local urls, we basically build up a cache.
200 # the cache has no validation, since the package servers don't support etags,
201 # or if-modified-since, so the caching is simply name-based (on the assumption
202 # that the filename is suitable as identifier)
203 self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch)
204 self.fullfilename = os.path.join(self.localdir, self.filename)
205 self.url_local = 'file://%s' % self.fullfilename
207 # first, add the local URL
208 self.urllist.append(self.url_local)
212 self.urllist.append(url % self.mp)
218 return "%s" % self.name
222 def get_built_files(pacdir, pactype):
224 b_built = subprocess.Popen(['find', os.path.join(pacdir, 'RPMS'),
226 stdout=subprocess.PIPE).stdout.read().strip()
227 s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SRPMS'),
229 stdout=subprocess.PIPE).stdout.read().strip()
230 elif pactype == 'kiwi':
231 b_built = subprocess.Popen(['find', os.path.join(pacdir, 'KIWI'),
233 stdout=subprocess.PIPE).stdout.read().strip()
235 b_built = subprocess.Popen(['find', os.path.join(pacdir, 'DEBS'),
237 stdout=subprocess.PIPE).stdout.read().strip()
238 s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SOURCES.DEB'),
240 stdout=subprocess.PIPE).stdout.read().strip()
241 return s_built, b_built
244 """Walks up path looking for any repodata directories.
246 @param path path to a directory
247 @return str path to repository directory containing repodata directory
250 currentDirectory = os.path.abspath(path)
251 repositoryDirectory = None
253 # while there are still parent directories
254 while currentDirectory != oldDirectory:
255 children = os.listdir(currentDirectory)
257 if "repodata" in children:
258 repositoryDirectory = currentDirectory
262 oldDirectory = currentDirectory
263 currentDirectory = os.path.abspath(os.path.join(oldDirectory,
266 return repositoryDirectory
268 def get_prefer_pkgs(dirs, wanted_arch, type):
270 from util import repodata, packagequery, cpio
280 repository = get_repo(dir)
281 if repository is None:
282 paths += glob.glob(os.path.join(os.path.abspath(dir), suffix))
284 repositories.append(repository)
286 packageQueries = packagequery.PackageQueries(wanted_arch)
288 for repository in repositories:
289 repodataPackageQueries = repodata.queries(repository)
291 for packageQuery in repodataPackageQueries:
292 packageQueries.add(packageQuery)
295 if path.endswith('src.rpm'):
297 if path.find('-debuginfo-') > 0:
299 packageQuery = packagequery.PackageQuery.query(path)
300 packageQueries.add(packageQuery)
302 prefer_pkgs = dict((name, packageQuery.path())
303 for name, packageQuery in packageQueries.iteritems())
305 depfile = create_deps(packageQueries.values())
306 cpio = cpio.CpioWrite()
307 cpio.add('deps', '\n'.join(depfile))
308 return prefer_pkgs, cpio
311 def create_deps(pkgqs):
313 creates a list of requires/provides which corresponds to build's internal
314 dependency file format
318 id = '%s.%s-0/0/0: ' % (p.name(), p.arch())
319 depfile.append('R:%s%s' % (id, ' '.join(p.requires())))
320 depfile.append('P:%s%s' % (id, ' '.join(p.provides())))
324 trustprompt = """Would you like to ...
326 1 - trust packages from '%(project)s' always
327 2 - trust them just this time
329 def check_trusted_projects(apiurl, projects):
330 trusted = config['api_host_options'][apiurl]['trusted_prj']
333 if not prj in trusted:
334 print "\nThe build root needs packages from project '%s'." % prj
335 print "Note that malicious packages can compromise the build result or even your system."
336 r = raw_input(trustprompt % { 'project':prj })
338 print "adding '%s' to ~/.oscrc: ['%s']['trusted_prj']" % (prj,apiurl)
341 print "Well, good good bye then :-)"
342 raise oscerr.UserAbort()
344 if tlen != len(trusted):
345 config['api_host_options'][apiurl]['trusted_prj'] = trusted
346 conf.config_set_option(apiurl, 'trusted_prj', ' '.join(trusted))
348 def main(opts, argv):
352 build_descr = argv[2]
356 vm_type = config['build-type']
358 build_descr = os.path.abspath(build_descr)
359 build_type = os.path.splitext(build_descr)[1][1:]
360 if build_type not in ['spec', 'dsc', 'kiwi']:
361 raise oscerr.WrongArgs(
362 'Unknown build type: \'%s\'. Build description should end in .spec, .dsc or .kiwi.' \
364 if not os.path.isfile(build_descr):
365 raise oscerr.WrongArgs('Error: build description file named \'%s\' does not exist.' % build_descr)
368 if not opts.userootforbuild:
369 buildargs.append('--norootforbuild')
371 buildargs.append('--clean')
373 buildargs.append('--noinit')
375 buildargs.append('--no-checks')
376 if not opts.no_changelog:
377 buildargs.append('--changelog')
379 build_root = opts.root
381 buildargs.append('--jobs %s' % opts.jobs)
382 elif config['build-jobs'] > 1:
383 buildargs.append('--jobs %s' % config['build-jobs'])
384 if opts.icecream or config['icecream'] != '0':
388 num = config['icecream']
391 buildargs.append('--icecream %s' % num)
392 xp.append('icecream')
395 buildargs.append('--ccache')
398 buildargs.append('--linksources')
400 buildargs.append('--baselibs')
402 buildargs.append('--debug')
405 buildargs.append('--with %s' % o)
407 for o in opts.without:
408 buildargs.append('--without %s' % o)
411 # buildargs.append('--define "%s"' % opts.define)
412 if config['build-uid']:
413 build_uid = config['build-uid']
415 build_uid = opts.build_uid
417 buildidre = re.compile('^[0-9]{1,5}:[0-9]{1,5}$')
418 if build_uid == 'caller':
419 buildargs.append('--uid %s:%s' % (os.getuid(), os.getgid()))
420 elif buildidre.match(build_uid):
421 buildargs.append('--uid %s' % build_uid)
423 print >>sys.stderr, 'Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"'
426 vm_type = opts.vm_type
427 if opts.alternative_project:
428 prj = opts.alternative_project
431 prj = store_read_project(os.curdir)
432 if opts.local_package:
435 pac = store_read_package(os.curdir)
436 apiurl = store_read_apiurl(os.curdir)
438 # make it possible to override configuration of the rc file
439 for var in ['OSC_PACKAGECACHEDIR', 'OSC_SU_WRAPPER', 'OSC_BUILD_ROOT']:
442 if var.startswith('OSC_'): var = var[4:]
443 var = var.lower().replace('_', '-')
444 if config.has_key(var):
445 print 'Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val)
449 if pacname == '_repository':
450 if not opts.local_package:
452 pacname = store_read_package(os.curdir)
453 except oscerr.NoWorkingCopy:
454 opts.local_package = True
455 if opts.local_package:
456 pacname = os.path.splitext(build_descr)[0]
458 build_root = config['build-root'] % { 'repo': repo, 'arch': arch,
459 'project' : prj, 'package' : pacname
463 if not opts.extra_pkgs:
464 extra_pkgs = config['extra-pkgs']
465 elif opts.extra_pkgs != ['']:
466 extra_pkgs = opts.extra_pkgs
472 build_descr_data = open(build_descr).read()
474 # XXX: dirty hack but there's no api to provide custom defines
477 for i in opts.without:
478 s += "%%define _without_%s 1\n" % i
479 s += "%%define _with_%s 0\n" % i
480 build_descr_data = s + build_descr_data
484 s += "%%define _without_%s 0\n" % i
485 s += "%%define _with_%s 1\n" % i
486 build_descr_data = s + build_descr_data
489 print 'Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs)
490 prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type)
491 cpio.add(os.path.basename(build_descr), build_descr_data)
492 build_descr_data = cpio.get()
494 # special handling for overlay and rsync-src/dest
496 if opts.rsyncsrc or opts.rsyncdest :
497 if not opts.rsyncsrc or not opts.rsyncdest:
498 raise oscerr.WrongOptions('When using --rsync-{src,dest} both parameters have to be specified.')
499 myrsyncsrc = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.rsyncsrc)))
500 if not os.path.isdir(myrsyncsrc):
501 raise oscerr.WrongOptions('--rsync-src %s is no valid directory!' % opts.rsyncsrc)
502 # can't check destination - its in the target chroot ;) - but we can check for sanity
503 myrsyncdest = os.path.expandvars(opts.rsyncdest)
504 if not os.path.isabs(myrsyncdest):
505 raise oscerr.WrongOptions('--rsync-dest %s is no absolute path (starting with \'/\')!' % opts.rsyncdest)
506 specialcmdopts = '--rsync-src="%s" --rsync-dest="%s"' % (myrsyncsrc, myrsyncdest)
508 myoverlay = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.overlay)))
509 if not os.path.isdir(myoverlay):
510 raise oscerr.WrongOptions('--overlay %s is no valid directory!' % opts.overlay)
511 specialcmdopts += '--overlay="%s"' % myoverlay
515 bi_filename = '_buildinfo-%s-%s.xml' % (repo, arch)
516 bc_filename = '_buildconfig-%s-%s' % (repo, arch)
517 if is_package_dir('.') and os.access(osc.core.store, os.W_OK):
518 bi_filename = os.path.join(os.getcwd(), osc.core.store, bi_filename)
519 bc_filename = os.path.join(os.getcwd(), osc.core.store, bc_filename)
520 elif not os.access('.', os.W_OK):
521 bi_file = NamedTemporaryFile(prefix=bi_filename)
522 bi_filename = bi_file.name
523 bc_file = NamedTemporaryFile(prefix=bc_filename)
524 bc_filename = bc_file.name
526 bi_filename = os.path.abspath(bi_filename)
527 bc_filename = os.path.abspath(bc_filename)
531 if not os.path.isfile(bi_filename):
532 raise oscerr.WrongOptions('--noinit is not possible, no local buildinfo file')
533 print 'Use local \'%s\' file as buildinfo' % bi_filename
534 if not os.path.isfile(bc_filename):
535 raise oscerr.WrongOptions('--noinit is not possible, no local buildconfig file')
536 print 'Use local \'%s\' file as buildconfig' % bc_filename
538 if not os.path.isfile(bi_filename):
539 raise oscerr.WrongOptions('--offline is not possible, no local buildinfo file')
540 print 'Use local \'%s\' file as buildinfo' % bi_filename
541 if not os.path.isfile(bc_filename):
542 raise oscerr.WrongOptions('--offline is not possible, no local buildconfig file')
544 print 'Getting buildinfo from server and store to %s' % bi_filename
546 bi_file = open(bi_filename, 'w')
547 bi_text = ''.join(get_buildinfo(apiurl,
552 specfile=build_descr_data,
554 bi_file.write(bi_text)
556 print 'Getting buildconfig from server and store to %s' % bc_filename
558 bc_file = open(bc_filename, 'w')
559 bc_file.write(get_buildconfig(apiurl, prj, pac, repo, arch))
561 except urllib2.HTTPError, e:
563 # check what caused the 404
564 if meta_exists(metatype='prj', path_args=(quote_plus(prj), ),
565 template_args=None, create_new=False, apiurl=apiurl):
568 # take care, not to run into double trouble.
569 pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj),
570 quote_plus(pac)), template_args=None, create_new=False,
575 if pac == '_repository' or pkg_meta_e:
576 print >>sys.stderr, 'ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error'
578 print >>sys.stderr, 'The package \'%s\' does not exists - please ' \
579 'rerun with \'--local-package\'' % pac
581 print >>sys.stderr, 'The project \'%s\' does not exists - please ' \
582 'rerun with \'--alternative-project <alternative_project>\'' % prj
587 bi = Buildinfo(bi_filename, apiurl, build_type, prefer_pkgs.keys())
589 if bi.debuginfo and not (opts.disable_debuginfo or '--debug' in buildargs):
590 buildargs.append('--debug')
593 bi.release = opts.release
596 buildargs.append('--release %s' % bi.release)
598 # real arch of this machine
600 # arch we are supposed to build for
601 if hostarch != bi.buildarch:
602 if not bi.buildarch in can_also_build.get(hostarch, []):
603 print >>sys.stderr, 'Error: hostarch \'%s\' cannot build \'%s\'.' % (hostarch, bi.buildarch)
608 print 'Evaluating preferred packages'
609 for name, path in prefer_pkgs.iteritems():
611 # We remove a preferred package from the buildinfo, so that the
612 # fetcher doesn't take care about them.
613 # Instead, we put it in a list which is appended to the rpmlist later.
614 # At the same time, this will make sure that these packages are
617 rpmlist_prefers.append((name, path))
618 print ' - %s (%s)' % (name, path)
621 print 'Updating cache of required packages'
624 if not opts.download_api_only:
625 # transform 'url1, url2, url3' form into a list
626 if 'urllist' in config:
627 if type(config['urllist']) == str:
628 re_clist = re.compile('[, ]+')
629 urllist = [ i.strip() for i in re_clist.split(config['urllist'].strip()) ]
631 urllist = config['urllist']
633 # OBS 1.5 and before has no downloadurl defined in buildinfo
635 urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
636 if not opts.cpio_bulk_download:
637 urllist.append( '%(apiurl)s/build/%(project)s/%(repository)s/%(repoarch)s/%(repopackage)s/%(repofilename)s' )
639 fetcher = Fetcher(cachedir = config['packagecachedir'],
641 api_host_options = config['api_host_options'],
642 offline = opts.noinit or opts.offline,
643 http_debug = config['http_debug'],
644 enable_cpio = opts.cpio_bulk_download,
647 # implicitly trust the project we are building for
648 check_trusted_projects(apiurl, [ i for i in bi.projects.keys() if not i == prj ])
650 # now update the package cache
655 old_pkg_dir = opts.oldpackages
656 if not old_pkg_dir.startswith('/') and not opts.offline:
657 data = [ prj, pacname, repo, arch]
658 if old_pkg_dir == '_link':
659 p = osc.core.findpacs(os.curdir)[0]
661 raise oscerr.WrongOptions('package is not a link')
662 data[0] = p.linkinfo.project
663 data[1] = p.linkinfo.package
664 repos = osc.core.get_repositories_of_project(apiurl, data[0])
665 # hack for links to e.g. Factory
666 if not data[2] in repos and 'standard' in repos:
668 elif old_pkg_dir != '' and old_pkg_dir != '_self':
669 a = old_pkg_dir.split('/')
670 for i in range(0, len(a)):
673 destdir = os.path.join(config['packagecachedir'], data[0], data[2], data[3])
676 print "Downloading previous build from %s ..." % '/'.join(data)
677 binaries = get_binarylist(apiurl, data[0], data[2], data[3], package=data[1], verbose=True)
679 print "Error: failed to get binaries: %s" % str(e)
684 """ temporary directory that removes itself"""
685 def __init__(self, *args, **kwargs):
686 self.name = mkdtemp(*args, **kwargs)
696 old_pkg_dir = mytmpdir(prefix='.build.oldpackages', dir=os.path.abspath(os.curdir))
697 if not os.path.exists(destdir):
700 fname = os.path.join(destdir, i.name)
701 os.symlink(fname, os.path.join(str(old_pkg_dir), i.name))
702 if os.path.exists(fname):
704 if st.st_mtime == i.mtime and st.st_size == i.size:
706 get_binary_file(apiurl,
711 target_filename = fname,
712 target_mtime = i.mtime,
713 progress_meter = True)
715 if old_pkg_dir != None:
716 buildargs.append('--oldpackages %s' % old_pkg_dir)
718 # Make packages from buildinfo available as repos for kiwi
719 if build_type == 'kiwi':
720 if not os.path.exists('repos'):
727 pdir = str(i.extproject).replace(':/', ':')
729 rdir = str(i.extrepository).replace(':/', ':')
733 prdir = "repos/"+pdir+"/"+rdir
735 pradir = prdir+"/"+adir
736 # source fullfilename
737 sffn = i.fullfilename
738 print "Using package: "+sffn
739 # target fullfilename
740 tffn = pradir+"/"+sffn.split("/")[-1]
741 if not os.path.exists(os.path.join(pradir)):
742 os.makedirs(os.path.join(pradir))
743 if not os.path.exists(tffn):
747 os.symlink(sffn, tffn)
749 if bi.pacsuffix == 'rpm':
751 print 'Skipping verification of package signatures'
753 print 'Verifying integrity of cached packages'
754 verify_pacs([ i.fullfilename for i in bi.deps ], bi.keys)
756 elif bi.pacsuffix == 'deb':
757 if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc":
758 print 'Skipping verification of package signatures due to secure VM build'
759 elif opts.no_verify or opts.noinit:
760 print 'Skipping verification of package signatures'
762 print 'WARNING: deb packages get not verified, they can compromise your system !'
764 print 'WARNING: unknown packages get not verified, they can compromise your system !'
766 print 'Writing build configuration'
768 rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps if not i.noinstall ]
769 rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ]
771 rpmlist.append('preinstall: ' + ' '.join(bi.preinstall_list) + '\n')
772 rpmlist.append('vminstall: ' + ' '.join(bi.vminstall_list) + '\n')
773 rpmlist.append('cbinstall: ' + ' '.join(bi.cbinstall_list) + '\n')
774 rpmlist.append('cbpreinstall: ' + ' '.join(bi.cbpreinstall_list) + '\n')
775 rpmlist.append('runscripts: ' + ' '.join(bi.runscripts_list) + '\n')
777 rpmlist_file = NamedTemporaryFile(prefix='rpmlist.')
778 rpmlist_filename = rpmlist_file.name
779 rpmlist_file.writelines(rpmlist)
782 subst = { 'repo': repo, 'arch': arch, 'project' : prj, 'package' : pacname }
784 # XXX check if build-device present
786 if config['build-device']:
787 my_build_device = config['build-device'] % subst
789 # obs worker uses /root here but that collides with the
790 # /root directory if the build root was used without vm
792 my_build_device = build_root + '/img'
796 if config['build-swap']:
797 my_build_swap = config['build-swap'] % subst
799 my_build_swap = build_root + '/swap'
801 vm_options = '--vm-type=%s'%vm_type
803 vm_options += ' --vm-disk=' + my_build_device
804 vm_options += ' --vm-swap=' + my_build_swap
805 vm_options += ' --logfile=%s/.build.log' % build_root
807 if os.access(build_root, os.W_OK) and os.access('/dev/kvm', os.W_OK):
808 # so let's hope there's also an fstab entry
810 build_root += '/.mount'
812 if config['build-memory']:
813 vm_options += ' --memory ' + config['build-memory']
814 if config['build-vmdisk-rootsize']:
815 vm_options += ' --vmdisk-rootsize ' + config['build-vmdisk-rootsize']
816 if config['build-vmdisk-swapsize']:
817 vm_options += ' --vmdisk-swapsize ' + config['build-vmdisk-swapsize']
820 print "Preload done for selected repo/arch."
823 print 'Running build'
824 cmd = '"%s" --root="%s" --rpmlist="%s" --dist="%s" %s --arch=%s %s %s "%s"' \
825 % (config['build-cmd'],
836 if config['su-wrapper'].startswith('su '):
840 cmd = tmpl % (config['su-wrapper'], cmd)
842 # change personality, if needed
843 if hostarch != bi.buildarch:
844 cmd = (change_personality.get(bi.buildarch, '') + ' ' + cmd).strip()
847 rc = subprocess.call(cmd, shell=True)
850 print 'The buildroot was:', build_root
853 pacdir = os.path.join(build_root, '.build.packages')
854 if os.path.islink(pacdir):
855 pacdir = os.readlink(pacdir)
856 pacdir = os.path.join(build_root, pacdir)
858 if os.path.exists(pacdir):
859 (s_built, b_built) = get_built_files(pacdir, bi.pacsuffix)
862 if s_built: print s_built
867 for i in b_built.splitlines() + s_built.splitlines():
868 shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i)))