1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.130.1'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 new_project_templ = """\
39 <project name="%(name)s">
41 <title></title> <!-- Short title of NewProject -->
43 <!-- This is for a longer description of the purpose of the project -->
46 <person role="maintainer" userid="%(user)s" />
47 <person role="bugowner" userid="%(user)s" />
48 <!-- remove this block to publish your packages on the mirrors -->
59 <!-- remove this comment to enable one or more build targets
61 <repository name="openSUSE_Factory">
62 <path project="openSUSE:Factory" repository="standard" />
66 <repository name="openSUSE_11.2">
67 <path project="openSUSE:11.2" repository="standard"/>
71 <repository name="openSUSE_11.1">
72 <path project="openSUSE:11.1" repository="standard"/>
76 <repository name="Fedora_12">
77 <path project="Fedora:12" repository="standard" />
81 <repository name="SLE_11">
82 <path project="SUSE:SLE-11" repository="standard" />
91 new_package_templ = """\
92 <package name="%(name)s">
94 <title></title> <!-- Title of package -->
97 <!-- for long description -->
100 <person role="maintainer" userid="%(user)s"/>
101 <person role="bugowner" userid="%(user)s"/>
103 <url>PUT_UPSTREAM_URL_HERE</url>
107 use one of the examples below to disable building of this package
108 on a certain architecture, in a certain repository,
109 or a combination thereof:
111 <disable arch="x86_64"/>
112 <disable repository="SUSE_SLE-10"/>
113 <disable repository="SUSE_SLE-10" arch="x86_64"/>
115 Possible sections where you can use the tags above:
125 Please have a look at:
126 http://en.opensuse.org/Restricted_formats
127 Packages containing formats listed there are NOT allowed to
128 be packaged in the openSUSE Buildservice and will be deleted!
135 new_attribute_templ = """\
137 <attribute namespace="" name="">
143 new_user_template = """\
145 <login>%(user)s</login>
146 <email>PUT_EMAIL_ADDRESS_HERE</email>
147 <realname>PUT_REAL_NAME_HERE</realname>
149 <project name="home:%(user)s"/>
165 new_pattern_template = """\
166 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
172 buildstatus_symbols = {'succeeded': '.',
174 'expansion error': 'U', # obsolete with OBS 2.0
188 # our own xml writer function to write xml nice, but with correct syntax
189 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
190 from xml.dom import minidom
191 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
192 # indent = current indentation
193 # addindent = indentation to add to higher levels
194 # newl = newline string
195 writer.write(indent+"<" + self.tagName)
197 attrs = self._get_attributes()
198 a_names = attrs.keys()
201 for a_name in a_names:
202 writer.write(" %s=\"" % a_name)
203 minidom._write_data(writer, attrs[a_name].value)
206 if len(self.childNodes) == 1 \
207 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
209 self.childNodes[0].writexml(writer, "", "", "")
210 writer.write("</%s>%s" % (self.tagName, newl))
212 writer.write(">%s"%(newl))
213 for node in self.childNodes:
214 node.writexml(writer,indent+addindent,addindent,newl)
215 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
217 writer.write("/>%s"%(newl))
218 # replace minidom's function with ours
219 minidom.Element.writexml = fixed_writexml
222 # os.path.samefile is available only under Unix
223 def os_path_samefile(path1, path2):
225 return os.path.samefile(path1, path2)
227 return os.path.realpath(path1) == os.path.realpath(path2)
230 """represent a file, including its metadata"""
231 def __init__(self, name, md5, size, mtime, skipped=False):
236 self.skipped = skipped
244 """Source service content
247 """creates an empty serviceinfo instance"""
250 def read(self, serviceinfo_node):
251 """read in the source services <services> element passed as
254 if serviceinfo_node == None:
257 services = serviceinfo_node.findall('service')
259 for service in services:
260 name = service.get('name')
262 for param in service.findall('param'):
263 option = param.get('name', None)
265 name += " --" + option + " '" + value + "'"
266 self.commands.append(name)
268 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
269 raise oscerr.APIError(msg)
271 def addVerifyFile(self, serviceinfo_node, filename):
274 f = open(filename, 'r')
275 digest = hashlib.sha256(f.read()).hexdigest()
279 s = ET.Element( "service", name="verify_file" )
280 ET.SubElement(s, "param", name="file").text = filename
281 ET.SubElement(s, "param", name="verifier").text = "sha256"
282 ET.SubElement(s, "param", name="checksum").text = digest
288 def addDownloadUrl(self, serviceinfo_node, url_string):
289 from urlparse import urlparse
290 url = urlparse( url_string )
291 protocol = url.scheme
296 s = ET.Element( "service", name="download_url" )
297 ET.SubElement(s, "param", name="protocol").text = protocol
298 ET.SubElement(s, "param", name="host").text = host
299 ET.SubElement(s, "param", name="path").text = path
304 def addGitUrl(self, serviceinfo_node, url_string):
306 s = ET.Element( "service", name="git_pull" )
307 ET.SubElement(s, "param", name="url").text = url_string
311 def execute(self, dir):
314 for call in self.commands:
315 temp_dir = tempfile.mkdtemp()
316 name = call.split(None, 1)[0]
317 if not os.path.exists("/usr/lib/obs/service/"+name):
318 msg = "ERROR: service is not installed!\n"
319 msg += "Maybe try this: zypper in obs-service-" + name
320 raise oscerr.APIError(msg)
321 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
322 if conf.config['verbose'] > 1:
323 print "Run source service:", c
324 ret = subprocess.call(c, shell=True)
326 print "ERROR: service call failed: " + c
327 # FIXME: addDownloadUrlService calls si.execute after
328 # updating _services.
329 print " (your _services file may be corrupt now)"
331 for filename in os.listdir(temp_dir):
332 shutil.move( os.path.join(temp_dir, filename), os.path.join(dir, "_service:"+name+":"+filename) )
336 """linkinfo metadata (which is part of the xml representing a directory
339 """creates an empty linkinfo instance"""
349 def read(self, linkinfo_node):
350 """read in the linkinfo metadata from the <linkinfo> element passed as
352 If the passed element is None, the method does nothing.
354 if linkinfo_node == None:
356 self.project = linkinfo_node.get('project')
357 self.package = linkinfo_node.get('package')
358 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
359 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
360 self.srcmd5 = linkinfo_node.get('srcmd5')
361 self.error = linkinfo_node.get('error')
362 self.rev = linkinfo_node.get('rev')
363 self.baserev = linkinfo_node.get('baserev')
366 """returns True if the linkinfo is not empty, otherwise False"""
367 if self.xsrcmd5 or self.lsrcmd5:
371 def isexpanded(self):
372 """returns True if the package is an expanded link"""
373 if self.lsrcmd5 and not self.xsrcmd5:
378 """returns True if the link is in error state (could not be applied)"""
384 """return an informatory string representation"""
385 if self.islink() and not self.isexpanded():
386 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
387 % (self.project, self.package, self.xsrcmd5, self.rev)
388 elif self.islink() and self.isexpanded():
390 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
391 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
393 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
394 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
399 # http://effbot.org/zone/element-lib.htm#prettyprint
400 def xmlindent(elem, level=0):
403 if not elem.text or not elem.text.strip():
406 xmlindent(e, level+1)
407 if not e.tail or not e.tail.strip():
409 if not e.tail or not e.tail.strip():
412 if level and (not elem.tail or not elem.tail.strip()):
416 """represent a project directory, holding packages"""
417 REQ_STOREFILES = ('_project', '_apiurl')
418 if conf.config['do_package_tracking']:
419 REQ_STOREFILES += ('_packages',)
420 def __init__(self, dir, getPackageList=True, progress_obj=None, wc_check=True):
423 self.absdir = os.path.abspath(dir)
424 self.progress_obj = progress_obj
426 self.name = store_read_project(self.dir)
427 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
431 dirty_files = self.wc_check()
433 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
434 'Please run \'osc repairwc %s\' and check the state\n' \
435 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
436 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, dirty_files, msg)
439 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
441 self.pacs_available = []
443 if conf.config['do_package_tracking']:
444 self.pac_root = self.read_packages().getroot()
445 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
446 self.pacs_excluded = [ i for i in os.listdir(self.dir)
447 for j in conf.config['exclude_glob']
448 if fnmatch.fnmatch(i, j) ]
449 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
450 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
451 # in the self.pacs_broken list
452 self.pacs_broken = []
453 for p in self.pacs_have:
454 if not os.path.isdir(os.path.join(self.absdir, p)):
455 # all states will be replaced with the '!'-state
456 # (except it is already marked as deleted ('D'-state))
457 self.pacs_broken.append(p)
459 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
461 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
466 for fname in Project.REQ_STOREFILES:
467 if not os.path.exists(os.path.join(self.absdir, store, fname)):
468 dirty_files.append(fname)
471 def wc_repair(self, apiurl=None):
473 if not os.path.exists(os.path.join(self.dir, store, '_apiurl')) or apiurl:
475 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
476 'no \'apiurl\' was passed to wc_repair'
477 # hmm should we raise oscerr.WrongArgs?
478 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
480 conf.parse_apisrv_url(None, apiurl)
481 store_write_apiurl(self.dir, apiurl)
482 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
484 def checkout_missing_pacs(self, expand_link=False):
485 for pac in self.pacs_missing:
487 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
488 # pac is not under version control but a local file/dir exists
489 msg = 'can\'t add package \'%s\': Object already exists' % pac
490 raise oscerr.PackageExists(self.name, pac, msg)
492 print 'checking out new package %s' % pac
493 checkout_package(self.apiurl, self.name, pac, \
494 pathname=getTransActPath(os.path.join(self.dir, pac)), \
495 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
497 def set_state(self, pac, state):
498 node = self.get_package_node(pac)
500 self.new_package_entry(pac, state)
502 node.attrib['state'] = state
504 def get_package_node(self, pac):
505 for node in self.pac_root.findall('package'):
506 if pac == node.get('name'):
510 def del_package_node(self, pac):
511 for node in self.pac_root.findall('package'):
512 if pac == node.get('name'):
513 self.pac_root.remove(node)
515 def get_state(self, pac):
516 node = self.get_package_node(pac)
518 return node.get('state')
522 def new_package_entry(self, name, state):
523 ET.SubElement(self.pac_root, 'package', name=name, state=state)
525 def read_packages(self):
528 packages_file = os.path.join(self.absdir, store, '_packages')
529 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
530 return ET.parse(packages_file)
532 # scan project for existing packages and migrate them
534 for data in os.listdir(self.dir):
535 pac_dir = os.path.join(self.absdir, data)
536 # we cannot use self.pacs_available because we cannot guarantee that the package list
537 # was fetched from the server
538 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
539 and Package(pac_dir).name == data:
540 cur_pacs.append(ET.Element('package', name=data, state=' '))
541 store_write_initial_packages(self.absdir, self.name, cur_pacs)
542 return ET.parse(os.path.join(self.absdir, store, '_packages'))
544 def write_packages(self):
545 xmlindent(self.pac_root)
546 store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root))
548 def addPackage(self, pac):
550 for i in conf.config['exclude_glob']:
551 if fnmatch.fnmatch(pac, i):
552 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
553 raise oscerr.OscIOError(None, msg)
554 state = self.get_state(pac)
555 if state == None or state == 'D':
556 self.new_package_entry(pac, 'A')
557 self.write_packages()
558 # sometimes the new pac doesn't exist in the list because
559 # it would take too much time to update all data structs regularly
560 if pac in self.pacs_unvers:
561 self.pacs_unvers.remove(pac)
563 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
565 def delPackage(self, pac, force = False):
566 state = self.get_state(pac.name)
568 if state == ' ' or state == 'D':
570 for filename in pac.filenamelist + pac.filenamelist_unvers:
571 filestate = pac.status(filename)
572 if filestate == 'M' or filestate == 'C' or \
573 filestate == 'A' or filestate == '?':
576 del_files.append(filename)
577 if can_delete or force:
578 for filename in del_files:
579 pac.delete_localfile(filename)
580 if pac.status(filename) != '?':
581 pac.delete_storefile(filename)
582 # this is not really necessary
583 pac.put_on_deletelist(filename)
584 print statfrmt('D', getTransActPath(os.path.join(pac.dir, filename)))
585 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
586 pac.write_deletelist()
587 self.set_state(pac.name, 'D')
588 self.write_packages()
590 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
593 delete_dir(pac.absdir)
594 self.del_package_node(pac.name)
595 self.write_packages()
596 print statfrmt('D', pac.name)
598 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
600 print 'package is not under version control'
602 print 'unsupported state'
604 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
607 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
609 # we need to make sure that the _packages file will be written (even if an exception
612 # update complete project
613 # packages which no longer exists upstream
614 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
616 for pac in upstream_del:
617 p = Package(os.path.join(self.dir, pac))
618 self.delPackage(p, force = True)
619 delete_storedir(p.storedir)
624 self.pac_root.remove(self.get_package_node(p.name))
625 self.pacs_have.remove(pac)
627 for pac in self.pacs_have:
628 state = self.get_state(pac)
629 if pac in self.pacs_broken:
630 if self.get_state(pac) != 'A':
631 checkout_package(self.apiurl, self.name, pac,
632 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
633 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
636 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
638 if expand_link and p.islink() and not p.isexpanded():
641 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
643 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
646 rev = p.linkinfo.xsrcmd5
647 print 'Expanding to rev', rev
648 elif unexpand_link and p.islink() and p.isexpanded():
649 rev = p.linkinfo.lsrcmd5
650 print 'Unexpanding to rev', rev
651 elif p.islink() and p.isexpanded():
653 print 'Updating %s' % p.name
654 p.update(rev, service_files)
658 # TODO: Package::update has to fixed to behave like svn does
659 if pac in self.pacs_broken:
660 checkout_package(self.apiurl, self.name, pac,
661 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
662 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
664 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
665 elif state == 'A' and pac in self.pacs_available:
666 # file/dir called pac already exists and is under version control
667 msg = 'can\'t add package \'%s\': Object already exists' % pac
668 raise oscerr.PackageExists(self.name, pac, msg)
673 print 'unexpected state.. package \'%s\'' % pac
675 self.checkout_missing_pacs(expand_link=not unexpand_link)
677 self.write_packages()
679 def validate_pacs(self, validators, verbose_validation=False, *pacs):
681 for pac in self.pacs_broken:
682 if self.get_state(pac) != 'D':
683 msg = 'validation failed: package \'%s\' is missing' % pac
684 raise oscerr.PackageMissing(self.name, pac, msg)
685 pacs = self.pacs_have
687 if pac in self.pacs_broken and self.get_state(pac) != 'D':
688 msg = 'validation failed: package \'%s\' is missing' % pac
689 raise oscerr.PackageMissing(self.name, pac, msg)
690 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
693 p = Package(os.path.join(self.dir, pac))
694 p.validate(validators, verbose_validation)
696 def commit(self, pacs = (), msg = '', files = {}, validators_dir = None, verbose_validation = False):
701 if files.has_key(pac):
703 state = self.get_state(pac)
705 self.commitNewPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
707 self.commitDelPackage(pac)
709 # display the correct dir when sending the changes
710 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
713 p = Package(os.path.join(self.dir, pac))
715 p.commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
716 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
717 print 'osc: \'%s\' is not under version control' % pac
718 elif pac in self.pacs_broken:
719 print 'osc: \'%s\' package not found' % pac
721 self.commitExtPackage(pac, msg, todo, validators_dir=validators_dir, verbose_validation=verbose_validation)
723 self.write_packages()
725 # if we have packages marked as '!' we cannot commit
726 for pac in self.pacs_broken:
727 if self.get_state(pac) != 'D':
728 msg = 'commit failed: package \'%s\' is missing' % pac
729 raise oscerr.PackageMissing(self.name, pac, msg)
731 for pac in self.pacs_have:
732 state = self.get_state(pac)
735 Package(os.path.join(self.dir, pac)).commit(msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
737 self.commitDelPackage(pac)
739 self.commitNewPackage(pac, msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
741 self.write_packages()
743 def commitNewPackage(self, pac, msg = '', files = [], validators_dir = None, verbose_validation = False):
744 """creates and commits a new package if it does not exist on the server"""
745 if pac in self.pacs_available:
746 print 'package \'%s\' already exists' % pac
748 user = conf.get_apiurl_usr(self.apiurl)
749 edit_meta(metatype='pkg',
750 path_args=(quote_plus(self.name), quote_plus(pac)),
755 # display the correct dir when sending the changes
757 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
761 p = Package(os.path.join(self.dir, pac))
763 print statfrmt('Sending', os.path.normpath(p.dir))
764 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
765 self.set_state(pac, ' ')
768 def commitDelPackage(self, pac):
769 """deletes a package on the server and in the working copy"""
771 # display the correct dir when sending the changes
772 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
775 pac_dir = os.path.join(self.dir, pac)
776 p = Package(os.path.join(self.dir, pac))
777 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
778 delete_storedir(p.storedir)
784 pac_dir = os.path.join(self.dir, pac)
785 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
786 print statfrmt('Deleting', getTransActPath(pac_dir))
787 delete_package(self.apiurl, self.name, pac)
788 self.del_package_node(pac)
790 def commitExtPackage(self, pac, msg, files = [], validators_dir=None, verbose_validation=False):
791 """commits a package from an external project"""
792 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
795 pac_path = os.path.join(self.dir, pac)
797 project = store_read_project(pac_path)
798 package = store_read_package(pac_path)
799 apiurl = store_read_apiurl(pac_path, defaulturl=False)
800 if not meta_exists(metatype='pkg',
801 path_args=(quote_plus(project), quote_plus(package)),
802 template_args=None, create_new=False, apiurl=apiurl):
803 user = conf.get_apiurl_usr(self.apiurl)
804 edit_meta(metatype='pkg',
805 path_args=(quote_plus(project), quote_plus(package)),
806 template_args=({'name': pac, 'user': user}), apiurl=apiurl)
807 p = Package(pac_path)
809 p.commit(msg=msg, validators_dir=validators_dir, verbose_validation=verbose_validation)
813 r.append('*****************************************************')
814 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
815 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
816 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
817 r.append('*****************************************************')
821 def init_project(apiurl, dir, project, package_tracking=True):
824 if not os.path.exists(dir):
825 # use makedirs (checkout_no_colon config option might be enabled)
827 elif not os.path.isdir(dir):
828 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
829 if os.path.exists(os.path.join(dir, store)):
830 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
832 os.mkdir(os.path.join(dir, store))
834 store_write_project(dir, project)
835 store_write_apiurl(dir, apiurl)
837 store_write_initial_packages(dir, project, [])
841 """represent a package (its directory) and read/keep/write its metadata"""
843 # should _meta be a required file?
844 REQ_STOREFILES = ('_project', '_package', '_apiurl', '_files', '_osclib_version')
845 OPT_STOREFILES = ('_to_be_added', '_to_be_deleted', '_in_conflict', '_in_update',
846 '_in_commit', '_meta', '_meta_mode', '_frozenlink', '_pulled', '_linkrepair',
847 '_size_limit', '_commit_msg')
849 def __init__(self, workingdir, progress_obj=None, size_limit=None, wc_check=True):
852 self.dir = workingdir
853 self.absdir = os.path.abspath(self.dir)
854 self.storedir = os.path.join(self.absdir, store)
855 self.progress_obj = progress_obj
856 self.size_limit = size_limit
857 if size_limit and size_limit == 0:
858 self.size_limit = None
860 check_store_version(self.dir)
862 self.prjname = store_read_project(self.dir)
863 self.name = store_read_package(self.dir)
864 self.apiurl = store_read_apiurl(self.dir, defaulturl=not wc_check)
866 self.update_datastructs()
869 dirty_files = self.wc_check()
871 msg = 'Your working copy \'%s\' is in an inconsistent state.\n' \
872 'Please run \'osc repairwc %s\' (Note this might _remove_\n' \
873 'files from the .osc/ dir). Please check the state\n' \
874 'of the working copy afterwards (via \'osc status %s\')' % (self.dir, self.dir, self.dir)
875 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, dirty_files, msg)
881 for fname in self.filenamelist:
882 if not os.path.exists(os.path.join(self.storedir, fname)) and not fname in self.skipped:
883 dirty_files.append(fname)
884 for fname in Package.REQ_STOREFILES:
885 if not os.path.isfile(os.path.join(self.storedir, fname)):
886 dirty_files.append(fname)
887 for fname in os.listdir(self.storedir):
888 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
889 fname.startswith('_build'):
891 elif fname in self.filenamelist and fname in self.skipped:
892 dirty_files.append(fname)
893 elif not fname in self.filenamelist:
894 dirty_files.append(fname)
895 for fname in self.to_be_deleted[:]:
896 if not fname in self.filenamelist:
897 dirty_files.append(fname)
898 for fname in self.in_conflict[:]:
899 if not fname in self.filenamelist:
900 dirty_files.append(fname)
903 def wc_repair(self, apiurl=None):
904 if not os.path.exists(os.path.join(self.storedir, '_apiurl')) or apiurl:
906 msg = 'cannot repair wc: the \'_apiurl\' file is missing but ' \
907 'no \'apiurl\' was passed to wc_repair'
908 # hmm should we raise oscerr.WrongArgs?
909 raise oscerr.WorkingCopyInconsistent(self.prjname, self.name, [], msg)
911 conf.parse_apisrv_url(None, apiurl)
912 store_write_apiurl(self.dir, apiurl)
913 self.apiurl = store_read_apiurl(self.dir, defaulturl=False)
914 # all files which are present in the filelist have to exist in the storedir
915 for f in self.filelist:
916 # XXX: should we also check the md5?
917 if not os.path.exists(os.path.join(self.storedir, f.name)) and not f.name in self.skipped:
918 # if get_source_file fails we're screwed up...
919 get_source_file(self.apiurl, self.prjname, self.name, f.name,
920 targetfilename=os.path.join(self.storedir, f.name), revision=self.rev,
922 for fname in os.listdir(self.storedir):
923 if fname in Package.REQ_STOREFILES or fname in Package.OPT_STOREFILES or \
924 fname.startswith('_build'):
926 elif not fname in self.filenamelist or fname in self.skipped:
927 # this file does not belong to the storedir so remove it
928 os.unlink(os.path.join(self.storedir, fname))
929 for fname in self.to_be_deleted[:]:
930 if not fname in self.filenamelist:
931 self.to_be_deleted.remove(fname)
932 self.write_deletelist()
933 for fname in self.in_conflict[:]:
934 if not fname in self.filenamelist:
935 self.in_conflict.remove(fname)
936 self.write_conflictlist()
939 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
940 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
943 def addfile(self, n):
944 if not os.path.exists(os.path.join(self.absdir, n)):
945 raise oscerr.OscIOError(None, 'error: file \'%s\' does not exist' % n)
946 if n in self.to_be_deleted:
947 self.to_be_deleted.remove(n)
948 # self.delete_storefile(n)
949 self.write_deletelist()
950 elif n in self.filenamelist or n in self.to_be_added:
951 raise oscerr.PackageFileConflict(self.prjname, self.name, n, 'osc: warning: \'%s\' is already under version control' % n)
952 # shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
954 pathname = os.path.join(self.dir, n)
957 self.to_be_added.append(n)
959 print statfrmt('A', pathname)
961 def delete_file(self, n, force=False):
962 """deletes a file if possible and marks the file as deleted"""
965 state = self.status(n)
969 if state in ['?', 'A', 'M', 'R', 'C'] and not force:
970 return (False, state)
971 self.delete_localfile(n)
972 was_added = n in self.to_be_added
973 if state in ('A', 'R') or state == '!' and was_added:
974 self.to_be_added.remove(n)
977 # don't remove "merge files" (*.r, *.mine...)
978 # that's why we don't use clear_from_conflictlist
979 self.in_conflict.remove(n)
980 self.write_conflictlist()
981 if not state in ('A', '?') and not (state == '!' and was_added):
982 self.put_on_deletelist(n)
983 self.write_deletelist()
986 def delete_storefile(self, n):
987 try: os.unlink(os.path.join(self.storedir, n))
990 def delete_localfile(self, n):
991 try: os.unlink(os.path.join(self.dir, n))
994 def put_on_deletelist(self, n):
995 if n not in self.to_be_deleted:
996 self.to_be_deleted.append(n)
998 def put_on_conflictlist(self, n):
999 if n not in self.in_conflict:
1000 self.in_conflict.append(n)
1002 def put_on_addlist(self, n):
1003 if n not in self.to_be_added:
1004 self.to_be_added.append(n)
1006 def clear_from_conflictlist(self, n):
1007 """delete an entry from the file, and remove the file if it would be empty"""
1008 if n in self.in_conflict:
1010 filename = os.path.join(self.dir, n)
1011 storefilename = os.path.join(self.storedir, n)
1012 myfilename = os.path.join(self.dir, n + '.mine')
1013 if self.islinkrepair() or self.ispulled():
1014 upfilename = os.path.join(self.dir, n + '.new')
1016 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1019 os.unlink(myfilename)
1020 # the working copy may be updated, so the .r* ending may be obsolete...
1021 # then we don't care
1022 os.unlink(upfilename)
1023 if self.islinkrepair() or self.ispulled():
1024 os.unlink(os.path.join(self.dir, n + '.old'))
1028 self.in_conflict.remove(n)
1030 self.write_conflictlist()
1032 # XXX: this isn't used at all
1033 def write_meta_mode(self):
1034 # XXX: the "elif" is somehow a contradiction (with current and the old implementation
1035 # it's not possible to "leave" the metamode again) (except if you modify pac.meta
1036 # which is really ugly:) )
1038 store_write_string(self.absdir, '_meta_mode', '')
1039 elif self.ismetamode():
1040 os.unlink(os.path.join(self.storedir, '_meta_mode'))
1042 def write_sizelimit(self):
1043 if self.size_limit and self.size_limit <= 0:
1045 os.unlink(os.path.join(self.storedir, '_size_limit'))
1049 store_write_string(self.absdir, '_size_limit', str(self.size_limit) + '\n')
1051 def write_addlist(self):
1052 self.__write_storelist('_to_be_added', self.to_be_added)
1054 def write_deletelist(self):
1055 self.__write_storelist('_to_be_deleted', self.to_be_deleted)
1057 def delete_source_file(self, n):
1058 """delete local a source file"""
1059 self.delete_localfile(n)
1060 self.delete_storefile(n)
1062 def delete_remote_source_file(self, n):
1063 """delete a remote source file (e.g. from the server)"""
1064 query = 'rev=upload'
1065 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1068 def put_source_file(self, n, copy_only=False):
1069 cdir = os.path.join(self.storedir, '_in_commit')
1071 if not os.path.isdir(cdir):
1073 query = 'rev=repository'
1074 tmpfile = os.path.join(cdir, n)
1075 shutil.copyfile(os.path.join(self.dir, n), tmpfile)
1076 # escaping '+' in the URL path (note: not in the URL query string) is
1077 # only a workaround for ruby on rails, which swallows it otherwise
1079 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
1080 http_PUT(u, file = os.path.join(self.dir, n))
1081 os.rename(tmpfile, os.path.join(self.storedir, n))
1083 if os.path.isdir(cdir):
1085 if n in self.to_be_added:
1086 self.to_be_added.remove(n)
1088 def __generate_commitlist(self, todo_send):
1089 root = ET.Element('directory')
1090 keys = todo_send.keys()
1093 ET.SubElement(root, 'entry', name=i, md5=todo_send[i])
1096 def __send_commitlog(self, msg, local_filelist):
1097 """send the commitlog and the local filelist to the server"""
1098 query = {'cmd' : 'commitfilelist',
1099 'user' : conf.get_apiurl_usr(self.apiurl),
1101 if self.islink() and self.isexpanded():
1102 query['keeplink'] = '1'
1103 if conf.config['linkcontrol'] or self.isfrozen():
1104 query['linkrev'] = self.linkinfo.srcmd5
1106 query['repairlink'] = '1'
1107 query['linkrev'] = self.get_pulled_srcmd5()
1108 if self.islinkrepair():
1109 query['repairlink'] = '1'
1110 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1111 f = http_POST(u, data=ET.tostring(local_filelist))
1112 root = ET.parse(f).getroot()
1115 def __get_todo_send(self, server_filelist):
1116 """parse todo from a previous __send_commitlog call"""
1117 error = server_filelist.get('error')
1120 elif error != 'missing':
1121 raise oscerr.PackageInternalError(self.prjname, self.name,
1122 '__get_todo_send: unexpected \'error\' attr: \'%s\'' % error)
1124 for n in server_filelist.findall('entry'):
1125 name = n.get('name')
1127 raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist))
1128 todo.append(n.get('name'))
1131 def validate(self, validators_dir, verbose_validation=False):
1134 if validators_dir is None or self.name.startswith('_'):
1136 for validator in sorted(os.listdir(validators_dir)):
1137 if validator.startswith('.'):
1139 fn = os.path.join(validators_dir, validator)
1140 mode = os.stat(fn).st_mode
1141 if stat.S_ISREG(mode):
1142 if verbose_validation:
1143 print 'osc runs source validator: %s' % fn
1144 p = subprocess.Popen([fn, '--verbose'], close_fds=True)
1146 p = subprocess.Popen([fn], close_fds=True)
1148 raise oscerr.ExtRuntimeError('ERROR: source_validator failed:\n%s' % p.stdout, validator)
1150 def commit(self, msg='', validators_dir=None, verbose_validation=False):
1151 # commit only if the upstream revision is the same as the working copy's
1152 upstream_rev = self.latest_rev()
1153 if self.rev != upstream_rev:
1154 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
1156 if not validators_dir is None:
1157 self.validate(validators_dir, verbose_validation)
1160 self.todo = [i for i in self.to_be_added if not i in self.filenamelist] + self.filenamelist
1162 pathn = getTransActPath(self.dir)
1167 for filename in self.filenamelist + [i for i in self.to_be_added if not i in self.filenamelist]:
1168 if filename.startswith('_service:') or filename.startswith('_service_'):
1170 st = self.status(filename)
1172 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
1174 elif filename in self.todo:
1175 if st in ('A', 'R', 'M'):
1176 todo_send[filename] = dgst(os.path.join(self.absdir, filename))
1177 real_send.append(filename)
1178 print statfrmt('Sending', os.path.join(pathn, filename))
1179 elif st in (' ', '!', 'S'):
1180 if st == '!' and filename in self.to_be_added:
1181 print 'file \'%s\' is marked as \'A\' but does not exist' % filename
1183 f = self.findfilebyname(filename)
1185 raise oscerr.PackageInternalError(self.prjname, self.name,
1186 'error: file \'%s\' with state \'%s\' is not known by meta' \
1188 todo_send[filename] = f.md5
1190 todo_delete.append(filename)
1191 print statfrmt('Deleting', os.path.join(pathn, filename))
1192 elif st in ('R', 'M', 'D', ' ', '!', 'S'):
1193 # ignore missing new file (it's not part of the current commit)
1194 if st == '!' and filename in self.to_be_added:
1196 f = self.findfilebyname(filename)
1198 raise oscerr.PackageInternalError(self.prjname, self.name,
1199 'error: file \'%s\' with state \'%s\' is not known by meta' \
1201 todo_send[filename] = f.md5
1203 if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
1204 print 'nothing to do for package %s' % self.name
1207 print 'Transmitting file data ',
1208 filelist = self.__generate_commitlist(todo_send)
1209 sfilelist = self.__send_commitlog(msg, filelist)
1210 send = self.__get_todo_send(sfilelist)
1211 real_send = [i for i in real_send if not i in send]
1212 # abort after 3 tries
1214 while len(send) and tries:
1215 for filename in send[:]:
1216 sys.stdout.write('.')
1218 self.put_source_file(filename)
1219 send.remove(filename)
1221 sfilelist = self.__send_commitlog(msg, filelist)
1222 send = self.__get_todo_send(sfilelist)
1224 raise oscerr.PackageInternalError(self.prjname, self.name,
1225 'server does not accept filelist:\n%s\nmissing:\n%s\n' \
1226 % (ET.tostring(filelist), ET.tostring(sfilelist)))
1227 # these files already exist on the server
1228 # just copy them into the storedir
1229 for filename in real_send:
1230 self.put_source_file(filename, copy_only=True)
1232 self.rev = sfilelist.get('rev')
1234 print 'Committed revision %s.' % self.rev
1237 os.unlink(os.path.join(self.storedir, '_pulled'))
1238 if self.islinkrepair():
1239 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1240 self.linkrepair = False
1241 # XXX: mark package as invalid?
1242 print 'The source link has been repaired. This directory can now be removed.'
1244 if self.islink() and self.isexpanded():
1246 li.read(sfilelist.find('linkinfo'))
1247 if li.xsrcmd5 is None:
1248 raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist))
1249 sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
1250 for i in sfilelist.findall('entry'):
1251 if i.get('name') in self.skipped:
1252 i.set('skipped', 'true')
1253 store_write_string(self.absdir, '_files', ET.tostring(sfilelist) + '\n')
1254 for filename in todo_delete:
1255 self.to_be_deleted.remove(filename)
1256 self.delete_storefile(filename)
1257 self.write_deletelist()
1258 self.write_addlist()
1259 self.update_datastructs()
1261 if self.filenamelist.count('_service'):
1262 print 'The package contains a source service.'
1263 for filename in self.todo:
1264 if filename.startswith('_service:') and os.path.exists(filename):
1265 os.unlink(filename) # remove local files
1266 print_request_list(self.apiurl, self.prjname, self.name)
1268 def __write_storelist(self, name, data):
1271 os.unlink(os.path.join(self.storedir, name))
1275 store_write_string(self.absdir, name, '%s\n' % '\n'.join(data))
1277 def write_conflictlist(self):
1278 self.__write_storelist('_in_conflict', self.in_conflict)
1280 def updatefile(self, n, revision, mtime=None):
1281 filename = os.path.join(self.dir, n)
1282 storefilename = os.path.join(self.storedir, n)
1283 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1284 origfile = os.path.join(self.storedir, '_in_update', n)
1285 if os.path.isfile(filename):
1286 shutil.copyfile(filename, origfile_tmp)
1287 os.rename(origfile_tmp, origfile)
1291 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=storefilename,
1292 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1294 shutil.copyfile(storefilename, filename)
1295 if not origfile is None:
1298 def mergefile(self, n, revision, mtime=None):
1299 filename = os.path.join(self.dir, n)
1300 storefilename = os.path.join(self.storedir, n)
1301 myfilename = os.path.join(self.dir, n + '.mine')
1302 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1303 origfile_tmp = os.path.join(self.storedir, '_in_update', '%s.copy' % n)
1304 origfile = os.path.join(self.storedir, '_in_update', n)
1305 shutil.copyfile(filename, origfile_tmp)
1306 os.rename(origfile_tmp, origfile)
1307 os.rename(filename, myfilename)
1309 get_source_file(self.apiurl, self.prjname, self.name, n,
1310 revision=revision, targetfilename=upfilename,
1311 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1313 if binary_file(myfilename) or binary_file(upfilename):
1315 shutil.copyfile(upfilename, filename)
1316 shutil.copyfile(upfilename, storefilename)
1318 self.in_conflict.append(n)
1319 self.write_conflictlist()
1323 # diff3 OPTIONS... MINE OLDER YOURS
1324 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1325 # we would rather use the subprocess module, but it is not availablebefore 2.4
1326 ret = subprocess.call(merge_cmd, shell=True)
1328 # "An exit status of 0 means `diff3' was successful, 1 means some
1329 # conflicts were found, and 2 means trouble."
1331 # merge was successful... clean up
1332 shutil.copyfile(upfilename, storefilename)
1333 os.unlink(upfilename)
1334 os.unlink(myfilename)
1338 # unsuccessful merge
1339 shutil.copyfile(upfilename, storefilename)
1341 self.in_conflict.append(n)
1342 self.write_conflictlist()
1345 raise oscerr.ExtRuntimeError('diff3 failed with exit code: %s' % ret, merge_cmd)
1347 def update_local_filesmeta(self, revision=None):
1349 Update the local _files file in the store.
1350 It is replaced with the version pulled from upstream.
1352 meta = self.get_files_meta(revision=revision)
1353 store_write_string(self.absdir, '_files', meta + '\n')
1355 def get_files_meta(self, revision='latest', skip_service=True):
1356 fm = show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, meta=self.meta)
1357 # look for "too large" files according to size limit and mark them
1358 root = ET.fromstring(fm)
1359 for e in root.findall('entry'):
1360 size = e.get('size')
1361 if size and self.size_limit and int(size) > self.size_limit \
1362 or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
1363 e.set('skipped', 'true')
1364 return ET.tostring(root)
1366 def update_datastructs(self):
1368 Update the internal data structures if the local _files
1369 file has changed (e.g. update_local_filesmeta() has been
1373 files_tree = read_filemeta(self.dir)
1374 files_tree_root = files_tree.getroot()
1376 self.rev = files_tree_root.get('rev')
1377 self.srcmd5 = files_tree_root.get('srcmd5')
1379 self.linkinfo = Linkinfo()
1380 self.linkinfo.read(files_tree_root.find('linkinfo'))
1382 self.filenamelist = []
1385 for node in files_tree_root.findall('entry'):
1387 f = File(node.get('name'),
1389 int(node.get('size')),
1390 int(node.get('mtime')))
1391 if node.get('skipped'):
1392 self.skipped.append(f.name)
1395 # okay, a very old version of _files, which didn't contain any metadata yet...
1396 f = File(node.get('name'), '', 0, 0)
1397 self.filelist.append(f)
1398 self.filenamelist.append(f.name)
1400 self.to_be_added = read_tobeadded(self.absdir)
1401 self.to_be_deleted = read_tobedeleted(self.absdir)
1402 self.in_conflict = read_inconflict(self.absdir)
1403 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1404 self.size_limit = read_sizelimit(self.dir)
1405 self.meta = self.ismetamode()
1407 # gather unversioned files, but ignore some stuff
1409 for i in os.listdir(self.dir):
1410 for j in conf.config['exclude_glob']:
1411 if fnmatch.fnmatch(i, j):
1412 self.excluded.append(i)
1414 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1415 if i not in self.excluded
1416 if i not in self.filenamelist ]
1419 """tells us if the package is a link (has 'linkinfo').
1420 A package with linkinfo is a package which links to another package.
1421 Returns True if the package is a link, otherwise False."""
1422 return self.linkinfo.islink()
1424 def isexpanded(self):
1425 """tells us if the package is a link which is expanded.
1426 Returns True if the package is expanded, otherwise False."""
1427 return self.linkinfo.isexpanded()
1429 def islinkrepair(self):
1430 """tells us if we are repairing a broken source link."""
1431 return self.linkrepair
1434 """tells us if we have pulled a link."""
1435 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1438 """tells us if the link is frozen."""
1439 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1441 def ismetamode(self):
1442 """tells us if the package is in meta mode"""
1443 return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
1445 def get_pulled_srcmd5(self):
1447 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1448 pulledrev = line.strip()
1451 def haslinkerror(self):
1453 Returns True if the link is broken otherwise False.
1454 If the package is not a link it returns False.
1456 return self.linkinfo.haserror()
1458 def linkerror(self):
1460 Returns an error message if the link is broken otherwise None.
1461 If the package is not a link it returns None.
1463 return self.linkinfo.error
1465 def update_local_pacmeta(self):
1467 Update the local _meta file in the store.
1468 It is replaced with the version pulled from upstream.
1470 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1471 store_write_string(self.absdir, '_meta', meta + '\n')
1473 def findfilebyname(self, n):
1474 for i in self.filelist:
1478 def status(self, n):
1482 file storefile file present STATUS
1483 exists exists in _files
1485 x - - 'A' and listed in _to_be_added
1486 x x - 'R' and listed in _to_be_added
1487 x x x ' ' if digest differs: 'M'
1488 and if in conflicts file: 'C'
1490 - x x 'D' and listed in _to_be_deleted
1491 x x x 'D' and listed in _to_be_deleted (e.g. if deleted file was modified)
1492 x x x 'C' and listed in _in_conflict
1493 x - x 'S' and listed in self.skipped
1494 - - x 'S' and listed in self.skipped
1500 known_by_meta = False
1502 exists_in_store = False
1503 if n in self.filenamelist:
1504 known_by_meta = True
1505 if os.path.exists(os.path.join(self.absdir, n)):
1507 if os.path.exists(os.path.join(self.storedir, n)):
1508 exists_in_store = True
1510 if n in self.to_be_deleted:
1512 elif n in self.in_conflict:
1514 elif n in self.skipped:
1516 elif n in self.to_be_added and exists and exists_in_store:
1518 elif n in self.to_be_added and exists:
1520 elif exists and exists_in_store and known_by_meta:
1521 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1525 elif n in self.to_be_added and not exists:
1527 elif not exists and exists_in_store and known_by_meta and not n in self.to_be_deleted:
1529 elif exists and not exists_in_store and not known_by_meta:
1531 elif not exists_in_store and known_by_meta:
1532 # XXX: this codepath shouldn't be reached (we restore the storefile
1533 # in update_datastructs)
1534 raise oscerr.PackageInternalError(self.prjname, self.name,
1535 'error: file \'%s\' is known by meta but no storefile exists.\n'
1536 'This might be caused by an old wc format. Please backup your current\n'
1537 'wc and checkout the package again. Afterwards copy all files (except the\n'
1538 '.osc/ dir) into the new package wc.' % n)
1540 # this case shouldn't happen (except there was a typo in the filename etc.)
1541 raise oscerr.OscIOError(None, 'osc: \'%s\' is not under version control' % n)
1545 def get_diff(self, revision=None, ignoreUnversioned=False):
1547 diff_hdr = 'Index: %s\n'
1548 diff_hdr += '===================================================================\n'
1552 def diff_add_delete(fname, add, revision):
1554 diff.append(diff_hdr % fname)
1558 diff.append('--- %s\t(revision 0)\n' % fname)
1560 if revision and not fname in self.to_be_added:
1561 rev = 'working copy'
1562 diff.append('+++ %s\t(%s)\n' % (fname, rev))
1563 fname = os.path.join(self.absdir, fname)
1565 diff.append('--- %s\t(revision %s)\n' % (fname, revision or self.rev))
1566 diff.append('+++ %s\t(working copy)\n' % fname)
1567 fname = os.path.join(self.storedir, fname)
1570 if revision is not None and not add:
1571 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1572 get_source_file(self.apiurl, self.prjname, self.name, origname, tmpfile, revision)
1574 if binary_file(fname):
1579 diff.append('Binary file \'%s\' %s.\n' % (origname, what))
1582 ltmpl = '@@ -0,0 +1,%d @@\n'
1585 ltmpl = '@@ -1,%d +0,0 @@\n'
1586 lines = [tmpl % i for i in open(fname, 'r').readlines()]
1588 diff.append(ltmpl % len(lines))
1589 if not lines[-1].endswith('\n'):
1590 lines.append('\n\\ No newline at end of file\n')
1593 if tmpfile is not None:
1598 if revision is None:
1599 todo = self.todo or [i for i in self.filenamelist if not i in self.to_be_added]+self.to_be_added
1601 if fname in self.to_be_added and self.status(fname) == 'A':
1603 elif fname in self.to_be_deleted:
1604 deleted.append(fname)
1605 elif fname in self.filenamelist:
1606 kept.append(self.findfilebyname(fname))
1607 elif fname in self.to_be_added and self.status(fname) == '!':
1608 raise oscerr.OscIOError(None, 'file \'%s\' is marked as \'A\' but does not exist\n'\
1609 '(either add the missing file or revert it)' % fname)
1610 elif not ignoreUnversioned:
1611 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % fname)
1613 fm = self.get_files_meta(revision=revision)
1614 root = ET.fromstring(fm)
1615 rfiles = self.__get_files(root)
1616 # swap added and deleted
1617 kept, deleted, added, services = self.__get_rev_changes(rfiles)
1618 added = [f.name for f in added]
1619 added.extend([f for f in self.to_be_added if not f in kept])
1620 deleted = [f.name for f in deleted]
1621 deleted.extend(self.to_be_deleted)
1626 # print kept, added, deleted
1628 state = self.status(f.name)
1629 if state in ('S', '?', '!'):
1631 elif state == ' ' and revision is None:
1633 elif revision and self.findfilebyname(f.name).md5 == f.md5 and state != 'M':
1635 yield [diff_hdr % f.name]
1636 if revision is None:
1637 yield get_source_file_diff(self.absdir, f.name, self.rev)
1642 (fd, tmpfile) = tempfile.mkstemp(prefix='osc_diff')
1643 get_source_file(self.apiurl, self.prjname, self.name, f.name, tmpfile, revision)
1644 diff = get_source_file_diff(self.absdir, f.name, revision,
1645 os.path.basename(tmpfile), os.path.dirname(tmpfile), f.name)
1647 if tmpfile is not None:
1653 yield diff_add_delete(f, True, revision)
1655 yield diff_add_delete(f, False, revision)
1657 def merge(self, otherpac):
1658 self.todo += otherpac.todo
1672 '\n '.join(self.filenamelist),
1680 def read_meta_from_spec(self, spec = None):
1685 # scan for spec files
1686 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1687 if len(speclist) == 1:
1688 specfile = speclist[0]
1689 elif len(speclist) > 1:
1690 print 'the following specfiles were found:'
1691 for filename in speclist:
1693 print 'please specify one with --specfile'
1696 print 'no specfile was found - please specify one ' \
1700 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1701 self.summary = data.get('Summary', '')
1702 self.url = data.get('Url', '')
1703 self.descr = data.get('%description', '')
1706 def update_package_meta(self, force=False):
1708 for the updatepacmetafromspec subcommand
1709 argument force supress the confirm question
1712 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1714 root = ET.fromstring(m)
1715 root.find('title').text = self.summary
1716 root.find('description').text = ''.join(self.descr)
1717 url = root.find('url')
1719 url = ET.SubElement(root, 'url')
1722 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1723 mf = metafile(u, ET.tostring(root))
1726 print '*' * 36, 'old', '*' * 36
1728 print '*' * 36, 'new', '*' * 36
1729 print ET.tostring(root)
1731 repl = raw_input('Write? (y/N/e) ')
1742 def mark_frozen(self):
1743 store_write_string(self.absdir, '_frozenlink', '')
1745 print "The link in this package is currently broken. Checking"
1746 print "out the last working version instead; please use 'osc pull'"
1747 print "to repair the link."
1750 def unmark_frozen(self):
1751 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1752 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1754 def latest_rev(self):
1755 if self.islinkrepair():
1756 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta)
1757 elif self.islink() and self.isexpanded():
1758 if self.isfrozen() or self.ispulled():
1759 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1762 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta)
1765 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1767 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta)
1770 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta)
1773 def __get_files(self, fmeta_root):
1775 if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
1776 raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root)))
1777 for i in fmeta_root.findall('entry'):
1778 skipped = i.get('skipped') is not None
1779 f.append(File(i.get('name'), i.get('md5'),
1780 int(i.get('size')), int(i.get('mtime')), skipped))
1783 def __get_rev_changes(self, revfiles):
1790 revfilenames.append(f.name)
1791 # treat skipped like deleted files
1793 if f.name.startswith('_service:'):
1798 # treat skipped like added files
1799 # problem: this overwrites existing files during the update
1800 # (because skipped files aren't in self.filenamelist_unvers)
1801 if f.name in self.filenamelist and not f.name in self.skipped:
1805 for f in self.filelist:
1806 if not f.name in revfilenames:
1809 return kept, added, deleted, services
1811 def update(self, rev = None, service_files = False, size_limit = None):
1814 # size_limit is only temporary for this update
1815 old_size_limit = self.size_limit
1816 if not size_limit is None:
1817 self.size_limit = int(size_limit)
1818 if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
1819 print 'resuming broken update...'
1820 root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
1821 rfiles = self.__get_files(root)
1822 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1823 # check if we aborted in the middle of a file update
1824 broken_file = os.listdir(os.path.join(self.storedir, '_in_update'))
1825 broken_file.remove('_files')
1826 if len(broken_file) == 1:
1827 origfile = os.path.join(self.storedir, '_in_update', broken_file[0])
1828 wcfile = os.path.join(self.absdir, broken_file[0])
1829 origfile_md5 = dgst(origfile)
1830 origfile_meta = self.findfilebyname(broken_file[0])
1831 if origfile.endswith('.copy'):
1832 # ok it seems we aborted at some point during the copy process
1833 # (copy process == copy wcfile to the _in_update dir). remove file+continue
1835 elif self.findfilebyname(broken_file[0]) is None:
1836 # should we remove this file from _in_update? if we don't
1837 # the user has no chance to continue without removing the file manually
1838 raise oscerr.PackageInternalError(self.prjname, self.name,
1839 '\'%s\' is not known by meta but exists in \'_in_update\' dir')
1840 elif os.path.isfile(wcfile) and dgst(wcfile) != origfile_md5:
1841 (fd, tmpfile) = tempfile.mkstemp(dir=self.absdir, prefix=broken_file[0]+'.')
1843 os.rename(wcfile, tmpfile)
1844 os.rename(origfile, wcfile)
1845 print 'warning: it seems you modified \'%s\' after the broken ' \
1846 'update. Restored original file and saved modified version ' \
1847 'to \'%s\'.' % (wcfile, tmpfile)
1848 elif not os.path.isfile(wcfile):
1849 # this is strange... because it existed before the update. restore it
1850 os.rename(origfile, wcfile)
1852 # everything seems to be ok
1854 elif len(broken_file) > 1:
1855 raise oscerr.PackageInternalError(self.prjname, self.name, 'too many files in \'_in_update\' dir')
1858 if os.path.exists(os.path.join(self.storedir, f.name)):
1859 if dgst(os.path.join(self.storedir, f.name)) == f.md5:
1867 if not service_files:
1869 self.__update(kept, added, deleted, services, ET.tostring(root), root.get('rev'))
1870 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1871 os.rmdir(os.path.join(self.storedir, '_in_update'))
1872 # ok everything is ok (hopefully)...
1873 fm = self.get_files_meta(revision=rev)
1874 root = ET.fromstring(fm)
1875 rfiles = self.__get_files(root)
1876 store_write_string(self.absdir, '_files', fm + '\n', subdir='_in_update')
1877 kept, added, deleted, services = self.__get_rev_changes(rfiles)
1878 if not service_files:
1880 self.__update(kept, added, deleted, services, fm, root.get('rev'))
1881 os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
1882 if os.path.isdir(os.path.join(self.storedir, '_in_update')):
1883 os.rmdir(os.path.join(self.storedir, '_in_update'))
1884 self.size_limit = old_size_limit
1886 def __update(self, kept, added, deleted, services, fm, rev):
1887 pathn = getTransActPath(self.dir)
1888 # check for conflicts with existing files
1890 if f.name in self.filenamelist_unvers:
1891 raise oscerr.PackageFileConflict(self.prjname, self.name, f.name,
1892 'failed to add file \'%s\' file/dir with the same name already exists' % f.name)
1893 # ok, the update can't fail due to existing files
1895 self.updatefile(f.name, rev, f.mtime)
1896 print statfrmt('A', os.path.join(pathn, f.name))
1898 # if the storefile doesn't exist we're resuming an aborted update:
1899 # the file was already deleted but we cannot know this
1900 # OR we're processing a _service: file (simply keep the file)
1901 if os.path.isfile(os.path.join(self.storedir, f.name)) and self.status(f.name) != 'M':
1902 # if self.status(f.name) != 'M':
1903 self.delete_localfile(f.name)
1904 self.delete_storefile(f.name)
1905 print statfrmt('D', os.path.join(pathn, f.name))
1906 if f.name in self.to_be_deleted:
1907 self.to_be_deleted.remove(f.name)
1908 self.write_deletelist()
1911 state = self.status(f.name)
1912 # print f.name, state
1913 if state == 'M' and self.findfilebyname(f.name).md5 == f.md5:
1914 # remote file didn't change
1917 # try to merge changes
1918 merge_status = self.mergefile(f.name, rev, f.mtime)
1919 print statfrmt(merge_status, os.path.join(pathn, f.name))
1921 self.updatefile(f.name, rev, f.mtime)
1922 print 'Restored \'%s\'' % os.path.join(pathn, f.name)
1924 get_source_file(self.apiurl, self.prjname, self.name, f.name,
1925 targetfilename=os.path.join(self.storedir, f.name), revision=rev,
1926 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
1927 print 'skipping \'%s\' (this is due to conflicts)' % f.name
1928 elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
1929 # XXX: in the worst case we might end up with f.name being
1930 # in _to_be_deleted and in _in_conflict... this needs to be checked
1931 if os.path.exists(os.path.join(self.absdir, f.name)):
1932 merge_status = self.mergefile(f.name, rev, f.mtime)
1933 print statfrmt(merge_status, os.path.join(pathn, f.name))
1934 if merge_status == 'C':
1935 # state changes from delete to conflict
1936 self.to_be_deleted.remove(f.name)
1937 self.write_deletelist()
1939 # XXX: we cannot recover this case because we've no file
1941 self.updatefile(f.name, rev, f.mtime)
1942 print statfrmt('U', os.path.join(pathn, f.name))
1943 elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
1944 self.updatefile(f.name, rev, f.mtime)
1945 print statfrmt('U', os.path.join(pathn, f.name))
1947 # checkout service files
1949 get_source_file(self.apiurl, self.prjname, self.name, f.name,
1950 targetfilename=os.path.join(self.absdir, f.name), revision=rev,
1951 progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
1952 print statfrmt('A', os.path.join(pathn, f.name))
1953 store_write_string(self.absdir, '_files', fm + '\n')
1955 self.update_local_pacmeta()
1956 self.update_datastructs()
1958 print 'At revision %s.' % self.rev
1960 def run_source_services(self):
1961 if self.filenamelist.count('_service'):
1962 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1965 si.execute(self.absdir)
1967 def prepare_filelist(self):
1968 """Prepare a list of files, which will be processed by process_filelist
1969 method. This allows easy modifications of a file list in commit
1973 self.todo = self.filenamelist + self.filenamelist_unvers
1977 for f in [f for f in self.todo if not os.path.isdir(f)]:
1979 status = self.status(f)
1984 ret += "%s %s %s\n" % (action, status, f)
1987 # Edit a filelist for package \'%s\'
1989 # l, leave = leave a file as is
1990 # r, remove = remove a file
1991 # a, add = add a file
1993 # If you remove file from a list, it will be unchanged
1994 # If you remove all, commit will be aborted""" % self.name
1998 def edit_filelist(self):
1999 """Opens a package list in editor for editing. This allows easy
2000 modifications of it just by simple text editing
2004 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
2005 f = os.fdopen(fd, 'w')
2006 f.write(self.prepare_filelist())
2008 mtime_orig = os.stat(filename).st_mtime
2011 run_editor(filename)
2012 mtime = os.stat(filename).st_mtime
2013 if mtime_orig < mtime:
2014 filelist = open(filename).readlines()
2018 raise oscerr.UserAbort()
2020 return self.process_filelist(filelist)
2022 def process_filelist(self, filelist):
2023 """Process a filelist - it add/remove or leave files. This depends on
2024 user input. If no file is processed, it raises an ValueError
2028 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
2030 foo = line.split(' ')
2032 action, state, name = (foo[0], ' ', foo[3])
2034 action, state, name = (foo[0], foo[1], foo[2])
2037 action = action.lower()
2040 if action in ('r', 'remove'):
2041 if self.status(name) == '?':
2043 if name in self.todo:
2044 self.todo.remove(name)
2046 self.delete_file(name, True)
2047 elif action in ('a', 'add'):
2048 if self.status(name) != '?':
2049 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
2052 elif action in ('l', 'leave'):
2055 raise ValueError("Unknow action `%s'" % action)
2058 raise ValueError("Empty filelist")
2060 def revert(self, filename):
2061 if not filename in self.filenamelist and not filename in self.to_be_added:
2062 raise oscerr.OscIOError(None, 'file \'%s\' is not under version control' % filename)
2063 elif filename in self.skipped:
2064 raise oscerr.OscIOError(None, 'file \'%s\' is marked as skipped and cannot be reverted')
2065 if filename in self.filenamelist and not os.path.exists(os.path.join(self.storedir, filename)):
2066 raise oscerr.PackageInternalError('file \'%s\' is listed in filenamelist but no storefile exists' % filename)
2067 state = self.status(filename)
2068 if not (state == 'A' or state == '!' and filename in self.to_be_added):
2069 shutil.copyfile(os.path.join(self.storedir, filename), os.path.join(self.absdir, filename))
2071 self.to_be_deleted.remove(filename)
2072 self.write_deletelist()
2074 self.clear_from_conflictlist(filename)
2075 elif state in ('A', 'R') or state == '!' and filename in self.to_be_added:
2076 self.to_be_added.remove(filename)
2077 self.write_addlist()
2080 def init_package(apiurl, project, package, dir, size_limit=None, meta=False, progress_obj=None):
2083 if not os.path.exists(dir):
2085 elif not os.path.isdir(dir):
2086 raise oscerr.OscIOError(None, 'error: \'%s\' is no directory' % dir)
2087 if os.path.exists(os.path.join(dir, store)):
2088 raise oscerr.OscIOError(None, 'error: \'%s\' is already an initialized osc working copy' % dir)
2090 os.mkdir(os.path.join(dir, store))
2091 store_write_project(dir, project)
2092 store_write_string(dir, '_package', package + '\n')
2093 store_write_apiurl(dir, apiurl)
2095 store_write_string(dir, '_meta_mode', '')
2097 store_write_string(dir, '_size_limit', str(size_limit) + '\n')
2098 store_write_string(dir, '_files', '<directory />' + '\n')
2099 store_write_string(dir, '_osclib_version', __store_version__ + '\n')
2100 return Package(dir, progress_obj=progress_obj, size_limit=size_limit)
2103 """for objects to represent the review state in a request"""
2104 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
2106 self.by_user = by_user
2107 self.by_group = by_group
2110 self.comment = comment
2113 """for objects to represent the "state" of a request"""
2114 def __init__(self, name=None, who=None, when=None, comment=None):
2118 self.comment = comment
2121 """represents an action"""
2122 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update, role_person, role_group, role):
2124 self.src_project = src_project
2125 self.src_package = src_package
2126 self.src_rev = src_rev
2127 self.dst_project = dst_project
2128 self.dst_package = dst_package
2129 self.src_update = src_update
2130 self.role_person = role_person
2131 self.role_group = role_group
2135 """represent a request and holds its metadata
2136 it has methods to read in metadata from xml,
2137 different views, ..."""
2140 self.state = RequestState()
2143 self.last_author = None
2146 self.statehistory = []
2149 def read(self, root):
2150 self.reqid = int(root.get('id'))
2151 actions = root.findall('action')
2152 if len(actions) == 0:
2153 actions = [ root.find('submit') ] # for old style requests
2155 for action in actions:
2156 action_type = action.get('type', 'submit')
2158 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = role = role_person = role_group = None
2159 if action.findall('source'):
2160 n = action.find('source')
2161 src_prj = n.get('project', None)
2162 src_pkg = n.get('package', None)
2163 src_rev = n.get('rev', None)
2164 if action.findall('target'):
2165 n = action.find('target')
2166 dst_prj = n.get('project', None)
2167 dst_pkg = n.get('package', None)
2168 if action.findall('options'):
2169 n = action.find('options')
2170 if n.findall('sourceupdate'):
2171 src_update = n.find('sourceupdate').text.strip()
2172 if action.findall('person'):
2173 n = action.find('person')
2174 role_person = n.get('name', None)
2175 role = n.get('role', None)
2176 if action.findall('group'):
2177 n = action.find('add_role')
2178 role_group = n.get('name', None)
2179 role = n.get('role', None)
2180 self.add_action(action_type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update, role_person, role_group, role)
2182 msg = 'invalid request format:\n%s' % ET.tostring(root)
2183 raise oscerr.APIError(msg)
2186 n = root.find('state')
2187 self.state.name, self.state.who, self.state.when \
2188 = n.get('name'), n.get('who'), n.get('when')
2190 self.state.comment = n.find('comment').text.strip()
2192 self.state.comment = None
2194 # read the review states
2195 for r in root.findall('review'):
2197 s.state = r.get('state')
2198 s.by_user = r.get('by_user')
2199 s.by_group = r.get('by_group')
2200 s.who = r.get('who')
2201 s.when = r.get('when')
2203 s.comment = r.find('comment').text.strip()
2206 self.reviews.append(s)
2208 # read the state history
2209 for h in root.findall('history'):
2211 s.name = h.get('name')
2212 s.who = h.get('who')
2213 s.when = h.get('when')
2215 s.comment = h.find('comment').text.strip()
2218 self.statehistory.append(s)
2219 self.statehistory.reverse()
2221 # read a description, if it exists
2223 n = root.find('description').text
2228 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update, role_person, role_group, role):
2229 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
2230 dst_prj, dst_pkg, src_update, role_person, role_group, role)
2233 def get_creator(self):
2234 if len(self.statehistory):
2235 return self.statehistory[-1].who
2236 return self.state.who
2238 def list_view(self):
2239 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
2241 for a in self.actions:
2242 dst = "%s/%s" % (a.dst_project, a.dst_package)
2243 if a.src_package == a.dst_package:
2247 if a.type=="submit":
2248 sr_source="%s/%s -> " % (a.src_project, a.src_package)
2249 if a.type=="add_role":
2250 if a.role_person is not None:
2251 sr_source="%s as %s" % (a.role_person, a.role)
2252 if a.role_group is not None:
2253 sr_source="%s as %s" % (a.role_group, a.role)
2254 if a.type=="change_devel":
2255 dst = "developed in %s/%s" % (a.src_project, a.src_package)
2256 sr_source="%s/%s" % (a.dst_project, a.dst_package)
2258 ret += '\n %s: %-50s %-20s ' % \
2259 (a.type, sr_source, dst)
2261 if self.statehistory and self.statehistory[0]:
2263 for h in self.statehistory:
2264 who.append("%s(%s)" % (h.who,h.name))
2266 ret += "\n From: %s" % (' -> '.join(who))
2268 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
2270 lines = txt.splitlines()
2271 wrapper = textwrap.TextWrapper( width = 80,
2272 initial_indent=' Descr: ',
2273 subsequent_indent=' ')
2274 ret += "\n" + wrapper.fill(lines[0])
2275 wrapper.initial_indent = ' '
2276 for line in lines[1:]:
2277 ret += "\n" + wrapper.fill(line)
2283 def __cmp__(self, other):
2284 return cmp(self.reqid, other.reqid)
2288 for action in self.actions:
2289 action_list=action_list+" %s: " % (action.type)
2290 if action.type=="submit":
2293 r="(r%s)" % (action.src_rev)
2295 if action.src_update:
2296 m="(%s)" % (action.src_update)
2297 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
2298 if action.dst_package:
2299 action_list=action_list+"/%s" % ( action.dst_package )
2300 elif action.type=="delete":
2301 action_list=action_list+" %s" % ( action.dst_project )
2302 if action.dst_package:
2303 action_list=action_list+"/%s" % ( action.dst_package )
2304 elif action.type=="change_devel":
2305 action_list=action_list+" %s/%s developed in %s/%s" % \
2306 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
2307 action_list=action_list+"\n"
2322 self.state.name, self.state.when, self.state.who,
2325 if len(self.reviews):
2326 reviewitems = [ '%-10s %s %s %s %s %s' \
2327 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
2328 for i in self.reviews ]
2329 s += '\nReview: ' + '\n '.join(reviewitems)
2332 if len(self.statehistory):
2333 histitems = [ '%-10s %s %s' \
2334 % (i.name, i.when, i.who) \
2335 for i in self.statehistory ]
2336 s += '\nHistory: ' + '\n '.join(histitems)
2343 """format time as Apr 02 18:19
2345 depending on whether it is in the current year
2349 if time.localtime()[0] == time.localtime(t)[0]:
2351 return time.strftime('%b %d %H:%M',time.localtime(t))
2353 return time.strftime('%b %d %Y',time.localtime(t))
2356 def is_project_dir(d):
2359 return os.path.exists(os.path.join(d, store, '_project')) and not \
2360 os.path.exists(os.path.join(d, store, '_package'))
2363 def is_package_dir(d):
2366 return os.path.exists(os.path.join(d, store, '_project')) and \
2367 os.path.exists(os.path.join(d, store, '_package'))
2369 def parse_disturl(disturl):
2370 """Parse a disturl, returns tuple (apiurl, project, source, repository,
2371 revision), else raises an oscerr.WrongArgs exception
2376 m = DISTURL_RE.match(disturl)
2378 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
2380 apiurl = m.group('apiurl')
2381 if apiurl.split('.')[0] != 'api':
2382 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
2383 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
2385 def parse_buildlogurl(buildlogurl):
2386 """Parse a build log url, returns a tuple (apiurl, project, package,
2387 repository, arch), else raises oscerr.WrongArgs exception"""
2389 global BUILDLOGURL_RE
2391 m = BUILDLOGURL_RE.match(buildlogurl)
2393 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
2395 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
2398 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
2399 This is handy to allow copy/paste a project/package combination in this form.
2401 Trailing slashes are removed before the split, because the split would
2402 otherwise give an additional empty string.
2410 def expand_proj_pack(args, idx=0, howmany=0):
2411 """looks for occurance of '.' at the position idx.
2412 If howmany is 2, both proj and pack are expanded together
2413 using the current directory, or none of them, if not possible.
2414 If howmany is 0, proj is expanded if possible, then, if there
2415 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
2416 expanded, if possible.
2417 If howmany is 1, only proj is expanded if possible.
2419 If args[idx] does not exists, an implicit '.' is assumed.
2420 if not enough elements up to idx exist, an error is raised.
2422 See also parseargs(args), slash_split(args), findpacs(args)
2423 All these need unification, somehow.
2426 # print args,idx,howmany
2429 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
2431 if len(args) == idx:
2433 if args[idx+0] == '.':
2434 if howmany == 0 and len(args) > idx+1:
2435 if args[idx+1] == '.':
2437 # remove one dot and make sure to expand both proj and pack
2442 # print args,idx,howmany
2444 args[idx+0] = store_read_project('.')
2447 package = store_read_package('.')
2448 args.insert(idx+1, package)
2452 package = store_read_package('.')
2453 args.insert(idx+1, package)
2457 def findpacs(files, progress_obj=None):
2458 """collect Package objects belonging to the given files
2459 and make sure each Package is returned only once"""
2462 p = filedir_to_pac(f, progress_obj)
2465 if i.name == p.name:
2475 def filedir_to_pac(f, progress_obj=None):
2476 """Takes a working copy path, or a path to a file inside a working copy,
2477 and returns a Package object instance
2479 If the argument was a filename, add it onto the "todo" list of the Package """
2481 if os.path.isdir(f):
2483 p = Package(wd, progress_obj=progress_obj)
2485 wd = os.path.dirname(f) or os.curdir
2486 p = Package(wd, progress_obj=progress_obj)
2487 p.todo = [ os.path.basename(f) ]
2491 def read_filemeta(dir):
2494 msg = '\'%s\' is not a valid working copy.\n' % dir
2495 if not is_package_dir(dir):
2496 raise oscerr.NoWorkingCopy(msg)
2498 filesmeta = os.path.join(dir, store, '_files')
2499 if not os.path.isfile(filesmeta):
2500 print "Warning: file _files is missing, creating a default one"
2501 store_write_string(os.path.join(dir, store), '_files', '<directory \>')
2504 r = ET.parse(filesmeta)
2505 except SyntaxError, e:
2506 raise oscerr.NoWorkingCopy(msg +
2507 'When parsing .osc/_files, the following error was encountered:\n'
2511 def store_readlist(dir, name):
2515 if os.path.exists(os.path.join(dir, store, name)):
2516 r = [line.strip() for line in open(os.path.join(dir, store, name), 'r')]
2519 def read_tobeadded(dir):
2520 return store_readlist(dir, '_to_be_added')
2522 def read_tobedeleted(dir):
2523 return store_readlist(dir, '_to_be_deleted')
2525 def read_sizelimit(dir):
2529 fname = os.path.join(dir, store, '_size_limit')
2531 if os.path.exists(fname):
2532 r = open(fname).readline().strip()
2534 if r is None or not r.isdigit():
2538 def read_inconflict(dir):
2539 return store_readlist(dir, '_in_conflict')
2541 def parseargs(list_of_args):
2542 """Convenience method osc's commandline argument parsing.
2544 If called with an empty tuple (or list), return a list containing the current directory.
2545 Otherwise, return a list of the arguments."""
2547 return list(list_of_args)
2552 def statfrmt(statusletter, filename):
2553 return '%s %s' % (statusletter, filename)
2556 def pathjoin(a, *p):
2557 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2558 path = os.path.join(a, *p)
2559 if path.startswith('./'):
2564 def makeurl(baseurl, l, query=[]):
2565 """Given a list of path compoments, construct a complete URL.
2567 Optional parameters for a query string can be given as a list, as a
2568 dictionary, or as an already assembled string.
2569 In case of a dictionary, the parameters will be urlencoded by this
2570 function. In case of a list not -- this is to be backwards compatible.
2573 if conf.config['verbose'] > 1:
2574 print 'makeurl:', baseurl, l, query
2576 if type(query) == type(list()):
2577 query = '&'.join(query)
2578 elif type(query) == type(dict()):
2579 query = urlencode(query)
2581 scheme, netloc = urlsplit(baseurl)[0:2]
2582 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2585 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2586 """wrapper around urllib2.urlopen for error handling,
2587 and to support additional (PUT, DELETE) methods"""
2591 if conf.config['http_debug']:
2592 print >>sys.stderr, '\n\n--', method, url
2594 if method == 'POST' and not file and not data:
2595 # adding data to an urllib2 request transforms it into a POST
2598 req = urllib2.Request(url)
2599 api_host_options = {}
2600 if conf.is_known_apiurl(url):
2601 # ok no external request
2602 urllib2.install_opener(conf._build_opener(url))
2603 api_host_options = conf.get_apiurl_api_host_options(url)
2604 for header, value in api_host_options['http_headers']:
2605 req.add_header(header, value)
2607 req.get_method = lambda: method
2609 # POST requests are application/x-www-form-urlencoded per default
2610 # since we change the request into PUT, we also need to adjust the content type header
2611 if method == 'PUT' or (method == 'POST' and data):
2612 req.add_header('Content-Type', 'application/octet-stream')
2614 if type(headers) == type({}):
2615 for i in headers.keys():
2617 req.add_header(i, headers[i])
2619 if file and not data:
2620 size = os.path.getsize(file)
2622 data = open(file, 'rb').read()
2625 filefd = open(file, 'rb')
2627 if sys.platform[:3] != 'win':
2628 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2630 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2632 except EnvironmentError, e:
2634 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2635 '\non a filesystem which does not support this.' % (e, file))
2636 elif hasattr(e, 'winerror') and e.winerror == 5:
2637 # falling back to the default io
2638 data = open(file, 'rb').read()
2642 if conf.config['debug']: print >>sys.stderr, method, url
2644 old_timeout = socket.getdefaulttimeout()
2645 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2646 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2647 socket.setdefaulttimeout(timeout)
2649 fd = urllib2.urlopen(req, data=data)
2651 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2652 socket.setdefaulttimeout(old_timeout)
2653 if hasattr(conf.cookiejar, 'save'):
2654 conf.cookiejar.save(ignore_discard=True)
2656 if filefd: filefd.close()
2661 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2662 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2663 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2664 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2667 def check_store_version(dir):
2670 versionfile = os.path.join(dir, store, '_osclib_version')
2672 v = open(versionfile).read().strip()
2677 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2678 if os.path.exists(os.path.join(dir, '.svn')):
2679 msg = msg + '\nTry svn instead of osc.'
2680 raise oscerr.NoWorkingCopy(msg)
2682 if v != __store_version__:
2683 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2684 # version is fine, no migration needed
2685 f = open(versionfile, 'w')
2686 f.write(__store_version__ + '\n')
2689 msg = 'The osc metadata of your working copy "%s"' % dir
2690 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2691 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2692 raise oscerr.WorkingCopyWrongVersion, msg
2695 def meta_get_packagelist(apiurl, prj, deleted=None):
2699 query['deleted'] = 1
2701 u = makeurl(apiurl, ['source', prj], query)
2703 root = ET.parse(f).getroot()
2704 return [ node.get('name') for node in root.findall('entry') ]
2707 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2708 """return a list of file names,
2709 or a list File() instances if verbose=True"""
2715 query['rev'] = revision
2717 query['rev'] = 'latest'
2719 u = makeurl(apiurl, ['source', prj, package], query=query)
2721 root = ET.parse(f).getroot()
2724 return [ node.get('name') for node in root.findall('entry') ]
2728 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2729 rev = root.get('rev')
2730 for node in root.findall('entry'):
2731 f = File(node.get('name'),
2733 int(node.get('size')),
2734 int(node.get('mtime')))
2740 def meta_get_project_list(apiurl, deleted=None):
2743 query['deleted'] = 1
2745 u = makeurl(apiurl, ['source'], query)
2747 root = ET.parse(f).getroot()
2748 return sorted([ node.get('name') for node in root if node.get('name')])
2751 def show_project_meta(apiurl, prj):
2752 url = makeurl(apiurl, ['source', prj, '_meta'])
2754 return f.readlines()
2757 def show_project_conf(apiurl, prj):
2758 url = makeurl(apiurl, ['source', prj, '_config'])
2760 return f.readlines()
2763 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2764 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2768 except urllib2.HTTPError, e:
2769 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2773 def show_package_meta(apiurl, prj, pac, meta=False):
2778 # packages like _pattern and _project do not have a _meta file
2779 if pac.startswith('_pattern') or pac.startswith('_project'):
2782 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
2785 return f.readlines()
2786 except urllib2.HTTPError, e:
2787 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2791 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2793 path.append('source')
2799 path.append('_attribute')
2801 path.append(attribute)
2804 query.append("with_default=1")
2806 query.append("with_project=1")
2807 url = makeurl(apiurl, path, query)
2810 return f.readlines()
2811 except urllib2.HTTPError, e:
2812 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2816 def show_develproject(apiurl, prj, pac):
2817 m = show_package_meta(apiurl, prj, pac)
2819 return ET.fromstring(''.join(m)).find('devel').get('project')
2824 def show_package_disabled_repos(apiurl, prj, pac):
2825 m = show_package_meta(apiurl, prj, pac)
2826 #FIXME: don't work if all repos of a project are disabled and only some are enabled since <disable/> is empty
2828 root = ET.fromstring(''.join(m))
2829 elm = root.find('build')
2830 r = [ node.get('repository') for node in elm.findall('disable')]
2836 def show_pattern_metalist(apiurl, prj):
2837 url = makeurl(apiurl, ['source', prj, '_pattern'])
2841 except urllib2.HTTPError, e:
2842 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2844 r = [ node.get('name') for node in tree.getroot() ]
2849 def show_pattern_meta(apiurl, prj, pattern):
2850 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2853 return f.readlines()
2854 except urllib2.HTTPError, e:
2855 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2860 """metafile that can be manipulated and is stored back after manipulation."""
2861 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2865 self.change_is_required = change_is_required
2866 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2867 f = os.fdopen(fd, 'w')
2868 f.write(''.join(input))