1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.128git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="Fedora_12">
81 <path project="Fedora:12" repository="standard" />
85 <repository name="SLE_11">
86 <path project="SUSE:SLE-11" repository="standard" />
95 new_package_templ = """\
96 <package name="%(name)s">
98 <title></title> <!-- Title of package -->
101 <!-- for long description -->
104 <person role="maintainer" userid="%(user)s"/>
105 <person role="bugowner" userid="%(user)s"/>
107 <url>PUT_UPSTREAM_URL_HERE</url>
111 use one of the examples below to disable building of this package
112 on a certain architecture, in a certain repository,
113 or a combination thereof:
115 <disable arch="x86_64"/>
116 <disable repository="SUSE_SLE-10"/>
117 <disable repository="SUSE_SLE-10" arch="x86_64"/>
119 Possible sections where you can use the tags above:
129 Please have a look at:
130 http://wiki.opensuse.org/Restricted_formats
131 Packages containing formats listed there are NOT allowed to
132 be packaged in the openSUSE Buildservice and will be deleted!
139 new_attribute_templ = """\
141 <attribute namespace="" name="">
147 new_user_template = """\
149 <login>%(user)s</login>
150 <email>PUT_EMAIL_ADDRESS_HERE</email>
151 <realname>PUT_REAL_NAME_HERE</realname>
153 <project name="home:%(user)s"/>
169 new_pattern_template = """\
170 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
176 buildstatus_symbols = {'succeeded': '.',
178 'expansion error': 'U', # obsolete with OBS 2.0
191 # our own xml writer function to write xml nice, but with correct syntax
192 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
193 from xml.dom import minidom
194 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
195 # indent = current indentation
196 # addindent = indentation to add to higher levels
197 # newl = newline string
198 writer.write(indent+"<" + self.tagName)
200 attrs = self._get_attributes()
201 a_names = attrs.keys()
204 for a_name in a_names:
205 writer.write(" %s=\"" % a_name)
206 minidom._write_data(writer, attrs[a_name].value)
209 if len(self.childNodes) == 1 \
210 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
212 self.childNodes[0].writexml(writer, "", "", "")
213 writer.write("</%s>%s" % (self.tagName, newl))
215 writer.write(">%s"%(newl))
216 for node in self.childNodes:
217 node.writexml(writer,indent+addindent,addindent,newl)
218 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
220 writer.write("/>%s"%(newl))
221 # replace minidom's function with ours
222 minidom.Element.writexml = fixed_writexml
225 # os.path.samefile is available only under Unix
226 def os_path_samefile(path1, path2):
228 return os.path.samefile(path1, path2)
230 return os.path.realpath(path1) == os.path.realpath(path2)
233 """represent a file, including its metadata"""
234 def __init__(self, name, md5, size, mtime):
244 """Source service content
247 """creates an empty serviceinfo instance"""
250 def read(self, serviceinfo_node):
251 """read in the source services <services> element passed as
254 if serviceinfo_node == None:
257 services = serviceinfo_node.findall('service')
259 for service in services:
260 name = service.get('name')
262 for param in service.findall('param'):
263 option = param.get('name', None)
265 name += " --" + option + " '" + value + "'"
266 self.commands.append(name)
268 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
269 raise oscerr.APIError(msg)
271 def addVerifyFile(self, serviceinfo_node, filename):
274 f = open(filename, 'r')
275 digest = hashlib.sha256(f.read()).hexdigest()
279 s = ET.Element( "service", name="verify_file" )
280 ET.SubElement(s, "param", name="file").text = filename
281 ET.SubElement(s, "param", name="verifier").text = "sha256"
282 ET.SubElement(s, "param", name="checksum").text = digest
288 def addDownloadUrl(self, serviceinfo_node, url_string):
289 from urlparse import urlparse
290 url = urlparse( url_string )
291 protocol = url.scheme
296 s = ET.Element( "service", name="download_url" )
297 ET.SubElement(s, "param", name="protocol").text = protocol
298 ET.SubElement(s, "param", name="host").text = host
299 ET.SubElement(s, "param", name="path").text = path
305 def execute(self, dir):
308 for call in self.commands:
309 temp_dir = tempfile.mkdtemp()
310 name = call.split(None, 1)[0]
311 if not os.path.exists("/usr/lib/obs/service/"+name):
312 msg = "ERROR: service is not installed!\n"
313 msg += "Maybe try this: zypper in obs-service-" + name
314 raise oscerr.APIError(msg)
315 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
316 if conf.config['verbose'] > 1:
317 print "Run source service:", c
318 ret = subprocess.call(c, shell=True)
320 print "ERROR: service call failed: " + c
321 # FIXME: addDownloadUrlService calls si.execute after
322 # updating _services.
323 print " (your _services file may be corrupt now)"
325 for file in os.listdir(temp_dir):
326 shutil.move( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
330 """linkinfo metadata (which is part of the xml representing a directory
333 """creates an empty linkinfo instance"""
343 def read(self, linkinfo_node):
344 """read in the linkinfo metadata from the <linkinfo> element passed as
346 If the passed element is None, the method does nothing.
348 if linkinfo_node == None:
350 self.project = linkinfo_node.get('project')
351 self.package = linkinfo_node.get('package')
352 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
353 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
354 self.srcmd5 = linkinfo_node.get('srcmd5')
355 self.error = linkinfo_node.get('error')
356 self.rev = linkinfo_node.get('rev')
357 self.baserev = linkinfo_node.get('baserev')
360 """returns True if the linkinfo is not empty, otherwise False"""
361 if self.xsrcmd5 or self.lsrcmd5:
365 def isexpanded(self):
366 """returns True if the package is an expanded link"""
367 if self.lsrcmd5 and not self.xsrcmd5:
372 """returns True if the link is in error state (could not be applied)"""
378 """return an informatory string representation"""
379 if self.islink() and not self.isexpanded():
380 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
381 % (self.project, self.package, self.xsrcmd5, self.rev)
382 elif self.islink() and self.isexpanded():
384 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
385 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
387 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
388 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
394 """represent a project directory, holding packages"""
395 def __init__(self, dir, getPackageList=True, progress_obj=None):
398 self.absdir = os.path.abspath(dir)
399 self.progress_obj = progress_obj
401 self.name = store_read_project(self.dir)
402 self.apiurl = store_read_apiurl(self.dir)
405 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
407 self.pacs_available = []
409 if conf.config['do_package_tracking']:
410 self.pac_root = self.read_packages().getroot()
411 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
412 self.pacs_excluded = [ i for i in os.listdir(self.dir)
413 for j in conf.config['exclude_glob']
414 if fnmatch.fnmatch(i, j) ]
415 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
416 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
417 # in the self.pacs_broken list
418 self.pacs_broken = []
419 for p in self.pacs_have:
420 if not os.path.isdir(os.path.join(self.absdir, p)):
421 # all states will be replaced with the '!'-state
422 # (except it is already marked as deleted ('D'-state))
423 self.pacs_broken.append(p)
425 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
427 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
429 def checkout_missing_pacs(self, expand_link=False):
430 for pac in self.pacs_missing:
432 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
433 # pac is not under version control but a local file/dir exists
434 msg = 'can\'t add package \'%s\': Object already exists' % pac
435 raise oscerr.PackageExists(self.name, pac, msg)
437 print 'checking out new package %s' % pac
438 checkout_package(self.apiurl, self.name, pac, \
439 pathname=getTransActPath(os.path.join(self.dir, pac)), \
440 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
442 def set_state(self, pac, state):
443 node = self.get_package_node(pac)
445 self.new_package_entry(pac, state)
447 node.attrib['state'] = state
449 def get_package_node(self, pac):
450 for node in self.pac_root.findall('package'):
451 if pac == node.get('name'):
455 def del_package_node(self, pac):
456 for node in self.pac_root.findall('package'):
457 if pac == node.get('name'):
458 self.pac_root.remove(node)
460 def get_state(self, pac):
461 node = self.get_package_node(pac)
463 return node.get('state')
467 def new_package_entry(self, name, state):
468 ET.SubElement(self.pac_root, 'package', name=name, state=state)
470 def read_packages(self):
471 packages_file = os.path.join(self.absdir, store, '_packages')
472 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
473 return ET.parse(packages_file)
475 # scan project for existing packages and migrate them
477 for data in os.listdir(self.dir):
478 pac_dir = os.path.join(self.absdir, data)
479 # we cannot use self.pacs_available because we cannot guarantee that the package list
480 # was fetched from the server
481 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
482 and Package(pac_dir).name == data:
483 cur_pacs.append(ET.Element('package', name=data, state=' '))
484 store_write_initial_packages(self.absdir, self.name, cur_pacs)
485 return ET.parse(os.path.join(self.absdir, store, '_packages'))
487 def write_packages(self):
488 # TODO: should we only modify the existing file instead of overwriting?
489 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
491 def addPackage(self, pac):
493 for i in conf.config['exclude_glob']:
494 if fnmatch.fnmatch(pac, i):
495 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
496 raise oscerr.OscIOError(None, msg)
497 state = self.get_state(pac)
498 if state == None or state == 'D':
499 self.new_package_entry(pac, 'A')
500 self.write_packages()
501 # sometimes the new pac doesn't exist in the list because
502 # it would take too much time to update all data structs regularly
503 if pac in self.pacs_unvers:
504 self.pacs_unvers.remove(pac)
506 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
508 def delPackage(self, pac, force = False):
509 state = self.get_state(pac.name)
511 if state == ' ' or state == 'D':
513 for file in pac.filenamelist + pac.filenamelist_unvers:
514 filestate = pac.status(file)
515 if filestate == 'M' or filestate == 'C' or \
516 filestate == 'A' or filestate == '?':
519 del_files.append(file)
520 if can_delete or force:
521 for file in del_files:
522 pac.delete_localfile(file)
523 if pac.status(file) != '?':
524 pac.delete_storefile(file)
525 # this is not really necessary
526 pac.put_on_deletelist(file)
527 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
528 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
529 pac.write_deletelist()
530 self.set_state(pac.name, 'D')
531 self.write_packages()
533 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
536 delete_dir(pac.absdir)
537 self.del_package_node(pac.name)
538 self.write_packages()
539 print statfrmt('D', pac.name)
541 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
543 print 'package is not under version control'
545 print 'unsupported state'
547 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
550 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
552 # we need to make sure that the _packages file will be written (even if an exception
555 # update complete project
556 # packages which no longer exists upstream
557 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
559 for pac in upstream_del:
560 p = Package(os.path.join(self.dir, pac))
561 self.delPackage(p, force = True)
562 delete_storedir(p.storedir)
567 self.pac_root.remove(self.get_package_node(p.name))
568 self.pacs_have.remove(pac)
570 for pac in self.pacs_have:
571 state = self.get_state(pac)
572 if pac in self.pacs_broken:
573 if self.get_state(pac) != 'A':
574 checkout_package(self.apiurl, self.name, pac,
575 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
576 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
579 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
581 if expand_link and p.islink() and not p.isexpanded():
584 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
586 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
589 rev = p.linkinfo.xsrcmd5
590 print 'Expanding to rev', rev
591 elif unexpand_link and p.islink() and p.isexpanded():
592 rev = p.linkinfo.lsrcmd5
593 print 'Unexpanding to rev', rev
594 elif p.islink() and p.isexpanded():
596 print 'Updating %s' % p.name
597 p.update(rev, service_files)
601 # TODO: Package::update has to fixed to behave like svn does
602 if pac in self.pacs_broken:
603 checkout_package(self.apiurl, self.name, pac,
604 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
605 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
607 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
608 elif state == 'A' and pac in self.pacs_available:
609 # file/dir called pac already exists and is under version control
610 msg = 'can\'t add package \'%s\': Object already exists' % pac
611 raise oscerr.PackageExists(self.name, pac, msg)
616 print 'unexpected state.. package \'%s\'' % pac
618 self.checkout_missing_pacs(expand_link=not unexpand_link)
620 self.write_packages()
622 def commit(self, pacs = (), msg = '', files = {}, validators = None, verbose_validation = None):
627 if files.has_key(pac):
629 state = self.get_state(pac)
631 self.commitNewPackage(pac, msg, todo, validators=validators, verbose_validation=verbose_validation)
633 self.commitDelPackage(pac)
635 # display the correct dir when sending the changes
636 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
639 p = Package(os.path.join(self.dir, pac))
641 p.commit(msg, validators=validators, verbose_validation=verbose_validation)
642 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
643 print 'osc: \'%s\' is not under version control' % pac
644 elif pac in self.pacs_broken:
645 print 'osc: \'%s\' package not found' % pac
647 self.commitExtPackage(pac, msg, todo)
649 self.write_packages()
651 # if we have packages marked as '!' we cannot commit
652 for pac in self.pacs_broken:
653 if self.get_state(pac) != 'D':
654 msg = 'commit failed: package \'%s\' is missing' % pac
655 raise oscerr.PackageMissing(self.name, pac, msg)
657 for pac in self.pacs_have:
658 state = self.get_state(pac)
661 Package(os.path.join(self.dir, pac)).commit(msg, validators=validators, verbose_validation=verbose_validation)
663 self.commitDelPackage(pac)
665 self.commitNewPackage(pac, msg, validators=validators, verbose_validation=verbose_validation)
667 self.write_packages()
669 def commitNewPackage(self, pac, msg = '', files = [], validators = None, verbose_validation = None):
670 """creates and commits a new package if it does not exist on the server"""
671 if pac in self.pacs_available:
672 print 'package \'%s\' already exists' % pac
674 user = conf.get_apiurl_usr(self.apiurl)
675 edit_meta(metatype='pkg',
676 path_args=(quote_plus(self.name), quote_plus(pac)),
681 # display the correct dir when sending the changes
683 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
687 p = Package(os.path.join(self.dir, pac))
689 print statfrmt('Sending', os.path.normpath(p.dir))
690 p.commit(msg=msg, validators=validators, verbose_validation=verbose_validation)
691 self.set_state(pac, ' ')
694 def commitDelPackage(self, pac):
695 """deletes a package on the server and in the working copy"""
697 # display the correct dir when sending the changes
698 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
701 pac_dir = os.path.join(self.dir, pac)
702 p = Package(os.path.join(self.dir, pac))
703 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
704 delete_storedir(p.storedir)
710 pac_dir = os.path.join(self.dir, pac)
711 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
712 print statfrmt('Deleting', getTransActPath(pac_dir))
713 delete_package(self.apiurl, self.name, pac)
714 self.del_package_node(pac)
716 def commitExtPackage(self, pac, msg, files = []):
717 """commits a package from an external project"""
718 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
721 pac_path = os.path.join(self.dir, pac)
723 project = store_read_project(pac_path)
724 package = store_read_package(pac_path)
725 apiurl = store_read_apiurl(pac_path)
726 if meta_exists(metatype='pkg',
727 path_args=(quote_plus(project), quote_plus(package)),
729 create_new=False, apiurl=apiurl):
730 p = Package(pac_path)
734 user = conf.get_apiurl_usr(self.apiurl)
735 edit_meta(metatype='pkg',
736 path_args=(quote_plus(project), quote_plus(package)),
741 p = Package(pac_path)
747 r.append('*****************************************************')
748 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
749 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
750 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
751 r.append('*****************************************************')
757 """represent a package (its directory) and read/keep/write its metadata"""
758 def __init__(self, workingdir, progress_obj=None, limit_size=None):
759 self.dir = workingdir
760 self.absdir = os.path.abspath(self.dir)
761 self.storedir = os.path.join(self.absdir, store)
762 self.progress_obj = progress_obj
763 self.limit_size = limit_size
764 if limit_size and limit_size == 0:
765 self.limit_size = None
767 check_store_version(self.dir)
769 self.prjname = store_read_project(self.dir)
770 self.name = store_read_package(self.dir)
771 self.apiurl = store_read_apiurl(self.dir)
773 self.update_datastructs()
777 self.todo_delete = []
780 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
781 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
784 def addfile(self, n):
785 st = os.stat(os.path.join(self.dir, n))
786 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
788 def delete_file(self, n, force=False):
789 """deletes a file if possible and marks the file as deleted"""
792 state = self.status(n)
796 if state in ['?', 'A', 'M'] and not force:
797 return (False, state)
798 self.delete_localfile(n)
800 self.put_on_deletelist(n)
801 self.write_deletelist()
803 self.delete_storefile(n)
806 def delete_storefile(self, n):
807 try: os.unlink(os.path.join(self.storedir, n))
810 def delete_localfile(self, n):
811 try: os.unlink(os.path.join(self.dir, n))
814 def put_on_deletelist(self, n):
815 if n not in self.to_be_deleted:
816 self.to_be_deleted.append(n)
818 def put_on_conflictlist(self, n):
819 if n not in self.in_conflict:
820 self.in_conflict.append(n)
822 def clear_from_conflictlist(self, n):
823 """delete an entry from the file, and remove the file if it would be empty"""
824 if n in self.in_conflict:
826 filename = os.path.join(self.dir, n)
827 storefilename = os.path.join(self.storedir, n)
828 myfilename = os.path.join(self.dir, n + '.mine')
829 if self.islinkrepair() or self.ispulled():
830 upfilename = os.path.join(self.dir, n + '.new')
832 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
835 os.unlink(myfilename)
836 # the working copy may be updated, so the .r* ending may be obsolete...
838 os.unlink(upfilename)
839 if self.islinkrepair() or self.ispulled():
840 os.unlink(os.path.join(self.dir, n + '.old'))
844 self.in_conflict.remove(n)
846 self.write_conflictlist()
848 # XXX: this isn't used at all
849 def write_meta_mode(self):
850 # XXX: the "elif" is somehow a contradiction (with current and the old implementation
851 # it's not possible to "leave" the metamode again) (except if you modify pac.meta
852 # which is really ugly:) )
854 store_write_string(self.absdir, '_meta_mode', '')
855 elif self.ismetamode():
856 os.unlink(os.path.join(self.storedir, '_meta_mode'))
858 def write_sizelimit(self):
859 if self.size_limit and self.size_limit <= 0:
861 os.unlink(os.path.join(self.storedir, '_size_limit'))
865 fname = os.path.join(self.storedir, '_size_limit')
867 f.write(str(self.size_limit))
870 def write_deletelist(self):
871 if len(self.to_be_deleted) == 0:
873 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
877 fname = os.path.join(self.storedir, '_to_be_deleted')
879 f.write('\n'.join(self.to_be_deleted))
883 def delete_source_file(self, n):
884 """delete local a source file"""
885 self.delete_localfile(n)
886 self.delete_storefile(n)
888 def delete_remote_source_file(self, n):
889 """delete a remote source file (e.g. from the server)"""
891 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
894 def put_source_file(self, n):
896 # escaping '+' in the URL path (note: not in the URL query string) is
897 # only a workaround for ruby on rails, which swallows it otherwise
899 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
900 http_PUT(u, file = os.path.join(self.dir, n))
902 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
904 def commit(self, msg='', validators=None, verbose_validation=None):
905 # commit only if the upstream revision is the same as the working copy's
906 upstream_rev = self.latest_rev()
907 if self.rev != upstream_rev:
908 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
911 self.todo = self.filenamelist_unvers + self.filenamelist
913 pathn = getTransActPath(self.dir)
915 if validators and not self.name.startswith('_'):
918 for validator in sorted(os.listdir(validators)):
919 if validator.startswith('.'):
921 fn = os.path.join(validators, validator)
922 mode = os.stat(fn).st_mode
923 if stat.S_ISREG(mode):
924 if verbose_validation:
925 print "osc runs source service:", fn
926 p = subprocess.Popen([fn, "--verbose"], close_fds=True)
928 p = subprocess.Popen([fn], close_fds=True)
930 raise oscerr.RuntimeError(p.stdout, validator )
932 have_conflicts = False
933 for filename in self.todo:
934 if not filename.startswith('_service:') and not filename.startswith('_service_'):
935 st = self.status(filename)
937 self.todo.remove(filename)
938 elif st == 'A' or st == 'M':
939 self.todo_send.append(filename)
940 print statfrmt('Sending', os.path.join(pathn, filename))
942 self.todo_delete.append(filename)
943 print statfrmt('Deleting', os.path.join(pathn, filename))
945 have_conflicts = True
948 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
951 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
952 print 'nothing to do for package %s' % self.name
955 if self.islink() and self.isexpanded():
956 # resolve the link into the upload revision
957 # XXX: do this always?
958 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
959 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
962 print 'Transmitting file data ',
964 for filename in self.todo_delete:
965 # do not touch local files on commit --
966 # delete remotely instead
967 self.delete_remote_source_file(filename)
968 self.to_be_deleted.remove(filename)
969 for filename in self.todo_send:
970 sys.stdout.write('.')
972 self.put_source_file(filename)
974 # all source files are committed - now comes the log
975 query = { 'cmd' : 'commit',
977 'user' : conf.get_apiurl_usr(self.apiurl),
979 if self.islink() and self.isexpanded():
980 query['keeplink'] = '1'
981 if conf.config['linkcontrol'] or self.isfrozen():
982 query['linkrev'] = self.linkinfo.srcmd5
984 query['repairlink'] = '1'
985 query['linkrev'] = self.get_pulled_srcmd5()
986 if self.islinkrepair():
987 query['repairlink'] = '1'
988 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
991 # delete upload revision
993 query = { 'cmd': 'deleteuploadrev' }
994 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1000 root = ET.parse(f).getroot()
1001 self.rev = int(root.get('rev'))
1003 print 'Committed revision %s.' % self.rev
1006 os.unlink(os.path.join(self.storedir, '_pulled'))
1007 if self.islinkrepair():
1008 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1009 self.linkrepair = False
1010 # XXX: mark package as invalid?
1011 print 'The source link has been repaired. This directory can now be removed.'
1012 if self.islink() and self.isexpanded():
1013 self.update_local_filesmeta(revision=self.latest_rev())
1015 self.update_local_filesmeta()
1016 self.write_deletelist()
1017 self.update_datastructs()
1019 if self.filenamelist.count('_service'):
1020 print 'The package contains a source service.'
1021 for filename in self.todo:
1022 if filename.startswith('_service:') and os.path.exists(filename):
1023 os.unlink(filename) # remove local files
1024 print_request_list(self.apiurl, self.prjname, self.name)
1026 def write_conflictlist(self):
1027 if len(self.in_conflict) == 0:
1029 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1033 fname = os.path.join(self.storedir, '_in_conflict')
1034 f = open(fname, 'w')
1035 f.write('\n'.join(self.in_conflict))
1039 def updatefile(self, n, revision):
1040 filename = os.path.join(self.dir, n)
1041 storefilename = os.path.join(self.storedir, n)
1042 mtime = self.findfilebyname(n).mtime
1044 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1045 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1047 shutil.copyfile(filename, storefilename)
1049 def mergefile(self, n):
1050 filename = os.path.join(self.dir, n)
1051 storefilename = os.path.join(self.storedir, n)
1052 myfilename = os.path.join(self.dir, n + '.mine')
1053 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1054 os.rename(filename, myfilename)
1056 mtime = self.findfilebyname(n).mtime
1057 get_source_file(self.apiurl, self.prjname, self.name, n,
1058 revision=self.rev, targetfilename=upfilename,
1059 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1061 if binary_file(myfilename) or binary_file(upfilename):
1063 shutil.copyfile(upfilename, filename)
1064 shutil.copyfile(upfilename, storefilename)
1065 self.in_conflict.append(n)
1066 self.write_conflictlist()
1070 # diff3 OPTIONS... MINE OLDER YOURS
1071 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1072 # we would rather use the subprocess module, but it is not availablebefore 2.4
1073 ret = subprocess.call(merge_cmd, shell=True)
1075 # "An exit status of 0 means `diff3' was successful, 1 means some
1076 # conflicts were found, and 2 means trouble."
1078 # merge was successful... clean up
1079 shutil.copyfile(upfilename, storefilename)
1080 os.unlink(upfilename)
1081 os.unlink(myfilename)
1084 # unsuccessful merge
1085 shutil.copyfile(upfilename, storefilename)
1086 self.in_conflict.append(n)
1087 self.write_conflictlist()
1090 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1091 print >>sys.stderr, 'the command line was:'
1092 print >>sys.stderr, merge_cmd
1097 def update_local_filesmeta(self, revision=None):
1099 Update the local _files file in the store.
1100 It is replaced with the version pulled from upstream.
1102 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size, meta=self.meta))
1103 store_write_string(self.absdir, '_files', meta)
1105 def update_datastructs(self):
1107 Update the internal data structures if the local _files
1108 file has changed (e.g. update_local_filesmeta() has been
1112 files_tree = read_filemeta(self.dir)
1113 files_tree_root = files_tree.getroot()
1115 self.rev = files_tree_root.get('rev')
1116 self.srcmd5 = files_tree_root.get('srcmd5')
1118 self.linkinfo = Linkinfo()
1119 self.linkinfo.read(files_tree_root.find('linkinfo'))
1121 self.filenamelist = []
1124 for node in files_tree_root.findall('entry'):
1126 f = File(node.get('name'),
1128 int(node.get('size')),
1129 int(node.get('mtime')))
1130 if node.get('skipped'):
1131 self.skipped.append(f.name)
1133 # okay, a very old version of _files, which didn't contain any metadata yet...
1134 f = File(node.get('name'), '', 0, 0)
1135 self.filelist.append(f)
1136 self.filenamelist.append(f.name)
1138 self.to_be_deleted = read_tobedeleted(self.dir)
1139 self.in_conflict = read_inconflict(self.dir)
1140 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1141 self.size_limit = read_sizelimit(self.dir)
1142 self.meta = self.ismetamode()
1144 # gather unversioned files, but ignore some stuff
1145 self.excluded = [ i for i in os.listdir(self.dir)
1146 for j in conf.config['exclude_glob']
1147 if fnmatch.fnmatch(i, j) ]
1148 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1149 if i not in self.excluded
1150 if i not in self.filenamelist ]
1153 """tells us if the package is a link (has 'linkinfo').
1154 A package with linkinfo is a package which links to another package.
1155 Returns True if the package is a link, otherwise False."""
1156 return self.linkinfo.islink()
1158 def isexpanded(self):
1159 """tells us if the package is a link which is expanded.
1160 Returns True if the package is expanded, otherwise False."""
1161 return self.linkinfo.isexpanded()
1163 def islinkrepair(self):
1164 """tells us if we are repairing a broken source link."""
1165 return self.linkrepair
1168 """tells us if we have pulled a link."""
1169 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1172 """tells us if the link is frozen."""
1173 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1175 def ismetamode(self):
1176 """tells us if the package is in meta mode"""
1177 return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
1179 def get_pulled_srcmd5(self):
1181 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1182 pulledrev = line.strip()
1185 def haslinkerror(self):
1187 Returns True if the link is broken otherwise False.
1188 If the package is not a link it returns False.
1190 return self.linkinfo.haserror()
1192 def linkerror(self):
1194 Returns an error message if the link is broken otherwise None.
1195 If the package is not a link it returns None.
1197 return self.linkinfo.error
1199 def update_local_pacmeta(self):
1201 Update the local _meta file in the store.
1202 It is replaced with the version pulled from upstream.
1204 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1205 store_write_string(self.absdir, '_meta', meta)
1207 def findfilebyname(self, n):
1208 for i in self.filelist:
1212 def status(self, n):
1216 file storefile file present STATUS
1217 exists exists in _files
1220 x x x ' ' if digest differs: 'M'
1221 and if in conflicts file: 'C'
1223 x - x 'D' and listed in _to_be_deleted
1225 - x - 'D' (when file in working copy is already deleted)
1226 - - x 'F' (new in repo, but not yet in working copy)
1231 known_by_meta = False
1233 exists_in_store = False
1234 if n in self.filenamelist:
1235 known_by_meta = True
1236 if os.path.exists(os.path.join(self.absdir, n)):
1238 if os.path.exists(os.path.join(self.storedir, n)):
1239 exists_in_store = True
1242 if n in self.skipped:
1244 elif exists and not exists_in_store and known_by_meta:
1246 elif n in self.to_be_deleted:
1248 elif n in self.in_conflict:
1250 elif exists and exists_in_store and known_by_meta:
1251 #print self.findfilebyname(n)
1252 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1256 elif exists and not exists_in_store and not known_by_meta:
1258 elif exists and exists_in_store and not known_by_meta:
1260 elif not exists and exists_in_store and known_by_meta:
1262 elif not exists and not exists_in_store and known_by_meta:
1264 elif not exists and exists_in_store and not known_by_meta:
1266 elif not exists and not exists_in_store and not known_by_meta:
1267 # this case shouldn't happen (except there was a typo in the filename etc.)
1268 raise IOError('osc: \'%s\' is not under version control' % n)
1272 def comparePac(self, cmp_pac):
1274 This method compares the local filelist with
1275 the filelist of the passed package to see which files
1276 were added, removed and changed.
1283 for file in self.filenamelist+self.filenamelist_unvers:
1284 state = self.status(file)
1285 if file in self.skipped:
1287 if state == 'A' and (not file in cmp_pac.filenamelist):
1288 added_files.append(file)
1289 elif file in cmp_pac.filenamelist and state == 'D':
1290 removed_files.append(file)
1291 elif state == ' ' and not file in cmp_pac.filenamelist:
1292 added_files.append(file)
1293 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1294 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1295 changed_files.append(file)
1296 for file in cmp_pac.filenamelist:
1297 if not file in self.filenamelist:
1298 removed_files.append(file)
1299 removed_files = set(removed_files)
1301 return changed_files, added_files, removed_files
1303 def merge(self, otherpac):
1304 self.todo += otherpac.todo
1318 '\n '.join(self.filenamelist),
1326 def read_meta_from_spec(self, spec = None):
1331 # scan for spec files
1332 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1333 if len(speclist) == 1:
1334 specfile = speclist[0]
1335 elif len(speclist) > 1:
1336 print 'the following specfiles were found:'
1337 for file in speclist:
1339 print 'please specify one with --specfile'
1342 print 'no specfile was found - please specify one ' \
1346 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1347 self.summary = data.get('Summary', '')
1348 self.url = data.get('Url', '')
1349 self.descr = data.get('%description', '')
1352 def update_package_meta(self, force=False):
1354 for the updatepacmetafromspec subcommand
1355 argument force supress the confirm question
1358 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1360 root = ET.fromstring(m)
1361 root.find('title').text = self.summary
1362 root.find('description').text = ''.join(self.descr)
1363 url = root.find('url')
1365 url = ET.SubElement(root, 'url')
1368 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1369 mf = metafile(u, ET.tostring(root))
1372 print '*' * 36, 'old', '*' * 36
1374 print '*' * 36, 'new', '*' * 36
1375 print ET.tostring(root)
1377 repl = raw_input('Write? (y/N/e) ')
1388 def mark_frozen(self):
1389 store_write_string(self.absdir, '_frozenlink', '')
1391 print "The link in this package is currently broken. Checking"
1392 print "out the last working version instead; please use 'osc pull'"
1393 print "to repair the link."
1396 def unmark_frozen(self):
1397 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1398 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1400 def latest_rev(self):
1401 if self.islinkrepair():
1402 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta)
1403 elif self.islink() and self.isexpanded():
1404 if self.isfrozen() or self.ispulled():
1405 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1408 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta)
1411 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1413 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta)
1416 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta)
1419 def update(self, rev = None, service_files = False, limit_size = None):
1420 # save filelist and (modified) status before replacing the meta file
1421 saved_filenames = self.filenamelist
1422 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1426 self.limit_size = limit_size
1428 self.limit_size = read_sizelimit(self.dir)
1429 self.update_local_filesmeta(rev)
1430 self = Package(self.dir, progress_obj=self.progress_obj)
1432 # which files do no longer exist upstream?
1433 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1435 pathn = getTransActPath(self.dir)
1437 for filename in saved_filenames:
1438 if filename in self.skipped:
1440 if not filename.startswith('_service:') and filename in disappeared:
1441 print statfrmt('D', os.path.join(pathn, filename))
1442 # keep file if it has local modifications
1443 if oldp.status(filename) == ' ':
1444 self.delete_localfile(filename)
1445 self.delete_storefile(filename)
1447 for filename in self.filenamelist:
1448 if filename in self.skipped:
1451 state = self.status(filename)
1452 if not service_files and filename.startswith('_service:'):
1454 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1455 # no merge necessary... local file is changed, but upstream isn't
1457 elif state == 'M' and filename in saved_modifiedfiles:
1458 status_after_merge = self.mergefile(filename)
1459 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1461 self.updatefile(filename, rev)
1462 print statfrmt('U', os.path.join(pathn, filename))
1464 self.updatefile(filename, rev)
1465 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1467 self.updatefile(filename, rev)
1468 print statfrmt('A', os.path.join(pathn, filename))
1469 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1470 self.updatefile(filename, rev)
1471 self.delete_storefile(filename)
1472 print statfrmt('U', os.path.join(pathn, filename))
1476 self.update_local_pacmeta()
1478 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1479 print 'At revision %s.' % self.rev
1481 def run_source_services(self):
1482 if self.filenamelist.count('_service'):
1483 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1486 si.execute(self.absdir)
1488 def prepare_filelist(self):
1489 """Prepare a list of files, which will be processed by process_filelist
1490 method. This allows easy modifications of a file list in commit
1494 self.todo = self.filenamelist + self.filenamelist_unvers
1498 for f in [f for f in self.todo if not os.path.isdir(f)]:
1500 status = self.status(f)
1505 ret += "%s %s %s\n" % (action, status, f)
1508 # Edit a filelist for package \'%s\'
1510 # l, leave = leave a file as is
1511 # r, remove = remove a file
1512 # a, add = add a file
1514 # If you remove file from a list, it will be unchanged
1515 # If you remove all, commit will be aborted""" % self.name
1519 def edit_filelist(self):
1520 """Opens a package list in editor for editing. This allows easy
1521 modifications of it just by simple text editing
1525 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1526 f = os.fdopen(fd, 'w')
1527 f.write(self.prepare_filelist())
1529 mtime_orig = os.stat(filename).st_mtime
1532 run_editor(filename)
1533 mtime = os.stat(filename).st_mtime
1534 if mtime_orig < mtime:
1535 filelist = open(filename).readlines()
1539 raise oscerr.UserAbort()
1541 return self.process_filelist(filelist)
1543 def process_filelist(self, filelist):
1544 """Process a filelist - it add/remove or leave files. This depends on
1545 user input. If no file is processed, it raises an ValueError
1549 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1551 foo = line.split(' ')
1553 action, state, name = (foo[0], ' ', foo[3])
1555 action, state, name = (foo[0], foo[1], foo[2])
1558 action = action.lower()
1561 if action in ('r', 'remove'):
1562 if self.status(name) == '?':
1564 if name in self.todo:
1565 self.todo.remove(name)
1567 self.delete_file(name, True)
1568 elif action in ('a', 'add'):
1569 if self.status(name) != '?':
1570 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1573 elif action in ('l', 'leave'):
1576 raise ValueError("Unknow action `%s'" % action)
1579 raise ValueError("Empty filelist")
1582 """for objects to represent the review state in a request"""
1583 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1585 self.by_user = by_user
1586 self.by_group = by_group
1589 self.comment = comment
1592 """for objects to represent the "state" of a request"""
1593 def __init__(self, name=None, who=None, when=None, comment=None):
1597 self.comment = comment
1600 """represents an action"""
1601 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1603 self.src_project = src_project
1604 self.src_package = src_package
1605 self.src_rev = src_rev
1606 self.dst_project = dst_project
1607 self.dst_package = dst_package
1608 self.src_update = src_update
1611 """represent a request and holds its metadata
1612 it has methods to read in metadata from xml,
1613 different views, ..."""
1616 self.state = RequestState()
1619 self.last_author = None
1622 self.statehistory = []
1625 def read(self, root):
1626 self.reqid = int(root.get('id'))
1627 actions = root.findall('action')
1628 if len(actions) == 0:
1629 actions = [ root.find('submit') ] # for old style requests
1631 for action in actions:
1632 type = action.get('type', 'submit')
1634 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1635 if action.findall('source'):
1636 n = action.find('source')
1637 src_prj = n.get('project', None)
1638 src_pkg = n.get('package', None)
1639 src_rev = n.get('rev', None)
1640 if action.findall('target'):
1641 n = action.find('target')
1642 dst_prj = n.get('project', None)
1643 dst_pkg = n.get('package', None)
1644 if action.findall('options'):
1645 n = action.find('options')
1646 if n.findall('sourceupdate'):
1647 src_update = n.find('sourceupdate').text.strip()
1648 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1650 msg = 'invalid request format:\n%s' % ET.tostring(root)
1651 raise oscerr.APIError(msg)
1654 n = root.find('state')
1655 self.state.name, self.state.who, self.state.when \
1656 = n.get('name'), n.get('who'), n.get('when')
1658 self.state.comment = n.find('comment').text.strip()
1660 self.state.comment = None
1662 # read the review states
1663 for r in root.findall('review'):
1665 s.state = r.get('state')
1666 s.by_user = r.get('by_user')
1667 s.by_group = r.get('by_group')
1668 s.who = r.get('who')
1669 s.when = r.get('when')
1671 s.comment = r.find('comment').text.strip()
1674 self.reviews.append(s)
1676 # read the state history
1677 for h in root.findall('history'):
1679 s.name = h.get('name')
1680 s.who = h.get('who')
1681 s.when = h.get('when')
1683 s.comment = h.find('comment').text.strip()
1686 self.statehistory.append(s)
1687 self.statehistory.reverse()
1689 # read a description, if it exists
1691 n = root.find('description').text
1696 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1697 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1698 dst_prj, dst_pkg, src_update)
1701 def list_view(self):
1702 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1704 for a in self.actions:
1705 dst = "%s/%s" % (a.dst_project, a.dst_package)
1706 if a.src_package == a.dst_package:
1710 if a.type=="submit":
1711 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1712 if a.type=="change_devel":
1713 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1714 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1716 ret += '\n %s: %-50s %-20s ' % \
1717 (a.type, sr_source, dst)
1719 if self.statehistory and self.statehistory[0]:
1721 for h in self.statehistory:
1722 who.append("%s(%s)" % (h.who,h.name))
1724 ret += "\n From: %s" % (' -> '.join(who))
1726 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1728 lines = txt.splitlines()
1729 wrapper = textwrap.TextWrapper( width = 80,
1730 initial_indent=' Descr: ',
1731 subsequent_indent=' ')
1732 ret += "\n" + wrapper.fill(lines[0])
1733 wrapper.initial_indent = ' '
1734 for line in lines[1:]:
1735 ret += "\n" + wrapper.fill(line)
1741 def __cmp__(self, other):
1742 return cmp(self.reqid, other.reqid)
1746 for action in self.actions:
1747 action_list=action_list+" %s: " % (action.type)
1748 if action.type=="submit":
1751 r="(r%s)" % (action.src_rev)
1753 if action.src_update:
1754 m="(%s)" % (action.src_update)
1755 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1756 if action.dst_package:
1757 action_list=action_list+"/%s" % ( action.dst_package )
1758 elif action.type=="delete":
1759 action_list=action_list+" %s" % ( action.dst_project )
1760 if action.dst_package:
1761 action_list=action_list+"/%s" % ( action.dst_package )
1762 elif action.type=="change_devel":
1763 action_list=action_list+" %s/%s developed in %s/%s" % \
1764 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1765 action_list=action_list+"\n"
1780 self.state.name, self.state.when, self.state.who,
1783 if len(self.reviews):
1784 reviewitems = [ '%-10s %s %s %s %s %s' \
1785 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1786 for i in self.reviews ]
1787 s += '\nReview: ' + '\n '.join(reviewitems)
1790 if len(self.statehistory):
1791 histitems = [ '%-10s %s %s' \
1792 % (i.name, i.when, i.who) \
1793 for i in self.statehistory ]
1794 s += '\nHistory: ' + '\n '.join(histitems)
1801 """format time as Apr 02 18:19
1803 depending on whether it is in the current year
1807 if time.localtime()[0] == time.localtime(t)[0]:
1809 return time.strftime('%b %d %H:%M',time.localtime(t))
1811 return time.strftime('%b %d %Y',time.localtime(t))
1814 def is_project_dir(d):
1815 return os.path.exists(os.path.join(d, store, '_project')) and not \
1816 os.path.exists(os.path.join(d, store, '_package'))
1819 def is_package_dir(d):
1820 return os.path.exists(os.path.join(d, store, '_project')) and \
1821 os.path.exists(os.path.join(d, store, '_package'))
1823 def parse_disturl(disturl):
1824 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1825 revision), else raises an oscerr.WrongArgs exception
1828 m = DISTURL_RE.match(disturl)
1830 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1832 apiurl = m.group('apiurl')
1833 if apiurl.split('.')[0] != 'api':
1834 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1835 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1837 def parse_buildlogurl(buildlogurl):
1838 """Parse a build log url, returns a tuple (apiurl, project, package,
1839 repository, arch), else raises oscerr.WrongArgs exception"""
1841 global BUILDLOGURL_RE
1843 m = BUILDLOGURL_RE.match(buildlogurl)
1845 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1847 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1850 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1851 This is handy to allow copy/paste a project/package combination in this form.
1853 Trailing slashes are removed before the split, because the split would
1854 otherwise give an additional empty string.
1862 def expand_proj_pack(args, idx=0, howmany=0):
1863 """looks for occurance of '.' at the position idx.
1864 If howmany is 2, both proj and pack are expanded together
1865 using the current directory, or none of them, if not possible.
1866 If howmany is 0, proj is expanded if possible, then, if there
1867 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1868 expanded, if possible.
1869 If howmany is 1, only proj is expanded if possible.
1871 If args[idx] does not exists, an implicit '.' is assumed.
1872 if not enough elements up to idx exist, an error is raised.
1874 See also parseargs(args), slash_split(args), findpacs(args)
1875 All these need unification, somehow.
1878 # print args,idx,howmany
1881 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1883 if len(args) == idx:
1885 if args[idx+0] == '.':
1886 if howmany == 0 and len(args) > idx+1:
1887 if args[idx+1] == '.':
1889 # remove one dot and make sure to expand both proj and pack
1894 # print args,idx,howmany
1896 args[idx+0] = store_read_project('.')
1899 package = store_read_package('.')
1900 args.insert(idx+1, package)
1904 package = store_read_package('.')
1905 args.insert(idx+1, package)
1909 def findpacs(files, progress_obj=None):
1910 """collect Package objects belonging to the given files
1911 and make sure each Package is returned only once"""
1914 p = filedir_to_pac(f, progress_obj)
1917 if i.name == p.name:
1927 def filedir_to_pac(f, progress_obj=None):
1928 """Takes a working copy path, or a path to a file inside a working copy,
1929 and returns a Package object instance
1931 If the argument was a filename, add it onto the "todo" list of the Package """
1933 if os.path.isdir(f):
1935 p = Package(wd, progress_obj=progress_obj)
1937 wd = os.path.dirname(f) or os.curdir
1938 p = Package(wd, progress_obj=progress_obj)
1939 p.todo = [ os.path.basename(f) ]
1943 def read_filemeta(dir):
1945 r = ET.parse(os.path.join(dir, store, '_files'))
1946 except SyntaxError, e:
1947 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1948 'When parsing .osc/_files, the following error was encountered:\n'
1953 def read_tobedeleted(dir):
1955 fname = os.path.join(dir, store, '_to_be_deleted')
1957 if os.path.exists(fname):
1958 r = [ line.strip() for line in open(fname) ]
1963 def read_sizelimit(dir):
1965 fname = os.path.join(dir, store, '_size_limit')
1967 if os.path.exists(fname):
1968 r = open(fname).readline()
1970 if r is None or not r.isdigit():
1974 def read_inconflict(dir):
1976 fname = os.path.join(dir, store, '_in_conflict')
1978 if os.path.exists(fname):
1979 r = [ line.strip() for line in open(fname) ]
1984 def parseargs(list_of_args):
1985 """Convenience method osc's commandline argument parsing.
1987 If called with an empty tuple (or list), return a list containing the current directory.
1988 Otherwise, return a list of the arguments."""
1990 return list(list_of_args)
1995 def statfrmt(statusletter, filename):
1996 return '%s %s' % (statusletter, filename)
1999 def pathjoin(a, *p):
2000 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2001 path = os.path.join(a, *p)
2002 if path.startswith('./'):
2007 def makeurl(baseurl, l, query=[]):
2008 """Given a list of path compoments, construct a complete URL.
2010 Optional parameters for a query string can be given as a list, as a
2011 dictionary, or as an already assembled string.
2012 In case of a dictionary, the parameters will be urlencoded by this
2013 function. In case of a list not -- this is to be backwards compatible.
2016 if conf.config['verbose'] > 1:
2017 print 'makeurl:', baseurl, l, query
2019 if type(query) == type(list()):
2020 query = '&'.join(query)
2021 elif type(query) == type(dict()):
2022 query = urlencode(query)
2024 scheme, netloc = urlsplit(baseurl)[0:2]
2025 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2028 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2029 """wrapper around urllib2.urlopen for error handling,
2030 and to support additional (PUT, DELETE) methods"""
2034 if conf.config['http_debug']:
2037 print '--', method, url
2039 if method == 'POST' and not file and not data:
2040 # adding data to an urllib2 request transforms it into a POST
2043 req = urllib2.Request(url)
2044 api_host_options = {}
2046 api_host_options = conf.get_apiurl_api_host_options(url)
2047 for header, value in api_host_options['http_headers']:
2048 req.add_header(header, value)
2050 # "external" request (url is no apiurl)
2053 req.get_method = lambda: method
2055 # POST requests are application/x-www-form-urlencoded per default
2056 # since we change the request into PUT, we also need to adjust the content type header
2057 if method == 'PUT' or (method == 'POST' and data):
2058 req.add_header('Content-Type', 'application/octet-stream')
2060 if type(headers) == type({}):
2061 for i in headers.keys():
2063 req.add_header(i, headers[i])
2065 if file and not data:
2066 size = os.path.getsize(file)
2068 data = open(file, 'rb').read()
2071 filefd = open(file, 'rb')
2073 if sys.platform[:3] != 'win':
2074 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2076 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2078 except EnvironmentError, e:
2080 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2081 '\non a filesystem which does not support this.' % (e, file))
2082 elif hasattr(e, 'winerror') and e.winerror == 5:
2083 # falling back to the default io
2084 data = open(file, 'rb').read()
2088 if conf.config['debug']: print method, url
2090 old_timeout = socket.getdefaulttimeout()
2091 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2092 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2093 socket.setdefaulttimeout(timeout)
2095 fd = urllib2.urlopen(req, data=data)
2097 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2098 socket.setdefaulttimeout(old_timeout)
2099 if hasattr(conf.cookiejar, 'save'):
2100 conf.cookiejar.save(ignore_discard=True)
2102 if filefd: filefd.close()
2107 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2108 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2109 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2110 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2113 def init_project_dir(apiurl, dir, project):
2114 if not os.path.exists(dir):
2115 if conf.config['checkout_no_colon']:
2116 os.makedirs(dir) # helpful with checkout_no_colon
2119 if not os.path.exists(os.path.join(dir, store)):
2120 os.mkdir(os.path.join(dir, store))
2122 # print 'project=',project,' dir=',dir
2123 store_write_project(dir, project)
2124 store_write_apiurl(dir, apiurl)
2125 if conf.config['do_package_tracking']:
2126 store_write_initial_packages(dir, project, [])
2128 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=False):
2129 if not os.path.isdir(dir):
2132 f = open('_project', 'w')
2133 f.write(project + '\n')
2135 f = open('_package', 'w')
2136 f.write(package + '\n')
2140 store_write_string(os.pardir, '_meta_mode', '')
2143 store_write_string(os.pardir, '_size_limit', str(limit_size))
2146 fmeta = ''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta))
2147 store_write_string(os.pardir, '_files', fmeta)
2150 ET.ElementTree(element=ET.Element('directory')).write('_files')
2152 store_write_string(os.pardir, '_osclib_version', __store_version__ + '\n')
2153 store_write_apiurl(os.path.pardir, apiurl)
2157 def check_store_version(dir):
2158 versionfile = os.path.join(dir, store, '_osclib_version')
2160 v = open(versionfile).read().strip()
2165 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2166 if os.path.exists(os.path.join(dir, '.svn')):
2167 msg = msg + '\nTry svn instead of osc.'
2168 raise oscerr.NoWorkingCopy(msg)
2170 if v != __store_version__:
2171 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2172 # version is fine, no migration needed
2173 f = open(versionfile, 'w')
2174 f.write(__store_version__ + '\n')
2177 msg = 'The osc metadata of your working copy "%s"' % dir
2178 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2179 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2180 raise oscerr.WorkingCopyWrongVersion, msg
2183 def meta_get_packagelist(apiurl, prj, deleted=None):
2187 query['deleted'] = 1
2189 u = makeurl(apiurl, ['source', prj], query)
2191 root = ET.parse(f).getroot()
2192 return [ node.get('name') for node in root.findall('entry') ]
2195 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2196 """return a list of file names,
2197 or a list File() instances if verbose=True"""
2203 query['rev'] = revision
2205 query['rev'] = 'latest'
2207 u = makeurl(apiurl, ['source', prj, package], query=query)
2209 root = ET.parse(f).getroot()
2212 return [ node.get('name') for node in root.findall('entry') ]
2216 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2217 rev = root.get('rev')
2218 for node in root.findall('entry'):
2219 f = File(node.get('name'),
2221 int(node.get('size')),
2222 int(node.get('mtime')))
2228 def meta_get_project_list(apiurl, deleted=None):
2231 query['deleted'] = 1
2233 u = makeurl(apiurl, ['source'], query)
2235 root = ET.parse(f).getroot()
2236 return sorted([ node.get('name') for node in root ])
2239 def show_project_meta(apiurl, prj):
2240 url = makeurl(apiurl, ['source', prj, '_meta'])
2242 return f.readlines()
2245 def show_project_conf(apiurl, prj):
2246 url = makeurl(apiurl, ['source', prj, '_config'])
2248 return f.readlines()
2251 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2252 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2256 except urllib2.HTTPError, e:
2257 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2261 def show_package_meta(apiurl, prj, pac, meta=False):
2266 # packages like _pattern and _project do not have a _meta file
2267 if pac.startswith('_pattern') or pac.startswith('_project'):
2270 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
2273 return f.readlines()
2274 except urllib2.HTTPError, e:
2275 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2279 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2281 path.append('source')
2287 path.append('_attribute')
2289 path.append(attribute)
2292 query.append("with_default=1")
2294 query.append("with_project=1")
2295 url = makeurl(apiurl, path, query)
2298 return f.readlines()
2299 except urllib2.HTTPError, e:
2300 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2304 def show_develproject(apiurl, prj, pac):
2305 m = show_package_meta(apiurl, prj, pac)
2307 return ET.fromstring(''.join(m)).find('devel').get('project')
2312 def show_package_disabled_repos(apiurl, prj, pac):
2313 m = show_package_meta(apiurl, prj, pac)
2315 root = ET.fromstring(''.join(m))
2316 elm = root.find('build')
2317 r = [ node.get('repository') for node in elm.findall('disable')]
2323 def show_pattern_metalist(apiurl, prj):
2324 url = makeurl(apiurl, ['source', prj, '_pattern'])
2328 except urllib2.HTTPError, e:
2329 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2331 r = [ node.get('name') for node in tree.getroot() ]
2336 def show_pattern_meta(apiurl, prj, pattern):
2337 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2340 return f.readlines()
2341 except urllib2.HTTPError, e:
2342 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2347 """metafile that can be manipulated and is stored back after manipulation."""
2348 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2352 self.change_is_required = change_is_required
2353 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2354 f = os.fdopen(fd, 'w')
2355 f.write(''.join(input))
2357 self.hash_orig = dgst(self.filename)
2360 hash = dgst(self.filename)
2361 if self.change_is_required and hash == self.hash_orig:
2362 print 'File unchanged. Not saving.'
2363 os.unlink(self.filename)
2366 print 'Sending meta data...'
2367 # don't do any exception handling... it's up to the caller what to do in case
2369 http_PUT(self.url, file=self.filename)
2370 os.unlink(self.filename)
2376 run_editor(self.filename)
2380 except urllib2.HTTPError, e:
2381 error_help = "%d" % e.code
2382 if e.headers.get('X-Opensuse-Errorcode'):
2383 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2385 print >>sys.stderr, 'BuildService API error:', error_help
2386 # examine the error - we can't raise an exception because we might want
2389 if '<summary>' in data:
2390 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2391 input = raw_input('Try again? ([y/N]): ')
2392 if input not in ['y', 'Y']:
2398 if os.path.exists(self.filename):
2399 print 'discarding %s' % self.filename
2400 os.unlink(self.filename)
2403 # different types of metadata
2404 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2405 'template': new_project_templ,
2408 'pkg': { 'path' : 'source/%s/%s/_meta',
2409 'template': new_package_templ,
2412 'attribute': { 'path' : 'source/%s/%s/_meta',
2413 'template': new_attribute_templ,
2416 'prjconf': { 'path': 'source/%s/_config',
2420 'user': { 'path': 'person/%s',
2421 'template': new_user_template,
2424 'pattern': { 'path': 'source/%s/_pattern/%s',
2425 'template': new_pattern_template,
2430 def meta_exists(metatype,
2437 apiurl = conf.config['apiurl']
2438 url = make_meta_url(metatype, path_args, apiurl)
2440 data = http_GET(url).readlines()
2441 except urllib2.HTTPError, e:
2442 if e.code == 404 and create_new:
2443 data = metatypes[metatype]['template']
2445 data = StringIO(data % template_args).readlines()
2450 def make_meta_url(metatype, path_args=None, apiurl=None):
2452 apiurl = conf.config['apiurl']
2453 if metatype not in metatypes.keys():
2454 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2455 path = metatypes[metatype]['path']
2458 path = path % path_args
2460 return makeurl(apiurl, [path])
2463 def edit_meta(metatype,
2468 change_is_required=False,
2472 apiurl = conf.config['apiurl']
2474 data = meta_exists(metatype,
2477 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2481 change_is_required = True
2483 url = make_meta_url(metatype, path_args, apiurl)
2484 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2492 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=False):
2495 query['rev'] = revision
2497 query['rev'] = 'latest'
2499 query['linkrev'] = linkrev
2500 elif conf.config['linkcontrol']:
2501 query['linkrev'] = 'base'
2507 query['emptylink'] = 1
2508 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2509 # look for "too large" files according to size limit and mark them
2510 root = ET.fromstring(''.join(f.readlines()))
2511 for e in root.findall('entry'):
2512 size = e.get('size')
2513 if size and limit_size and int(size) > int(limit_size):
2514 e.set('skipped', 'true')
2515 return ET.tostring(root)
2518 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False):
2519 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
2520 return ET.fromstring(''.join(m)).get('srcmd5')
2523 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False):
2524 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta)
2526 # only source link packages have a <linkinfo> element.
2527 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2535 raise oscerr.LinkExpandError(prj, pac, li.error)
2539 def show_upstream_rev(apiurl, prj, pac, meta=False):
2540 m = show_files_meta(apiurl, prj, pac, meta=meta)
2541 return ET.fromstring(''.join(m)).get('rev')
2544 def read_meta_from_spec(specfile, *args):
2545 import codecs, locale, re
2547 Read tags and sections from spec file. To read out
2548 a tag the passed argument mustn't end with a colon. To
2549 read out a section the passed argument must start with
2551 This method returns a dictionary which contains the
2555 if not os.path.isfile(specfile):
2556 raise IOError('\'%s\' is not a regular file' % specfile)
2559 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2560 except UnicodeDecodeError:
2561 lines = open(specfile).readlines()
2568 if itm.startswith('%'):
2569 sections.append(itm)
2573 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2575 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2576 if m and m.group('val'):
2577 spec_data[tag] = m.group('val').strip()
2579 section_pat = '^%s\s*?$'
2580 for section in sections:
2581 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2583 start = lines.index(m.group()+'\n') + 1
2585 for line in lines[start:]:
2586 if line.startswith('%'):
2589 spec_data[section] = data
2593 def get_default_editor():
2595 system = platform.system()
2596 if system == 'Windows':
2598 if system == 'Linux':
2601 dist = platform.linux_distribution()[0]
2602 except AttributeError:
2603 dist = platform.dist()[0]
2604 if dist == 'debian':
2606 elif dist == 'fedora':
2611 def get_default_pager():
2613 system = platform.system()
2614 if system == 'Windows':
2616 if system == 'Linux':
2619 dist = platform.linux_distribution()[0]
2620 except AttributeError:
2621 dist = platform.dist()[0]
2622 if dist == 'debian':
2627 def run_pager(message):
2628 import tempfile, sys
2630 if not sys.stdout.isatty():
2633 tmpfile = tempfile.NamedTemporaryFile()
2634 tmpfile.write(message)
2636 pager = os.getenv('PAGER', default=get_default_pager())
2639 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2641 raise oscerr.RuntimeError('cannot run pager \'%s\': %s' % (pager, e.strerror), pager)
2645 def run_editor(filename):
2646 editor = os.getenv('EDITOR', default=get_default_editor())
2647 cmd = editor.split(' ')
2648 cmd.append(filename)
2650 return subprocess.call(cmd)
2652 raise oscerr.RuntimeError('cannot run editor \'%s\': %s' % (editor, e.strerror), editor)
2654 def edit_message(footer='', template='', templatelen=30):
2655 delim = '--This line, and those below, will be ignored--\n'
2657 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2658 f = os.fdopen(fd, 'w')
2660 if not templatelen is None:
2661 lines = template.splitlines()
2662 template = '\n'.join(lines[:templatelen])
2663 if lines[templatelen:]:
2664 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2674 run_editor(filename)
2675 msg = open(filename).read().split(delim)[0].rstrip()
2680 input = raw_input('Log message not specified\n'
2681 'a)bort, c)ontinue, e)dit: ')
2683 raise oscerr.UserAbort()
2693 def create_delete_request(apiurl, project, package, message):
2698 package = """package="%s" """ % (package)
2704 <action type="delete">
2705 <target project="%s" %s/>
2708 <description>%s</description>
2710 """ % (project, package,
2711 cgi.escape(message or ''))
2713 u = makeurl(apiurl, ['request'], query='cmd=create')
2714 f = http_POST(u, data=xml)
2716 root = ET.parse(f).getroot()
2717 return root.get('id')
2720 def create_change_devel_request(apiurl,
2721 devel_project, devel_package,
2728 <action type="change_devel">
2729 <source project="%s" package="%s" />
2730 <target project="%s" package="%s" />
2733 <description>%s</description>
2735 """ % (devel_project,
2739 cgi.escape(message or ''))
2741 u = makeurl(apiurl, ['request'], query='cmd=create')
2742 f = http_POST(u, data=xml)
2744 root = ET.parse(f).getroot()
2745 return root.get('id')
2748 # This creates an old style submit request for server api 1.0
2749 def create_submit_request(apiurl,
2750 src_project, src_package,
2751 dst_project=None, dst_package=None,
2752 message=None, orev=None, src_update=None):
2757 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2759 # Yes, this kind of xml construction is horrible
2764 packagexml = """package="%s" """ %( dst_package )
2765 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2766 # XXX: keep the old template for now in order to work with old obs instances
2768 <request type="submit">
2770 <source project="%s" package="%s" rev="%s"/>
2775 <description>%s</description>
2779 orev or show_upstream_rev(apiurl, src_project, src_package),
2782 cgi.escape(message or ""))
2784 u = makeurl(apiurl, ['request'], query='cmd=create')
2785 f = http_POST(u, data=xml)
2787 root = ET.parse(f).getroot()
2788 return root.get('id')
2791 def get_request(apiurl, reqid):
2792 u = makeurl(apiurl, ['request', reqid])
2794 root = ET.parse(f).getroot()
2801 def change_review_state(apiurl, reqid, newstate, by_user='', message='', supersed=''):
2804 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2805 f = http_POST(u, data=message)
2808 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2811 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2812 f = http_POST(u, data=message)
2816 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2818 if not 'all' in req_state:
2819 for state in req_state:
2820 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2822 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2824 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2828 todo['project'] = project
2830 todo['package'] = package
2831 for kind, val in todo.iteritems():
2832 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2833 'action/source/@%(kind)s=\'%(val)s\' or ' \
2834 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2835 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2837 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2838 for i in exclude_target_projects:
2839 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2840 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2842 if conf.config['verbose'] > 1:
2843 print '[ %s ]' % xpath
2844 res = search(apiurl, request=xpath)
2845 collection = res['request']
2847 for root in collection.findall('request'):
2853 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2854 """Return all new requests for all projects/packages where is user is involved"""
2856 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2857 for i in res['project_id'].findall('project'):
2858 projpkgs[i.get('name')] = []
2859 for i in res['package_id'].findall('package'):
2860 if not i.get('project') in projpkgs.keys():
2861 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2863 for prj, pacs in projpkgs.iteritems():
2865 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2869 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2870 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2871 xpath = xpath_join(xpath, xp, inner=True)
2873 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2874 if not 'all' in req_state:
2876 for state in req_state:
2877 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2878 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2879 res = search(apiurl, request=xpath)
2881 for root in res['request'].findall('request'):
2887 def get_request_log(apiurl, reqid):
2888 r = get_request(apiurl, reqid)
2890 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2891 # the description of the request is used for the initial log entry
2892 # otherwise its comment attribute would contain None
2893 if len(r.statehistory) >= 1:
2894 r.statehistory[-1].comment = r.descr
2896 r.state.comment = r.descr
2897 for state in [ r.state ] + r.statehistory:
2898 s = frmt % (state.name, state.who, state.when, str(state.comment))
2903 def get_user_meta(apiurl, user):
2904 u = makeurl(apiurl, ['person', quote_plus(user)])
2907 return ''.join(f.readlines())
2908 except urllib2.HTTPError:
2909 print 'user \'%s\' not found' % user
2913 def get_user_data(apiurl, user, *tags):
2914 """get specified tags from the user meta"""
2915 meta = get_user_meta(apiurl, user)
2918 root = ET.fromstring(meta)
2921 if root.find(tag).text != None:
2922 data.append(root.find(tag).text)
2926 except AttributeError:
2927 # this part is reached if the tags tuple contains an invalid tag
2928 print 'The xml file for user \'%s\' seems to be broken' % user
2933 def download(url, filename, progress_obj = None, mtime = None):
2934 import tempfile, shutil
2937 prefix = os.path.basename(filename)
2938 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2939 os.chmod(tmpfile, 0644)
2941 o = os.fdopen(fd, 'wb')
2942 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2945 shutil.move(tmpfile, filename)