1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="Fedora_12">
81 <path project="Fedora:12" repository="standard" />
85 <repository name="SLE_11">
86 <path project="SUSE:SLE-11" repository="standard" />
95 new_package_templ = """\
96 <package name="%(name)s">
98 <title></title> <!-- Title of package -->
101 <!-- for long description -->
104 <person role="maintainer" userid="%(user)s"/>
105 <person role="bugowner" userid="%(user)s"/>
107 <url>PUT_UPSTREAM_URL_HERE</url>
111 use one of the examples below to disable building of this package
112 on a certain architecture, in a certain repository,
113 or a combination thereof:
115 <disable arch="x86_64"/>
116 <disable repository="SUSE_SLE-10"/>
117 <disable repository="SUSE_SLE-10" arch="x86_64"/>
119 Possible sections where you can use the tags above:
129 Please have a look at:
130 http://en.opensuse.org/Restricted_Formats
131 Packages containing formats listed there are NOT allowed to
132 be packaged in the openSUSE Buildservice and will be deleted!
139 new_attribute_templ = """\
141 <attribute namespace="" name="">
147 new_user_template = """\
149 <login>%(user)s</login>
150 <email>PUT_EMAIL_ADDRESS_HERE</email>
151 <realname>PUT_REAL_NAME_HERE</realname>
153 <project name="home:%(user)s"/>
169 new_pattern_template = """\
170 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
176 buildstatus_symbols = {'succeeded': '.',
178 'expansion error': 'U', # obsolete with OBS 2.0
191 # our own xml writer function to write xml nice, but with correct syntax
192 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
193 from xml.dom import minidom
194 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
195 # indent = current indentation
196 # addindent = indentation to add to higher levels
197 # newl = newline string
198 writer.write(indent+"<" + self.tagName)
200 attrs = self._get_attributes()
201 a_names = attrs.keys()
204 for a_name in a_names:
205 writer.write(" %s=\"" % a_name)
206 minidom._write_data(writer, attrs[a_name].value)
209 if len(self.childNodes) == 1 \
210 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
212 self.childNodes[0].writexml(writer, "", "", "")
213 writer.write("</%s>%s" % (self.tagName, newl))
215 writer.write(">%s"%(newl))
216 for node in self.childNodes:
217 node.writexml(writer,indent+addindent,addindent,newl)
218 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
220 writer.write("/>%s"%(newl))
221 # replace minidom's function with ours
222 minidom.Element.writexml = fixed_writexml
225 # os.path.samefile is available only under Unix
226 def os_path_samefile(path1, path2):
228 return os.path.samefile(path1, path2)
230 return os.path.realpath(path1) == os.path.realpath(path2)
233 """represent a file, including its metadata"""
234 def __init__(self, name, md5, size, mtime):
244 """Source service content
247 """creates an empty serviceinfo instance"""
250 def read(self, serviceinfo_node):
251 """read in the source services <services> element passed as
254 if serviceinfo_node == None:
257 services = serviceinfo_node.findall('service')
259 for service in services:
260 name = service.get('name')
262 for param in service.findall('param'):
263 option = param.get('name', None)
265 name += " --" + option + " '" + value + "'"
266 self.commands.append(name)
268 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
269 raise oscerr.APIError(msg)
271 def addVerifyFile(self, serviceinfo_node, filename):
274 f = open(filename, 'r')
275 digest = hashlib.sha256(f.read()).hexdigest()
279 s = ET.Element( "service", name="verify_file" )
280 ET.SubElement(s, "param", name="file").text = filename
281 ET.SubElement(s, "param", name="verifier").text = "sha256"
282 ET.SubElement(s, "param", name="checksum").text = digest
288 def addDownloadUrl(self, serviceinfo_node, url_string):
289 from urlparse import urlparse
290 url = urlparse( url_string )
291 protocol = url.scheme
296 s = ET.Element( "service", name="download_url" )
297 ET.SubElement(s, "param", name="protocol").text = protocol
298 ET.SubElement(s, "param", name="host").text = host
299 ET.SubElement(s, "param", name="path").text = path
305 def execute(self, dir):
308 for call in self.commands:
309 temp_dir = tempfile.mkdtemp()
310 name = call.split(None, 1)[0]
311 if not os.path.exists("/usr/lib/obs/service/"+name):
312 msg = "ERROR: service is not installed!\n"
313 msg += "Maybe try this: zypper in obs-server-" + name
314 raise oscerr.APIError(msg)
315 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
316 if conf.config['verbose'] > 1:
317 print "Run source service:", c
318 ret = subprocess.call(c, shell=True)
320 print "ERROR: service call failed: " + c
321 # FIXME: addDownloadUrlService calls si.execute after
322 # updating _services.
323 print " (your _services file may be corrupt now)"
325 for file in os.listdir(temp_dir):
326 shutil.move( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
330 """linkinfo metadata (which is part of the xml representing a directory
333 """creates an empty linkinfo instance"""
343 def read(self, linkinfo_node):
344 """read in the linkinfo metadata from the <linkinfo> element passed as
346 If the passed element is None, the method does nothing.
348 if linkinfo_node == None:
350 self.project = linkinfo_node.get('project')
351 self.package = linkinfo_node.get('package')
352 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
353 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
354 self.srcmd5 = linkinfo_node.get('srcmd5')
355 self.error = linkinfo_node.get('error')
356 self.rev = linkinfo_node.get('rev')
357 self.baserev = linkinfo_node.get('baserev')
360 """returns True if the linkinfo is not empty, otherwise False"""
361 if self.xsrcmd5 or self.lsrcmd5:
365 def isexpanded(self):
366 """returns True if the package is an expanded link"""
367 if self.lsrcmd5 and not self.xsrcmd5:
372 """returns True if the link is in error state (could not be applied)"""
378 """return an informatory string representation"""
379 if self.islink() and not self.isexpanded():
380 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
381 % (self.project, self.package, self.xsrcmd5, self.rev)
382 elif self.islink() and self.isexpanded():
384 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
385 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
387 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
388 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
394 """represent a project directory, holding packages"""
395 def __init__(self, dir, getPackageList=True, progress_obj=None):
398 self.absdir = os.path.abspath(dir)
399 self.progress_obj = progress_obj
401 self.name = store_read_project(self.dir)
402 self.apiurl = store_read_apiurl(self.dir)
405 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
407 self.pacs_available = []
409 if conf.config['do_package_tracking']:
410 self.pac_root = self.read_packages().getroot()
411 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
412 self.pacs_excluded = [ i for i in os.listdir(self.dir)
413 for j in conf.config['exclude_glob']
414 if fnmatch.fnmatch(i, j) ]
415 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
416 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
417 # in the self.pacs_broken list
418 self.pacs_broken = []
419 for p in self.pacs_have:
420 if not os.path.isdir(os.path.join(self.absdir, p)):
421 # all states will be replaced with the '!'-state
422 # (except it is already marked as deleted ('D'-state))
423 self.pacs_broken.append(p)
425 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
427 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
429 def checkout_missing_pacs(self, expand_link=False):
430 for pac in self.pacs_missing:
432 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
433 # pac is not under version control but a local file/dir exists
434 msg = 'can\'t add package \'%s\': Object already exists' % pac
435 raise oscerr.PackageExists(self.name, pac, msg)
437 print 'checking out new package %s' % pac
438 checkout_package(self.apiurl, self.name, pac, \
439 pathname=getTransActPath(os.path.join(self.dir, pac)), \
440 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
442 def set_state(self, pac, state):
443 node = self.get_package_node(pac)
445 self.new_package_entry(pac, state)
447 node.attrib['state'] = state
449 def get_package_node(self, pac):
450 for node in self.pac_root.findall('package'):
451 if pac == node.get('name'):
455 def del_package_node(self, pac):
456 for node in self.pac_root.findall('package'):
457 if pac == node.get('name'):
458 self.pac_root.remove(node)
460 def get_state(self, pac):
461 node = self.get_package_node(pac)
463 return node.get('state')
467 def new_package_entry(self, name, state):
468 ET.SubElement(self.pac_root, 'package', name=name, state=state)
470 def read_packages(self):
471 packages_file = os.path.join(self.absdir, store, '_packages')
472 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
473 return ET.parse(packages_file)
475 # scan project for existing packages and migrate them
477 for data in os.listdir(self.dir):
478 pac_dir = os.path.join(self.absdir, data)
479 # we cannot use self.pacs_available because we cannot guarantee that the package list
480 # was fetched from the server
481 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
482 and Package(pac_dir).name == data:
483 cur_pacs.append(ET.Element('package', name=data, state=' '))
484 store_write_initial_packages(self.absdir, self.name, cur_pacs)
485 return ET.parse(os.path.join(self.absdir, store, '_packages'))
487 def write_packages(self):
488 # TODO: should we only modify the existing file instead of overwriting?
489 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
491 def addPackage(self, pac):
493 for i in conf.config['exclude_glob']:
494 if fnmatch.fnmatch(pac, i):
495 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
496 raise oscerr.OscIOError(None, msg)
497 state = self.get_state(pac)
498 if state == None or state == 'D':
499 self.new_package_entry(pac, 'A')
500 self.write_packages()
501 # sometimes the new pac doesn't exist in the list because
502 # it would take too much time to update all data structs regularly
503 if pac in self.pacs_unvers:
504 self.pacs_unvers.remove(pac)
506 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
508 def delPackage(self, pac, force = False):
509 state = self.get_state(pac.name)
511 if state == ' ' or state == 'D':
513 for file in pac.filenamelist + pac.filenamelist_unvers:
514 filestate = pac.status(file)
515 if filestate == 'M' or filestate == 'C' or \
516 filestate == 'A' or filestate == '?':
519 del_files.append(file)
520 if can_delete or force:
521 for file in del_files:
522 pac.delete_localfile(file)
523 if pac.status(file) != '?':
524 pac.delete_storefile(file)
525 # this is not really necessary
526 pac.put_on_deletelist(file)
527 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
528 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
529 pac.write_deletelist()
530 self.set_state(pac.name, 'D')
531 self.write_packages()
533 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
536 delete_dir(pac.absdir)
537 self.del_package_node(pac.name)
538 self.write_packages()
539 print statfrmt('D', pac.name)
541 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
543 print 'package is not under version control'
545 print 'unsupported state'
547 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
550 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
552 # we need to make sure that the _packages file will be written (even if an exception
555 # update complete project
556 # packages which no longer exists upstream
557 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
559 for pac in upstream_del:
560 p = Package(os.path.join(self.dir, pac))
561 self.delPackage(p, force = True)
562 delete_storedir(p.storedir)
567 self.pac_root.remove(self.get_package_node(p.name))
568 self.pacs_have.remove(pac)
570 for pac in self.pacs_have:
571 state = self.get_state(pac)
572 if pac in self.pacs_broken:
573 if self.get_state(pac) != 'A':
574 checkout_package(self.apiurl, self.name, pac,
575 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
576 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
579 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
581 if expand_link and p.islink() and not p.isexpanded():
584 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
586 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
589 rev = p.linkinfo.xsrcmd5
590 print 'Expanding to rev', rev
591 elif unexpand_link and p.islink() and p.isexpanded():
592 rev = p.linkinfo.lsrcmd5
593 print 'Unexpanding to rev', rev
594 elif p.islink() and p.isexpanded():
596 print 'Updating %s' % p.name
597 p.update(rev, service_files)
601 # TODO: Package::update has to fixed to behave like svn does
602 if pac in self.pacs_broken:
603 checkout_package(self.apiurl, self.name, pac,
604 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
605 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
607 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
608 elif state == 'A' and pac in self.pacs_available:
609 # file/dir called pac already exists and is under version control
610 msg = 'can\'t add package \'%s\': Object already exists' % pac
611 raise oscerr.PackageExists(self.name, pac, msg)
616 print 'unexpected state.. package \'%s\'' % pac
618 self.checkout_missing_pacs(expand_link=not unexpand_link)
620 self.write_packages()
622 def commit(self, pacs = (), msg = '', files = {}, validators = None, verbose_validation = None):
627 if files.has_key(pac):
629 state = self.get_state(pac)
631 self.commitNewPackage(pac, msg, todo, validators=validators, verbose_validation=verbose_validation)
633 self.commitDelPackage(pac)
635 # display the correct dir when sending the changes
636 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
639 p = Package(os.path.join(self.dir, pac))
641 p.commit(msg, validators=validators, verbose_validation=verbose_validation)
642 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
643 print 'osc: \'%s\' is not under version control' % pac
644 elif pac in self.pacs_broken:
645 print 'osc: \'%s\' package not found' % pac
647 self.commitExtPackage(pac, msg, todo)
649 self.write_packages()
651 # if we have packages marked as '!' we cannot commit
652 for pac in self.pacs_broken:
653 if self.get_state(pac) != 'D':
654 msg = 'commit failed: package \'%s\' is missing' % pac
655 raise oscerr.PackageMissing(self.name, pac, msg)
657 for pac in self.pacs_have:
658 state = self.get_state(pac)
661 Package(os.path.join(self.dir, pac)).commit(msg, validators=validators, verbose_validation=verbose_validation)
663 self.commitDelPackage(pac)
665 self.commitNewPackage(pac, msg, validators=validators, verbose_validation=verbose_validation)
667 self.write_packages()
669 def commitNewPackage(self, pac, msg = '', files = [], validators = None, verbose_validation = None):
670 """creates and commits a new package if it does not exist on the server"""
671 if pac in self.pacs_available:
672 print 'package \'%s\' already exists' % pac
674 user = conf.get_apiurl_usr(self.apiurl)
675 edit_meta(metatype='pkg',
676 path_args=(quote_plus(self.name), quote_plus(pac)),
681 # display the correct dir when sending the changes
683 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
687 p = Package(os.path.join(self.dir, pac))
689 print statfrmt('Sending', os.path.normpath(p.dir))
690 p.commit(msg=msg, validators=validators, verbose_validation=verbose_validation)
691 self.set_state(pac, ' ')
694 def commitDelPackage(self, pac):
695 """deletes a package on the server and in the working copy"""
697 # display the correct dir when sending the changes
698 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
701 pac_dir = os.path.join(self.dir, pac)
702 p = Package(os.path.join(self.dir, pac))
703 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
704 delete_storedir(p.storedir)
710 pac_dir = os.path.join(self.dir, pac)
711 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
712 print statfrmt('Deleting', getTransActPath(pac_dir))
713 delete_package(self.apiurl, self.name, pac)
714 self.del_package_node(pac)
716 def commitExtPackage(self, pac, msg, files = []):
717 """commits a package from an external project"""
718 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
721 pac_path = os.path.join(self.dir, pac)
723 project = store_read_project(pac_path)
724 package = store_read_package(pac_path)
725 apiurl = store_read_apiurl(pac_path)
726 if meta_exists(metatype='pkg',
727 path_args=(quote_plus(project), quote_plus(package)),
729 create_new=False, apiurl=apiurl):
730 p = Package(pac_path)
734 user = conf.get_apiurl_usr(self.apiurl)
735 edit_meta(metatype='pkg',
736 path_args=(quote_plus(project), quote_plus(package)),
741 p = Package(pac_path)
747 r.append('*****************************************************')
748 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
749 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
750 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
751 r.append('*****************************************************')
757 """represent a package (its directory) and read/keep/write its metadata"""
758 def __init__(self, workingdir, progress_obj=None, limit_size=None, meta=None):
759 self.dir = workingdir
760 self.absdir = os.path.abspath(self.dir)
761 self.storedir = os.path.join(self.absdir, store)
762 self.progress_obj = progress_obj
764 self.limit_size = limit_size
765 if limit_size and limit_size == 0:
766 self.limit_size = None
768 check_store_version(self.dir)
770 self.prjname = store_read_project(self.dir)
771 self.name = store_read_package(self.dir)
772 self.apiurl = store_read_apiurl(self.dir)
774 self.update_datastructs()
778 self.todo_delete = []
781 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
782 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
785 def addfile(self, n):
786 st = os.stat(os.path.join(self.dir, n))
787 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
789 def delete_file(self, n, force=False):
790 """deletes a file if possible and marks the file as deleted"""
793 state = self.status(n)
797 if state in ['?', 'A', 'M'] and not force:
798 return (False, state)
799 self.delete_localfile(n)
801 self.put_on_deletelist(n)
802 self.write_deletelist()
804 self.delete_storefile(n)
807 def delete_storefile(self, n):
808 try: os.unlink(os.path.join(self.storedir, n))
811 def delete_localfile(self, n):
812 try: os.unlink(os.path.join(self.dir, n))
815 def put_on_deletelist(self, n):
816 if n not in self.to_be_deleted:
817 self.to_be_deleted.append(n)
819 def put_on_conflictlist(self, n):
820 if n not in self.in_conflict:
821 self.in_conflict.append(n)
823 def clear_from_conflictlist(self, n):
824 """delete an entry from the file, and remove the file if it would be empty"""
825 if n in self.in_conflict:
827 filename = os.path.join(self.dir, n)
828 storefilename = os.path.join(self.storedir, n)
829 myfilename = os.path.join(self.dir, n + '.mine')
830 if self.islinkrepair() or self.ispulled():
831 upfilename = os.path.join(self.dir, n + '.new')
833 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
836 os.unlink(myfilename)
837 # the working copy may be updated, so the .r* ending may be obsolete...
839 os.unlink(upfilename)
840 if self.islinkrepair() or self.ispulled():
841 os.unlink(os.path.join(self.dir, n + '.old'))
845 self.in_conflict.remove(n)
847 self.write_conflictlist()
849 def write_meta_mode(self):
851 fname = os.path.join(self.storedir, '_meta_mode')
857 os.unlink(os.path.join(self.storedir, '_meta_mode'))
861 def write_sizelimit(self):
862 if self.size_limit and self.size_limit <= 0:
864 os.unlink(os.path.join(self.storedir, '_size_limit'))
868 fname = os.path.join(self.storedir, '_size_limit')
870 f.write(str(self.size_limit))
873 def write_deletelist(self):
874 if len(self.to_be_deleted) == 0:
876 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
880 fname = os.path.join(self.storedir, '_to_be_deleted')
882 f.write('\n'.join(self.to_be_deleted))
886 def delete_source_file(self, n):
887 """delete local a source file"""
888 self.delete_localfile(n)
889 self.delete_storefile(n)
891 def delete_remote_source_file(self, n):
892 """delete a remote source file (e.g. from the server)"""
894 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
897 def put_source_file(self, n):
899 # escaping '+' in the URL path (note: not in the URL query string) is
900 # only a workaround for ruby on rails, which swallows it otherwise
902 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
903 http_PUT(u, file = os.path.join(self.dir, n))
905 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
907 def commit(self, msg='', validators=None, verbose_validation=None):
908 # commit only if the upstream revision is the same as the working copy's
909 upstream_rev = self.latest_rev()
910 if self.rev != upstream_rev:
911 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
914 self.todo = self.filenamelist_unvers + self.filenamelist
916 pathn = getTransActPath(self.dir)
921 for validator in sorted(os.listdir(validators)):
922 if validator.startswith('.'):
924 fn = os.path.join(validators, validator)
925 mode = os.stat(fn).st_mode
926 if stat.S_ISREG(mode):
927 if verbose_validation:
928 print "osc runs source service:", fn
929 p = subprocess.Popen([fn, "--verbose"], close_fds=True)
931 p = subprocess.Popen([fn], close_fds=True)
933 raise oscerr.RuntimeError(p.stdout, validator )
935 have_conflicts = False
936 for filename in self.todo:
937 if not filename.startswith('_service:') and not filename.startswith('_service_'):
938 st = self.status(filename)
940 self.todo.remove(filename)
941 elif st == 'A' or st == 'M':
942 self.todo_send.append(filename)
943 print statfrmt('Sending', os.path.join(pathn, filename))
945 self.todo_delete.append(filename)
946 print statfrmt('Deleting', os.path.join(pathn, filename))
948 have_conflicts = True
951 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
954 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
955 print 'nothing to do for package %s' % self.name
958 if self.islink() and self.isexpanded():
959 # resolve the link into the upload revision
960 # XXX: do this always?
961 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
962 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
965 print 'Transmitting file data ',
967 for filename in self.todo_delete:
968 # do not touch local files on commit --
969 # delete remotely instead
970 self.delete_remote_source_file(filename)
971 self.to_be_deleted.remove(filename)
972 for filename in self.todo_send:
973 sys.stdout.write('.')
975 self.put_source_file(filename)
977 # all source files are committed - now comes the log
978 query = { 'cmd' : 'commit',
980 'user' : conf.get_apiurl_usr(self.apiurl),
982 if self.islink() and self.isexpanded():
983 query['keeplink'] = '1'
984 if conf.config['linkcontrol'] or self.isfrozen():
985 query['linkrev'] = self.linkinfo.srcmd5
987 query['repairlink'] = '1'
988 query['linkrev'] = self.get_pulled_srcmd5()
989 if self.islinkrepair():
990 query['repairlink'] = '1'
991 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
994 # delete upload revision
996 query = { 'cmd': 'deleteuploadrev' }
997 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
1003 root = ET.parse(f).getroot()
1004 self.rev = int(root.get('rev'))
1006 print 'Committed revision %s.' % self.rev
1009 os.unlink(os.path.join(self.storedir, '_pulled'))
1010 if self.islinkrepair():
1011 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1012 self.linkrepair = False
1013 # XXX: mark package as invalid?
1014 print 'The source link has been repaired. This directory can now be removed.'
1015 if self.islink() and self.isexpanded():
1016 self.update_local_filesmeta(revision=self.latest_rev())
1018 self.update_local_filesmeta()
1019 self.write_deletelist()
1020 self.update_datastructs()
1022 if self.filenamelist.count('_service'):
1023 print 'The package contains a source service.'
1024 for filename in self.todo:
1025 if filename.startswith('_service:') and os.path.exists(filename):
1026 os.unlink(filename) # remove local files
1027 print_request_list(self.apiurl, self.prjname, self.name)
1029 def write_conflictlist(self):
1030 if len(self.in_conflict) == 0:
1032 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1036 fname = os.path.join(self.storedir, '_in_conflict')
1037 f = open(fname, 'w')
1038 f.write('\n'.join(self.in_conflict))
1042 def updatefile(self, n, revision):
1043 filename = os.path.join(self.dir, n)
1044 storefilename = os.path.join(self.storedir, n)
1045 mtime = self.findfilebyname(n).mtime
1047 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1048 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1050 shutil.copyfile(filename, storefilename)
1052 def mergefile(self, n):
1053 filename = os.path.join(self.dir, n)
1054 storefilename = os.path.join(self.storedir, n)
1055 myfilename = os.path.join(self.dir, n + '.mine')
1056 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1057 os.rename(filename, myfilename)
1059 mtime = self.findfilebyname(n).mtime
1060 get_source_file(self.apiurl, self.prjname, self.name, n,
1061 revision=self.rev, targetfilename=upfilename,
1062 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1064 if binary_file(myfilename) or binary_file(upfilename):
1066 shutil.copyfile(upfilename, filename)
1067 shutil.copyfile(upfilename, storefilename)
1068 self.in_conflict.append(n)
1069 self.write_conflictlist()
1073 # diff3 OPTIONS... MINE OLDER YOURS
1074 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1075 # we would rather use the subprocess module, but it is not availablebefore 2.4
1076 ret = subprocess.call(merge_cmd, shell=True)
1078 # "An exit status of 0 means `diff3' was successful, 1 means some
1079 # conflicts were found, and 2 means trouble."
1081 # merge was successful... clean up
1082 shutil.copyfile(upfilename, storefilename)
1083 os.unlink(upfilename)
1084 os.unlink(myfilename)
1087 # unsuccessful merge
1088 shutil.copyfile(upfilename, storefilename)
1089 self.in_conflict.append(n)
1090 self.write_conflictlist()
1093 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1094 print >>sys.stderr, 'the command line was:'
1095 print >>sys.stderr, merge_cmd
1100 def update_local_filesmeta(self, revision=None):
1102 Update the local _files file in the store.
1103 It is replaced with the version pulled from upstream.
1105 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size, meta=self.meta))
1106 store_write_string(self.absdir, '_files', meta)
1108 def update_datastructs(self):
1110 Update the internal data structures if the local _files
1111 file has changed (e.g. update_local_filesmeta() has been
1115 files_tree = read_filemeta(self.dir)
1116 files_tree_root = files_tree.getroot()
1118 self.rev = files_tree_root.get('rev')
1119 self.srcmd5 = files_tree_root.get('srcmd5')
1121 self.linkinfo = Linkinfo()
1122 self.linkinfo.read(files_tree_root.find('linkinfo'))
1124 self.filenamelist = []
1127 for node in files_tree_root.findall('entry'):
1129 f = File(node.get('name'),
1131 int(node.get('size')),
1132 int(node.get('mtime')))
1133 if node.get('skipped'):
1134 self.skipped.append(f.name)
1136 # okay, a very old version of _files, which didn't contain any metadata yet...
1137 f = File(node.get('name'), '', 0, 0)
1138 self.filelist.append(f)
1139 self.filenamelist.append(f.name)
1141 self.to_be_deleted = read_tobedeleted(self.dir)
1142 self.in_conflict = read_inconflict(self.dir)
1143 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1144 self.size_limit = read_sizelimit(self.dir)
1145 self.meta = read_meta_mode(self.dir)
1147 # gather unversioned files, but ignore some stuff
1148 self.excluded = [ i for i in os.listdir(self.dir)
1149 for j in conf.config['exclude_glob']
1150 if fnmatch.fnmatch(i, j) ]
1151 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1152 if i not in self.excluded
1153 if i not in self.filenamelist ]
1156 """tells us if the package is a link (has 'linkinfo').
1157 A package with linkinfo is a package which links to another package.
1158 Returns True if the package is a link, otherwise False."""
1159 return self.linkinfo.islink()
1161 def isexpanded(self):
1162 """tells us if the package is a link which is expanded.
1163 Returns True if the package is expanded, otherwise False."""
1164 return self.linkinfo.isexpanded()
1166 def islinkrepair(self):
1167 """tells us if we are repairing a broken source link."""
1168 return self.linkrepair
1171 """tells us if we have pulled a link."""
1172 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1175 """tells us if the link is frozen."""
1176 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1178 def get_pulled_srcmd5(self):
1180 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1181 pulledrev = line.strip()
1184 def haslinkerror(self):
1186 Returns True if the link is broken otherwise False.
1187 If the package is not a link it returns False.
1189 return self.linkinfo.haserror()
1191 def linkerror(self):
1193 Returns an error message if the link is broken otherwise None.
1194 If the package is not a link it returns None.
1196 return self.linkinfo.error
1198 def update_local_pacmeta(self):
1200 Update the local _meta file in the store.
1201 It is replaced with the version pulled from upstream.
1203 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1204 store_write_string(self.absdir, '_meta', meta)
1206 def findfilebyname(self, n):
1207 for i in self.filelist:
1211 def status(self, n):
1215 file storefile file present STATUS
1216 exists exists in _files
1219 x x x ' ' if digest differs: 'M'
1220 and if in conflicts file: 'C'
1222 x - x 'D' and listed in _to_be_deleted
1224 - x - 'D' (when file in working copy is already deleted)
1225 - - x 'F' (new in repo, but not yet in working copy)
1230 known_by_meta = False
1232 exists_in_store = False
1233 if n in self.filenamelist:
1234 known_by_meta = True
1235 if os.path.exists(os.path.join(self.absdir, n)):
1237 if os.path.exists(os.path.join(self.storedir, n)):
1238 exists_in_store = True
1241 if n in self.skipped:
1243 elif exists and not exists_in_store and known_by_meta:
1245 elif n in self.to_be_deleted:
1247 elif n in self.in_conflict:
1249 elif exists and exists_in_store and known_by_meta:
1250 #print self.findfilebyname(n)
1251 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1255 elif exists and not exists_in_store and not known_by_meta:
1257 elif exists and exists_in_store and not known_by_meta:
1259 elif not exists and exists_in_store and known_by_meta:
1261 elif not exists and not exists_in_store and known_by_meta:
1263 elif not exists and exists_in_store and not known_by_meta:
1265 elif not exists and not exists_in_store and not known_by_meta:
1266 # this case shouldn't happen (except there was a typo in the filename etc.)
1267 raise IOError('osc: \'%s\' is not under version control' % n)
1271 def comparePac(self, cmp_pac):
1273 This method compares the local filelist with
1274 the filelist of the passed package to see which files
1275 were added, removed and changed.
1282 for file in self.filenamelist+self.filenamelist_unvers:
1283 state = self.status(file)
1284 if file in self.skipped:
1286 if state == 'A' and (not file in cmp_pac.filenamelist):
1287 added_files.append(file)
1288 elif file in cmp_pac.filenamelist and state == 'D':
1289 removed_files.append(file)
1290 elif state == ' ' and not file in cmp_pac.filenamelist:
1291 added_files.append(file)
1292 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1293 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1294 changed_files.append(file)
1295 for file in cmp_pac.filenamelist:
1296 if not file in self.filenamelist:
1297 removed_files.append(file)
1298 removed_files = set(removed_files)
1300 return changed_files, added_files, removed_files
1302 def merge(self, otherpac):
1303 self.todo += otherpac.todo
1317 '\n '.join(self.filenamelist),
1325 def read_meta_from_spec(self, spec = None):
1330 # scan for spec files
1331 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1332 if len(speclist) == 1:
1333 specfile = speclist[0]
1334 elif len(speclist) > 1:
1335 print 'the following specfiles were found:'
1336 for file in speclist:
1338 print 'please specify one with --specfile'
1341 print 'no specfile was found - please specify one ' \
1345 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1346 self.summary = data['Summary']
1347 self.url = data['Url']
1348 self.descr = data['%description']
1351 def update_package_meta(self, force=False):
1353 for the updatepacmetafromspec subcommand
1354 argument force supress the confirm question
1357 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1359 root = ET.fromstring(m)
1360 root.find('title').text = self.summary
1361 root.find('description').text = ''.join(self.descr)
1362 url = root.find('url')
1364 url = ET.SubElement(root, 'url')
1367 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1368 mf = metafile(u, ET.tostring(root))
1371 print '*' * 36, 'old', '*' * 36
1373 print '*' * 36, 'new', '*' * 36
1374 print ET.tostring(root)
1376 repl = raw_input('Write? (y/N/e) ')
1387 def mark_frozen(self):
1388 store_write_string(self.absdir, '_frozenlink', '')
1390 print "The link in this package is currently broken. Checking"
1391 print "out the last working version instead; please use 'osc pull'"
1392 print "to repair the link."
1395 def unmark_frozen(self):
1396 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1397 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1399 def latest_rev(self):
1400 if self.islinkrepair():
1401 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1, meta=self.meta)
1402 elif self.islink() and self.isexpanded():
1403 if self.isfrozen() or self.ispulled():
1404 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1407 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, meta=self.meta)
1410 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5, meta=self.meta)
1412 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base", meta=self.meta)
1415 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name, meta=self.meta)
1418 def update(self, rev = None, service_files = False, limit_size = None):
1419 # save filelist and (modified) status before replacing the meta file
1420 saved_filenames = self.filenamelist
1421 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1425 self.limit_size = limit_size
1427 self.limit_size = read_sizelimit(self.dir)
1428 self.update_local_filesmeta(rev)
1429 self = Package(self.dir, progress_obj=self.progress_obj)
1431 # which files do no longer exist upstream?
1432 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1434 pathn = getTransActPath(self.dir)
1436 for filename in saved_filenames:
1437 if filename in self.skipped:
1439 if not filename.startswith('_service:') and filename in disappeared:
1440 print statfrmt('D', os.path.join(pathn, filename))
1441 # keep file if it has local modifications
1442 if oldp.status(filename) == ' ':
1443 self.delete_localfile(filename)
1444 self.delete_storefile(filename)
1446 for filename in self.filenamelist:
1447 if filename in self.skipped:
1450 state = self.status(filename)
1451 if not service_files and filename.startswith('_service:'):
1453 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1454 # no merge necessary... local file is changed, but upstream isn't
1456 elif state == 'M' and filename in saved_modifiedfiles:
1457 status_after_merge = self.mergefile(filename)
1458 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1460 self.updatefile(filename, rev)
1461 print statfrmt('U', os.path.join(pathn, filename))
1463 self.updatefile(filename, rev)
1464 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1466 self.updatefile(filename, rev)
1467 print statfrmt('A', os.path.join(pathn, filename))
1468 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1469 self.updatefile(filename, rev)
1470 self.delete_storefile(filename)
1471 print statfrmt('U', os.path.join(pathn, filename))
1475 self.update_local_pacmeta()
1477 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1478 print 'At revision %s.' % self.rev
1480 def run_source_services(self):
1481 if self.filenamelist.count('_service'):
1482 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1485 si.execute(self.absdir)
1487 def prepare_filelist(self):
1488 """Prepare a list of files, which will be processed by process_filelist
1489 method. This allows easy modifications of a file list in commit
1493 self.todo = self.filenamelist + self.filenamelist_unvers
1497 for f in [f for f in self.todo if not os.path.isdir(f)]:
1499 status = self.status(f)
1504 ret += "%s %s %s\n" % (action, status, f)
1507 # Edit a filelist for package \'%s\'
1509 # l, leave = leave a file as is
1510 # r, remove = remove a file
1511 # a, add = add a file
1513 # If you remove file from a list, it will be unchanged
1514 # If you remove all, commit will be aborted""" % self.name
1518 def edit_filelist(self):
1519 """Opens a package list in editor for editing. This allows easy
1520 modifications of it just by simple text editing
1524 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1525 f = os.fdopen(fd, 'w')
1526 f.write(self.prepare_filelist())
1528 mtime_orig = os.stat(filename).st_mtime
1531 run_editor(filename)
1532 mtime = os.stat(filename).st_mtime
1533 if mtime_orig < mtime:
1534 filelist = open(filename).readlines()
1538 raise oscerr.UserAbort()
1540 return self.process_filelist(filelist)
1542 def process_filelist(self, filelist):
1543 """Process a filelist - it add/remove or leave files. This depends on
1544 user input. If no file is processed, it raises an ValueError
1548 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1550 foo = line.split(' ')
1552 action, state, name = (foo[0], ' ', foo[3])
1554 action, state, name = (foo[0], foo[1], foo[2])
1557 action = action.lower()
1560 if action in ('r', 'remove'):
1561 if self.status(name) == '?':
1563 if name in self.todo:
1564 self.todo.remove(name)
1566 self.delete_file(name, True)
1567 elif action in ('a', 'add'):
1568 if self.status(name) != '?':
1569 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1572 elif action in ('l', 'leave'):
1575 raise ValueError("Unknow action `%s'" % action)
1578 raise ValueError("Empty filelist")
1581 """for objects to represent the review state in a request"""
1582 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1584 self.by_user = by_user
1585 self.by_group = by_group
1588 self.comment = comment
1591 """for objects to represent the "state" of a request"""
1592 def __init__(self, name=None, who=None, when=None, comment=None):
1596 self.comment = comment
1599 """represents an action"""
1600 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1602 self.src_project = src_project
1603 self.src_package = src_package
1604 self.src_rev = src_rev
1605 self.dst_project = dst_project
1606 self.dst_package = dst_package
1607 self.src_update = src_update
1610 """represent a request and holds its metadata
1611 it has methods to read in metadata from xml,
1612 different views, ..."""
1615 self.state = RequestState()
1618 self.last_author = None
1621 self.statehistory = []
1624 def read(self, root):
1625 self.reqid = int(root.get('id'))
1626 actions = root.findall('action')
1627 if len(actions) == 0:
1628 actions = [ root.find('submit') ] # for old style requests
1630 for action in actions:
1631 type = action.get('type', 'submit')
1633 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1634 if action.findall('source'):
1635 n = action.find('source')
1636 src_prj = n.get('project', None)
1637 src_pkg = n.get('package', None)
1638 src_rev = n.get('rev', None)
1639 if action.findall('target'):
1640 n = action.find('target')
1641 dst_prj = n.get('project', None)
1642 dst_pkg = n.get('package', None)
1643 if action.findall('options'):
1644 n = action.find('options')
1645 if n.findall('sourceupdate'):
1646 src_update = n.find('sourceupdate').text.strip()
1647 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1649 msg = 'invalid request format:\n%s' % ET.tostring(root)
1650 raise oscerr.APIError(msg)
1653 n = root.find('state')
1654 self.state.name, self.state.who, self.state.when \
1655 = n.get('name'), n.get('who'), n.get('when')
1657 self.state.comment = n.find('comment').text.strip()
1659 self.state.comment = None
1661 # read the review states
1662 for r in root.findall('review'):
1664 s.state = r.get('state')
1665 s.by_user = r.get('by_user')
1666 s.by_group = r.get('by_group')
1667 s.who = r.get('who')
1668 s.when = r.get('when')
1670 s.comment = r.find('comment').text.strip()
1673 self.reviews.append(s)
1675 # read the state history
1676 for h in root.findall('history'):
1678 s.name = h.get('name')
1679 s.who = h.get('who')
1680 s.when = h.get('when')
1682 s.comment = h.find('comment').text.strip()
1685 self.statehistory.append(s)
1686 self.statehistory.reverse()
1688 # read a description, if it exists
1690 n = root.find('description').text
1695 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1696 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1697 dst_prj, dst_pkg, src_update)
1700 def list_view(self):
1701 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1703 for a in self.actions:
1704 dst = "%s/%s" % (a.dst_project, a.dst_package)
1705 if a.src_package == a.dst_package:
1709 if a.type=="submit":
1710 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1711 if a.type=="change_devel":
1712 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1713 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1715 ret += '\n %s: %-50s %-20s ' % \
1716 (a.type, sr_source, dst)
1718 if self.statehistory and self.statehistory[0]:
1720 for h in self.statehistory:
1721 who.append("%s(%s)" % (h.who,h.name))
1723 ret += "\n From: %s" % (' -> '.join(who))
1725 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1727 lines = txt.splitlines()
1728 wrapper = textwrap.TextWrapper( width = 80,
1729 initial_indent=' Descr: ',
1730 subsequent_indent=' ')
1731 ret += "\n" + wrapper.fill(lines[0])
1732 wrapper.initial_indent = ' '
1733 for line in lines[1:]:
1734 ret += "\n" + wrapper.fill(line)
1740 def __cmp__(self, other):
1741 return cmp(self.reqid, other.reqid)
1745 for action in self.actions:
1746 action_list=action_list+" %s: " % (action.type)
1747 if action.type=="submit":
1750 r="(r%s)" % (action.src_rev)
1752 if action.src_update:
1753 m="(%s)" % (action.src_update)
1754 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1755 if action.dst_package:
1756 action_list=action_list+"/%s" % ( action.dst_package )
1757 elif action.type=="delete":
1758 action_list=action_list+" %s" % ( action.dst_project )
1759 if action.dst_package:
1760 action_list=action_list+"/%s" % ( action.dst_package )
1761 elif action.type=="change_devel":
1762 action_list=action_list+" %s/%s developed in %s/%s" % \
1763 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1764 action_list=action_list+"\n"
1779 self.state.name, self.state.when, self.state.who,
1782 if len(self.reviews):
1783 reviewitems = [ '%-10s %s %s %s %s %s' \
1784 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1785 for i in self.reviews ]
1786 s += '\nReview: ' + '\n '.join(reviewitems)
1789 if len(self.statehistory):
1790 histitems = [ '%-10s %s %s' \
1791 % (i.name, i.when, i.who) \
1792 for i in self.statehistory ]
1793 s += '\nHistory: ' + '\n '.join(histitems)
1800 """format time as Apr 02 18:19
1802 depending on whether it is in the current year
1806 if time.localtime()[0] == time.localtime(t)[0]:
1808 return time.strftime('%b %d %H:%M',time.localtime(t))
1810 return time.strftime('%b %d %Y',time.localtime(t))
1813 def is_project_dir(d):
1814 return os.path.exists(os.path.join(d, store, '_project')) and not \
1815 os.path.exists(os.path.join(d, store, '_package'))
1818 def is_package_dir(d):
1819 return os.path.exists(os.path.join(d, store, '_project')) and \
1820 os.path.exists(os.path.join(d, store, '_package'))
1822 def parse_disturl(disturl):
1823 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1824 revision), else raises an oscerr.WrongArgs exception
1827 m = DISTURL_RE.match(disturl)
1829 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1831 apiurl = m.group('apiurl')
1832 if apiurl.split('.')[0] != 'api':
1833 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1834 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1836 def parse_buildlogurl(buildlogurl):
1837 """Parse a build log url, returns a tuple (apiurl, project, package,
1838 repository, arch), else raises oscerr.WrongArgs exception"""
1840 global BUILDLOGURL_RE
1842 m = BUILDLOGURL_RE.match(buildlogurl)
1844 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1846 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1849 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1850 This is handy to allow copy/paste a project/package combination in this form.
1852 Trailing slashes are removed before the split, because the split would
1853 otherwise give an additional empty string.
1861 def expand_proj_pack(args, idx=0, howmany=0):
1862 """looks for occurance of '.' at the position idx.
1863 If howmany is 2, both proj and pack are expanded together
1864 using the current directory, or none of them, if not possible.
1865 If howmany is 0, proj is expanded if possible, then, if there
1866 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1867 expanded, if possible.
1868 If howmany is 1, only proj is expanded if possible.
1870 If args[idx] does not exists, an implicit '.' is assumed.
1871 if not enough elements up to idx exist, an error is raised.
1873 See also parseargs(args), slash_split(args), findpacs(args)
1874 All these need unification, somehow.
1877 # print args,idx,howmany
1880 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1882 if len(args) == idx:
1884 if args[idx+0] == '.':
1885 if howmany == 0 and len(args) > idx+1:
1886 if args[idx+1] == '.':
1888 # remove one dot and make sure to expand both proj and pack
1893 # print args,idx,howmany
1895 args[idx+0] = store_read_project('.')
1898 package = store_read_package('.')
1899 args.insert(idx+1, package)
1903 package = store_read_package('.')
1904 args.insert(idx+1, package)
1908 def findpacs(files, progress_obj=None):
1909 """collect Package objects belonging to the given files
1910 and make sure each Package is returned only once"""
1913 p = filedir_to_pac(f, progress_obj)
1916 if i.name == p.name:
1926 def filedir_to_pac(f, progress_obj=None):
1927 """Takes a working copy path, or a path to a file inside a working copy,
1928 and returns a Package object instance
1930 If the argument was a filename, add it onto the "todo" list of the Package """
1932 if os.path.isdir(f):
1934 p = Package(wd, progress_obj=progress_obj)
1936 wd = os.path.dirname(f) or os.curdir
1937 p = Package(wd, progress_obj=progress_obj)
1938 p.todo = [ os.path.basename(f) ]
1942 def read_filemeta(dir):
1944 r = ET.parse(os.path.join(dir, store, '_files'))
1945 except SyntaxError, e:
1946 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1947 'When parsing .osc/_files, the following error was encountered:\n'
1952 def read_tobedeleted(dir):
1954 fname = os.path.join(dir, store, '_to_be_deleted')
1956 if os.path.exists(fname):
1957 r = [ line.strip() for line in open(fname) ]
1962 def read_meta_mode(dir):
1964 fname = os.path.join(dir, store, '_meta_mode')
1966 if os.path.exists(fname):
1967 r = open(fname).readline()
1969 if r is None or not r == "true":
1973 def read_sizelimit(dir):
1975 fname = os.path.join(dir, store, '_size_limit')
1977 if os.path.exists(fname):
1978 r = open(fname).readline()
1980 if r is None or not r.isdigit():
1984 def read_inconflict(dir):
1986 fname = os.path.join(dir, store, '_in_conflict')
1988 if os.path.exists(fname):
1989 r = [ line.strip() for line in open(fname) ]
1994 def parseargs(list_of_args):
1995 """Convenience method osc's commandline argument parsing.
1997 If called with an empty tuple (or list), return a list containing the current directory.
1998 Otherwise, return a list of the arguments."""
2000 return list(list_of_args)
2005 def statfrmt(statusletter, filename):
2006 return '%s %s' % (statusletter, filename)
2009 def pathjoin(a, *p):
2010 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2011 path = os.path.join(a, *p)
2012 if path.startswith('./'):
2017 def makeurl(baseurl, l, query=[]):
2018 """Given a list of path compoments, construct a complete URL.
2020 Optional parameters for a query string can be given as a list, as a
2021 dictionary, or as an already assembled string.
2022 In case of a dictionary, the parameters will be urlencoded by this
2023 function. In case of a list not -- this is to be backwards compatible.
2026 if conf.config['verbose'] > 1:
2027 print 'makeurl:', baseurl, l, query
2029 if type(query) == type(list()):
2030 query = '&'.join(query)
2031 elif type(query) == type(dict()):
2032 query = urlencode(query)
2034 scheme, netloc = urlsplit(baseurl)[0:2]
2035 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2038 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2039 """wrapper around urllib2.urlopen for error handling,
2040 and to support additional (PUT, DELETE) methods"""
2044 if conf.config['http_debug']:
2047 print '--', method, url
2049 if method == 'POST' and not file and not data:
2050 # adding data to an urllib2 request transforms it into a POST
2053 req = urllib2.Request(url)
2054 api_host_options = {}
2056 api_host_options = conf.get_apiurl_api_host_options(url)
2057 for header, value in api_host_options['http_headers']:
2058 req.add_header(header, value)
2060 # "external" request (url is no apiurl)
2063 req.get_method = lambda: method
2065 # POST requests are application/x-www-form-urlencoded per default
2066 # since we change the request into PUT, we also need to adjust the content type header
2067 if method == 'PUT' or (method == 'POST' and data):
2068 req.add_header('Content-Type', 'application/octet-stream')
2070 if type(headers) == type({}):
2071 for i in headers.keys():
2073 req.add_header(i, headers[i])
2075 if file and not data:
2076 size = os.path.getsize(file)
2078 data = open(file, 'rb').read()
2081 filefd = open(file, 'rb')
2083 if sys.platform[:3] != 'win':
2084 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2086 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2088 except EnvironmentError, e:
2090 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2091 '\non a filesystem which does not support this.' % (e, file))
2092 elif hasattr(e, 'winerror') and e.winerror == 5:
2093 # falling back to the default io
2094 data = open(file, 'rb').read()
2098 if conf.config['debug']: print method, url
2100 old_timeout = socket.getdefaulttimeout()
2101 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2102 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2103 socket.setdefaulttimeout(timeout)
2105 fd = urllib2.urlopen(req, data=data)
2107 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2108 socket.setdefaulttimeout(old_timeout)
2109 if hasattr(conf.cookiejar, 'save'):
2110 conf.cookiejar.save(ignore_discard=True)
2112 if filefd: filefd.close()
2117 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2118 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2119 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2120 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2123 def init_project_dir(apiurl, dir, project):
2124 if not os.path.exists(dir):
2125 if conf.config['checkout_no_colon']:
2126 os.makedirs(dir) # helpful with checkout_no_colon
2129 if not os.path.exists(os.path.join(dir, store)):
2130 os.mkdir(os.path.join(dir, store))
2132 # print 'project=',project,' dir=',dir
2133 store_write_project(dir, project)
2134 store_write_apiurl(dir, apiurl)
2135 if conf.config['do_package_tracking']:
2136 store_write_initial_packages(dir, project, [])
2138 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=None):
2139 if not os.path.isdir(store):
2142 f = open('_project', 'w')
2143 f.write(project + '\n')
2145 f = open('_package', 'w')
2146 f.write(package + '\n')
2150 f = open('_meta_mode', 'w')
2155 f = open('_size_limit', 'w')
2156 f.write(str(limit_size))
2160 f = open('_files', 'w')
2161 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta)))
2165 ET.ElementTree(element=ET.Element('directory')).write('_files')
2167 f = open('_osclib_version', 'w')
2168 f.write(__store_version__ + '\n')
2171 store_write_apiurl(os.path.pardir, apiurl)
2177 def check_store_version(dir):
2178 versionfile = os.path.join(dir, store, '_osclib_version')
2180 v = open(versionfile).read().strip()
2185 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2186 if os.path.exists(os.path.join(dir, '.svn')):
2187 msg = msg + '\nTry svn instead of osc.'
2188 raise oscerr.NoWorkingCopy(msg)
2190 if v != __store_version__:
2191 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2192 # version is fine, no migration needed
2193 f = open(versionfile, 'w')
2194 f.write(__store_version__ + '\n')
2197 msg = 'The osc metadata of your working copy "%s"' % dir
2198 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2199 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2200 raise oscerr.WorkingCopyWrongVersion, msg
2203 def meta_get_packagelist(apiurl, prj, deleted=None):
2207 query['deleted'] = 1
2209 u = makeurl(apiurl, ['source', prj], query)
2211 root = ET.parse(f).getroot()
2212 return [ node.get('name') for node in root.findall('entry') ]
2215 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2216 """return a list of file names,
2217 or a list File() instances if verbose=True"""
2223 query['rev'] = revision
2225 query['rev'] = 'latest'
2227 u = makeurl(apiurl, ['source', prj, package], query=query)
2229 root = ET.parse(f).getroot()
2232 return [ node.get('name') for node in root.findall('entry') ]
2236 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2237 rev = root.get('rev')
2238 for node in root.findall('entry'):
2239 f = File(node.get('name'),
2241 int(node.get('size')),
2242 int(node.get('mtime')))
2248 def meta_get_project_list(apiurl, deleted=None):
2251 query['deleted'] = 1
2253 u = makeurl(apiurl, ['source'], query)
2255 root = ET.parse(f).getroot()
2256 return sorted([ node.get('name') for node in root ])
2259 def show_project_meta(apiurl, prj):
2260 url = makeurl(apiurl, ['source', prj, '_meta'])
2262 return f.readlines()
2265 def show_project_conf(apiurl, prj):
2266 url = makeurl(apiurl, ['source', prj, '_config'])
2268 return f.readlines()
2271 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2272 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2276 except urllib2.HTTPError, e:
2277 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2281 def show_package_meta(apiurl, prj, pac, meta=None):
2286 # packages like _project, _pattern and _project do not have a _meta file
2287 if pac.startswith('_'):
2290 url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
2293 return f.readlines()
2294 except urllib2.HTTPError, e:
2295 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2299 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2301 path.append('source')
2307 path.append('_attribute')
2309 path.append(attribute)
2312 query.append("with_default=1")
2314 query.append("with_project=1")
2315 url = makeurl(apiurl, path, query)
2318 return f.readlines()
2319 except urllib2.HTTPError, e:
2320 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2324 def show_develproject(apiurl, prj, pac):
2325 m = show_package_meta(apiurl, prj, pac)
2327 return ET.fromstring(''.join(m)).find('devel').get('project')
2332 def show_pattern_metalist(apiurl, prj):
2333 url = makeurl(apiurl, ['source', prj, '_pattern'])
2337 except urllib2.HTTPError, e:
2338 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2340 r = [ node.get('name') for node in tree.getroot() ]
2345 def show_pattern_meta(apiurl, prj, pattern):
2346 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2349 return f.readlines()
2350 except urllib2.HTTPError, e:
2351 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2356 """metafile that can be manipulated and is stored back after manipulation."""
2357 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2361 self.change_is_required = change_is_required
2362 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2363 f = os.fdopen(fd, 'w')
2364 f.write(''.join(input))
2366 self.hash_orig = dgst(self.filename)
2369 hash = dgst(self.filename)
2370 if self.change_is_required and hash == self.hash_orig:
2371 print 'File unchanged. Not saving.'
2372 os.unlink(self.filename)
2375 print 'Sending meta data...'
2376 # don't do any exception handling... it's up to the caller what to do in case
2378 http_PUT(self.url, file=self.filename)
2379 os.unlink(self.filename)
2385 run_editor(self.filename)
2389 except urllib2.HTTPError, e:
2390 error_help = "%d" % e.code
2391 if e.headers.get('X-Opensuse-Errorcode'):
2392 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2394 print >>sys.stderr, 'BuildService API error:', error_help
2395 # examine the error - we can't raise an exception because we might want
2398 if '<summary>' in data:
2399 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2400 input = raw_input('Try again? ([y/N]): ')
2401 if input not in ['y', 'Y']:
2407 if os.path.exists(self.filename):
2408 print 'discarding %s' % self.filename
2409 os.unlink(self.filename)
2412 # different types of metadata
2413 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2414 'template': new_project_templ,
2417 'pkg': { 'path' : 'source/%s/%s/_meta',
2418 'template': new_package_templ,
2421 'attribute': { 'path' : 'source/%s/%s/_meta',
2422 'template': new_attribute_templ,
2425 'prjconf': { 'path': 'source/%s/_config',
2429 'user': { 'path': 'person/%s',
2430 'template': new_user_template,
2433 'pattern': { 'path': 'source/%s/_pattern/%s',
2434 'template': new_pattern_template,
2439 def meta_exists(metatype,
2446 apiurl = conf.config['apiurl']
2447 url = make_meta_url(metatype, path_args, apiurl)
2449 data = http_GET(url).readlines()
2450 except urllib2.HTTPError, e:
2451 if e.code == 404 and create_new:
2452 data = metatypes[metatype]['template']
2454 data = StringIO(data % template_args).readlines()
2459 def make_meta_url(metatype, path_args=None, apiurl=None):
2461 apiurl = conf.config['apiurl']
2462 if metatype not in metatypes.keys():
2463 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2464 path = metatypes[metatype]['path']
2467 path = path % path_args
2469 return makeurl(apiurl, [path])
2472 def edit_meta(metatype,
2477 change_is_required=False,
2481 apiurl = conf.config['apiurl']
2483 data = meta_exists(metatype,
2486 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2490 change_is_required = True
2492 url = make_meta_url(metatype, path_args, apiurl)
2493 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2501 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=None):
2504 query['rev'] = revision
2506 query['rev'] = 'latest'
2508 query['linkrev'] = linkrev
2509 elif conf.config['linkcontrol']:
2510 query['linkrev'] = 'base'
2516 query['emptylink'] = 1
2517 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2518 # look for "too large" files according to size limit and mark them
2519 root = ET.fromstring(''.join(f.readlines()))
2520 for e in root.findall('entry'):
2521 size = e.get('size')
2522 if size and limit_size and int(size) > int(limit_size):
2523 e.set('skipped', 'true')
2524 return ET.tostring(root)
2527 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=None):
2528 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
2529 return ET.fromstring(''.join(m)).get('srcmd5')
2532 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=None):
2533 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta)
2535 # only source link packages have a <linkinfo> element.
2536 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2544 raise oscerr.LinkExpandError(prj, pac, li.error)
2548 def show_upstream_rev(apiurl, prj, pac, meta=None):
2549 m = show_files_meta(apiurl, prj, pac, meta=meta)
2550 return ET.fromstring(''.join(m)).get('rev')
2553 def read_meta_from_spec(specfile, *args):
2554 import codecs, locale, re
2556 Read tags and sections from spec file. To read out
2557 a tag the passed argument mustn't end with a colon. To
2558 read out a section the passed argument must start with
2560 This method returns a dictionary which contains the
2564 if not os.path.isfile(specfile):
2565 raise IOError('\'%s\' is not a regular file' % specfile)
2568 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2569 except UnicodeDecodeError:
2570 lines = open(specfile).readlines()
2577 if itm.startswith('%'):
2578 sections.append(itm)
2582 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2584 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2585 if m and m.group('val'):
2586 spec_data[tag] = m.group('val').strip()
2588 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2591 section_pat = '^%s\s*?$'
2592 for section in sections:
2593 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2595 start = lines.index(m.group()+'\n') + 1
2597 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2600 for line in lines[start:]:
2601 if line.startswith('%'):
2604 spec_data[section] = data
2608 def run_pager(message):
2609 import tempfile, sys
2611 if not sys.stdout.isatty():
2614 tmpfile = tempfile.NamedTemporaryFile()
2615 tmpfile.write(message)
2617 pager = os.getenv('PAGER', default='less')
2618 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2621 def run_editor(filename):
2622 if sys.platform[:3] != 'win':
2623 editor = os.getenv('EDITOR', default='vim')
2625 editor = os.getenv('EDITOR', default='notepad')
2627 return subprocess.call([ editor, filename ])
2629 def edit_message(footer='', template='', templatelen=30):
2630 delim = '--This line, and those below, will be ignored--\n'
2632 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2633 f = os.fdopen(fd, 'w')
2635 if not templatelen is None:
2636 lines = template.splitlines()
2637 template = '\n'.join(lines[:templatelen])
2638 if lines[templatelen:]:
2639 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2649 run_editor(filename)
2650 msg = open(filename).read().split(delim)[0].rstrip()
2655 input = raw_input('Log message not specified\n'
2656 'a)bort, c)ontinue, e)dit: ')
2658 raise oscerr.UserAbort()
2668 def create_delete_request(apiurl, project, package, message):
2673 package = """package="%s" """ % (package)
2679 <action type="delete">
2680 <target project="%s" %s/>
2683 <description>%s</description>
2685 """ % (project, package,
2686 cgi.escape(message or ''))
2688 u = makeurl(apiurl, ['request'], query='cmd=create')
2689 f = http_POST(u, data=xml)
2691 root = ET.parse(f).getroot()
2692 return root.get('id')
2695 def create_change_devel_request(apiurl,
2696 devel_project, devel_package,
2703 <action type="change_devel">
2704 <source project="%s" package="%s" />
2705 <target project="%s" package="%s" />
2708 <description>%s</description>
2710 """ % (devel_project,
2714 cgi.escape(message or ''))
2716 u = makeurl(apiurl, ['request'], query='cmd=create')
2717 f = http_POST(u, data=xml)
2719 root = ET.parse(f).getroot()
2720 return root.get('id')
2723 # This creates an old style submit request for server api 1.0
2724 def create_submit_request(apiurl,
2725 src_project, src_package,
2726 dst_project=None, dst_package=None,
2727 message=None, orev=None, src_update=None):
2732 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2734 # Yes, this kind of xml construction is horrible
2739 packagexml = """package="%s" """ %( dst_package )
2740 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2741 # XXX: keep the old template for now in order to work with old obs instances
2743 <request type="submit">
2745 <source project="%s" package="%s" rev="%s"/>
2750 <description>%s</description>
2754 orev or show_upstream_rev(apiurl, src_project, src_package),
2757 cgi.escape(message or ""))
2759 u = makeurl(apiurl, ['request'], query='cmd=create')
2760 f = http_POST(u, data=xml)
2762 root = ET.parse(f).getroot()
2763 return root.get('id')
2766 def get_request(apiurl, reqid):
2767 u = makeurl(apiurl, ['request', reqid])
2769 root = ET.parse(f).getroot()
2776 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2779 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2780 f = http_POST(u, data=message)
2783 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2786 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2787 f = http_POST(u, data=message)
2791 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2793 if not 'all' in req_state:
2794 for state in req_state:
2795 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2797 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2799 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2803 todo['project'] = project
2805 todo['package'] = package
2806 for kind, val in todo.iteritems():
2807 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2808 'action/source/@%(kind)s=\'%(val)s\' or ' \
2809 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2810 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2812 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2813 for i in exclude_target_projects:
2814 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2815 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2817 if conf.config['verbose'] > 1:
2818 print '[ %s ]' % xpath
2819 res = search(apiurl, request=xpath)
2820 collection = res['request']
2822 for root in collection.findall('request'):
2828 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2829 """Return all new requests for all projects/packages where is user is involved"""
2831 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2832 for i in res['project_id'].findall('project'):
2833 projpkgs[i.get('name')] = []
2834 for i in res['package_id'].findall('package'):
2835 if not i.get('project') in projpkgs.keys():
2836 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2838 for prj, pacs in projpkgs.iteritems():
2840 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2844 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2845 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2846 xpath = xpath_join(xpath, xp, inner=True)
2848 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2849 if not 'all' in req_state:
2851 for state in req_state:
2852 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2853 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2854 res = search(apiurl, request=xpath)
2856 for root in res['request'].findall('request'):
2862 def get_request_log(apiurl, reqid):
2863 r = get_request(conf.config['apiurl'], reqid)
2865 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2866 # the description of the request is used for the initial log entry
2867 # otherwise its comment attribute would contain None
2868 if len(r.statehistory) >= 1:
2869 r.statehistory[-1].comment = r.descr
2871 r.state.comment = r.descr
2872 for state in [ r.state ] + r.statehistory:
2873 s = frmt % (state.name, state.who, state.when, str(state.comment))
2878 def get_user_meta(apiurl, user):
2879 u = makeurl(apiurl, ['person', quote_plus(user)])
2882 return ''.join(f.readlines())
2883 except urllib2.HTTPError:
2884 print 'user \'%s\' not found' % user
2888 def get_user_data(apiurl, user, *tags):
2889 """get specified tags from the user meta"""
2890 meta = get_user_meta(apiurl, user)
2893 root = ET.fromstring(meta)
2896 if root.find(tag).text != None:
2897 data.append(root.find(tag).text)
2901 except AttributeError:
2902 # this part is reached if the tags tuple contains an invalid tag
2903 print 'The xml file for user \'%s\' seems to be broken' % user
2908 def download(url, filename, progress_obj = None, mtime = None):
2909 import tempfile, shutil
2912 prefix = os.path.basename(filename)
2913 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2914 os.chmod(tmpfile, 0644)
2916 o = os.fdopen(fd, 'wb')
2917 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2920 shutil.move(tmpfile, filename)
2929 os.utime(filename, (-1, mtime))
2931 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None, meta=None):
2932 targetfilename = targetfilename or filename
2937 query['rev'] = revision
2938 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2939 download(u, targetfilename, progress_obj, mtime)
2941 def get_binary_file(apiurl, prj, repo, arch,
2944 target_filename = None,
2945 target_mtime = None,
2946 progress_meter = False):
2949 from meter import TextMeter