1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="Fedora_12">
81 <path project="Fedora:12" repository="standard" />
85 <repository name="SLE_11">
86 <path project="SUSE:SLE-11" repository="standard" />
95 new_package_templ = """\
96 <package name="%(name)s">
98 <title></title> <!-- Title of package -->
101 <!-- for long description -->
104 <person role="maintainer" userid="%(user)s"/>
105 <person role="bugowner" userid="%(user)s"/>
107 <url>PUT_UPSTREAM_URL_HERE</url>
111 use one of the examples below to disable building of this package
112 on a certain architecture, in a certain repository,
113 or a combination thereof:
115 <disable arch="x86_64"/>
116 <disable repository="SUSE_SLE-10"/>
117 <disable repository="SUSE_SLE-10" arch="x86_64"/>
119 Possible sections where you can use the tags above:
129 Please have a look at:
130 http://en.opensuse.org/Restricted_Formats
131 Packages containing formats listed there are NOT allowed to
132 be packaged in the openSUSE Buildservice and will be deleted!
139 new_attribute_templ = """\
141 <attribute namespace="" name="">
147 new_user_template = """\
149 <login>%(user)s</login>
150 <email>PUT_EMAIL_ADDRESS_HERE</email>
151 <realname>PUT_REAL_NAME_HERE</realname>
153 <project name="home:%(user)s"/>
169 new_pattern_template = """\
170 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
176 buildstatus_symbols = {'succeeded': '.',
178 'expansion error': 'U', # obsolete with OBS 2.0
191 # our own xml writer function to write xml nice, but with correct syntax
192 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
193 from xml.dom import minidom
194 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
195 # indent = current indentation
196 # addindent = indentation to add to higher levels
197 # newl = newline string
198 writer.write(indent+"<" + self.tagName)
200 attrs = self._get_attributes()
201 a_names = attrs.keys()
204 for a_name in a_names:
205 writer.write(" %s=\"" % a_name)
206 minidom._write_data(writer, attrs[a_name].value)
209 if len(self.childNodes) == 1 \
210 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
212 self.childNodes[0].writexml(writer, "", "", "")
213 writer.write("</%s>%s" % (self.tagName, newl))
215 writer.write(">%s"%(newl))
216 for node in self.childNodes:
217 node.writexml(writer,indent+addindent,addindent,newl)
218 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
220 writer.write("/>%s"%(newl))
221 # replace minidom's function with ours
222 minidom.Element.writexml = fixed_writexml
225 # os.path.samefile is available only under Unix
226 def os_path_samefile(path1, path2):
228 return os.path.samefile(path1, path2)
230 return os.path.realpath(path1) == os.path.realpath(path2)
233 """represent a file, including its metadata"""
234 def __init__(self, name, md5, size, mtime):
244 """Source service content
247 """creates an empty serviceinfo instance"""
250 def read(self, serviceinfo_node):
251 """read in the source services <services> element passed as
254 if serviceinfo_node == None:
257 services = serviceinfo_node.findall('service')
259 for service in services:
260 name = service.get('name')
262 for param in service.findall('param'):
263 option = param.get('name', None)
265 name += " --" + option + " '" + value + "'"
266 self.commands.append(name)
268 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
269 raise oscerr.APIError(msg)
271 def addVerifyFile(self, serviceinfo_node, filename):
274 f = open(filename, 'r')
275 digest = hashlib.sha256(f.read()).hexdigest()
279 s = ET.Element( "service", name="verify_file" )
280 ET.SubElement(s, "param", name="file").text = filename
281 ET.SubElement(s, "param", name="verifier").text = "sha256"
282 ET.SubElement(s, "param", name="checksum").text = digest
288 def addDownloadUrl(self, serviceinfo_node, url_string):
289 from urlparse import urlparse
290 url = urlparse( url_string )
291 protocol = url.scheme
296 s = ET.Element( "service", name="download_url" )
297 ET.SubElement(s, "param", name="protocol").text = protocol
298 ET.SubElement(s, "param", name="host").text = host
299 ET.SubElement(s, "param", name="path").text = path
305 def execute(self, dir):
308 for call in self.commands:
309 temp_dir = tempfile.mkdtemp()
310 name = call.split(None, 1)[0]
311 if not os.path.exists("/usr/lib/obs/service/"+name):
312 msg = "ERROR: service is not installed!\n"
313 msg += "Maybe try this: zypper in obs-server-" + name
314 raise oscerr.APIError(msg)
315 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
316 ret = subprocess.call(c, shell=True)
318 print "ERROR: service call failed: " + c
320 for file in os.listdir(temp_dir):
321 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
325 """linkinfo metadata (which is part of the xml representing a directory
328 """creates an empty linkinfo instance"""
338 def read(self, linkinfo_node):
339 """read in the linkinfo metadata from the <linkinfo> element passed as
341 If the passed element is None, the method does nothing.
343 if linkinfo_node == None:
345 self.project = linkinfo_node.get('project')
346 self.package = linkinfo_node.get('package')
347 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
348 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
349 self.srcmd5 = linkinfo_node.get('srcmd5')
350 self.error = linkinfo_node.get('error')
351 self.rev = linkinfo_node.get('rev')
352 self.baserev = linkinfo_node.get('baserev')
355 """returns True if the linkinfo is not empty, otherwise False"""
356 if self.xsrcmd5 or self.lsrcmd5:
360 def isexpanded(self):
361 """returns True if the package is an expanded link"""
362 if self.lsrcmd5 and not self.xsrcmd5:
367 """returns True if the link is in error state (could not be applied)"""
373 """return an informatory string representation"""
374 if self.islink() and not self.isexpanded():
375 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
376 % (self.project, self.package, self.xsrcmd5, self.rev)
377 elif self.islink() and self.isexpanded():
379 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
380 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
382 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
383 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
389 """represent a project directory, holding packages"""
390 def __init__(self, dir, getPackageList=True, progress_obj=None):
393 self.absdir = os.path.abspath(dir)
394 self.progress_obj = progress_obj
396 self.name = store_read_project(self.dir)
397 self.apiurl = store_read_apiurl(self.dir)
400 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
402 self.pacs_available = []
404 if conf.config['do_package_tracking']:
405 self.pac_root = self.read_packages().getroot()
406 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
407 self.pacs_excluded = [ i for i in os.listdir(self.dir)
408 for j in conf.config['exclude_glob']
409 if fnmatch.fnmatch(i, j) ]
410 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
411 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
412 # in the self.pacs_broken list
413 self.pacs_broken = []
414 for p in self.pacs_have:
415 if not os.path.isdir(os.path.join(self.absdir, p)):
416 # all states will be replaced with the '!'-state
417 # (except it is already marked as deleted ('D'-state))
418 self.pacs_broken.append(p)
420 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
422 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
424 def checkout_missing_pacs(self, expand_link=False):
425 for pac in self.pacs_missing:
427 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
428 # pac is not under version control but a local file/dir exists
429 msg = 'can\'t add package \'%s\': Object already exists' % pac
430 raise oscerr.PackageExists(self.name, pac, msg)
432 print 'checking out new package %s' % pac
433 checkout_package(self.apiurl, self.name, pac, \
434 pathname=getTransActPath(os.path.join(self.dir, pac)), \
435 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
437 def set_state(self, pac, state):
438 node = self.get_package_node(pac)
440 self.new_package_entry(pac, state)
442 node.attrib['state'] = state
444 def get_package_node(self, pac):
445 for node in self.pac_root.findall('package'):
446 if pac == node.get('name'):
450 def del_package_node(self, pac):
451 for node in self.pac_root.findall('package'):
452 if pac == node.get('name'):
453 self.pac_root.remove(node)
455 def get_state(self, pac):
456 node = self.get_package_node(pac)
458 return node.get('state')
462 def new_package_entry(self, name, state):
463 ET.SubElement(self.pac_root, 'package', name=name, state=state)
465 def read_packages(self):
466 packages_file = os.path.join(self.absdir, store, '_packages')
467 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
468 return ET.parse(packages_file)
470 # scan project for existing packages and migrate them
472 for data in os.listdir(self.dir):
473 pac_dir = os.path.join(self.absdir, data)
474 # we cannot use self.pacs_available because we cannot guarantee that the package list
475 # was fetched from the server
476 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
477 and Package(pac_dir).name == data:
478 cur_pacs.append(ET.Element('package', name=data, state=' '))
479 store_write_initial_packages(self.absdir, self.name, cur_pacs)
480 return ET.parse(os.path.join(self.absdir, store, '_packages'))
482 def write_packages(self):
483 # TODO: should we only modify the existing file instead of overwriting?
484 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
486 def addPackage(self, pac):
488 for i in conf.config['exclude_glob']:
489 if fnmatch.fnmatch(pac, i):
490 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
491 raise oscerr.OscIOError(None, msg)
492 state = self.get_state(pac)
493 if state == None or state == 'D':
494 self.new_package_entry(pac, 'A')
495 self.write_packages()
496 # sometimes the new pac doesn't exist in the list because
497 # it would take too much time to update all data structs regularly
498 if pac in self.pacs_unvers:
499 self.pacs_unvers.remove(pac)
501 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
503 def delPackage(self, pac, force = False):
504 state = self.get_state(pac.name)
506 if state == ' ' or state == 'D':
508 for file in pac.filenamelist + pac.filenamelist_unvers:
509 filestate = pac.status(file)
510 if filestate == 'M' or filestate == 'C' or \
511 filestate == 'A' or filestate == '?':
514 del_files.append(file)
515 if can_delete or force:
516 for file in del_files:
517 pac.delete_localfile(file)
518 if pac.status(file) != '?':
519 pac.delete_storefile(file)
520 # this is not really necessary
521 pac.put_on_deletelist(file)
522 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
523 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
524 pac.write_deletelist()
525 self.set_state(pac.name, 'D')
526 self.write_packages()
528 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
531 delete_dir(pac.absdir)
532 self.del_package_node(pac.name)
533 self.write_packages()
534 print statfrmt('D', pac.name)
536 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
538 print 'package is not under version control'
540 print 'unsupported state'
542 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
545 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
547 # we need to make sure that the _packages file will be written (even if an exception
550 # update complete project
551 # packages which no longer exists upstream
552 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
554 for pac in upstream_del:
555 p = Package(os.path.join(self.dir, pac))
556 self.delPackage(p, force = True)
557 delete_storedir(p.storedir)
562 self.pac_root.remove(self.get_package_node(p.name))
563 self.pacs_have.remove(pac)
565 for pac in self.pacs_have:
566 state = self.get_state(pac)
567 if pac in self.pacs_broken:
568 if self.get_state(pac) != 'A':
569 checkout_package(self.apiurl, self.name, pac,
570 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
571 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
574 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
576 if expand_link and p.islink() and not p.isexpanded():
579 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
581 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
584 rev = p.linkinfo.xsrcmd5
585 print 'Expanding to rev', rev
586 elif unexpand_link and p.islink() and p.isexpanded():
587 rev = p.linkinfo.lsrcmd5
588 print 'Unexpanding to rev', rev
589 elif p.islink() and p.isexpanded():
591 print 'Updating %s' % p.name
592 p.update(rev, service_files)
596 # TODO: Package::update has to fixed to behave like svn does
597 if pac in self.pacs_broken:
598 checkout_package(self.apiurl, self.name, pac,
599 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
600 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
602 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
603 elif state == 'A' and pac in self.pacs_available:
604 # file/dir called pac already exists and is under version control
605 msg = 'can\'t add package \'%s\': Object already exists' % pac
606 raise oscerr.PackageExists(self.name, pac, msg)
611 print 'unexpected state.. package \'%s\'' % pac
613 self.checkout_missing_pacs(expand_link=not unexpand_link)
615 self.write_packages()
617 def commit(self, pacs = (), msg = '', files = {}, validators = None, verbose_validation = None):
622 if files.has_key(pac):
624 state = self.get_state(pac)
626 self.commitNewPackage(pac, msg, todo, validators=validators, verbose_validation=verbose_validation)
628 self.commitDelPackage(pac)
630 # display the correct dir when sending the changes
631 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
634 p = Package(os.path.join(self.dir, pac))
636 p.commit(msg, validators=validators, verbose_validation=verbose_validation)
637 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
638 print 'osc: \'%s\' is not under version control' % pac
639 elif pac in self.pacs_broken:
640 print 'osc: \'%s\' package not found' % pac
642 self.commitExtPackage(pac, msg, todo)
644 self.write_packages()
646 # if we have packages marked as '!' we cannot commit
647 for pac in self.pacs_broken:
648 if self.get_state(pac) != 'D':
649 msg = 'commit failed: package \'%s\' is missing' % pac
650 raise oscerr.PackageMissing(self.name, pac, msg)
652 for pac in self.pacs_have:
653 state = self.get_state(pac)
656 Package(os.path.join(self.dir, pac)).commit(msg, validators=validators, verbose_validation=verbose_validation)
658 self.commitDelPackage(pac)
660 self.commitNewPackage(pac, msg, validators=validators, verbose_validation=verbose_validation)
662 self.write_packages()
664 def commitNewPackage(self, pac, msg = '', files = [], validators = None, verbose_validation = None):
665 """creates and commits a new package if it does not exist on the server"""
666 if pac in self.pacs_available:
667 print 'package \'%s\' already exists' % pac
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(self.name), quote_plus(pac)),
676 # display the correct dir when sending the changes
678 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
682 p = Package(os.path.join(self.dir, pac))
684 print statfrmt('Sending', os.path.normpath(p.dir))
685 p.commit(msg, validators=validators, verbose_validation=verbose_validation)
686 self.set_state(pac, ' ')
689 def commitDelPackage(self, pac):
690 """deletes a package on the server and in the working copy"""
692 # display the correct dir when sending the changes
693 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
696 pac_dir = os.path.join(self.dir, pac)
697 p = Package(os.path.join(self.dir, pac))
698 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
699 delete_storedir(p.storedir)
705 pac_dir = os.path.join(self.dir, pac)
706 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
707 print statfrmt('Deleting', getTransActPath(pac_dir))
708 delete_package(self.apiurl, self.name, pac)
709 self.del_package_node(pac)
711 def commitExtPackage(self, pac, msg, files = []):
712 """commits a package from an external project"""
713 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
716 pac_path = os.path.join(self.dir, pac)
718 project = store_read_project(pac_path)
719 package = store_read_package(pac_path)
720 apiurl = store_read_apiurl(pac_path)
721 if meta_exists(metatype='pkg',
722 path_args=(quote_plus(project), quote_plus(package)),
724 create_new=False, apiurl=apiurl):
725 p = Package(pac_path)
729 user = conf.get_apiurl_usr(self.apiurl)
730 edit_meta(metatype='pkg',
731 path_args=(quote_plus(project), quote_plus(package)),
736 p = Package(pac_path)
742 r.append('*****************************************************')
743 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
744 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
745 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
746 r.append('*****************************************************')
752 """represent a package (its directory) and read/keep/write its metadata"""
753 def __init__(self, workingdir, progress_obj=None, limit_size=None, meta=None):
754 self.dir = workingdir
755 self.absdir = os.path.abspath(self.dir)
756 self.storedir = os.path.join(self.absdir, store)
757 self.progress_obj = progress_obj
759 self.limit_size = limit_size
760 if limit_size and limit_size == 0:
761 self.limit_size = None
763 check_store_version(self.dir)
765 self.prjname = store_read_project(self.dir)
766 self.name = store_read_package(self.dir)
767 self.apiurl = store_read_apiurl(self.dir)
769 self.update_datastructs()
773 self.todo_delete = []
776 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
777 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
780 def addfile(self, n):
781 st = os.stat(os.path.join(self.dir, n))
782 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
784 def delete_file(self, n, force=False):
785 """deletes a file if possible and marks the file as deleted"""
788 state = self.status(n)
792 if state in ['?', 'A', 'M'] and not force:
793 return (False, state)
794 self.delete_localfile(n)
796 self.put_on_deletelist(n)
797 self.write_deletelist()
799 self.delete_storefile(n)
802 def delete_storefile(self, n):
803 try: os.unlink(os.path.join(self.storedir, n))
806 def delete_localfile(self, n):
807 try: os.unlink(os.path.join(self.dir, n))
810 def put_on_deletelist(self, n):
811 if n not in self.to_be_deleted:
812 self.to_be_deleted.append(n)
814 def put_on_conflictlist(self, n):
815 if n not in self.in_conflict:
816 self.in_conflict.append(n)
818 def clear_from_conflictlist(self, n):
819 """delete an entry from the file, and remove the file if it would be empty"""
820 if n in self.in_conflict:
822 filename = os.path.join(self.dir, n)
823 storefilename = os.path.join(self.storedir, n)
824 myfilename = os.path.join(self.dir, n + '.mine')
825 if self.islinkrepair() or self.ispulled():
826 upfilename = os.path.join(self.dir, n + '.new')
828 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
831 os.unlink(myfilename)
832 # the working copy may be updated, so the .r* ending may be obsolete...
834 os.unlink(upfilename)
835 if self.islinkrepair() or self.ispulled():
836 os.unlink(os.path.join(self.dir, n + '.old'))
840 self.in_conflict.remove(n)
842 self.write_conflictlist()
844 def write_meta_mode(self):
846 fname = os.path.join(self.storedir, '_meta_mode')
852 os.unlink(os.path.join(self.storedir, '_meta_mode'))
856 def write_sizelimit(self):
857 if self.size_limit and self.size_limit <= 0:
859 os.unlink(os.path.join(self.storedir, '_size_limit'))
863 fname = os.path.join(self.storedir, '_size_limit')
865 f.write(str(self.size_limit))
868 def write_deletelist(self):
869 if len(self.to_be_deleted) == 0:
871 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
875 fname = os.path.join(self.storedir, '_to_be_deleted')
877 f.write('\n'.join(self.to_be_deleted))
881 def delete_source_file(self, n):
882 """delete local a source file"""
883 self.delete_localfile(n)
884 self.delete_storefile(n)
886 def delete_remote_source_file(self, n):
887 """delete a remote source file (e.g. from the server)"""
889 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
892 def put_source_file(self, n):
894 # escaping '+' in the URL path (note: not in the URL query string) is
895 # only a workaround for ruby on rails, which swallows it otherwise
897 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
898 http_PUT(u, file = os.path.join(self.dir, n))
900 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
902 def commit(self, msg='', validators=None, verbose_validation=None):
903 # commit only if the upstream revision is the same as the working copy's
904 upstream_rev = self.latest_rev()
905 if self.rev != upstream_rev:
906 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
909 self.todo = self.filenamelist_unvers + self.filenamelist
911 pathn = getTransActPath(self.dir)
916 for validator in sorted(os.listdir(validators)):
917 if validator.startswith('.'):
919 fn=validators+"/"+validator
921 if S_ISREG(mode[ST_MODE]):
922 if verbose_validation:
924 p = subprocess.Popen([fn, "--verbose"], close_fds=True)
926 p = subprocess.Popen([fn], close_fds=True)
928 raise oscerr.RuntimeError(p.stdout, validator )
930 have_conflicts = False
931 for filename in self.todo:
932 if not filename.startswith('_service:') and not filename.startswith('_service_'):
933 st = self.status(filename)
935 self.todo.remove(filename)
936 elif st == 'A' or st == 'M':
937 self.todo_send.append(filename)
938 print statfrmt('Sending', os.path.join(pathn, filename))
940 self.todo_delete.append(filename)
941 print statfrmt('Deleting', os.path.join(pathn, filename))
943 have_conflicts = True
946 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
949 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
950 print 'nothing to do for package %s' % self.name
953 if self.islink() and self.isexpanded():
954 # resolve the link into the upload revision
955 # XXX: do this always?
956 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
957 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
960 print 'Transmitting file data ',
962 for filename in self.todo_delete:
963 # do not touch local files on commit --
964 # delete remotely instead
965 self.delete_remote_source_file(filename)
966 self.to_be_deleted.remove(filename)
967 for filename in self.todo_send:
968 sys.stdout.write('.')
970 self.put_source_file(filename)
972 # all source files are committed - now comes the log
973 query = { 'cmd' : 'commit',
975 'user' : conf.get_apiurl_usr(self.apiurl),
977 if self.islink() and self.isexpanded():
978 query['keeplink'] = '1'
979 if conf.config['linkcontrol'] or self.isfrozen():
980 query['linkrev'] = self.linkinfo.srcmd5
982 query['repairlink'] = '1'
983 query['linkrev'] = self.get_pulled_srcmd5()
984 if self.islinkrepair():
985 query['repairlink'] = '1'
986 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
989 # delete upload revision
991 query = { 'cmd': 'deleteuploadrev' }
992 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
998 root = ET.parse(f).getroot()
999 self.rev = int(root.get('rev'))
1001 print 'Committed revision %s.' % self.rev
1004 os.unlink(os.path.join(self.storedir, '_pulled'))
1005 if self.islinkrepair():
1006 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1007 self.linkrepair = False
1008 # XXX: mark package as invalid?
1009 print 'The source link has been repaired. This directory can now be removed.'
1010 if self.islink() and self.isexpanded():
1011 self.update_local_filesmeta(revision=self.latest_rev())
1013 self.update_local_filesmeta()
1014 self.write_deletelist()
1015 self.update_datastructs()
1017 if self.filenamelist.count('_service'):
1018 print 'The package contains a source service.'
1019 for filename in self.todo:
1020 if filename.startswith('_service:') and os.path.exists(filename):
1021 os.unlink(filename) # remove local files
1022 print_request_list(self.apiurl, self.prjname, self.name)
1024 def write_conflictlist(self):
1025 if len(self.in_conflict) == 0:
1027 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1031 fname = os.path.join(self.storedir, '_in_conflict')
1032 f = open(fname, 'w')
1033 f.write('\n'.join(self.in_conflict))
1037 def updatefile(self, n, revision):
1038 filename = os.path.join(self.dir, n)
1039 storefilename = os.path.join(self.storedir, n)
1040 mtime = self.findfilebyname(n).mtime
1042 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1043 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1045 shutil.copyfile(filename, storefilename)
1047 def mergefile(self, n):
1048 filename = os.path.join(self.dir, n)
1049 storefilename = os.path.join(self.storedir, n)
1050 myfilename = os.path.join(self.dir, n + '.mine')
1051 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1052 os.rename(filename, myfilename)
1054 mtime = self.findfilebyname(n).mtime
1055 get_source_file(self.apiurl, self.prjname, self.name, n,
1056 revision=self.rev, targetfilename=upfilename,
1057 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1059 if binary_file(myfilename) or binary_file(upfilename):
1061 shutil.copyfile(upfilename, filename)
1062 shutil.copyfile(upfilename, storefilename)
1063 self.in_conflict.append(n)
1064 self.write_conflictlist()
1068 # diff3 OPTIONS... MINE OLDER YOURS
1069 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1070 # we would rather use the subprocess module, but it is not availablebefore 2.4
1071 ret = subprocess.call(merge_cmd, shell=True)
1073 # "An exit status of 0 means `diff3' was successful, 1 means some
1074 # conflicts were found, and 2 means trouble."
1076 # merge was successful... clean up
1077 shutil.copyfile(upfilename, storefilename)
1078 os.unlink(upfilename)
1079 os.unlink(myfilename)
1082 # unsuccessful merge
1083 shutil.copyfile(upfilename, storefilename)
1084 self.in_conflict.append(n)
1085 self.write_conflictlist()
1088 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1089 print >>sys.stderr, 'the command line was:'
1090 print >>sys.stderr, merge_cmd
1095 def update_local_filesmeta(self, revision=None):
1097 Update the local _files file in the store.
1098 It is replaced with the version pulled from upstream.
1100 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size, meta=self.meta))
1101 store_write_string(self.absdir, '_files', meta)
1103 def update_datastructs(self):
1105 Update the internal data structures if the local _files
1106 file has changed (e.g. update_local_filesmeta() has been
1110 files_tree = read_filemeta(self.dir)
1111 files_tree_root = files_tree.getroot()
1113 self.rev = files_tree_root.get('rev')
1114 self.srcmd5 = files_tree_root.get('srcmd5')
1116 self.linkinfo = Linkinfo()
1117 self.linkinfo.read(files_tree_root.find('linkinfo'))
1119 self.filenamelist = []
1122 for node in files_tree_root.findall('entry'):
1124 f = File(node.get('name'),
1126 int(node.get('size')),
1127 int(node.get('mtime')))
1128 if node.get('skipped'):
1129 self.skipped.append(f.name)
1131 # okay, a very old version of _files, which didn't contain any metadata yet...
1132 f = File(node.get('name'), '', 0, 0)
1133 self.filelist.append(f)
1134 self.filenamelist.append(f.name)
1136 self.to_be_deleted = read_tobedeleted(self.dir)
1137 self.in_conflict = read_inconflict(self.dir)
1138 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1139 self.size_limit = read_sizelimit(self.dir)
1140 self.meta = read_meta_mode(self.dir)
1142 # gather unversioned files, but ignore some stuff
1143 self.excluded = [ i for i in os.listdir(self.dir)
1144 for j in conf.config['exclude_glob']
1145 if fnmatch.fnmatch(i, j) ]
1146 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1147 if i not in self.excluded
1148 if i not in self.filenamelist ]
1151 """tells us if the package is a link (has 'linkinfo').
1152 A package with linkinfo is a package which links to another package.
1153 Returns True if the package is a link, otherwise False."""
1154 return self.linkinfo.islink()
1156 def isexpanded(self):
1157 """tells us if the package is a link which is expanded.
1158 Returns True if the package is expanded, otherwise False."""
1159 return self.linkinfo.isexpanded()
1161 def islinkrepair(self):
1162 """tells us if we are repairing a broken source link."""
1163 return self.linkrepair
1166 """tells us if we have pulled a link."""
1167 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1170 """tells us if the link is frozen."""
1171 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1173 def get_pulled_srcmd5(self):
1175 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1176 pulledrev = line.strip()
1179 def haslinkerror(self):
1181 Returns True if the link is broken otherwise False.
1182 If the package is not a link it returns False.
1184 return self.linkinfo.haserror()
1186 def linkerror(self):
1188 Returns an error message if the link is broken otherwise None.
1189 If the package is not a link it returns None.
1191 return self.linkinfo.error
1193 def update_local_pacmeta(self):
1195 Update the local _meta file in the store.
1196 It is replaced with the version pulled from upstream.
1198 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1199 store_write_string(self.absdir, '_meta', meta)
1201 def findfilebyname(self, n):
1202 for i in self.filelist:
1206 def status(self, n):
1210 file storefile file present STATUS
1211 exists exists in _files
1214 x x x ' ' if digest differs: 'M'
1215 and if in conflicts file: 'C'
1217 x - x 'D' and listed in _to_be_deleted
1219 - x - 'D' (when file in working copy is already deleted)
1220 - - x 'F' (new in repo, but not yet in working copy)
1225 known_by_meta = False
1227 exists_in_store = False
1228 if n in self.filenamelist:
1229 known_by_meta = True
1230 if os.path.exists(os.path.join(self.absdir, n)):
1232 if os.path.exists(os.path.join(self.storedir, n)):
1233 exists_in_store = True
1236 if n in self.skipped:
1238 elif exists and not exists_in_store and known_by_meta:
1240 elif n in self.to_be_deleted:
1242 elif n in self.in_conflict:
1244 elif exists and exists_in_store and known_by_meta:
1245 #print self.findfilebyname(n)
1246 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1250 elif exists and not exists_in_store and not known_by_meta:
1252 elif exists and exists_in_store and not known_by_meta:
1254 elif not exists and exists_in_store and known_by_meta:
1256 elif not exists and not exists_in_store and known_by_meta:
1258 elif not exists and exists_in_store and not known_by_meta:
1260 elif not exists and not exists_in_store and not known_by_meta:
1261 # this case shouldn't happen (except there was a typo in the filename etc.)
1262 raise IOError('osc: \'%s\' is not under version control' % n)
1266 def comparePac(self, cmp_pac):
1268 This method compares the local filelist with
1269 the filelist of the passed package to see which files
1270 were added, removed and changed.
1277 for file in self.filenamelist+self.filenamelist_unvers:
1278 state = self.status(file)
1279 if file in self.skipped:
1281 if state == 'A' and (not file in cmp_pac.filenamelist):
1282 added_files.append(file)
1283 elif file in cmp_pac.filenamelist and state == 'D':
1284 removed_files.append(file)
1285 elif state == ' ' and not file in cmp_pac.filenamelist:
1286 added_files.append(file)
1287 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1288 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1289 changed_files.append(file)
1290 for file in cmp_pac.filenamelist:
1291 if not file in self.filenamelist:
1292 removed_files.append(file)
1293 removed_files = set(removed_files)
1295 return changed_files, added_files, removed_files
1297 def merge(self, otherpac):
1298 self.todo += otherpac.todo
1312 '\n '.join(self.filenamelist),
1320 def read_meta_from_spec(self, spec = None):
1325 # scan for spec files
1326 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1327 if len(speclist) == 1:
1328 specfile = speclist[0]
1329 elif len(speclist) > 1:
1330 print 'the following specfiles were found:'
1331 for file in speclist:
1333 print 'please specify one with --specfile'
1336 print 'no specfile was found - please specify one ' \
1340 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1341 self.summary = data['Summary']
1342 self.url = data['Url']
1343 self.descr = data['%description']
1346 def update_package_meta(self, force=False):
1348 for the updatepacmetafromspec subcommand
1349 argument force supress the confirm question
1352 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1354 root = ET.fromstring(m)
1355 root.find('title').text = self.summary
1356 root.find('description').text = ''.join(self.descr)
1357 url = root.find('url')
1359 url = ET.SubElement(root, 'url')
1362 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1363 mf = metafile(u, ET.tostring(root))
1366 print '*' * 36, 'old', '*' * 36
1368 print '*' * 36, 'new', '*' * 36
1369 print ET.tostring(root)
1371 repl = raw_input('Write? (y/N/e) ')
1382 def mark_frozen(self):
1383 store_write_string(self.absdir, '_frozenlink', '')
1385 print "The link in this package is currently broken. Checking"
1386 print "out the last working version instead; please use 'osc pull'"
1387 print "to repair the link."
1390 def unmark_frozen(self):
1391 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1392 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1394 def latest_rev(self):
1395 if self.islinkrepair():
1396 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1397 elif self.islink() and self.isexpanded():
1398 if self.isfrozen() or self.ispulled():
1399 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1402 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1405 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1407 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1410 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1413 def update(self, rev = None, service_files = False, limit_size = None):
1414 # save filelist and (modified) status before replacing the meta file
1415 saved_filenames = self.filenamelist
1416 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1420 self.limit_size = limit_size
1422 self.limit_size = read_sizelimit(self.dir)
1423 self.update_local_filesmeta(rev)
1424 self = Package(self.dir, progress_obj=self.progress_obj)
1426 # which files do no longer exist upstream?
1427 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1429 pathn = getTransActPath(self.dir)
1431 for filename in saved_filenames:
1432 if filename in self.skipped:
1434 if not filename.startswith('_service:') and filename in disappeared:
1435 print statfrmt('D', os.path.join(pathn, filename))
1436 # keep file if it has local modifications
1437 if oldp.status(filename) == ' ':
1438 self.delete_localfile(filename)
1439 self.delete_storefile(filename)
1441 for filename in self.filenamelist:
1442 if filename in self.skipped:
1445 state = self.status(filename)
1446 if not service_files and filename.startswith('_service:'):
1448 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1449 # no merge necessary... local file is changed, but upstream isn't
1451 elif state == 'M' and filename in saved_modifiedfiles:
1452 status_after_merge = self.mergefile(filename)
1453 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1455 self.updatefile(filename, rev)
1456 print statfrmt('U', os.path.join(pathn, filename))
1458 self.updatefile(filename, rev)
1459 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1461 self.updatefile(filename, rev)
1462 print statfrmt('A', os.path.join(pathn, filename))
1463 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1464 self.updatefile(filename, rev)
1465 self.delete_storefile(filename)
1466 print statfrmt('U', os.path.join(pathn, filename))
1470 self.update_local_pacmeta()
1472 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1473 print 'At revision %s.' % self.rev
1475 if not service_files:
1476 self.run_source_services()
1478 def run_source_services(self):
1479 if self.filenamelist.count('_service'):
1480 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1483 si.execute(self.absdir)
1485 def prepare_filelist(self):
1486 """Prepare a list of files, which will be processed by process_filelist
1487 method. This allows easy modifications of a file list in commit
1491 self.todo = self.filenamelist + self.filenamelist_unvers
1495 for f in [f for f in self.todo if not os.path.isdir(f)]:
1497 status = self.status(f)
1502 ret += "%s %s %s\n" % (action, status, f)
1505 # Edit a filelist for package \'%s\'
1507 # l, leave = leave a file as is
1508 # r, remove = remove a file
1509 # a, add = add a file
1511 # If you remove file from a list, it will be unchanged
1512 # If you remove all, commit will be aborted""" % self.name
1516 def edit_filelist(self):
1517 """Opens a package list in editor for editing. This allows easy
1518 modifications of it just by simple text editing
1522 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1523 f = os.fdopen(fd, 'w')
1524 f.write(self.prepare_filelist())
1526 mtime_orig = os.stat(filename).st_mtime
1529 run_editor(filename)
1530 mtime = os.stat(filename).st_mtime
1531 if mtime_orig < mtime:
1532 filelist = open(filename).readlines()
1536 raise oscerr.UserAbort()
1538 return self.process_filelist(filelist)
1540 def process_filelist(self, filelist):
1541 """Process a filelist - it add/remove or leave files. This depends on
1542 user input. If no file is processed, it raises an ValueError
1546 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1548 foo = line.split(' ')
1550 action, state, name = (foo[0], ' ', foo[3])
1552 action, state, name = (foo[0], foo[1], foo[2])
1555 action = action.lower()
1558 if action in ('r', 'remove'):
1559 if self.status(name) == '?':
1561 if name in self.todo:
1562 self.todo.remove(name)
1564 self.delete_file(name, True)
1565 elif action in ('a', 'add'):
1566 if self.status(name) != '?':
1567 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1570 elif action in ('l', 'leave'):
1573 raise ValueError("Unknow action `%s'" % action)
1576 raise ValueError("Empty filelist")
1579 """for objects to represent the review state in a request"""
1580 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1582 self.by_user = by_user
1583 self.by_group = by_group
1586 self.comment = comment
1589 """for objects to represent the "state" of a request"""
1590 def __init__(self, name=None, who=None, when=None, comment=None):
1594 self.comment = comment
1597 """represents an action"""
1598 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1600 self.src_project = src_project
1601 self.src_package = src_package
1602 self.src_rev = src_rev
1603 self.dst_project = dst_project
1604 self.dst_package = dst_package
1605 self.src_update = src_update
1608 """represent a request and holds its metadata
1609 it has methods to read in metadata from xml,
1610 different views, ..."""
1613 self.state = RequestState()
1616 self.last_author = None
1619 self.statehistory = []
1622 def read(self, root):
1623 self.reqid = int(root.get('id'))
1624 actions = root.findall('action')
1625 if len(actions) == 0:
1626 actions = [ root.find('submit') ] # for old style requests
1628 for action in actions:
1629 type = action.get('type', 'submit')
1631 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1632 if action.findall('source'):
1633 n = action.find('source')
1634 src_prj = n.get('project', None)
1635 src_pkg = n.get('package', None)
1636 src_rev = n.get('rev', None)
1637 if action.findall('target'):
1638 n = action.find('target')
1639 dst_prj = n.get('project', None)
1640 dst_pkg = n.get('package', None)
1641 if action.findall('options'):
1642 n = action.find('options')
1643 if n.findall('sourceupdate'):
1644 src_update = n.find('sourceupdate').text.strip()
1645 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1647 msg = 'invalid request format:\n%s' % ET.tostring(root)
1648 raise oscerr.APIError(msg)
1651 n = root.find('state')
1652 self.state.name, self.state.who, self.state.when \
1653 = n.get('name'), n.get('who'), n.get('when')
1655 self.state.comment = n.find('comment').text.strip()
1657 self.state.comment = None
1659 # read the review states
1660 for r in root.findall('review'):
1662 s.state = r.get('state')
1663 s.by_user = r.get('by_user')
1664 s.by_group = r.get('by_group')
1665 s.who = r.get('who')
1666 s.when = r.get('when')
1668 s.comment = r.find('comment').text.strip()
1671 self.reviews.append(s)
1673 # read the state history
1674 for h in root.findall('history'):
1676 s.name = h.get('name')
1677 s.who = h.get('who')
1678 s.when = h.get('when')
1680 s.comment = h.find('comment').text.strip()
1683 self.statehistory.append(s)
1684 self.statehistory.reverse()
1686 # read a description, if it exists
1688 n = root.find('description').text
1693 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1694 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1695 dst_prj, dst_pkg, src_update)
1698 def list_view(self):
1699 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1701 for a in self.actions:
1702 dst = "%s/%s" % (a.dst_project, a.dst_package)
1703 if a.src_package == a.dst_package:
1707 if a.type=="submit":
1708 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1709 if a.type=="change_devel":
1710 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1711 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1713 ret += '\n %s: %-50s %-20s ' % \
1714 (a.type, sr_source, dst)
1716 if self.statehistory and self.statehistory[0]:
1718 for h in self.statehistory:
1719 who.append("%s(%s)" % (h.who,h.name))
1721 ret += "\n From: %s" % (' -> '.join(who))
1723 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1725 lines = txt.splitlines()
1726 wrapper = textwrap.TextWrapper( width = 80,
1727 initial_indent=' Descr: ',
1728 subsequent_indent=' ')
1729 ret += "\n" + wrapper.fill(lines[0])
1730 wrapper.initial_indent = ' '
1731 for line in lines[1:]:
1732 ret += "\n" + wrapper.fill(line)
1738 def __cmp__(self, other):
1739 return cmp(self.reqid, other.reqid)
1743 for action in self.actions:
1744 action_list=" %s: " % (action.type)
1745 if action.type=="submit":
1748 r="(r%s)" % (action.src_rev)
1750 if action.src_update:
1751 m="(%s)" % (action.src_update)
1752 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1753 if action.dst_package:
1754 action_list=action_list+"/%s" % ( action.dst_package )
1755 elif action.type=="delete":
1756 action_list=action_list+" %s" % ( action.dst_project )
1757 if action.dst_package:
1758 action_list=action_list+"/%s" % ( action.dst_package )
1759 elif action.type=="change_devel":
1760 action_list=action_list+" %s/%s developed in %s/%s" % \
1761 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1762 action_list=action_list+"\n"
1777 self.state.name, self.state.when, self.state.who,
1780 if len(self.reviews):
1781 reviewitems = [ '%-10s %s %s %s %s %s' \
1782 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1783 for i in self.reviews ]
1784 s += '\nReview: ' + '\n '.join(reviewitems)
1787 if len(self.statehistory):
1788 histitems = [ '%-10s %s %s' \
1789 % (i.name, i.when, i.who) \
1790 for i in self.statehistory ]
1791 s += '\nHistory: ' + '\n '.join(histitems)
1798 """format time as Apr 02 18:19
1800 depending on whether it is in the current year
1804 if time.localtime()[0] == time.localtime(t)[0]:
1806 return time.strftime('%b %d %H:%M',time.localtime(t))
1808 return time.strftime('%b %d %Y',time.localtime(t))
1811 def is_project_dir(d):
1812 return os.path.exists(os.path.join(d, store, '_project')) and not \
1813 os.path.exists(os.path.join(d, store, '_package'))
1816 def is_package_dir(d):
1817 return os.path.exists(os.path.join(d, store, '_project')) and \
1818 os.path.exists(os.path.join(d, store, '_package'))
1820 def parse_disturl(disturl):
1821 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1822 revision), else raises an oscerr.WrongArgs exception
1825 m = DISTURL_RE.match(disturl)
1827 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1829 apiurl = m.group('apiurl')
1830 if apiurl.split('.')[0] != 'api':
1831 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1832 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1834 def parse_buildlogurl(buildlogurl):
1835 """Parse a build log url, returns a tuple (apiurl, project, package,
1836 repository, arch), else raises oscerr.WrongArgs exception"""
1838 global BUILDLOGURL_RE
1840 m = BUILDLOGURL_RE.match(buildlogurl)
1842 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1844 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1847 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1848 This is handy to allow copy/paste a project/package combination in this form.
1850 Trailing slashes are removed before the split, because the split would
1851 otherwise give an additional empty string.
1859 def expand_proj_pack(args, idx=0, howmany=0):
1860 """looks for occurance of '.' at the position idx.
1861 If howmany is 2, both proj and pack are expanded together
1862 using the current directory, or none of them, if not possible.
1863 If howmany is 0, proj is expanded if possible, then, if there
1864 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1865 expanded, if possible.
1866 If howmany is 1, only proj is expanded if possible.
1868 If args[idx] does not exists, an implicit '.' is assumed.
1869 if not enough elements up to idx exist, an error is raised.
1871 See also parseargs(args), slash_split(args), findpacs(args)
1872 All these need unification, somehow.
1875 # print args,idx,howmany
1878 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1880 if len(args) == idx:
1882 if args[idx+0] == '.':
1883 if howmany == 0 and len(args) > idx+1:
1884 if args[idx+1] == '.':
1886 # remove one dot and make sure to expand both proj and pack
1891 # print args,idx,howmany
1893 args[idx+0] = store_read_project('.')
1896 package = store_read_package('.')
1897 args.insert(idx+1, package)
1901 package = store_read_package('.')
1902 args.insert(idx+1, package)
1906 def findpacs(files, progress_obj=None):
1907 """collect Package objects belonging to the given files
1908 and make sure each Package is returned only once"""
1911 p = filedir_to_pac(f, progress_obj)
1914 if i.name == p.name:
1924 def filedir_to_pac(f, progress_obj=None):
1925 """Takes a working copy path, or a path to a file inside a working copy,
1926 and returns a Package object instance
1928 If the argument was a filename, add it onto the "todo" list of the Package """
1930 if os.path.isdir(f):
1932 p = Package(wd, progress_obj=progress_obj)
1934 wd = os.path.dirname(f) or os.curdir
1935 p = Package(wd, progress_obj=progress_obj)
1936 p.todo = [ os.path.basename(f) ]
1940 def read_filemeta(dir):
1942 r = ET.parse(os.path.join(dir, store, '_files'))
1943 except SyntaxError, e:
1944 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1945 'When parsing .osc/_files, the following error was encountered:\n'
1950 def read_tobedeleted(dir):
1952 fname = os.path.join(dir, store, '_to_be_deleted')
1954 if os.path.exists(fname):
1955 r = [ line.strip() for line in open(fname) ]
1960 def read_meta_mode(dir):
1962 fname = os.path.join(dir, store, '_meta_mode')
1964 if os.path.exists(fname):
1965 r = open(fname).readline()
1967 if r is None or not r == "true":
1971 def read_sizelimit(dir):
1973 fname = os.path.join(dir, store, '_size_limit')
1975 if os.path.exists(fname):
1976 r = open(fname).readline()
1978 if r is None or not r.isdigit():
1982 def read_inconflict(dir):
1984 fname = os.path.join(dir, store, '_in_conflict')
1986 if os.path.exists(fname):
1987 r = [ line.strip() for line in open(fname) ]
1992 def parseargs(list_of_args):
1993 """Convenience method osc's commandline argument parsing.
1995 If called with an empty tuple (or list), return a list containing the current directory.
1996 Otherwise, return a list of the arguments."""
1998 return list(list_of_args)
2003 def statfrmt(statusletter, filename):
2004 return '%s %s' % (statusletter, filename)
2007 def pathjoin(a, *p):
2008 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2009 path = os.path.join(a, *p)
2010 if path.startswith('./'):
2015 def makeurl(baseurl, l, query=[]):
2016 """Given a list of path compoments, construct a complete URL.
2018 Optional parameters for a query string can be given as a list, as a
2019 dictionary, or as an already assembled string.
2020 In case of a dictionary, the parameters will be urlencoded by this
2021 function. In case of a list not -- this is to be backwards compatible.
2024 if conf.config['verbose'] > 1:
2025 print 'makeurl:', baseurl, l, query
2027 if type(query) == type(list()):
2028 query = '&'.join(query)
2029 elif type(query) == type(dict()):
2030 query = urlencode(query)
2032 scheme, netloc = urlsplit(baseurl)[0:2]
2033 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2036 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2037 """wrapper around urllib2.urlopen for error handling,
2038 and to support additional (PUT, DELETE) methods"""
2042 if conf.config['http_debug']:
2045 print '--', method, url
2047 if method == 'POST' and not file and not data:
2048 # adding data to an urllib2 request transforms it into a POST
2051 req = urllib2.Request(url)
2052 api_host_options = {}
2054 api_host_options = conf.get_apiurl_api_host_options(url)
2055 for header, value in api_host_options['http_headers']:
2056 req.add_header(header, value)
2058 # "external" request (url is no apiurl)
2061 req.get_method = lambda: method
2063 # POST requests are application/x-www-form-urlencoded per default
2064 # since we change the request into PUT, we also need to adjust the content type header
2065 if method == 'PUT' or (method == 'POST' and data):
2066 req.add_header('Content-Type', 'application/octet-stream')
2068 if type(headers) == type({}):
2069 for i in headers.keys():
2071 req.add_header(i, headers[i])
2073 if file and not data:
2074 size = os.path.getsize(file)
2076 data = open(file, 'rb').read()
2079 filefd = open(file, 'rb')
2081 if sys.platform[:3] != 'win':
2082 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2084 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2086 except EnvironmentError, e:
2088 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2089 '\non a filesystem which does not support this.' % (e, file))
2090 elif hasattr(e, 'winerror') and e.winerror == 5:
2091 # falling back to the default io
2092 data = open(file, 'rb').read()
2096 if conf.config['debug']: print method, url
2098 old_timeout = socket.getdefaulttimeout()
2099 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2100 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2101 socket.setdefaulttimeout(timeout)
2103 fd = urllib2.urlopen(req, data=data)
2105 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2106 socket.setdefaulttimeout(old_timeout)
2107 if hasattr(conf.cookiejar, 'save'):
2108 conf.cookiejar.save(ignore_discard=True)
2110 if filefd: filefd.close()
2115 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2116 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2117 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2118 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2121 def init_project_dir(apiurl, dir, project):
2122 if not os.path.exists(dir):
2123 if conf.config['checkout_no_colon']:
2124 os.makedirs(dir) # helpful with checkout_no_colon
2127 if not os.path.exists(os.path.join(dir, store)):
2128 os.mkdir(os.path.join(dir, store))
2130 # print 'project=',project,' dir=',dir
2131 store_write_project(dir, project)
2132 store_write_apiurl(dir, apiurl)
2133 if conf.config['do_package_tracking']:
2134 store_write_initial_packages(dir, project, [])
2136 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=None):
2137 if not os.path.isdir(store):
2140 f = open('_project', 'w')
2141 f.write(project + '\n')
2143 f = open('_package', 'w')
2144 f.write(package + '\n')
2148 f = open('_meta_mode', 'w')
2153 f = open('_size_limit', 'w')
2154 f.write(str(limit_size))
2158 f = open('_files', 'w')
2159 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta)))
2163 ET.ElementTree(element=ET.Element('directory')).write('_files')
2165 f = open('_osclib_version', 'w')
2166 f.write(__store_version__ + '\n')
2169 store_write_apiurl(os.path.pardir, apiurl)
2175 def check_store_version(dir):
2176 versionfile = os.path.join(dir, store, '_osclib_version')
2178 v = open(versionfile).read().strip()
2183 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2184 if os.path.exists(os.path.join(dir, '.svn')):
2185 msg = msg + '\nTry svn instead of osc.'
2186 raise oscerr.NoWorkingCopy(msg)
2188 if v != __store_version__:
2189 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2190 # version is fine, no migration needed
2191 f = open(versionfile, 'w')
2192 f.write(__store_version__ + '\n')
2195 msg = 'The osc metadata of your working copy "%s"' % dir
2196 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2197 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2198 raise oscerr.WorkingCopyWrongVersion, msg
2201 def meta_get_packagelist(apiurl, prj, deleted=None):
2205 query['deleted'] = 1
2207 u = makeurl(apiurl, ['source', prj], query)
2209 root = ET.parse(f).getroot()
2210 return [ node.get('name') for node in root.findall('entry') ]
2213 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2214 """return a list of file names,
2215 or a list File() instances if verbose=True"""
2221 query['rev'] = revision
2223 query['rev'] = 'latest'
2225 u = makeurl(apiurl, ['source', prj, package], query=query)
2227 root = ET.parse(f).getroot()
2230 return [ node.get('name') for node in root.findall('entry') ]
2234 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2235 rev = root.get('rev')
2236 for node in root.findall('entry'):
2237 f = File(node.get('name'),
2239 int(node.get('size')),
2240 int(node.get('mtime')))
2246 def meta_get_project_list(apiurl, deleted):
2249 query['deleted'] = 1
2251 u = makeurl(apiurl, ['source'], query)
2253 root = ET.parse(f).getroot()
2254 return sorted([ node.get('name') for node in root ])
2257 def show_project_meta(apiurl, prj):
2258 url = makeurl(apiurl, ['source', prj, '_meta'])
2260 return f.readlines()
2263 def show_project_conf(apiurl, prj):
2264 url = makeurl(apiurl, ['source', prj, '_config'])
2266 return f.readlines()
2269 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2270 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2274 except urllib2.HTTPError, e:
2275 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2279 def show_package_meta(apiurl, prj, pac):
2280 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2283 return f.readlines()
2284 except urllib2.HTTPError, e:
2285 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2289 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2291 path.append('source')
2297 path.append('_attribute')
2299 path.append(attribute)
2302 query.append("with_default=1")
2304 query.append("with_project=1")
2305 url = makeurl(apiurl, path, query)
2308 return f.readlines()
2309 except urllib2.HTTPError, e:
2310 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2314 def show_develproject(apiurl, prj, pac):
2315 m = show_package_meta(apiurl, prj, pac)
2317 return ET.fromstring(''.join(m)).find('devel').get('project')
2322 def show_pattern_metalist(apiurl, prj):
2323 url = makeurl(apiurl, ['source', prj, '_pattern'])
2327 except urllib2.HTTPError, e:
2328 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2330 r = [ node.get('name') for node in tree.getroot() ]
2335 def show_pattern_meta(apiurl, prj, pattern):
2336 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2339 return f.readlines()
2340 except urllib2.HTTPError, e:
2341 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2346 """metafile that can be manipulated and is stored back after manipulation."""
2347 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2351 self.change_is_required = change_is_required
2352 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2353 f = os.fdopen(fd, 'w')
2354 f.write(''.join(input))
2356 self.hash_orig = dgst(self.filename)
2359 hash = dgst(self.filename)
2360 if self.change_is_required and hash == self.hash_orig:
2361 print 'File unchanged. Not saving.'
2362 os.unlink(self.filename)
2365 print 'Sending meta data...'
2366 # don't do any exception handling... it's up to the caller what to do in case
2368 http_PUT(self.url, file=self.filename)
2369 os.unlink(self.filename)
2375 run_editor(self.filename)
2379 except urllib2.HTTPError, e:
2380 error_help = "%d" % e.code
2381 if e.headers.get('X-Opensuse-Errorcode'):
2382 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2384 print >>sys.stderr, 'BuildService API error:', error_help
2385 # examine the error - we can't raise an exception because we might want
2388 if '<summary>' in data:
2389 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2390 input = raw_input('Try again? ([y/N]): ')
2391 if input not in ['y', 'Y']:
2397 if os.path.exists(self.filename):
2398 print 'discarding %s' % self.filename
2399 os.unlink(self.filename)
2402 # different types of metadata
2403 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2404 'template': new_project_templ,
2407 'pkg': { 'path' : 'source/%s/%s/_meta',
2408 'template': new_package_templ,
2411 'attribute': { 'path' : 'source/%s/%s/_meta',
2412 'template': new_attribute_templ,
2415 'prjconf': { 'path': 'source/%s/_config',
2419 'user': { 'path': 'person/%s',
2420 'template': new_user_template,
2423 'pattern': { 'path': 'source/%s/_pattern/%s',
2424 'template': new_pattern_template,
2429 def meta_exists(metatype,
2436 apiurl = conf.config['apiurl']
2437 url = make_meta_url(metatype, path_args, apiurl)
2439 data = http_GET(url).readlines()
2440 except urllib2.HTTPError, e:
2441 if e.code == 404 and create_new:
2442 data = metatypes[metatype]['template']
2444 data = StringIO(data % template_args).readlines()
2449 def make_meta_url(metatype, path_args=None, apiurl=None):
2451 apiurl = conf.config['apiurl']
2452 if metatype not in metatypes.keys():
2453 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2454 path = metatypes[metatype]['path']
2457 path = path % path_args
2459 return makeurl(apiurl, [path])
2462 def edit_meta(metatype,
2467 change_is_required=False,
2471 apiurl = conf.config['apiurl']
2473 data = meta_exists(metatype,
2476 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2480 change_is_required = True
2482 url = make_meta_url(metatype, path_args, apiurl)
2483 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2491 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=None):
2494 query['rev'] = revision
2496 query['rev'] = 'latest'
2498 query['linkrev'] = linkrev
2499 elif conf.config['linkcontrol']:
2500 query['linkrev'] = 'base'
2506 query['emptylink'] = 1
2507 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2509 # look for "too large" files according to size limit and mark them
2510 root = ET.fromstring(''.join(f.readlines()))
2511 for e in root.findall('entry'):
2512 size = e.get('size')
2513 if size and limit_size and int(size) > int(limit_size):
2514 e.set('skipped', 'true')
2515 return ET.tostring(root)
2518 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2519 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2520 return ET.fromstring(''.join(m)).get('srcmd5')
2523 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2524 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2526 # only source link packages have a <linkinfo> element.
2527 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2535 raise oscerr.LinkExpandError(prj, pac, li.error)
2539 def show_upstream_rev(apiurl, prj, pac):
2540 m = show_files_meta(apiurl, prj, pac)
2541 return ET.fromstring(''.join(m)).get('rev')
2544 def read_meta_from_spec(specfile, *args):
2545 import codecs, locale, re
2547 Read tags and sections from spec file. To read out
2548 a tag the passed argument mustn't end with a colon. To
2549 read out a section the passed argument must start with
2551 This method returns a dictionary which contains the
2555 if not os.path.isfile(specfile):
2556 raise IOError('\'%s\' is not a regular file' % specfile)
2559 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2560 except UnicodeDecodeError:
2561 lines = open(specfile).readlines()
2568 if itm.startswith('%'):
2569 sections.append(itm)
2573 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2575 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2576 if m and m.group('val'):
2577 spec_data[tag] = m.group('val').strip()
2579 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2582 section_pat = '^%s\s*?$'
2583 for section in sections:
2584 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2586 start = lines.index(m.group()+'\n') + 1
2588 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2591 for line in lines[start:]:
2592 if line.startswith('%'):
2595 spec_data[section] = data
2599 def run_pager(message):
2600 import tempfile, sys
2602 if not sys.stdout.isatty():
2605 tmpfile = tempfile.NamedTemporaryFile()
2606 tmpfile.write(message)
2608 pager = os.getenv('PAGER', default='less')
2609 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2612 def run_editor(filename):
2613 if sys.platform[:3] != 'win':
2614 editor = os.getenv('EDITOR', default='vim')
2616 editor = os.getenv('EDITOR', default='notepad')
2618 return subprocess.call([ editor, filename ])
2620 def edit_message(footer='', template='', templatelen=30):
2621 delim = '--This line, and those below, will be ignored--\n'
2623 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2624 f = os.fdopen(fd, 'w')
2626 if not templatelen is None:
2627 lines = template.splitlines()
2628 template = '\n'.join(lines[:templatelen])
2629 if lines[templatelen:]:
2630 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2640 run_editor(filename)
2641 msg = open(filename).read().split(delim)[0].rstrip()
2646 input = raw_input('Log message not specified\n'
2647 'a)bort, c)ontinue, e)dit: ')
2649 raise oscerr.UserAbort()
2659 def create_delete_request(apiurl, project, package, message):
2664 package = """package="%s" """ % (package)
2670 <action type="delete">
2671 <target project="%s" %s/>
2674 <description>%s</description>
2676 """ % (project, package,
2677 cgi.escape(message or ''))
2679 u = makeurl(apiurl, ['request'], query='cmd=create')
2680 f = http_POST(u, data=xml)
2682 root = ET.parse(f).getroot()
2683 return root.get('id')
2686 def create_change_devel_request(apiurl,
2687 devel_project, devel_package,
2694 <action type="change_devel">
2695 <source project="%s" package="%s" />
2696 <target project="%s" package="%s" />
2699 <description>%s</description>
2701 """ % (devel_project,
2705 cgi.escape(message or ''))
2707 u = makeurl(apiurl, ['request'], query='cmd=create')
2708 f = http_POST(u, data=xml)
2710 root = ET.parse(f).getroot()
2711 return root.get('id')
2714 # This creates an old style submit request for server api 1.0
2715 def create_submit_request(apiurl,
2716 src_project, src_package,
2717 dst_project=None, dst_package=None,
2718 message=None, orev=None, src_update=None):
2723 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2725 # Yes, this kind of xml construction is horrible
2730 packagexml = """package="%s" """ %( dst_package )
2731 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2732 # XXX: keep the old template for now in order to work with old obs instances
2734 <request type="submit">
2736 <source project="%s" package="%s" rev="%s"/>
2741 <description>%s</description>
2745 orev or show_upstream_rev(apiurl, src_project, src_package),
2748 cgi.escape(message or ""))
2750 u = makeurl(apiurl, ['request'], query='cmd=create')
2751 f = http_POST(u, data=xml)
2753 root = ET.parse(f).getroot()
2754 return root.get('id')
2757 def get_request(apiurl, reqid):
2758 u = makeurl(apiurl, ['request', reqid])
2760 root = ET.parse(f).getroot()
2767 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2770 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2771 f = http_POST(u, data=message)
2774 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2777 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2778 f = http_POST(u, data=message)
2782 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2784 if not 'all' in req_state:
2785 for state in req_state:
2786 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2788 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2790 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2794 todo['project'] = project
2796 todo['package'] = package
2797 for kind, val in todo.iteritems():
2798 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2799 'action/source/@%(kind)s=\'%(val)s\' or ' \
2800 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2801 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2803 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2804 for i in exclude_target_projects:
2805 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2806 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2808 if conf.config['verbose'] > 1:
2809 print '[ %s ]' % xpath
2810 res = search(apiurl, request=xpath)
2811 collection = res['request']
2813 for root in collection.findall('request'):
2819 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2820 """Return all new requests for all projects/packages where is user is involved"""
2822 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2823 for i in res['project_id'].findall('project'):
2824 projpkgs[i.get('name')] = []
2825 for i in res['package_id'].findall('package'):
2826 if not i.get('project') in projpkgs.keys():
2827 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2829 for prj, pacs in projpkgs.iteritems():
2831 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2835 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2836 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2837 xpath = xpath_join(xpath, xp, inner=True)
2839 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2840 if not 'all' in req_state:
2842 for state in req_state:
2843 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2844 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2845 res = search(apiurl, request=xpath)
2847 for root in res['request'].findall('request'):
2853 def get_request_log(apiurl, reqid):
2854 r = get_request(conf.config['apiurl'], reqid)
2856 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2857 # the description of the request is used for the initial log entry
2858 # otherwise its comment attribute would contain None
2859 if len(r.statehistory) >= 1:
2860 r.statehistory[-1].comment = r.descr
2862 r.state.comment = r.descr
2863 for state in [ r.state ] + r.statehistory:
2864 s = frmt % (state.name, state.who, state.when, str(state.comment))
2869 def get_user_meta(apiurl, user):
2870 u = makeurl(apiurl, ['person', quote_plus(user)])
2873 return ''.join(f.readlines())
2874 except urllib2.HTTPError:
2875 print 'user \'%s\' not found' % user
2879 def get_user_data(apiurl, user, *tags):
2880 """get specified tags from the user meta"""
2881 meta = get_user_meta(apiurl, user)
2884 root = ET.fromstring(meta)
2887 if root.find(tag).text != None:
2888 data.append(root.find(tag).text)
2892 except AttributeError:
2893 # this part is reached if the tags tuple contains an invalid tag
2894 print 'The xml file for user \'%s\' seems to be broken' % user
2899 def download(url, filename, progress_obj = None, mtime = None):
2900 import tempfile, shutil
2903 prefix = os.path.basename(filename)
2904 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2905 os.chmod(tmpfile, 0644)
2907 o = os.fdopen(fd, 'wb')
2908 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2911 shutil.move(tmpfile, filename)
2920 os.utime(filename, (-1, mtime))
2922 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None, meta=None):
2923 targetfilename = targetfilename or filename
2928 query['rev'] = revision
2929 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2930 download(u, targetfilename, progress_obj, mtime)
2932 def get_binary_file(apiurl, prj, repo, arch,
2935 target_filename = None,
2936 target_mtime = None,
2937 progress_meter = False):
2940 from meter import TextMeter
2941 progress_obj = TextMeter()
2943 target_filename = target_filename or filename
2945 where = package or '_repository'
2946 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2947 download(u, target_filename, progress_obj, target_mtime)
2949 def dgst_from_string(str):
2950 # Python 2.5 depracates the md5 modules
2951 # Python 2.4 doesn't have hashlib yet