1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E', # obsolete with OBS 2.0
201 # our own xml writer function to write xml nice, but with correct syntax
202 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
203 from xml.dom import minidom
204 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
205 # indent = current indentation
206 # addindent = indentation to add to higher levels
207 # newl = newline string
208 writer.write(indent+"<" + self.tagName)
210 attrs = self._get_attributes()
211 a_names = attrs.keys()
214 for a_name in a_names:
215 writer.write(" %s=\"" % a_name)
216 minidom._write_data(writer, attrs[a_name].value)
219 if len(self.childNodes) == 1 \
220 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
222 self.childNodes[0].writexml(writer, "", "", "")
223 writer.write("</%s>%s" % (self.tagName, newl))
225 writer.write(">%s"%(newl))
226 for node in self.childNodes:
227 node.writexml(writer,indent+addindent,addindent,newl)
228 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
230 writer.write("/>%s"%(newl))
231 # replace minidom's function with ours
232 minidom.Element.writexml = fixed_writexml
235 # os.path.samefile is available only under Unix
236 def os_path_samefile(path1, path2):
238 return os.path.samefile(path1, path2)
240 return os.path.realpath(path1) == os.path.realpath(path2)
243 """represent a file, including its metadata"""
244 def __init__(self, name, md5, size, mtime):
254 """Source service content
257 """creates an empty serviceinfo instance"""
260 def read(self, serviceinfo_node):
261 """read in the source services <services> element passed as
264 if serviceinfo_node == None:
267 services = serviceinfo_node.findall('service')
269 for service in services:
270 name = service.get('name')
272 for param in service.findall('param'):
273 option = param.get('name', None)
275 name += " --" + option + " '" + value + "'"
276 self.commands.append(name)
278 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
279 raise oscerr.APIError(msg)
281 def addDownloadUrl(self, serviceinfo_node, url_string):
282 from urlparse import urlparse
283 url = urlparse( url_string )
284 protocol = url.scheme
289 s = ET.Element( "service", name="download_url" )
290 ET.SubElement(s, "param", name="protocol").text = protocol
291 ET.SubElement(s, "param", name="host").text = host
292 ET.SubElement(s, "param", name="path").text = path
299 def execute(self, dir):
302 for call in self.commands:
303 temp_dir = tempfile.mkdtemp()
304 name = call.split(None, 1)[0]
305 if not os.path.exists("/usr/lib/obs/service/"+name):
306 msg = "ERROR: service is not installed !"
307 msg += "Can maybe solved with: zypper in obs-server-" + name
308 raise oscerr.APIError(msg)
309 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
310 ret = subprocess.call(c, shell=True)
312 print "ERROR: service call failed: " + c
314 for file in os.listdir(temp_dir):
315 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
319 """linkinfo metadata (which is part of the xml representing a directory
322 """creates an empty linkinfo instance"""
332 def read(self, linkinfo_node):
333 """read in the linkinfo metadata from the <linkinfo> element passed as
335 If the passed element is None, the method does nothing.
337 if linkinfo_node == None:
339 self.project = linkinfo_node.get('project')
340 self.package = linkinfo_node.get('package')
341 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
342 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
343 self.srcmd5 = linkinfo_node.get('srcmd5')
344 self.error = linkinfo_node.get('error')
345 self.rev = linkinfo_node.get('rev')
346 self.baserev = linkinfo_node.get('baserev')
349 """returns True if the linkinfo is not empty, otherwise False"""
350 if self.xsrcmd5 or self.lsrcmd5:
354 def isexpanded(self):
355 """returns True if the package is an expanded link"""
356 if self.lsrcmd5 and not self.xsrcmd5:
361 """returns True if the link is in error state (could not be applied)"""
367 """return an informatory string representation"""
368 if self.islink() and not self.isexpanded():
369 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
370 % (self.project, self.package, self.xsrcmd5, self.rev)
371 elif self.islink() and self.isexpanded():
373 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
374 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
376 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
377 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
383 """represent a project directory, holding packages"""
384 def __init__(self, dir, getPackageList=True, progress_obj=None):
387 self.absdir = os.path.abspath(dir)
388 self.progress_obj = progress_obj
390 self.name = store_read_project(self.dir)
391 self.apiurl = store_read_apiurl(self.dir)
394 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
396 self.pacs_available = []
398 if conf.config['do_package_tracking']:
399 self.pac_root = self.read_packages().getroot()
400 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
401 self.pacs_excluded = [ i for i in os.listdir(self.dir)
402 for j in conf.config['exclude_glob']
403 if fnmatch.fnmatch(i, j) ]
404 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
405 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
406 # in the self.pacs_broken list
407 self.pacs_broken = []
408 for p in self.pacs_have:
409 if not os.path.isdir(os.path.join(self.absdir, p)):
410 # all states will be replaced with the '!'-state
411 # (except it is already marked as deleted ('D'-state))
412 self.pacs_broken.append(p)
414 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
416 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
418 def checkout_missing_pacs(self, expand_link=False):
419 for pac in self.pacs_missing:
421 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
422 # pac is not under version control but a local file/dir exists
423 msg = 'can\'t add package \'%s\': Object already exists' % pac
424 raise oscerr.PackageExists(self.name, pac, msg)
426 print 'checking out new package %s' % pac
427 checkout_package(self.apiurl, self.name, pac, \
428 pathname=getTransActPath(os.path.join(self.dir, pac)), \
429 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
431 def set_state(self, pac, state):
432 node = self.get_package_node(pac)
434 self.new_package_entry(pac, state)
436 node.attrib['state'] = state
438 def get_package_node(self, pac):
439 for node in self.pac_root.findall('package'):
440 if pac == node.get('name'):
444 def del_package_node(self, pac):
445 for node in self.pac_root.findall('package'):
446 if pac == node.get('name'):
447 self.pac_root.remove(node)
449 def get_state(self, pac):
450 node = self.get_package_node(pac)
452 return node.get('state')
456 def new_package_entry(self, name, state):
457 ET.SubElement(self.pac_root, 'package', name=name, state=state)
459 def read_packages(self):
460 packages_file = os.path.join(self.absdir, store, '_packages')
461 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
462 return ET.parse(packages_file)
464 # scan project for existing packages and migrate them
466 for data in os.listdir(self.dir):
467 pac_dir = os.path.join(self.absdir, data)
468 # we cannot use self.pacs_available because we cannot guarantee that the package list
469 # was fetched from the server
470 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
471 and Package(pac_dir).name == data:
472 cur_pacs.append(ET.Element('package', name=data, state=' '))
473 store_write_initial_packages(self.absdir, self.name, cur_pacs)
474 return ET.parse(os.path.join(self.absdir, store, '_packages'))
476 def write_packages(self):
477 # TODO: should we only modify the existing file instead of overwriting?
478 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
480 def addPackage(self, pac):
482 for i in conf.config['exclude_glob']:
483 if fnmatch.fnmatch(pac, i):
484 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
485 raise oscerr.OscIOError(None, msg)
486 state = self.get_state(pac)
487 if state == None or state == 'D':
488 self.new_package_entry(pac, 'A')
489 self.write_packages()
490 # sometimes the new pac doesn't exist in the list because
491 # it would take too much time to update all data structs regularly
492 if pac in self.pacs_unvers:
493 self.pacs_unvers.remove(pac)
495 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
497 def delPackage(self, pac, force = False):
498 state = self.get_state(pac.name)
500 if state == ' ' or state == 'D':
502 for file in pac.filenamelist + pac.filenamelist_unvers:
503 filestate = pac.status(file)
504 if filestate == 'M' or filestate == 'C' or \
505 filestate == 'A' or filestate == '?':
508 del_files.append(file)
509 if can_delete or force:
510 for file in del_files:
511 pac.delete_localfile(file)
512 if pac.status(file) != '?':
513 pac.delete_storefile(file)
514 # this is not really necessary
515 pac.put_on_deletelist(file)
516 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
517 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
518 pac.write_deletelist()
519 self.set_state(pac.name, 'D')
520 self.write_packages()
522 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
525 delete_dir(pac.absdir)
526 self.del_package_node(pac.name)
527 self.write_packages()
528 print statfrmt('D', pac.name)
530 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
532 print 'package is not under version control'
534 print 'unsupported state'
536 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
539 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
541 # we need to make sure that the _packages file will be written (even if an exception
544 # update complete project
545 # packages which no longer exists upstream
546 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
548 for pac in upstream_del:
549 p = Package(os.path.join(self.dir, pac))
550 self.delPackage(p, force = True)
551 delete_storedir(p.storedir)
556 self.pac_root.remove(self.get_package_node(p.name))
557 self.pacs_have.remove(pac)
559 for pac in self.pacs_have:
560 state = self.get_state(pac)
561 if pac in self.pacs_broken:
562 if self.get_state(pac) != 'A':
563 checkout_package(self.apiurl, self.name, pac,
564 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
565 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
568 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
570 if expand_link and p.islink() and not p.isexpanded():
573 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
575 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
578 rev = p.linkinfo.xsrcmd5
579 print 'Expanding to rev', rev
580 elif unexpand_link and p.islink() and p.isexpanded():
581 rev = p.linkinfo.lsrcmd5
582 print 'Unexpanding to rev', rev
583 elif p.islink() and p.isexpanded():
585 print 'Updating %s' % p.name
586 p.update(rev, service_files)
590 # TODO: Package::update has to fixed to behave like svn does
591 if pac in self.pacs_broken:
592 checkout_package(self.apiurl, self.name, pac,
593 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
594 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
596 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
597 elif state == 'A' and pac in self.pacs_available:
598 # file/dir called pac already exists and is under version control
599 msg = 'can\'t add package \'%s\': Object already exists' % pac
600 raise oscerr.PackageExists(self.name, pac, msg)
605 print 'unexpected state.. package \'%s\'' % pac
607 self.checkout_missing_pacs(expand_link=not unexpand_link)
609 self.write_packages()
611 def commit(self, pacs = (), msg = '', files = {}):
616 if files.has_key(pac):
618 state = self.get_state(pac)
620 self.commitNewPackage(pac, msg, todo)
622 self.commitDelPackage(pac)
624 # display the correct dir when sending the changes
625 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
628 p = Package(os.path.join(self.dir, pac))
631 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
632 print 'osc: \'%s\' is not under version control' % pac
633 elif pac in self.pacs_broken:
634 print 'osc: \'%s\' package not found' % pac
636 self.commitExtPackage(pac, msg, todo)
638 self.write_packages()
640 # if we have packages marked as '!' we cannot commit
641 for pac in self.pacs_broken:
642 if self.get_state(pac) != 'D':
643 msg = 'commit failed: package \'%s\' is missing' % pac
644 raise oscerr.PackageMissing(self.name, pac, msg)
646 for pac in self.pacs_have:
647 state = self.get_state(pac)
650 Package(os.path.join(self.dir, pac)).commit(msg)
652 self.commitDelPackage(pac)
654 self.commitNewPackage(pac, msg)
656 self.write_packages()
658 def commitNewPackage(self, pac, msg = '', files = []):
659 """creates and commits a new package if it does not exist on the server"""
660 if pac in self.pacs_available:
661 print 'package \'%s\' already exists' % pac
663 user = conf.get_apiurl_usr(self.apiurl)
664 edit_meta(metatype='pkg',
665 path_args=(quote_plus(self.name), quote_plus(pac)),
670 # display the correct dir when sending the changes
672 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
676 p = Package(os.path.join(self.dir, pac))
678 print statfrmt('Sending', os.path.normpath(p.dir))
680 self.set_state(pac, ' ')
683 def commitDelPackage(self, pac):
684 """deletes a package on the server and in the working copy"""
686 # display the correct dir when sending the changes
687 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
690 pac_dir = os.path.join(self.dir, pac)
691 p = Package(os.path.join(self.dir, pac))
692 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
693 delete_storedir(p.storedir)
699 pac_dir = os.path.join(self.dir, pac)
700 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
701 print statfrmt('Deleting', getTransActPath(pac_dir))
702 delete_package(self.apiurl, self.name, pac)
703 self.del_package_node(pac)
705 def commitExtPackage(self, pac, msg, files = []):
706 """commits a package from an external project"""
707 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
710 pac_path = os.path.join(self.dir, pac)
712 project = store_read_project(pac_path)
713 package = store_read_package(pac_path)
714 apiurl = store_read_apiurl(pac_path)
715 if meta_exists(metatype='pkg',
716 path_args=(quote_plus(project), quote_plus(package)),
718 create_new=False, apiurl=apiurl):
719 p = Package(pac_path)
723 user = conf.get_apiurl_usr(self.apiurl)
724 edit_meta(metatype='pkg',
725 path_args=(quote_plus(project), quote_plus(package)),
730 p = Package(pac_path)
736 r.append('*****************************************************')
737 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
738 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
739 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
740 r.append('*****************************************************')
746 """represent a package (its directory) and read/keep/write its metadata"""
747 def __init__(self, workingdir, progress_obj=None, limit_size=None):
748 self.dir = workingdir
749 self.absdir = os.path.abspath(self.dir)
750 self.storedir = os.path.join(self.absdir, store)
751 self.progress_obj = progress_obj
752 self.limit_size = limit_size
753 if limit_size and limit_size == 0:
754 self.limit_size = None
756 check_store_version(self.dir)
758 self.prjname = store_read_project(self.dir)
759 self.name = store_read_package(self.dir)
760 self.apiurl = store_read_apiurl(self.dir)
762 self.update_datastructs()
766 self.todo_delete = []
769 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
770 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
773 def addfile(self, n):
774 st = os.stat(os.path.join(self.dir, n))
775 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
777 def delete_file(self, n, force=False):
778 """deletes a file if possible and marks the file as deleted"""
781 state = self.status(n)
785 if state in ['?', 'A', 'M'] and not force:
786 return (False, state)
787 self.delete_localfile(n)
789 self.put_on_deletelist(n)
790 self.write_deletelist()
792 self.delete_storefile(n)
795 def delete_storefile(self, n):
796 try: os.unlink(os.path.join(self.storedir, n))
799 def delete_localfile(self, n):
800 try: os.unlink(os.path.join(self.dir, n))
803 def put_on_deletelist(self, n):
804 if n not in self.to_be_deleted:
805 self.to_be_deleted.append(n)
807 def put_on_conflictlist(self, n):
808 if n not in self.in_conflict:
809 self.in_conflict.append(n)
811 def clear_from_conflictlist(self, n):
812 """delete an entry from the file, and remove the file if it would be empty"""
813 if n in self.in_conflict:
815 filename = os.path.join(self.dir, n)
816 storefilename = os.path.join(self.storedir, n)
817 myfilename = os.path.join(self.dir, n + '.mine')
818 if self.islinkrepair() or self.ispulled():
819 upfilename = os.path.join(self.dir, n + '.new')
821 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
824 os.unlink(myfilename)
825 # the working copy may be updated, so the .r* ending may be obsolete...
827 os.unlink(upfilename)
828 if self.islinkrepair() or self.ispulled():
829 os.unlink(os.path.join(self.dir, n + '.old'))
833 self.in_conflict.remove(n)
835 self.write_conflictlist()
837 def write_sizelimit(self):
838 if self.size_limit and self.size_limit <= 0:
840 os.unlink(os.path.join(self.storedir, '_size_limit'))
844 fname = os.path.join(self.storedir, '_size_limit')
846 f.write(str(self.size_limit))
849 def write_deletelist(self):
850 if len(self.to_be_deleted) == 0:
852 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
856 fname = os.path.join(self.storedir, '_to_be_deleted')
858 f.write('\n'.join(self.to_be_deleted))
862 def delete_source_file(self, n):
863 """delete local a source file"""
864 self.delete_localfile(n)
865 self.delete_storefile(n)
867 def delete_remote_source_file(self, n):
868 """delete a remote source file (e.g. from the server)"""
870 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
873 def put_source_file(self, n):
875 # escaping '+' in the URL path (note: not in the URL query string) is
876 # only a workaround for ruby on rails, which swallows it otherwise
878 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
879 http_PUT(u, file = os.path.join(self.dir, n))
881 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
883 def commit(self, msg=''):
884 # commit only if the upstream revision is the same as the working copy's
885 upstream_rev = self.latest_rev()
886 if self.rev != upstream_rev:
887 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
890 self.todo = self.filenamelist_unvers + self.filenamelist
892 pathn = getTransActPath(self.dir)
894 have_conflicts = False
895 for filename in self.todo:
896 if not filename.startswith('_service:') and not filename.startswith('_service_'):
897 st = self.status(filename)
899 self.todo.remove(filename)
900 elif st == 'A' or st == 'M':
901 self.todo_send.append(filename)
902 print statfrmt('Sending', os.path.join(pathn, filename))
904 self.todo_delete.append(filename)
905 print statfrmt('Deleting', os.path.join(pathn, filename))
907 have_conflicts = True
910 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
913 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
914 print 'nothing to do for package %s' % self.name
917 if self.islink() and self.isexpanded():
918 # resolve the link into the upload revision
919 # XXX: do this always?
920 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
921 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
924 print 'Transmitting file data ',
926 for filename in self.todo_delete:
927 # do not touch local files on commit --
928 # delete remotely instead
929 self.delete_remote_source_file(filename)
930 self.to_be_deleted.remove(filename)
931 for filename in self.todo_send:
932 sys.stdout.write('.')
934 self.put_source_file(filename)
936 # all source files are committed - now comes the log
937 query = { 'cmd' : 'commit',
939 'user' : conf.get_apiurl_usr(self.apiurl),
941 if self.islink() and self.isexpanded():
942 query['keeplink'] = '1'
943 if conf.config['linkcontrol'] or self.isfrozen():
944 query['linkrev'] = self.linkinfo.srcmd5
946 query['repairlink'] = '1'
947 query['linkrev'] = self.get_pulled_srcmd5()
948 if self.islinkrepair():
949 query['repairlink'] = '1'
950 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
953 # delete upload revision
955 query = { 'cmd': 'deleteuploadrev' }
956 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
962 root = ET.parse(f).getroot()
963 self.rev = int(root.get('rev'))
965 print 'Committed revision %s.' % self.rev
968 os.unlink(os.path.join(self.storedir, '_pulled'))
969 if self.islinkrepair():
970 os.unlink(os.path.join(self.storedir, '_linkrepair'))
971 self.linkrepair = False
972 # XXX: mark package as invalid?
973 print 'The source link has been repaired. This directory can now be removed.'
974 if self.islink() and self.isexpanded():
975 self.update_local_filesmeta(revision=self.latest_rev())
977 self.update_local_filesmeta()
978 self.write_deletelist()
979 self.update_datastructs()
981 if self.filenamelist.count('_service'):
982 print 'The package contains a source service.'
983 for filename in self.todo:
984 if filename.startswith('_service:') and os.path.exists(filename):
985 os.unlink(filename) # remove local files
986 print_request_list(self.apiurl, self.prjname, self.name)
988 def write_conflictlist(self):
989 if len(self.in_conflict) == 0:
991 os.unlink(os.path.join(self.storedir, '_in_conflict'))
995 fname = os.path.join(self.storedir, '_in_conflict')
997 f.write('\n'.join(self.in_conflict))
1001 def updatefile(self, n, revision):
1002 filename = os.path.join(self.dir, n)
1003 storefilename = os.path.join(self.storedir, n)
1004 mtime = self.findfilebyname(n).mtime
1006 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1007 revision=revision, progress_obj=self.progress_obj, mtime=mtime)
1009 shutil.copyfile(filename, storefilename)
1011 def mergefile(self, n):
1012 filename = os.path.join(self.dir, n)
1013 storefilename = os.path.join(self.storedir, n)
1014 myfilename = os.path.join(self.dir, n + '.mine')
1015 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1016 os.rename(filename, myfilename)
1018 mtime = self.findfilebyname(n).mtime
1019 get_source_file(self.apiurl, self.prjname, self.name, n,
1020 revision=self.rev, targetfilename=upfilename,
1021 progress_obj=self.progress_obj, mtime=mtime)
1023 if binary_file(myfilename) or binary_file(upfilename):
1025 shutil.copyfile(upfilename, filename)
1026 shutil.copyfile(upfilename, storefilename)
1027 self.in_conflict.append(n)
1028 self.write_conflictlist()
1032 # diff3 OPTIONS... MINE OLDER YOURS
1033 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1034 # we would rather use the subprocess module, but it is not availablebefore 2.4
1035 ret = subprocess.call(merge_cmd, shell=True)
1037 # "An exit status of 0 means `diff3' was successful, 1 means some
1038 # conflicts were found, and 2 means trouble."
1040 # merge was successful... clean up
1041 shutil.copyfile(upfilename, storefilename)
1042 os.unlink(upfilename)
1043 os.unlink(myfilename)
1046 # unsuccessful merge
1047 shutil.copyfile(upfilename, storefilename)
1048 self.in_conflict.append(n)
1049 self.write_conflictlist()
1052 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1053 print >>sys.stderr, 'the command line was:'
1054 print >>sys.stderr, merge_cmd
1059 def update_local_filesmeta(self, revision=None):
1061 Update the local _files file in the store.
1062 It is replaced with the version pulled from upstream.
1064 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1065 store_write_string(self.absdir, '_files', meta)
1067 def update_datastructs(self):
1069 Update the internal data structures if the local _files
1070 file has changed (e.g. update_local_filesmeta() has been
1074 files_tree = read_filemeta(self.dir)
1075 files_tree_root = files_tree.getroot()
1077 self.rev = files_tree_root.get('rev')
1078 self.srcmd5 = files_tree_root.get('srcmd5')
1080 self.linkinfo = Linkinfo()
1081 self.linkinfo.read(files_tree_root.find('linkinfo'))
1083 self.filenamelist = []
1086 for node in files_tree_root.findall('entry'):
1088 f = File(node.get('name'),
1090 int(node.get('size')),
1091 int(node.get('mtime')))
1092 if node.get('skipped'):
1093 self.skipped.append(f.name)
1095 # okay, a very old version of _files, which didn't contain any metadata yet...
1096 f = File(node.get('name'), '', 0, 0)
1097 self.filelist.append(f)
1098 self.filenamelist.append(f.name)
1100 self.to_be_deleted = read_tobedeleted(self.dir)
1101 self.in_conflict = read_inconflict(self.dir)
1102 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1103 self.size_limit = read_sizelimit(self.dir)
1105 # gather unversioned files, but ignore some stuff
1106 self.excluded = [ i for i in os.listdir(self.dir)
1107 for j in conf.config['exclude_glob']
1108 if fnmatch.fnmatch(i, j) ]
1109 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1110 if i not in self.excluded
1111 if i not in self.filenamelist ]
1114 """tells us if the package is a link (has 'linkinfo').
1115 A package with linkinfo is a package which links to another package.
1116 Returns True if the package is a link, otherwise False."""
1117 return self.linkinfo.islink()
1119 def isexpanded(self):
1120 """tells us if the package is a link which is expanded.
1121 Returns True if the package is expanded, otherwise False."""
1122 return self.linkinfo.isexpanded()
1124 def islinkrepair(self):
1125 """tells us if we are repairing a broken source link."""
1126 return self.linkrepair
1129 """tells us if we have pulled a link."""
1130 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1133 """tells us if the link is frozen."""
1134 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1136 def get_pulled_srcmd5(self):
1138 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1139 pulledrev = line.strip()
1142 def haslinkerror(self):
1144 Returns True if the link is broken otherwise False.
1145 If the package is not a link it returns False.
1147 return self.linkinfo.haserror()
1149 def linkerror(self):
1151 Returns an error message if the link is broken otherwise None.
1152 If the package is not a link it returns None.
1154 return self.linkinfo.error
1156 def update_local_pacmeta(self):
1158 Update the local _meta file in the store.
1159 It is replaced with the version pulled from upstream.
1161 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1162 store_write_string(self.absdir, '_meta', meta)
1164 def findfilebyname(self, n):
1165 for i in self.filelist:
1169 def status(self, n):
1173 file storefile file present STATUS
1174 exists exists in _files
1177 x x x ' ' if digest differs: 'M'
1178 and if in conflicts file: 'C'
1180 x - x 'D' and listed in _to_be_deleted
1182 - x - 'D' (when file in working copy is already deleted)
1183 - - x 'F' (new in repo, but not yet in working copy)
1188 known_by_meta = False
1190 exists_in_store = False
1191 if n in self.filenamelist:
1192 known_by_meta = True
1193 if os.path.exists(os.path.join(self.absdir, n)):
1195 if os.path.exists(os.path.join(self.storedir, n)):
1196 exists_in_store = True
1199 if n in self.skipped:
1201 elif exists and not exists_in_store and known_by_meta:
1203 elif n in self.to_be_deleted:
1205 elif n in self.in_conflict:
1207 elif exists and exists_in_store and known_by_meta:
1208 #print self.findfilebyname(n)
1209 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1213 elif exists and not exists_in_store and not known_by_meta:
1215 elif exists and exists_in_store and not known_by_meta:
1217 elif not exists and exists_in_store and known_by_meta:
1219 elif not exists and not exists_in_store and known_by_meta:
1221 elif not exists and exists_in_store and not known_by_meta:
1223 elif not exists and not exists_in_store and not known_by_meta:
1224 # this case shouldn't happen (except there was a typo in the filename etc.)
1225 raise IOError('osc: \'%s\' is not under version control' % n)
1229 def comparePac(self, cmp_pac):
1231 This method compares the local filelist with
1232 the filelist of the passed package to see which files
1233 were added, removed and changed.
1240 for file in self.filenamelist+self.filenamelist_unvers:
1241 state = self.status(file)
1242 if file in self.skipped:
1244 if state == 'A' and (not file in cmp_pac.filenamelist):
1245 added_files.append(file)
1246 elif file in cmp_pac.filenamelist and state == 'D':
1247 removed_files.append(file)
1248 elif state == ' ' and not file in cmp_pac.filenamelist:
1249 added_files.append(file)
1250 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1251 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1252 changed_files.append(file)
1253 for file in cmp_pac.filenamelist:
1254 if not file in self.filenamelist:
1255 removed_files.append(file)
1256 removed_files = set(removed_files)
1258 return changed_files, added_files, removed_files
1260 def merge(self, otherpac):
1261 self.todo += otherpac.todo
1275 '\n '.join(self.filenamelist),
1283 def read_meta_from_spec(self, spec = None):
1288 # scan for spec files
1289 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1290 if len(speclist) == 1:
1291 specfile = speclist[0]
1292 elif len(speclist) > 1:
1293 print 'the following specfiles were found:'
1294 for file in speclist:
1296 print 'please specify one with --specfile'
1299 print 'no specfile was found - please specify one ' \
1303 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1304 self.summary = data['Summary']
1305 self.url = data['Url']
1306 self.descr = data['%description']
1309 def update_package_meta(self, force=False):
1311 for the updatepacmetafromspec subcommand
1312 argument force supress the confirm question
1315 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1317 root = ET.fromstring(m)
1318 root.find('title').text = self.summary
1319 root.find('description').text = ''.join(self.descr)
1320 url = root.find('url')
1322 url = ET.SubElement(root, 'url')
1325 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1326 mf = metafile(u, ET.tostring(root))
1329 print '*' * 36, 'old', '*' * 36
1331 print '*' * 36, 'new', '*' * 36
1332 print ET.tostring(root)
1334 repl = raw_input('Write? (y/N/e) ')
1345 def mark_frozen(self):
1346 store_write_string(self.absdir, '_frozenlink', '')
1348 print "The link in this package is currently broken. Checking"
1349 print "out the last working version instead; please use 'osc pull'"
1350 print "to repair the link."
1353 def unmark_frozen(self):
1354 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1355 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1357 def latest_rev(self):
1358 if self.islinkrepair():
1359 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1360 elif self.islink() and self.isexpanded():
1361 if self.isfrozen() or self.ispulled():
1362 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1365 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1368 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1370 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1373 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1376 def update(self, rev = None, service_files = False, limit_size = None):
1377 # save filelist and (modified) status before replacing the meta file
1378 saved_filenames = self.filenamelist
1379 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1383 self.limit_size = limit_size
1385 self.limit_size = read_sizelimit(self.dir)
1386 self.update_local_filesmeta(rev)
1387 self = Package(self.dir, progress_obj=self.progress_obj)
1389 # which files do no longer exist upstream?
1390 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1392 pathn = getTransActPath(self.dir)
1394 for filename in saved_filenames:
1395 if filename in self.skipped:
1397 if not filename.startswith('_service:') and filename in disappeared:
1398 print statfrmt('D', os.path.join(pathn, filename))
1399 # keep file if it has local modifications
1400 if oldp.status(filename) == ' ':
1401 self.delete_localfile(filename)
1402 self.delete_storefile(filename)
1404 for filename in self.filenamelist:
1405 if filename in self.skipped:
1408 state = self.status(filename)
1409 if not service_files and filename.startswith('_service:'):
1411 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1412 # no merge necessary... local file is changed, but upstream isn't
1414 elif state == 'M' and filename in saved_modifiedfiles:
1415 status_after_merge = self.mergefile(filename)
1416 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1418 self.updatefile(filename, rev)
1419 print statfrmt('U', os.path.join(pathn, filename))
1421 self.updatefile(filename, rev)
1422 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1424 self.updatefile(filename, rev)
1425 print statfrmt('A', os.path.join(pathn, filename))
1426 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1427 self.updatefile(filename, rev)
1428 self.delete_storefile(filename)
1429 print statfrmt('U', os.path.join(pathn, filename))
1433 self.update_local_pacmeta()
1435 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1436 print 'At revision %s.' % self.rev
1438 if not service_files:
1439 self.run_source_services()
1441 def run_source_services(self):
1442 if self.filenamelist.count('_service'):
1443 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1446 si.execute(self.absdir)
1448 def prepare_filelist(self):
1449 """Prepare a list of files, which will be processed by process_filelist
1450 method. This allows easy modifications of a file list in commit
1454 self.todo = self.filenamelist + self.filenamelist_unvers
1458 for f in [f for f in self.todo if not os.path.isdir(f)]:
1460 status = self.status(f)
1465 ret += "%s %s %s\n" % (action, status, f)
1468 # Edit a filelist for package \'%s\'
1470 # l, leave = leave a file as is
1471 # r, remove = remove a file
1472 # a, add = add a file
1474 # If you remove file from a list, it will be unchanged
1475 # If you remove all, commit will be aborted""" % self.name
1479 def edit_filelist(self):
1480 """Opens a package list in editor for editing. This allows easy
1481 modifications of it just by simple text editing
1485 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1486 f = os.fdopen(fd, 'w')
1487 f.write(self.prepare_filelist())
1489 mtime_orig = os.stat(filename).st_mtime
1492 run_editor(filename)
1493 mtime = os.stat(filename).st_mtime
1494 if mtime_orig < mtime:
1495 filelist = open(filename).readlines()
1499 raise oscerr.UserAbort()
1501 return self.process_filelist(filelist)
1503 def process_filelist(self, filelist):
1504 """Process a filelist - it add/remove or leave files. This depends on
1505 user input. If no file is processed, it raises an ValueError
1509 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1511 foo = line.split(' ')
1513 action, state, name = (foo[0], ' ', foo[3])
1515 action, state, name = (foo[0], foo[1], foo[2])
1518 action = action.lower()
1521 if action in ('r', 'remove'):
1522 if self.status(name) == '?':
1524 if name in self.todo:
1525 self.todo.remove(name)
1527 self.delete_file(name, True)
1528 elif action in ('a', 'add'):
1529 if self.status(name) != '?':
1530 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1533 elif action in ('l', 'leave'):
1536 raise ValueError("Unknow action `%s'" % action)
1539 raise ValueError("Empty filelist")
1542 """for objects to represent the review state in a request"""
1543 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1545 self.by_user = by_user
1546 self.by_group = by_group
1549 self.comment = comment
1552 """for objects to represent the "state" of a request"""
1553 def __init__(self, name=None, who=None, when=None, comment=None):
1557 self.comment = comment
1560 """represents an action"""
1561 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1563 self.src_project = src_project
1564 self.src_package = src_package
1565 self.src_rev = src_rev
1566 self.dst_project = dst_project
1567 self.dst_package = dst_package
1568 self.src_update = src_update
1571 """represent a request and holds its metadata
1572 it has methods to read in metadata from xml,
1573 different views, ..."""
1576 self.state = RequestState()
1579 self.last_author = None
1582 self.statehistory = []
1585 def read(self, root):
1586 self.reqid = int(root.get('id'))
1587 actions = root.findall('action')
1588 if len(actions) == 0:
1589 actions = [ root.find('submit') ] # for old style requests
1591 for action in actions:
1592 type = action.get('type', 'submit')
1594 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1595 if action.findall('source'):
1596 n = action.find('source')
1597 src_prj = n.get('project', None)
1598 src_pkg = n.get('package', None)
1599 src_rev = n.get('rev', None)
1600 if action.findall('target'):
1601 n = action.find('target')
1602 dst_prj = n.get('project', None)
1603 dst_pkg = n.get('package', None)
1604 if action.findall('options'):
1605 n = action.find('options')
1606 if n.findall('sourceupdate'):
1607 src_update = n.find('sourceupdate').text.strip()
1608 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1610 msg = 'invalid request format:\n%s' % ET.tostring(root)
1611 raise oscerr.APIError(msg)
1614 n = root.find('state')
1615 self.state.name, self.state.who, self.state.when \
1616 = n.get('name'), n.get('who'), n.get('when')
1618 self.state.comment = n.find('comment').text.strip()
1620 self.state.comment = None
1622 # read the review states
1623 for r in root.findall('review'):
1625 s.state = r.get('state')
1626 s.by_user = r.get('by_user')
1627 s.by_group = r.get('by_group')
1628 s.who = r.get('who')
1629 s.when = r.get('when')
1631 s.comment = r.find('comment').text.strip()
1634 self.reviews.append(s)
1636 # read the state history
1637 for h in root.findall('history'):
1639 s.name = h.get('name')
1640 s.who = h.get('who')
1641 s.when = h.get('when')
1643 s.comment = h.find('comment').text.strip()
1646 self.statehistory.append(s)
1647 self.statehistory.reverse()
1649 # read a description, if it exists
1651 n = root.find('description').text
1656 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1657 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1658 dst_prj, dst_pkg, src_update)
1661 def list_view(self):
1662 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1664 for a in self.actions:
1665 dst = "%s/%s" % (a.dst_project, a.dst_package)
1666 if a.src_package == a.dst_package:
1670 if a.type=="submit":
1671 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1672 if a.type=="change_devel":
1673 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1674 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1676 ret += '\n %s: %-50s %-20s ' % \
1677 (a.type, sr_source, dst)
1679 if self.statehistory and self.statehistory[0]:
1681 for h in self.statehistory:
1682 who.append("%s(%s)" % (h.who,h.name))
1684 ret += "\n From: %s" % (' -> '.join(who))
1686 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1688 lines = txt.splitlines()
1689 wrapper = textwrap.TextWrapper( width = 80,
1690 initial_indent=' Descr: ',
1691 subsequent_indent=' ')
1692 ret += "\n" + wrapper.fill(lines[0])
1693 wrapper.initial_indent = ' '
1694 for line in lines[1:]:
1695 ret += "\n" + wrapper.fill(line)
1701 def __cmp__(self, other):
1702 return cmp(self.reqid, other.reqid)
1706 for action in self.actions:
1707 action_list=" %s: " % (action.type)
1708 if action.type=="submit":
1711 r="(r%s)" % (action.src_rev)
1713 if action.src_update:
1714 m="(%s)" % (action.src_update)
1715 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1716 if action.dst_package:
1717 action_list=action_list+"/%s" % ( action.dst_package )
1718 elif action.type=="delete":
1719 action_list=action_list+" %s" % ( action.dst_project )
1720 if action.dst_package:
1721 action_list=action_list+"/%s" % ( action.dst_package )
1722 elif action.type=="change_devel":
1723 action_list=action_list+" %s/%s developed in %s/%s" % \
1724 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1725 action_list=action_list+"\n"
1740 self.state.name, self.state.when, self.state.who,
1743 if len(self.reviews):
1744 reviewitems = [ '%-10s %s %s %s %s %s' \
1745 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1746 for i in self.reviews ]
1747 s += '\nReview: ' + '\n '.join(reviewitems)
1750 if len(self.statehistory):
1751 histitems = [ '%-10s %s %s' \
1752 % (i.name, i.when, i.who) \
1753 for i in self.statehistory ]
1754 s += '\nHistory: ' + '\n '.join(histitems)
1761 """format time as Apr 02 18:19
1763 depending on whether it is in the current year
1767 if time.localtime()[0] == time.localtime(t)[0]:
1769 return time.strftime('%b %d %H:%M',time.localtime(t))
1771 return time.strftime('%b %d %Y',time.localtime(t))
1774 def is_project_dir(d):
1775 return os.path.exists(os.path.join(d, store, '_project')) and not \
1776 os.path.exists(os.path.join(d, store, '_package'))
1779 def is_package_dir(d):
1780 return os.path.exists(os.path.join(d, store, '_project')) and \
1781 os.path.exists(os.path.join(d, store, '_package'))
1783 def parse_disturl(disturl):
1784 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1785 revision), else raises an oscerr.WrongArgs exception
1788 m = DISTURL_RE.match(disturl)
1790 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1792 apiurl = m.group('apiurl')
1793 if apiurl.split('.')[0] != 'api':
1794 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1795 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1797 def parse_buildlogurl(buildlogurl):
1798 """Parse a build log url, returns a tuple (apiurl, project, package,
1799 repository, arch), else raises oscerr.WrongArgs exception"""
1801 global BUILDLOGURL_RE
1803 m = BUILDLOGURL_RE.match(buildlogurl)
1805 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1807 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1810 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1811 This is handy to allow copy/paste a project/package combination in this form.
1813 Trailing slashes are removed before the split, because the split would
1814 otherwise give an additional empty string.
1822 def expand_proj_pack(args, idx=0, howmany=0):
1823 """looks for occurance of '.' at the position idx.
1824 If howmany is 2, both proj and pack are expanded together
1825 using the current directory, or none of them, if not possible.
1826 If howmany is 0, proj is expanded if possible, then, if there
1827 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1828 expanded, if possible.
1829 If howmany is 1, only proj is expanded if possible.
1831 If args[idx] does not exists, an implicit '.' is assumed.
1832 if not enough elements up to idx exist, an error is raised.
1834 See also parseargs(args), slash_split(args), findpacs(args)
1835 All these need unification, somehow.
1838 # print args,idx,howmany
1841 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1843 if len(args) == idx:
1845 if args[idx+0] == '.':
1846 if howmany == 0 and len(args) > idx+1:
1847 if args[idx+1] == '.':
1849 # remove one dot and make sure to expand both proj and pack
1854 # print args,idx,howmany
1856 args[idx+0] = store_read_project('.')
1859 package = store_read_package('.')
1860 args.insert(idx+1, package)
1864 package = store_read_package('.')
1865 args.insert(idx+1, package)
1869 def findpacs(files, progress_obj=None):
1870 """collect Package objects belonging to the given files
1871 and make sure each Package is returned only once"""
1874 p = filedir_to_pac(f, progress_obj)
1877 if i.name == p.name:
1887 def filedir_to_pac(f, progress_obj=None):
1888 """Takes a working copy path, or a path to a file inside a working copy,
1889 and returns a Package object instance
1891 If the argument was a filename, add it onto the "todo" list of the Package """
1893 if os.path.isdir(f):
1895 p = Package(wd, progress_obj=progress_obj)
1897 wd = os.path.dirname(f) or os.curdir
1898 p = Package(wd, progress_obj=progress_obj)
1899 p.todo = [ os.path.basename(f) ]
1903 def read_filemeta(dir):
1905 r = ET.parse(os.path.join(dir, store, '_files'))
1906 except SyntaxError, e:
1907 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1908 'When parsing .osc/_files, the following error was encountered:\n'
1913 def read_tobedeleted(dir):
1915 fname = os.path.join(dir, store, '_to_be_deleted')
1917 if os.path.exists(fname):
1918 r = [ line.strip() for line in open(fname) ]
1923 def read_sizelimit(dir):
1925 fname = os.path.join(dir, store, '_size_limit')
1927 if os.path.exists(fname):
1928 r = open(fname).readline()
1930 if r is None or not r.isdigit():
1934 def read_inconflict(dir):
1936 fname = os.path.join(dir, store, '_in_conflict')
1938 if os.path.exists(fname):
1939 r = [ line.strip() for line in open(fname) ]
1944 def parseargs(list_of_args):
1945 """Convenience method osc's commandline argument parsing.
1947 If called with an empty tuple (or list), return a list containing the current directory.
1948 Otherwise, return a list of the arguments."""
1950 return list(list_of_args)
1955 def statfrmt(statusletter, filename):
1956 return '%s %s' % (statusletter, filename)
1959 def pathjoin(a, *p):
1960 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1961 path = os.path.join(a, *p)
1962 if path.startswith('./'):
1967 def makeurl(baseurl, l, query=[]):
1968 """Given a list of path compoments, construct a complete URL.
1970 Optional parameters for a query string can be given as a list, as a
1971 dictionary, or as an already assembled string.
1972 In case of a dictionary, the parameters will be urlencoded by this
1973 function. In case of a list not -- this is to be backwards compatible.
1976 if conf.config['verbose'] > 1:
1977 print 'makeurl:', baseurl, l, query
1979 if type(query) == type(list()):
1980 query = '&'.join(query)
1981 elif type(query) == type(dict()):
1982 query = urlencode(query)
1984 scheme, netloc = urlsplit(baseurl)[0:2]
1985 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1988 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1989 """wrapper around urllib2.urlopen for error handling,
1990 and to support additional (PUT, DELETE) methods"""
1994 if conf.config['http_debug']:
1997 print '--', method, url
1999 if method == 'POST' and not file and not data:
2000 # adding data to an urllib2 request transforms it into a POST
2003 req = urllib2.Request(url)
2004 api_host_options = {}
2006 api_host_options = conf.get_apiurl_api_host_options(url)
2007 for header, value in api_host_options['http_headers']:
2008 req.add_header(header, value)
2010 # "external" request (url is no apiurl)
2013 req.get_method = lambda: method
2015 # POST requests are application/x-www-form-urlencoded per default
2016 # since we change the request into PUT, we also need to adjust the content type header
2017 if method == 'PUT' or (method == 'POST' and data):
2018 req.add_header('Content-Type', 'application/octet-stream')
2020 if type(headers) == type({}):
2021 for i in headers.keys():
2023 req.add_header(i, headers[i])
2025 if file and not data:
2026 size = os.path.getsize(file)
2028 data = open(file, 'rb').read()
2031 filefd = open(file, 'rb')
2033 if sys.platform[:3] != 'win':
2034 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2036 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2038 except EnvironmentError, e:
2040 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2041 '\non a filesystem which does not support this.' % (e, file))
2042 elif hasattr(e, 'winerror') and e.winerror == 5:
2043 # falling back to the default io
2044 data = open(file, 'rb').read()
2048 if conf.config['debug']: print method, url
2050 old_timeout = socket.getdefaulttimeout()
2051 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2052 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2053 socket.setdefaulttimeout(timeout)
2055 fd = urllib2.urlopen(req, data=data)
2057 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2058 socket.setdefaulttimeout(old_timeout)
2059 if hasattr(conf.cookiejar, 'save'):
2060 conf.cookiejar.save(ignore_discard=True)
2062 if filefd: filefd.close()
2067 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2068 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2069 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2070 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2073 def init_project_dir(apiurl, dir, project):
2074 if not os.path.exists(dir):
2075 if conf.config['checkout_no_colon']:
2076 os.makedirs(dir) # helpful with checkout_no_colon
2079 if not os.path.exists(os.path.join(dir, store)):
2080 os.mkdir(os.path.join(dir, store))
2082 # print 'project=',project,' dir=',dir
2083 store_write_project(dir, project)
2084 store_write_apiurl(dir, apiurl)
2085 if conf.config['do_package_tracking']:
2086 store_write_initial_packages(dir, project, [])
2088 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2089 if not os.path.isdir(store):
2092 f = open('_project', 'w')
2093 f.write(project + '\n')
2095 f = open('_package', 'w')
2096 f.write(package + '\n')
2100 f = open('_size_limit', 'w')
2101 f.write(str(limit_size))
2105 f = open('_files', 'w')
2106 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2110 ET.ElementTree(element=ET.Element('directory')).write('_files')
2112 f = open('_osclib_version', 'w')
2113 f.write(__store_version__ + '\n')
2116 store_write_apiurl(os.path.pardir, apiurl)
2122 def check_store_version(dir):
2123 versionfile = os.path.join(dir, store, '_osclib_version')
2125 v = open(versionfile).read().strip()
2130 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2131 if os.path.exists(os.path.join(dir, '.svn')):
2132 msg = msg + '\nTry svn instead of osc.'
2133 raise oscerr.NoWorkingCopy(msg)
2135 if v != __store_version__:
2136 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2137 # version is fine, no migration needed
2138 f = open(versionfile, 'w')
2139 f.write(__store_version__ + '\n')
2142 msg = 'The osc metadata of your working copy "%s"' % dir
2143 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2144 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2145 raise oscerr.WorkingCopyWrongVersion, msg
2148 def meta_get_packagelist(apiurl, prj):
2150 u = makeurl(apiurl, ['source', prj])
2152 root = ET.parse(f).getroot()
2153 return [ node.get('name') for node in root.findall('entry') ]
2156 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2157 """return a list of file names,
2158 or a list File() instances if verbose=True"""
2164 query['rev'] = revision
2166 query['rev'] = 'latest'
2168 u = makeurl(apiurl, ['source', prj, package], query=query)
2170 root = ET.parse(f).getroot()
2173 return [ node.get('name') for node in root.findall('entry') ]
2177 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2178 rev = root.get('rev')
2179 for node in root.findall('entry'):
2180 f = File(node.get('name'),
2182 int(node.get('size')),
2183 int(node.get('mtime')))
2189 def meta_get_project_list(apiurl):
2190 u = makeurl(apiurl, ['source'])
2192 root = ET.parse(f).getroot()
2193 return sorted([ node.get('name') for node in root ])
2196 def show_project_meta(apiurl, prj):
2197 url = makeurl(apiurl, ['source', prj, '_meta'])
2199 return f.readlines()
2202 def show_project_conf(apiurl, prj):
2203 url = makeurl(apiurl, ['source', prj, '_config'])
2205 return f.readlines()
2208 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2209 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2213 except urllib2.HTTPError, e:
2214 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2218 def show_package_meta(apiurl, prj, pac):
2219 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2222 return f.readlines()
2223 except urllib2.HTTPError, e:
2224 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2228 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2230 path.append('source')
2236 path.append('_attribute')
2238 path.append(attribute)
2241 query.append("with_default=1")
2243 query.append("with_project=1")
2244 url = makeurl(apiurl, path, query)
2247 return f.readlines()
2248 except urllib2.HTTPError, e:
2249 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2253 def show_develproject(apiurl, prj, pac):
2254 m = show_package_meta(apiurl, prj, pac)
2256 return ET.fromstring(''.join(m)).find('devel').get('project')
2261 def show_pattern_metalist(apiurl, prj):
2262 url = makeurl(apiurl, ['source', prj, '_pattern'])
2266 except urllib2.HTTPError, e:
2267 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2269 r = [ node.get('name') for node in tree.getroot() ]
2274 def show_pattern_meta(apiurl, prj, pattern):
2275 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2278 return f.readlines()
2279 except urllib2.HTTPError, e:
2280 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2285 """metafile that can be manipulated and is stored back after manipulation."""
2286 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2290 self.change_is_required = change_is_required
2291 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2292 f = os.fdopen(fd, 'w')
2293 f.write(''.join(input))
2295 self.hash_orig = dgst(self.filename)
2298 hash = dgst(self.filename)
2299 if self.change_is_required and hash == self.hash_orig:
2300 print 'File unchanged. Not saving.'
2301 os.unlink(self.filename)
2304 print 'Sending meta data...'
2305 # don't do any exception handling... it's up to the caller what to do in case
2307 http_PUT(self.url, file=self.filename)
2308 os.unlink(self.filename)
2314 run_editor(self.filename)
2318 except urllib2.HTTPError, e:
2319 error_help = "%d" % e.code
2320 if e.headers.get('X-Opensuse-Errorcode'):
2321 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2323 print >>sys.stderr, 'BuildService API error:', error_help
2324 # examine the error - we can't raise an exception because we might want
2327 if '<summary>' in data:
2328 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2329 input = raw_input('Try again? ([y/N]): ')
2330 if input not in ['y', 'Y']:
2336 if os.path.exists(self.filename):
2337 print 'discarding %s' % self.filename
2338 os.unlink(self.filename)
2341 # different types of metadata
2342 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2343 'template': new_project_templ,
2346 'pkg': { 'path' : 'source/%s/%s/_meta',
2347 'template': new_package_templ,
2350 'attribute': { 'path' : 'source/%s/%s/_meta',
2351 'template': new_attribute_templ,
2354 'prjconf': { 'path': 'source/%s/_config',
2358 'user': { 'path': 'person/%s',
2359 'template': new_user_template,
2362 'pattern': { 'path': 'source/%s/_pattern/%s',
2363 'template': new_pattern_template,
2368 def meta_exists(metatype,
2375 apiurl = conf.config['apiurl']
2376 url = make_meta_url(metatype, path_args, apiurl)
2378 data = http_GET(url).readlines()
2379 except urllib2.HTTPError, e:
2380 if e.code == 404 and create_new:
2381 data = metatypes[metatype]['template']
2383 data = StringIO(data % template_args).readlines()
2388 def make_meta_url(metatype, path_args=None, apiurl=None):
2390 apiurl = conf.config['apiurl']
2391 if metatype not in metatypes.keys():
2392 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2393 path = metatypes[metatype]['path']
2396 path = path % path_args
2398 return makeurl(apiurl, [path])
2401 def edit_meta(metatype,
2406 change_is_required=False,
2410 apiurl = conf.config['apiurl']
2412 data = meta_exists(metatype,
2415 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2419 change_is_required = True
2421 url = make_meta_url(metatype, path_args, apiurl)
2422 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2430 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2433 query['rev'] = revision
2435 query['rev'] = 'latest'
2437 query['linkrev'] = linkrev
2438 elif conf.config['linkcontrol']:
2439 query['linkrev'] = 'base'
2443 query['emptylink'] = 1
2444 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2446 # look for "too large" files according to size limit and mark them
2447 root = ET.fromstring(''.join(f.readlines()))
2448 for e in root.findall('entry'):
2449 size = e.get('size')
2450 if size and limit_size and int(size) > int(limit_size):
2451 e.set('skipped', 'true')
2452 return ET.tostring(root)
2455 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2456 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2457 return ET.fromstring(''.join(m)).get('srcmd5')
2460 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2461 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2463 # only source link packages have a <linkinfo> element.
2464 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2472 raise oscerr.LinkExpandError(prj, pac, li.error)
2476 def show_upstream_rev(apiurl, prj, pac):
2477 m = show_files_meta(apiurl, prj, pac)
2478 return ET.fromstring(''.join(m)).get('rev')
2481 def read_meta_from_spec(specfile, *args):
2482 import codecs, locale, re
2484 Read tags and sections from spec file. To read out
2485 a tag the passed argument mustn't end with a colon. To
2486 read out a section the passed argument must start with
2488 This method returns a dictionary which contains the
2492 if not os.path.isfile(specfile):
2493 raise IOError('\'%s\' is not a regular file' % specfile)
2496 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2497 except UnicodeDecodeError:
2498 lines = open(specfile).readlines()
2505 if itm.startswith('%'):
2506 sections.append(itm)
2510 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2512 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2513 if m and m.group('val'):
2514 spec_data[tag] = m.group('val').strip()
2516 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2519 section_pat = '^%s\s*?$'
2520 for section in sections:
2521 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2523 start = lines.index(m.group()+'\n') + 1
2525 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2528 for line in lines[start:]:
2529 if line.startswith('%'):
2532 spec_data[section] = data
2536 def run_pager(message):
2537 import tempfile, sys
2539 if not sys.stdout.isatty():
2542 tmpfile = tempfile.NamedTemporaryFile()
2543 tmpfile.write(message)
2545 pager = os.getenv('PAGER', default='less')
2546 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2549 def run_editor(filename):
2550 if sys.platform[:3] != 'win':
2551 editor = os.getenv('EDITOR', default='vim')
2553 editor = os.getenv('EDITOR', default='notepad')
2555 return subprocess.call([ editor, filename ])
2557 def edit_message(footer='', template='', templatelen=30):
2558 delim = '--This line, and those below, will be ignored--\n'
2560 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2561 f = os.fdopen(fd, 'w')
2563 if not templatelen is None:
2564 lines = template.splitlines()
2565 template = '\n'.join(lines[:templatelen])
2566 if lines[templatelen:]:
2567 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2577 run_editor(filename)
2578 msg = open(filename).read().split(delim)[0].rstrip()
2583 input = raw_input('Log message not specified\n'
2584 'a)bort, c)ontinue, e)dit: ')
2586 raise oscerr.UserAbort()
2596 def create_delete_request(apiurl, project, package, message):
2601 package = """package="%s" """ % (package)
2607 <action type="delete">
2608 <target project="%s" %s/>
2611 <description>%s</description>
2613 """ % (project, package,
2614 cgi.escape(message or ''))
2616 u = makeurl(apiurl, ['request'], query='cmd=create')
2617 f = http_POST(u, data=xml)
2619 root = ET.parse(f).getroot()
2620 return root.get('id')
2623 def create_change_devel_request(apiurl,
2624 devel_project, devel_package,
2631 <action type="change_devel">
2632 <source project="%s" package="%s" />
2633 <target project="%s" package="%s" />
2636 <description>%s</description>
2638 """ % (devel_project,
2642 cgi.escape(message or ''))
2644 u = makeurl(apiurl, ['request'], query='cmd=create')
2645 f = http_POST(u, data=xml)
2647 root = ET.parse(f).getroot()
2648 return root.get('id')
2651 # This creates an old style submit request for server api 1.0
2652 def create_submit_request(apiurl,
2653 src_project, src_package,
2654 dst_project=None, dst_package=None,
2655 message=None, orev=None, src_update=None):
2660 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2662 # Yes, this kind of xml construction is horrible
2667 packagexml = """package="%s" """ %( dst_package )
2668 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2669 # XXX: keep the old template for now in order to work with old obs instances
2671 <request type="submit">
2673 <source project="%s" package="%s" rev="%s"/>
2678 <description>%s</description>
2682 orev or show_upstream_rev(apiurl, src_project, src_package),
2685 cgi.escape(message or ""))
2687 u = makeurl(apiurl, ['request'], query='cmd=create')
2688 f = http_POST(u, data=xml)
2690 root = ET.parse(f).getroot()
2691 return root.get('id')
2694 def get_request(apiurl, reqid):
2695 u = makeurl(apiurl, ['request', reqid])
2697 root = ET.parse(f).getroot()
2704 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2707 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2708 f = http_POST(u, data=message)
2711 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2714 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2715 f = http_POST(u, data=message)
2719 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2721 if not 'all' in req_state:
2722 for state in req_state:
2723 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2725 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2727 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2731 todo['project'] = project
2733 todo['package'] = package
2734 for kind, val in todo.iteritems():
2735 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2736 'action/source/@%(kind)s=\'%(val)s\' or ' \
2737 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2738 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2740 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2741 for i in exclude_target_projects:
2742 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2743 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2745 if conf.config['verbose'] > 1:
2746 print '[ %s ]' % xpath
2747 res = search(apiurl, request=xpath)
2748 collection = res['request']
2750 for root in collection.findall('request'):
2756 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2757 """Return all new requests for all projects/packages where is user is involved"""
2759 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2760 for i in res['project_id'].findall('project'):
2761 projpkgs[i.get('name')] = []
2762 for i in res['package_id'].findall('package'):
2763 if not i.get('project') in projpkgs.keys():
2764 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2766 for prj, pacs in projpkgs.iteritems():
2768 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2772 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2773 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2774 xpath = xpath_join(xpath, xp, inner=True)
2776 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2777 if not 'all' in req_state:
2779 for state in req_state:
2780 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2781 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2782 res = search(apiurl, request=xpath)
2784 for root in res['request'].findall('request'):
2790 def get_request_log(apiurl, reqid):
2791 r = get_request(conf.config['apiurl'], reqid)
2793 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2794 # the description of the request is used for the initial log entry
2795 # otherwise its comment attribute would contain None
2796 if len(r.statehistory) >= 1:
2797 r.statehistory[-1].comment = r.descr
2799 r.state.comment = r.descr
2800 for state in [ r.state ] + r.statehistory:
2801 s = frmt % (state.name, state.who, state.when, str(state.comment))
2806 def get_user_meta(apiurl, user):
2807 u = makeurl(apiurl, ['person', quote_plus(user)])
2810 return ''.join(f.readlines())
2811 except urllib2.HTTPError:
2812 print 'user \'%s\' not found' % user
2816 def get_user_data(apiurl, user, *tags):
2817 """get specified tags from the user meta"""
2818 meta = get_user_meta(apiurl, user)
2821 root = ET.fromstring(meta)
2824 if root.find(tag).text != None:
2825 data.append(root.find(tag).text)
2829 except AttributeError:
2830 # this part is reached if the tags tuple contains an invalid tag
2831 print 'The xml file for user \'%s\' seems to be broken' % user
2836 def download(url, filename, progress_obj = None, mtime = None):
2837 import tempfile, shutil
2840 prefix = os.path.basename(filename)
2841 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2842 os.chmod(tmpfile, 0644)
2844 o = os.fdopen(fd, 'wb')
2845 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2848 shutil.move(tmpfile, filename)
2857 os.utime(filename, (-1, mtime))
2859 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None):
2860 targetfilename = targetfilename or filename
2863 query = { 'rev': revision }
2864 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2865 download(u, targetfilename, progress_obj, mtime)
2867 def get_binary_file(apiurl, prj, repo, arch,
2870 target_filename = None,
2871 target_mtime = None,
2872 progress_meter = False):
2875 from meter import TextMeter
2876 progress_obj = TextMeter()
2878 target_filename = target_filename or filename
2880 where = package or '_repository'
2881 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2882 download(u, target_filename, progress_obj, target_mtime)
2884 def dgst_from_string(str):
2885 # Python 2.5 depracates the md5 modules
2886 # Python 2.4 doesn't have hashlib yet
2889 md5_hash = hashlib.md5()
2892 md5_hash = md5.new()
2893 md5_hash.update(str)
2894 return md5_hash.hexdigest()
2898 #if not os.path.exists(file):
2908 f = open(file, 'rb')
2910 buf = f.read(BUFSIZE)
2913 return s.hexdigest()
2918 """return true if a string is binary data using diff's heuristic"""
2919 if s and '\0' in s[:4096]:
2924 def binary_file(fn):
2925 """read 4096 bytes from a file named fn, and call binary() on the data"""
2926 return binary(open(fn, 'rb').read(4096))
2929 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2931 This methods diffs oldfilename against filename (so filename will
2932 be shown as the new file).
2933 The variable origfilename is used if filename and oldfilename differ
2934 in their names (for instance if a tempfile is used for filename etc.)
2940 oldfilename = filename
2943 olddir = os.path.join(dir, store)
2945 if not origfilename:
2946 origfilename = filename
2948 file1 = os.path.join(olddir, oldfilename) # old/stored original
2949 file2 = os.path.join(dir, filename) # working copy
2951 f1 = open(file1, 'rb')
2955 f2 = open(file2, 'rb')
2959 if binary(s1) or binary (s2):
2960 d = ['Binary file %s has changed\n' % origfilename]
2963 d = difflib.unified_diff(\
2966 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2967 tofile = '%s\t(working copy)' % origfilename)
2969 # if file doesn't end with newline, we need to append one in the diff result
2971 for i, line in enumerate(d):
2972 if not line.endswith('\n'):
2973 d[i] += '\n\\ No newline at end of file'
2979 def make_diff(wc, revision):
2985 diff_hdr = 'Index: %s\n'
2986 diff_hdr += '===================================================================\n'
2988 olddir = os.getcwd()
2992 for file in wc.todo:
2993 if file in wc.skipped:
2995 if file in wc.filenamelist+wc.filenamelist_unvers:
2996 state = wc.status(file)
2998 added_files.append(file)
3000 removed_files.append(file)
3001 elif state == 'M' or state == 'C':
3002 changed_files.append(file)
3004 diff.append('osc: \'%s\' is not under version control' % file)
3006 for file in wc.filenamelist+wc.filenamelist_unvers:
3007 if file in wc.skipped:
3009 state = wc.status(file)
3010 if state == 'M' or state == 'C':
3011 changed_files.append(file)
3013 added_files.append(file)
3015 removed_files.append(file)
3017 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
3019 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
3020 cmp_pac = Package(tmpdir)
3022 for file in wc.todo:
3023 if file in cmp_pac.skipped:
3025 if file in cmp_pac.filenamelist:
3026 if file in wc.filenamelist:
3027 changed_files.append(file)
3029 diff.append('osc: \'%s\' is not under version control' % file)
3031 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
3033 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
3035 for file in changed_files:
3036 diff.append(diff_hdr % file)
3038 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
3040 cmp_pac.updatefile(file, revision)
3041 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3042 cmp_pac.absdir, file))
3043 (fd, tmpfile) = tempfile.mkstemp()
3044 for file in added_files:
3045 diff.append(diff_hdr % file)
3047 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3048 os.path.dirname(tmpfile), file))
3050 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3051 os.path.dirname(tmpfile), file))
3053 # FIXME: this is ugly but it cannot be avoided atm
3054 # if a file is deleted via "osc rm file" we should keep the storefile.
3056 if cmp_pac == None and removed_files:
3057 tmpdir = tempfile.mkdtemp()
3059 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3060 tmp_pac = Package(tmpdir)
3063 for file in removed_files:
3064 diff.append(diff_hdr % file)
3066 tmp_pac.updatefile(file, tmp_pac.rev)
3067 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3068 wc.rev, file, tmp_pac.storedir, file))
3070 cmp_pac.updatefile(file, revision)
3071 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3072 revision, file, cmp_pac.storedir, file))
3076 delete_dir(cmp_pac.absdir)
3078 delete_dir(tmp_pac.absdir)
3082 def server_diff(apiurl,
3083 old_project, old_package, old_revision,
3084 new_project, new_package, new_revision, unified=False, missingok=False):
3085 query = {'cmd': 'diff', 'expand': '1'}
3087 query['oproject'] = old_project
3089 query['opackage'] = old_package
3091 query['orev'] = old_revision
3093 query['rev'] = new_revision
3095 query['unified'] = 1
3097 query['missingok'] = 1
3099 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3105 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3107 creates the plain directory structure for a package dir.
3108 The 'apiurl' parameter is needed for the project dir initialization.
3109 The 'project' and 'package' parameters specify the name of the
3110 project and the package. The optional 'pathname' parameter is used
3111 for printing out the message that a new dir was created (default: 'prj_dir/package').
3112 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3114 prj_dir = prj_dir or project
3116 # FIXME: carefully test each patch component of prj_dir,
3117 # if we have a .osc/_files entry at that level.
3118 # -> if so, we have a package/project clash,
3119 # and should rename this path component by appending '.proj'
3120 # and give user a warning message, to discourage such clashes
3122 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3123 if is_package_dir(prj_dir):
3124 # we want this to become a project directory,
3125 # but it already is a package directory.
3126 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3128 if not is_project_dir(prj_dir):
3129 # this directory could exist as a parent direory for one of our earlier
3130 # checked out sub-projects. in this case, we still need to initialize it.
3131 print statfrmt('A', prj_dir)
3132 init_project_dir(apiurl, prj_dir, project)
3134 if is_project_dir(os.path.join(prj_dir, package)):
3135 # the thing exists, but is a project directory and not a package directory
3136 # FIXME: this should be a warning message to discourage package/project clashes
3137 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3139 if not os.path.exists(os.path.join(prj_dir, package)):
3140 print statfrmt('A', pathname)
3141 os.mkdir(os.path.join(prj_dir, package))
3142 os.mkdir(os.path.join(prj_dir, package, store))
3144 return(os.path.join(prj_dir, package))
3147 def checkout_package(apiurl, project, package,
3148 revision=None, pathname=None, prj_obj=None,
3149 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3151 # the project we're in might be deleted.
3152 # that'll throw an error then.
3153 olddir = os.getcwd()
3155 olddir = os.environ.get("PWD")
3160 if sys.platform[:3] == 'win':
3161 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3163 if conf.config['checkout_no_colon']:
3164 prj_dir = prj_dir.replace(':', '/')
3167 pathname = getTransActPath(os.path.join(prj_dir, package))
3169 # before we create directories and stuff, check if the package actually
3171 show_package_meta(apiurl, project, package)
3175 # try to read from the linkinfo
3176 # if it is a link we use the xsrcmd5 as the revision to be
3179 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3181 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3186 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3187 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3189 p = Package(package, progress_obj=progress_obj)
3192 for filename in p.filenamelist:
3193 if filename in p.skipped:
3195 if service_files or not filename.startswith('_service:'):
3196 p.updatefile(filename, revision)
3197 # print 'A ', os.path.join(project, package, filename)
3198 print statfrmt('A', os.path.join(pathname, filename))
3199 if conf.config['do_package_tracking']:
3200 # check if we can re-use an existing project object
3202 prj_obj = Project(os.getcwd())
3203 prj_obj.set_state(p.name, ' ')
3204 prj_obj.write_packages()
3208 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3209 dst_userid = None, keep_develproject = False):
3211 update pkgmeta with new new_name and new_prj and set calling user as the
3212 only maintainer (unless keep_maintainers is set). Additionally remove the
3213 develproject entry (<devel />) unless keep_develproject is true.
3215 root = ET.fromstring(''.join(pkgmeta))
3216 root.set('name', new_name)
3217 root.set('project', new_prj)
3218 if not keep_maintainers:
3219 for person in root.findall('person'):
3221 if not keep_develproject:
3222 for dp in root.findall('devel'):
3224 return ET.tostring(root)
3226 def link_to_branch(apiurl, project, package):
3228 convert a package with a _link + project.diff to a branch
3231 if '_link' in meta_get_filelist(apiurl, project, package):
3232 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3235 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3237 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3239 create a linked package
3240 - "src" is the original package
3241 - "dst" is the "link" package that we are creating here
3246 dst_meta = meta_exists(metatype='pkg',
3247 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3249 create_new=False, apiurl=conf.config['apiurl'])
3250 root = ET.fromstring(''.join(dst_meta))
3251 print root.attrib['project']
3252 if root.attrib['project'] != dst_project:
3253 # The source comes from a different project via a project link, we need to create this instance
3259 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3260 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3264 root = ET.fromstring(''.join(dst_meta))
3265 elm = root.find('publish')
3267 elm = ET.SubElement(root, 'publish')
3269 ET.SubElement(elm, 'disable')
3270 dst_meta = ET.tostring(root)
3274 path_args=(dst_project, dst_package),
3276 # create the _link file
3277 # but first, make sure not to overwrite an existing one
3278 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3280 print >>sys.stderr, 'forced overwrite of existing _link file'
3283 print >>sys.stderr, '_link file already exists...! Aborting'
3287 rev = 'rev="%s"' % rev
3292 cicount = 'cicount="%s"' % cicount
3296 print 'Creating _link...',
3297 link_template = """\
3298 <link project="%s" package="%s" %s %s>
3300 <!-- <apply name="patch" /> apply a patch on the source directory -->
3301 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3302 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3303 <!-- <delete>filename</delete> delete a file -->
3306 """ % (src_project, src_package, rev, cicount)
3308 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3309 http_PUT(u, data=link_template)
3312 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3315 - "src" is the original package
3316 - "dst" is the "aggregate" package that we are creating here
3317 - "map" is a dictionary SRC => TARGET repository mappings
3322 dst_meta = meta_exists(metatype='pkg',
3323 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3325 create_new=False, apiurl=conf.config['apiurl'])
3327 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3328 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3333 root = ET.fromstring(''.join(dst_meta))
3334 elm = root.find('publish')
3336 elm = ET.SubElement(root, 'publish')
3338 ET.SubElement(elm, 'disable')
3339 dst_meta = ET.tostring(root)
3342 path_args=(dst_project, dst_package),
3345 # create the _aggregate file
3346 # but first, make sure not to overwrite an existing one
3347 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3349 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3352 print 'Creating _aggregate...',
3353 aggregate_template = """\
3355 <aggregate project="%s">
3357 for tgt, src in repo_map.iteritems():
3358 aggregate_template += """\
3359 <repository target="%s" source="%s" />
3362 aggregate_template += """\
3363 <package>%s</package>
3366 """ % ( src_package)