1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E', # obsolete with OBS 2.0
201 # our own xml writer function to write xml nice, but with correct syntax
202 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
203 from xml.dom import minidom
204 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
205 # indent = current indentation
206 # addindent = indentation to add to higher levels
207 # newl = newline string
208 writer.write(indent+"<" + self.tagName)
210 attrs = self._get_attributes()
211 a_names = attrs.keys()
214 for a_name in a_names:
215 writer.write(" %s=\"" % a_name)
216 minidom._write_data(writer, attrs[a_name].value)
219 if len(self.childNodes) == 1 \
220 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
222 self.childNodes[0].writexml(writer, "", "", "")
223 writer.write("</%s>%s" % (self.tagName, newl))
225 writer.write(">%s"%(newl))
226 for node in self.childNodes:
227 node.writexml(writer,indent+addindent,addindent,newl)
228 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
230 writer.write("/>%s"%(newl))
231 # replace minidom's function with ours
232 minidom.Element.writexml = fixed_writexml
235 # os.path.samefile is available only under Unix
236 def os_path_samefile(path1, path2):
238 return os.path.samefile(path1, path2)
240 return os.path.realpath(path1) == os.path.realpath(path2)
243 """represent a file, including its metadata"""
244 def __init__(self, name, md5, size, mtime):
254 """Source service content
257 """creates an empty serviceinfo instance"""
260 def read(self, serviceinfo_node):
261 """read in the source services <services> element passed as
264 if serviceinfo_node == None:
267 services = serviceinfo_node.findall('service')
269 for service in services:
270 name = service.get('name')
272 for param in service.findall('param'):
273 option = param.get('name', None)
275 name += " --" + option + " '" + value + "'"
276 self.commands.append(name)
278 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
279 raise oscerr.APIError(msg)
281 def addVerifyFile(self, serviceinfo_node, filename):
284 f = open(filename, 'r')
285 digest = hashlib.sha256(f.read()).hexdigest()
289 s = ET.Element( "service", name="verify_file" )
290 ET.SubElement(s, "param", name="file").text = filename
291 ET.SubElement(s, "param", name="verifier").text = "sha256"
292 ET.SubElement(s, "param", name="checksum").text = digest
298 def addDownloadUrl(self, serviceinfo_node, url_string):
299 from urlparse import urlparse
300 url = urlparse( url_string )
301 protocol = url.scheme
306 s = ET.Element( "service", name="download_url" )
307 ET.SubElement(s, "param", name="protocol").text = protocol
308 ET.SubElement(s, "param", name="host").text = host
309 ET.SubElement(s, "param", name="path").text = path
315 def execute(self, dir):
318 for call in self.commands:
319 temp_dir = tempfile.mkdtemp()
320 name = call.split(None, 1)[0]
321 if not os.path.exists("/usr/lib/obs/service/"+name):
322 msg = "ERROR: service is not installed !"
323 msg += "Can maybe solved with: zypper in obs-server-" + name
324 raise oscerr.APIError(msg)
325 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
326 ret = subprocess.call(c, shell=True)
328 print "ERROR: service call failed: " + c
330 for file in os.listdir(temp_dir):
331 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
335 """linkinfo metadata (which is part of the xml representing a directory
338 """creates an empty linkinfo instance"""
348 def read(self, linkinfo_node):
349 """read in the linkinfo metadata from the <linkinfo> element passed as
351 If the passed element is None, the method does nothing.
353 if linkinfo_node == None:
355 self.project = linkinfo_node.get('project')
356 self.package = linkinfo_node.get('package')
357 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
358 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
359 self.srcmd5 = linkinfo_node.get('srcmd5')
360 self.error = linkinfo_node.get('error')
361 self.rev = linkinfo_node.get('rev')
362 self.baserev = linkinfo_node.get('baserev')
365 """returns True if the linkinfo is not empty, otherwise False"""
366 if self.xsrcmd5 or self.lsrcmd5:
370 def isexpanded(self):
371 """returns True if the package is an expanded link"""
372 if self.lsrcmd5 and not self.xsrcmd5:
377 """returns True if the link is in error state (could not be applied)"""
383 """return an informatory string representation"""
384 if self.islink() and not self.isexpanded():
385 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
386 % (self.project, self.package, self.xsrcmd5, self.rev)
387 elif self.islink() and self.isexpanded():
389 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
390 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
392 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
393 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
399 """represent a project directory, holding packages"""
400 def __init__(self, dir, getPackageList=True, progress_obj=None):
403 self.absdir = os.path.abspath(dir)
404 self.progress_obj = progress_obj
406 self.name = store_read_project(self.dir)
407 self.apiurl = store_read_apiurl(self.dir)
410 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
412 self.pacs_available = []
414 if conf.config['do_package_tracking']:
415 self.pac_root = self.read_packages().getroot()
416 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
417 self.pacs_excluded = [ i for i in os.listdir(self.dir)
418 for j in conf.config['exclude_glob']
419 if fnmatch.fnmatch(i, j) ]
420 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
421 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
422 # in the self.pacs_broken list
423 self.pacs_broken = []
424 for p in self.pacs_have:
425 if not os.path.isdir(os.path.join(self.absdir, p)):
426 # all states will be replaced with the '!'-state
427 # (except it is already marked as deleted ('D'-state))
428 self.pacs_broken.append(p)
430 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
432 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
434 def checkout_missing_pacs(self, expand_link=False):
435 for pac in self.pacs_missing:
437 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
438 # pac is not under version control but a local file/dir exists
439 msg = 'can\'t add package \'%s\': Object already exists' % pac
440 raise oscerr.PackageExists(self.name, pac, msg)
442 print 'checking out new package %s' % pac
443 checkout_package(self.apiurl, self.name, pac, \
444 pathname=getTransActPath(os.path.join(self.dir, pac)), \
445 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
447 def set_state(self, pac, state):
448 node = self.get_package_node(pac)
450 self.new_package_entry(pac, state)
452 node.attrib['state'] = state
454 def get_package_node(self, pac):
455 for node in self.pac_root.findall('package'):
456 if pac == node.get('name'):
460 def del_package_node(self, pac):
461 for node in self.pac_root.findall('package'):
462 if pac == node.get('name'):
463 self.pac_root.remove(node)
465 def get_state(self, pac):
466 node = self.get_package_node(pac)
468 return node.get('state')
472 def new_package_entry(self, name, state):
473 ET.SubElement(self.pac_root, 'package', name=name, state=state)
475 def read_packages(self):
476 packages_file = os.path.join(self.absdir, store, '_packages')
477 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
478 return ET.parse(packages_file)
480 # scan project for existing packages and migrate them
482 for data in os.listdir(self.dir):
483 pac_dir = os.path.join(self.absdir, data)
484 # we cannot use self.pacs_available because we cannot guarantee that the package list
485 # was fetched from the server
486 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
487 and Package(pac_dir).name == data:
488 cur_pacs.append(ET.Element('package', name=data, state=' '))
489 store_write_initial_packages(self.absdir, self.name, cur_pacs)
490 return ET.parse(os.path.join(self.absdir, store, '_packages'))
492 def write_packages(self):
493 # TODO: should we only modify the existing file instead of overwriting?
494 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
496 def addPackage(self, pac):
498 for i in conf.config['exclude_glob']:
499 if fnmatch.fnmatch(pac, i):
500 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
501 raise oscerr.OscIOError(None, msg)
502 state = self.get_state(pac)
503 if state == None or state == 'D':
504 self.new_package_entry(pac, 'A')
505 self.write_packages()
506 # sometimes the new pac doesn't exist in the list because
507 # it would take too much time to update all data structs regularly
508 if pac in self.pacs_unvers:
509 self.pacs_unvers.remove(pac)
511 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
513 def delPackage(self, pac, force = False):
514 state = self.get_state(pac.name)
516 if state == ' ' or state == 'D':
518 for file in pac.filenamelist + pac.filenamelist_unvers:
519 filestate = pac.status(file)
520 if filestate == 'M' or filestate == 'C' or \
521 filestate == 'A' or filestate == '?':
524 del_files.append(file)
525 if can_delete or force:
526 for file in del_files:
527 pac.delete_localfile(file)
528 if pac.status(file) != '?':
529 pac.delete_storefile(file)
530 # this is not really necessary
531 pac.put_on_deletelist(file)
532 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
533 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
534 pac.write_deletelist()
535 self.set_state(pac.name, 'D')
536 self.write_packages()
538 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
541 delete_dir(pac.absdir)
542 self.del_package_node(pac.name)
543 self.write_packages()
544 print statfrmt('D', pac.name)
546 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
548 print 'package is not under version control'
550 print 'unsupported state'
552 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
555 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
557 # we need to make sure that the _packages file will be written (even if an exception
560 # update complete project
561 # packages which no longer exists upstream
562 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
564 for pac in upstream_del:
565 p = Package(os.path.join(self.dir, pac))
566 self.delPackage(p, force = True)
567 delete_storedir(p.storedir)
572 self.pac_root.remove(self.get_package_node(p.name))
573 self.pacs_have.remove(pac)
575 for pac in self.pacs_have:
576 state = self.get_state(pac)
577 if pac in self.pacs_broken:
578 if self.get_state(pac) != 'A':
579 checkout_package(self.apiurl, self.name, pac,
580 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
581 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
584 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
586 if expand_link and p.islink() and not p.isexpanded():
589 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
591 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
594 rev = p.linkinfo.xsrcmd5
595 print 'Expanding to rev', rev
596 elif unexpand_link and p.islink() and p.isexpanded():
597 rev = p.linkinfo.lsrcmd5
598 print 'Unexpanding to rev', rev
599 elif p.islink() and p.isexpanded():
601 print 'Updating %s' % p.name
602 p.update(rev, service_files)
606 # TODO: Package::update has to fixed to behave like svn does
607 if pac in self.pacs_broken:
608 checkout_package(self.apiurl, self.name, pac,
609 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
610 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
612 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
613 elif state == 'A' and pac in self.pacs_available:
614 # file/dir called pac already exists and is under version control
615 msg = 'can\'t add package \'%s\': Object already exists' % pac
616 raise oscerr.PackageExists(self.name, pac, msg)
621 print 'unexpected state.. package \'%s\'' % pac
623 self.checkout_missing_pacs(expand_link=not unexpand_link)
625 self.write_packages()
627 def commit(self, pacs = (), msg = '', files = {}, validators = None):
632 if files.has_key(pac):
634 state = self.get_state(pac)
636 self.commitNewPackage(pac, msg, todo)
638 self.commitDelPackage(pac)
640 # display the correct dir when sending the changes
641 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
644 p = Package(os.path.join(self.dir, pac))
646 p.commit(msg, validators=validators)
647 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
648 print 'osc: \'%s\' is not under version control' % pac
649 elif pac in self.pacs_broken:
650 print 'osc: \'%s\' package not found' % pac
652 self.commitExtPackage(pac, msg, todo)
654 self.write_packages()
656 # if we have packages marked as '!' we cannot commit
657 for pac in self.pacs_broken:
658 if self.get_state(pac) != 'D':
659 msg = 'commit failed: package \'%s\' is missing' % pac
660 raise oscerr.PackageMissing(self.name, pac, msg)
662 for pac in self.pacs_have:
663 state = self.get_state(pac)
666 Package(os.path.join(self.dir, pac)).commit(msg, validators=validators)
668 self.commitDelPackage(pac)
670 self.commitNewPackage(pac, msg)
672 self.write_packages()
674 def commitNewPackage(self, pac, msg = '', files = []):
675 """creates and commits a new package if it does not exist on the server"""
676 if pac in self.pacs_available:
677 print 'package \'%s\' already exists' % pac
679 user = conf.get_apiurl_usr(self.apiurl)
680 edit_meta(metatype='pkg',
681 path_args=(quote_plus(self.name), quote_plus(pac)),
686 # display the correct dir when sending the changes
688 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
692 p = Package(os.path.join(self.dir, pac))
694 print statfrmt('Sending', os.path.normpath(p.dir))
696 self.set_state(pac, ' ')
699 def commitDelPackage(self, pac):
700 """deletes a package on the server and in the working copy"""
702 # display the correct dir when sending the changes
703 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
706 pac_dir = os.path.join(self.dir, pac)
707 p = Package(os.path.join(self.dir, pac))
708 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
709 delete_storedir(p.storedir)
715 pac_dir = os.path.join(self.dir, pac)
716 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
717 print statfrmt('Deleting', getTransActPath(pac_dir))
718 delete_package(self.apiurl, self.name, pac)
719 self.del_package_node(pac)
721 def commitExtPackage(self, pac, msg, files = []):
722 """commits a package from an external project"""
723 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
726 pac_path = os.path.join(self.dir, pac)
728 project = store_read_project(pac_path)
729 package = store_read_package(pac_path)
730 apiurl = store_read_apiurl(pac_path)
731 if meta_exists(metatype='pkg',
732 path_args=(quote_plus(project), quote_plus(package)),
734 create_new=False, apiurl=apiurl):
735 p = Package(pac_path)
739 user = conf.get_apiurl_usr(self.apiurl)
740 edit_meta(metatype='pkg',
741 path_args=(quote_plus(project), quote_plus(package)),
746 p = Package(pac_path)
752 r.append('*****************************************************')
753 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
754 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
755 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
756 r.append('*****************************************************')
762 """represent a package (its directory) and read/keep/write its metadata"""
763 def __init__(self, workingdir, progress_obj=None, limit_size=None):
764 self.dir = workingdir
765 self.absdir = os.path.abspath(self.dir)
766 self.storedir = os.path.join(self.absdir, store)
767 self.progress_obj = progress_obj
768 self.limit_size = limit_size
769 if limit_size and limit_size == 0:
770 self.limit_size = None
772 check_store_version(self.dir)
774 self.prjname = store_read_project(self.dir)
775 self.name = store_read_package(self.dir)
776 self.apiurl = store_read_apiurl(self.dir)
778 self.update_datastructs()
782 self.todo_delete = []
785 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
786 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
789 def addfile(self, n):
790 st = os.stat(os.path.join(self.dir, n))
791 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
793 def delete_file(self, n, force=False):
794 """deletes a file if possible and marks the file as deleted"""
797 state = self.status(n)
801 if state in ['?', 'A', 'M'] and not force:
802 return (False, state)
803 self.delete_localfile(n)
805 self.put_on_deletelist(n)
806 self.write_deletelist()
808 self.delete_storefile(n)
811 def delete_storefile(self, n):
812 try: os.unlink(os.path.join(self.storedir, n))
815 def delete_localfile(self, n):
816 try: os.unlink(os.path.join(self.dir, n))
819 def put_on_deletelist(self, n):
820 if n not in self.to_be_deleted:
821 self.to_be_deleted.append(n)
823 def put_on_conflictlist(self, n):
824 if n not in self.in_conflict:
825 self.in_conflict.append(n)
827 def clear_from_conflictlist(self, n):
828 """delete an entry from the file, and remove the file if it would be empty"""
829 if n in self.in_conflict:
831 filename = os.path.join(self.dir, n)
832 storefilename = os.path.join(self.storedir, n)
833 myfilename = os.path.join(self.dir, n + '.mine')
834 if self.islinkrepair() or self.ispulled():
835 upfilename = os.path.join(self.dir, n + '.new')
837 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
840 os.unlink(myfilename)
841 # the working copy may be updated, so the .r* ending may be obsolete...
843 os.unlink(upfilename)
844 if self.islinkrepair() or self.ispulled():
845 os.unlink(os.path.join(self.dir, n + '.old'))
849 self.in_conflict.remove(n)
851 self.write_conflictlist()
853 def write_sizelimit(self):
854 if self.size_limit and self.size_limit <= 0:
856 os.unlink(os.path.join(self.storedir, '_size_limit'))
860 fname = os.path.join(self.storedir, '_size_limit')
862 f.write(str(self.size_limit))
865 def write_deletelist(self):
866 if len(self.to_be_deleted) == 0:
868 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
872 fname = os.path.join(self.storedir, '_to_be_deleted')
874 f.write('\n'.join(self.to_be_deleted))
878 def delete_source_file(self, n):
879 """delete local a source file"""
880 self.delete_localfile(n)
881 self.delete_storefile(n)
883 def delete_remote_source_file(self, n):
884 """delete a remote source file (e.g. from the server)"""
886 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
889 def put_source_file(self, n):
891 # escaping '+' in the URL path (note: not in the URL query string) is
892 # only a workaround for ruby on rails, which swallows it otherwise
894 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
895 http_PUT(u, file = os.path.join(self.dir, n))
897 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
899 def commit(self, msg='', validators=None):
900 # commit only if the upstream revision is the same as the working copy's
901 upstream_rev = self.latest_rev()
902 if self.rev != upstream_rev:
903 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
906 self.todo = self.filenamelist_unvers + self.filenamelist
908 pathn = getTransActPath(self.dir)
911 for validator in os.listdir(validators):
913 p = subprocess.Popen([validators+"/"+validator], close_fds=True)
915 raise oscerr.RuntimeError(p.stdout, validator )
917 have_conflicts = False
918 for filename in self.todo:
919 if not filename.startswith('_service:') and not filename.startswith('_service_'):
920 st = self.status(filename)
922 self.todo.remove(filename)
923 elif st == 'A' or st == 'M':
924 self.todo_send.append(filename)
925 print statfrmt('Sending', os.path.join(pathn, filename))
927 self.todo_delete.append(filename)
928 print statfrmt('Deleting', os.path.join(pathn, filename))
930 have_conflicts = True
933 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
936 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
937 print 'nothing to do for package %s' % self.name
940 if self.islink() and self.isexpanded():
941 # resolve the link into the upload revision
942 # XXX: do this always?
943 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
944 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
947 print 'Transmitting file data ',
949 for filename in self.todo_delete:
950 # do not touch local files on commit --
951 # delete remotely instead
952 self.delete_remote_source_file(filename)
953 self.to_be_deleted.remove(filename)
954 for filename in self.todo_send:
955 sys.stdout.write('.')
957 self.put_source_file(filename)
959 # all source files are committed - now comes the log
960 query = { 'cmd' : 'commit',
962 'user' : conf.get_apiurl_usr(self.apiurl),
964 if self.islink() and self.isexpanded():
965 query['keeplink'] = '1'
966 if conf.config['linkcontrol'] or self.isfrozen():
967 query['linkrev'] = self.linkinfo.srcmd5
969 query['repairlink'] = '1'
970 query['linkrev'] = self.get_pulled_srcmd5()
971 if self.islinkrepair():
972 query['repairlink'] = '1'
973 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
976 # delete upload revision
978 query = { 'cmd': 'deleteuploadrev' }
979 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
985 root = ET.parse(f).getroot()
986 self.rev = int(root.get('rev'))
988 print 'Committed revision %s.' % self.rev
991 os.unlink(os.path.join(self.storedir, '_pulled'))
992 if self.islinkrepair():
993 os.unlink(os.path.join(self.storedir, '_linkrepair'))
994 self.linkrepair = False
995 # XXX: mark package as invalid?
996 print 'The source link has been repaired. This directory can now be removed.'
997 if self.islink() and self.isexpanded():
998 self.update_local_filesmeta(revision=self.latest_rev())
1000 self.update_local_filesmeta()
1001 self.write_deletelist()
1002 self.update_datastructs()
1004 if self.filenamelist.count('_service'):
1005 print 'The package contains a source service.'
1006 for filename in self.todo:
1007 if filename.startswith('_service:') and os.path.exists(filename):
1008 os.unlink(filename) # remove local files
1009 print_request_list(self.apiurl, self.prjname, self.name)
1011 def write_conflictlist(self):
1012 if len(self.in_conflict) == 0:
1014 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1018 fname = os.path.join(self.storedir, '_in_conflict')
1019 f = open(fname, 'w')
1020 f.write('\n'.join(self.in_conflict))
1024 def updatefile(self, n, revision):
1025 filename = os.path.join(self.dir, n)
1026 storefilename = os.path.join(self.storedir, n)
1027 mtime = self.findfilebyname(n).mtime
1029 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1030 revision=revision, progress_obj=self.progress_obj, mtime=mtime)
1032 shutil.copyfile(filename, storefilename)
1034 def mergefile(self, n):
1035 filename = os.path.join(self.dir, n)
1036 storefilename = os.path.join(self.storedir, n)
1037 myfilename = os.path.join(self.dir, n + '.mine')
1038 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1039 os.rename(filename, myfilename)
1041 mtime = self.findfilebyname(n).mtime
1042 get_source_file(self.apiurl, self.prjname, self.name, n,
1043 revision=self.rev, targetfilename=upfilename,
1044 progress_obj=self.progress_obj, mtime=mtime)
1046 if binary_file(myfilename) or binary_file(upfilename):
1048 shutil.copyfile(upfilename, filename)
1049 shutil.copyfile(upfilename, storefilename)
1050 self.in_conflict.append(n)
1051 self.write_conflictlist()
1055 # diff3 OPTIONS... MINE OLDER YOURS
1056 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1057 # we would rather use the subprocess module, but it is not availablebefore 2.4
1058 ret = subprocess.call(merge_cmd, shell=True)
1060 # "An exit status of 0 means `diff3' was successful, 1 means some
1061 # conflicts were found, and 2 means trouble."
1063 # merge was successful... clean up
1064 shutil.copyfile(upfilename, storefilename)
1065 os.unlink(upfilename)
1066 os.unlink(myfilename)
1069 # unsuccessful merge
1070 shutil.copyfile(upfilename, storefilename)
1071 self.in_conflict.append(n)
1072 self.write_conflictlist()
1075 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1076 print >>sys.stderr, 'the command line was:'
1077 print >>sys.stderr, merge_cmd
1082 def update_local_filesmeta(self, revision=None):
1084 Update the local _files file in the store.
1085 It is replaced with the version pulled from upstream.
1087 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1088 store_write_string(self.absdir, '_files', meta)
1090 def update_datastructs(self):
1092 Update the internal data structures if the local _files
1093 file has changed (e.g. update_local_filesmeta() has been
1097 files_tree = read_filemeta(self.dir)
1098 files_tree_root = files_tree.getroot()
1100 self.rev = files_tree_root.get('rev')
1101 self.srcmd5 = files_tree_root.get('srcmd5')
1103 self.linkinfo = Linkinfo()
1104 self.linkinfo.read(files_tree_root.find('linkinfo'))
1106 self.filenamelist = []
1109 for node in files_tree_root.findall('entry'):
1111 f = File(node.get('name'),
1113 int(node.get('size')),
1114 int(node.get('mtime')))
1115 if node.get('skipped'):
1116 self.skipped.append(f.name)
1118 # okay, a very old version of _files, which didn't contain any metadata yet...
1119 f = File(node.get('name'), '', 0, 0)
1120 self.filelist.append(f)
1121 self.filenamelist.append(f.name)
1123 self.to_be_deleted = read_tobedeleted(self.dir)
1124 self.in_conflict = read_inconflict(self.dir)
1125 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1126 self.size_limit = read_sizelimit(self.dir)
1128 # gather unversioned files, but ignore some stuff
1129 self.excluded = [ i for i in os.listdir(self.dir)
1130 for j in conf.config['exclude_glob']
1131 if fnmatch.fnmatch(i, j) ]
1132 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1133 if i not in self.excluded
1134 if i not in self.filenamelist ]
1137 """tells us if the package is a link (has 'linkinfo').
1138 A package with linkinfo is a package which links to another package.
1139 Returns True if the package is a link, otherwise False."""
1140 return self.linkinfo.islink()
1142 def isexpanded(self):
1143 """tells us if the package is a link which is expanded.
1144 Returns True if the package is expanded, otherwise False."""
1145 return self.linkinfo.isexpanded()
1147 def islinkrepair(self):
1148 """tells us if we are repairing a broken source link."""
1149 return self.linkrepair
1152 """tells us if we have pulled a link."""
1153 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1156 """tells us if the link is frozen."""
1157 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1159 def get_pulled_srcmd5(self):
1161 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1162 pulledrev = line.strip()
1165 def haslinkerror(self):
1167 Returns True if the link is broken otherwise False.
1168 If the package is not a link it returns False.
1170 return self.linkinfo.haserror()
1172 def linkerror(self):
1174 Returns an error message if the link is broken otherwise None.
1175 If the package is not a link it returns None.
1177 return self.linkinfo.error
1179 def update_local_pacmeta(self):
1181 Update the local _meta file in the store.
1182 It is replaced with the version pulled from upstream.
1184 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1185 store_write_string(self.absdir, '_meta', meta)
1187 def findfilebyname(self, n):
1188 for i in self.filelist:
1192 def status(self, n):
1196 file storefile file present STATUS
1197 exists exists in _files
1200 x x x ' ' if digest differs: 'M'
1201 and if in conflicts file: 'C'
1203 x - x 'D' and listed in _to_be_deleted
1205 - x - 'D' (when file in working copy is already deleted)
1206 - - x 'F' (new in repo, but not yet in working copy)
1211 known_by_meta = False
1213 exists_in_store = False
1214 if n in self.filenamelist:
1215 known_by_meta = True
1216 if os.path.exists(os.path.join(self.absdir, n)):
1218 if os.path.exists(os.path.join(self.storedir, n)):
1219 exists_in_store = True
1222 if n in self.skipped:
1224 elif exists and not exists_in_store and known_by_meta:
1226 elif n in self.to_be_deleted:
1228 elif n in self.in_conflict:
1230 elif exists and exists_in_store and known_by_meta:
1231 #print self.findfilebyname(n)
1232 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1236 elif exists and not exists_in_store and not known_by_meta:
1238 elif exists and exists_in_store and not known_by_meta:
1240 elif not exists and exists_in_store and known_by_meta:
1242 elif not exists and not exists_in_store and known_by_meta:
1244 elif not exists and exists_in_store and not known_by_meta:
1246 elif not exists and not exists_in_store and not known_by_meta:
1247 # this case shouldn't happen (except there was a typo in the filename etc.)
1248 raise IOError('osc: \'%s\' is not under version control' % n)
1252 def comparePac(self, cmp_pac):
1254 This method compares the local filelist with
1255 the filelist of the passed package to see which files
1256 were added, removed and changed.
1263 for file in self.filenamelist+self.filenamelist_unvers:
1264 state = self.status(file)
1265 if file in self.skipped:
1267 if state == 'A' and (not file in cmp_pac.filenamelist):
1268 added_files.append(file)
1269 elif file in cmp_pac.filenamelist and state == 'D':
1270 removed_files.append(file)
1271 elif state == ' ' and not file in cmp_pac.filenamelist:
1272 added_files.append(file)
1273 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1274 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1275 changed_files.append(file)
1276 for file in cmp_pac.filenamelist:
1277 if not file in self.filenamelist:
1278 removed_files.append(file)
1279 removed_files = set(removed_files)
1281 return changed_files, added_files, removed_files
1283 def merge(self, otherpac):
1284 self.todo += otherpac.todo
1298 '\n '.join(self.filenamelist),
1306 def read_meta_from_spec(self, spec = None):
1311 # scan for spec files
1312 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1313 if len(speclist) == 1:
1314 specfile = speclist[0]
1315 elif len(speclist) > 1:
1316 print 'the following specfiles were found:'
1317 for file in speclist:
1319 print 'please specify one with --specfile'
1322 print 'no specfile was found - please specify one ' \
1326 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1327 self.summary = data['Summary']
1328 self.url = data['Url']
1329 self.descr = data['%description']
1332 def update_package_meta(self, force=False):
1334 for the updatepacmetafromspec subcommand
1335 argument force supress the confirm question
1338 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1340 root = ET.fromstring(m)
1341 root.find('title').text = self.summary
1342 root.find('description').text = ''.join(self.descr)
1343 url = root.find('url')
1345 url = ET.SubElement(root, 'url')
1348 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1349 mf = metafile(u, ET.tostring(root))
1352 print '*' * 36, 'old', '*' * 36
1354 print '*' * 36, 'new', '*' * 36
1355 print ET.tostring(root)
1357 repl = raw_input('Write? (y/N/e) ')
1368 def mark_frozen(self):
1369 store_write_string(self.absdir, '_frozenlink', '')
1371 print "The link in this package is currently broken. Checking"
1372 print "out the last working version instead; please use 'osc pull'"
1373 print "to repair the link."
1376 def unmark_frozen(self):
1377 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1378 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1380 def latest_rev(self):
1381 if self.islinkrepair():
1382 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1383 elif self.islink() and self.isexpanded():
1384 if self.isfrozen() or self.ispulled():
1385 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1388 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1391 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1393 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1396 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1399 def update(self, rev = None, service_files = False, limit_size = None):
1400 # save filelist and (modified) status before replacing the meta file
1401 saved_filenames = self.filenamelist
1402 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1406 self.limit_size = limit_size
1408 self.limit_size = read_sizelimit(self.dir)
1409 self.update_local_filesmeta(rev)
1410 self = Package(self.dir, progress_obj=self.progress_obj)
1412 # which files do no longer exist upstream?
1413 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1415 pathn = getTransActPath(self.dir)
1417 for filename in saved_filenames:
1418 if filename in self.skipped:
1420 if not filename.startswith('_service:') and filename in disappeared:
1421 print statfrmt('D', os.path.join(pathn, filename))
1422 # keep file if it has local modifications
1423 if oldp.status(filename) == ' ':
1424 self.delete_localfile(filename)
1425 self.delete_storefile(filename)
1427 for filename in self.filenamelist:
1428 if filename in self.skipped:
1431 state = self.status(filename)
1432 if not service_files and filename.startswith('_service:'):
1434 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1435 # no merge necessary... local file is changed, but upstream isn't
1437 elif state == 'M' and filename in saved_modifiedfiles:
1438 status_after_merge = self.mergefile(filename)
1439 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1441 self.updatefile(filename, rev)
1442 print statfrmt('U', os.path.join(pathn, filename))
1444 self.updatefile(filename, rev)
1445 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1447 self.updatefile(filename, rev)
1448 print statfrmt('A', os.path.join(pathn, filename))
1449 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1450 self.updatefile(filename, rev)
1451 self.delete_storefile(filename)
1452 print statfrmt('U', os.path.join(pathn, filename))
1456 self.update_local_pacmeta()
1458 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1459 print 'At revision %s.' % self.rev
1461 if not service_files:
1462 self.run_source_services()
1464 def run_source_services(self):
1465 if self.filenamelist.count('_service'):
1466 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1469 si.execute(self.absdir)
1471 def prepare_filelist(self):
1472 """Prepare a list of files, which will be processed by process_filelist
1473 method. This allows easy modifications of a file list in commit
1477 self.todo = self.filenamelist + self.filenamelist_unvers
1481 for f in [f for f in self.todo if not os.path.isdir(f)]:
1483 status = self.status(f)
1488 ret += "%s %s %s\n" % (action, status, f)
1491 # Edit a filelist for package \'%s\'
1493 # l, leave = leave a file as is
1494 # r, remove = remove a file
1495 # a, add = add a file
1497 # If you remove file from a list, it will be unchanged
1498 # If you remove all, commit will be aborted""" % self.name
1502 def edit_filelist(self):
1503 """Opens a package list in editor for editing. This allows easy
1504 modifications of it just by simple text editing
1508 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1509 f = os.fdopen(fd, 'w')
1510 f.write(self.prepare_filelist())
1512 mtime_orig = os.stat(filename).st_mtime
1515 run_editor(filename)
1516 mtime = os.stat(filename).st_mtime
1517 if mtime_orig < mtime:
1518 filelist = open(filename).readlines()
1522 raise oscerr.UserAbort()
1524 return self.process_filelist(filelist)
1526 def process_filelist(self, filelist):
1527 """Process a filelist - it add/remove or leave files. This depends on
1528 user input. If no file is processed, it raises an ValueError
1532 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1534 foo = line.split(' ')
1536 action, state, name = (foo[0], ' ', foo[3])
1538 action, state, name = (foo[0], foo[1], foo[2])
1541 action = action.lower()
1544 if action in ('r', 'remove'):
1545 if self.status(name) == '?':
1547 if name in self.todo:
1548 self.todo.remove(name)
1550 self.delete_file(name, True)
1551 elif action in ('a', 'add'):
1552 if self.status(name) != '?':
1553 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1556 elif action in ('l', 'leave'):
1559 raise ValueError("Unknow action `%s'" % action)
1562 raise ValueError("Empty filelist")
1565 """for objects to represent the review state in a request"""
1566 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1568 self.by_user = by_user
1569 self.by_group = by_group
1572 self.comment = comment
1575 """for objects to represent the "state" of a request"""
1576 def __init__(self, name=None, who=None, when=None, comment=None):
1580 self.comment = comment
1583 """represents an action"""
1584 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1586 self.src_project = src_project
1587 self.src_package = src_package
1588 self.src_rev = src_rev
1589 self.dst_project = dst_project
1590 self.dst_package = dst_package
1591 self.src_update = src_update
1594 """represent a request and holds its metadata
1595 it has methods to read in metadata from xml,
1596 different views, ..."""
1599 self.state = RequestState()
1602 self.last_author = None
1605 self.statehistory = []
1608 def read(self, root):
1609 self.reqid = int(root.get('id'))
1610 actions = root.findall('action')
1611 if len(actions) == 0:
1612 actions = [ root.find('submit') ] # for old style requests
1614 for action in actions:
1615 type = action.get('type', 'submit')
1617 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1618 if action.findall('source'):
1619 n = action.find('source')
1620 src_prj = n.get('project', None)
1621 src_pkg = n.get('package', None)
1622 src_rev = n.get('rev', None)
1623 if action.findall('target'):
1624 n = action.find('target')
1625 dst_prj = n.get('project', None)
1626 dst_pkg = n.get('package', None)
1627 if action.findall('options'):
1628 n = action.find('options')
1629 if n.findall('sourceupdate'):
1630 src_update = n.find('sourceupdate').text.strip()
1631 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1633 msg = 'invalid request format:\n%s' % ET.tostring(root)
1634 raise oscerr.APIError(msg)
1637 n = root.find('state')
1638 self.state.name, self.state.who, self.state.when \
1639 = n.get('name'), n.get('who'), n.get('when')
1641 self.state.comment = n.find('comment').text.strip()
1643 self.state.comment = None
1645 # read the review states
1646 for r in root.findall('review'):
1648 s.state = r.get('state')
1649 s.by_user = r.get('by_user')
1650 s.by_group = r.get('by_group')
1651 s.who = r.get('who')
1652 s.when = r.get('when')
1654 s.comment = r.find('comment').text.strip()
1657 self.reviews.append(s)
1659 # read the state history
1660 for h in root.findall('history'):
1662 s.name = h.get('name')
1663 s.who = h.get('who')
1664 s.when = h.get('when')
1666 s.comment = h.find('comment').text.strip()
1669 self.statehistory.append(s)
1670 self.statehistory.reverse()
1672 # read a description, if it exists
1674 n = root.find('description').text
1679 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1680 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1681 dst_prj, dst_pkg, src_update)
1684 def list_view(self):
1685 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1687 for a in self.actions:
1688 dst = "%s/%s" % (a.dst_project, a.dst_package)
1689 if a.src_package == a.dst_package:
1693 if a.type=="submit":
1694 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1695 if a.type=="change_devel":
1696 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1697 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1699 ret += '\n %s: %-50s %-20s ' % \
1700 (a.type, sr_source, dst)
1702 if self.statehistory and self.statehistory[0]:
1704 for h in self.statehistory:
1705 who.append("%s(%s)" % (h.who,h.name))
1707 ret += "\n From: %s" % (' -> '.join(who))
1709 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1711 lines = txt.splitlines()
1712 wrapper = textwrap.TextWrapper( width = 80,
1713 initial_indent=' Descr: ',
1714 subsequent_indent=' ')
1715 ret += "\n" + wrapper.fill(lines[0])
1716 wrapper.initial_indent = ' '
1717 for line in lines[1:]:
1718 ret += "\n" + wrapper.fill(line)
1724 def __cmp__(self, other):
1725 return cmp(self.reqid, other.reqid)
1729 for action in self.actions:
1730 action_list=" %s: " % (action.type)
1731 if action.type=="submit":
1734 r="(r%s)" % (action.src_rev)
1736 if action.src_update:
1737 m="(%s)" % (action.src_update)
1738 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1739 if action.dst_package:
1740 action_list=action_list+"/%s" % ( action.dst_package )
1741 elif action.type=="delete":
1742 action_list=action_list+" %s" % ( action.dst_project )
1743 if action.dst_package:
1744 action_list=action_list+"/%s" % ( action.dst_package )
1745 elif action.type=="change_devel":
1746 action_list=action_list+" %s/%s developed in %s/%s" % \
1747 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1748 action_list=action_list+"\n"
1763 self.state.name, self.state.when, self.state.who,
1766 if len(self.reviews):
1767 reviewitems = [ '%-10s %s %s %s %s %s' \
1768 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1769 for i in self.reviews ]
1770 s += '\nReview: ' + '\n '.join(reviewitems)
1773 if len(self.statehistory):
1774 histitems = [ '%-10s %s %s' \
1775 % (i.name, i.when, i.who) \
1776 for i in self.statehistory ]
1777 s += '\nHistory: ' + '\n '.join(histitems)
1784 """format time as Apr 02 18:19
1786 depending on whether it is in the current year
1790 if time.localtime()[0] == time.localtime(t)[0]:
1792 return time.strftime('%b %d %H:%M',time.localtime(t))
1794 return time.strftime('%b %d %Y',time.localtime(t))
1797 def is_project_dir(d):
1798 return os.path.exists(os.path.join(d, store, '_project')) and not \
1799 os.path.exists(os.path.join(d, store, '_package'))
1802 def is_package_dir(d):
1803 return os.path.exists(os.path.join(d, store, '_project')) and \
1804 os.path.exists(os.path.join(d, store, '_package'))
1806 def parse_disturl(disturl):
1807 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1808 revision), else raises an oscerr.WrongArgs exception
1811 m = DISTURL_RE.match(disturl)
1813 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1815 apiurl = m.group('apiurl')
1816 if apiurl.split('.')[0] != 'api':
1817 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1818 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1820 def parse_buildlogurl(buildlogurl):
1821 """Parse a build log url, returns a tuple (apiurl, project, package,
1822 repository, arch), else raises oscerr.WrongArgs exception"""
1824 global BUILDLOGURL_RE
1826 m = BUILDLOGURL_RE.match(buildlogurl)
1828 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1830 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1833 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1834 This is handy to allow copy/paste a project/package combination in this form.
1836 Trailing slashes are removed before the split, because the split would
1837 otherwise give an additional empty string.
1845 def expand_proj_pack(args, idx=0, howmany=0):
1846 """looks for occurance of '.' at the position idx.
1847 If howmany is 2, both proj and pack are expanded together
1848 using the current directory, or none of them, if not possible.
1849 If howmany is 0, proj is expanded if possible, then, if there
1850 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1851 expanded, if possible.
1852 If howmany is 1, only proj is expanded if possible.
1854 If args[idx] does not exists, an implicit '.' is assumed.
1855 if not enough elements up to idx exist, an error is raised.
1857 See also parseargs(args), slash_split(args), findpacs(args)
1858 All these need unification, somehow.
1861 # print args,idx,howmany
1864 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1866 if len(args) == idx:
1868 if args[idx+0] == '.':
1869 if howmany == 0 and len(args) > idx+1:
1870 if args[idx+1] == '.':
1872 # remove one dot and make sure to expand both proj and pack
1877 # print args,idx,howmany
1879 args[idx+0] = store_read_project('.')
1882 package = store_read_package('.')
1883 args.insert(idx+1, package)
1887 package = store_read_package('.')
1888 args.insert(idx+1, package)
1892 def findpacs(files, progress_obj=None):
1893 """collect Package objects belonging to the given files
1894 and make sure each Package is returned only once"""
1897 p = filedir_to_pac(f, progress_obj)
1900 if i.name == p.name:
1910 def filedir_to_pac(f, progress_obj=None):
1911 """Takes a working copy path, or a path to a file inside a working copy,
1912 and returns a Package object instance
1914 If the argument was a filename, add it onto the "todo" list of the Package """
1916 if os.path.isdir(f):
1918 p = Package(wd, progress_obj=progress_obj)
1920 wd = os.path.dirname(f) or os.curdir
1921 p = Package(wd, progress_obj=progress_obj)
1922 p.todo = [ os.path.basename(f) ]
1926 def read_filemeta(dir):
1928 r = ET.parse(os.path.join(dir, store, '_files'))
1929 except SyntaxError, e:
1930 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1931 'When parsing .osc/_files, the following error was encountered:\n'
1936 def read_tobedeleted(dir):
1938 fname = os.path.join(dir, store, '_to_be_deleted')
1940 if os.path.exists(fname):
1941 r = [ line.strip() for line in open(fname) ]
1946 def read_sizelimit(dir):
1948 fname = os.path.join(dir, store, '_size_limit')
1950 if os.path.exists(fname):
1951 r = open(fname).readline()
1953 if r is None or not r.isdigit():
1957 def read_inconflict(dir):
1959 fname = os.path.join(dir, store, '_in_conflict')
1961 if os.path.exists(fname):
1962 r = [ line.strip() for line in open(fname) ]
1967 def parseargs(list_of_args):
1968 """Convenience method osc's commandline argument parsing.
1970 If called with an empty tuple (or list), return a list containing the current directory.
1971 Otherwise, return a list of the arguments."""
1973 return list(list_of_args)
1978 def statfrmt(statusletter, filename):
1979 return '%s %s' % (statusletter, filename)
1982 def pathjoin(a, *p):
1983 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1984 path = os.path.join(a, *p)
1985 if path.startswith('./'):
1990 def makeurl(baseurl, l, query=[]):
1991 """Given a list of path compoments, construct a complete URL.
1993 Optional parameters for a query string can be given as a list, as a
1994 dictionary, or as an already assembled string.
1995 In case of a dictionary, the parameters will be urlencoded by this
1996 function. In case of a list not -- this is to be backwards compatible.
1999 if conf.config['verbose'] > 1:
2000 print 'makeurl:', baseurl, l, query
2002 if type(query) == type(list()):
2003 query = '&'.join(query)
2004 elif type(query) == type(dict()):
2005 query = urlencode(query)
2007 scheme, netloc = urlsplit(baseurl)[0:2]
2008 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2011 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2012 """wrapper around urllib2.urlopen for error handling,
2013 and to support additional (PUT, DELETE) methods"""
2017 if conf.config['http_debug']:
2020 print '--', method, url
2022 if method == 'POST' and not file and not data:
2023 # adding data to an urllib2 request transforms it into a POST
2026 req = urllib2.Request(url)
2027 api_host_options = {}
2029 api_host_options = conf.get_apiurl_api_host_options(url)
2030 for header, value in api_host_options['http_headers']:
2031 req.add_header(header, value)
2033 # "external" request (url is no apiurl)
2036 req.get_method = lambda: method
2038 # POST requests are application/x-www-form-urlencoded per default
2039 # since we change the request into PUT, we also need to adjust the content type header
2040 if method == 'PUT' or (method == 'POST' and data):
2041 req.add_header('Content-Type', 'application/octet-stream')
2043 if type(headers) == type({}):
2044 for i in headers.keys():
2046 req.add_header(i, headers[i])
2048 if file and not data:
2049 size = os.path.getsize(file)
2051 data = open(file, 'rb').read()
2054 filefd = open(file, 'rb')
2056 if sys.platform[:3] != 'win':
2057 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2059 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2061 except EnvironmentError, e:
2063 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2064 '\non a filesystem which does not support this.' % (e, file))
2065 elif hasattr(e, 'winerror') and e.winerror == 5:
2066 # falling back to the default io
2067 data = open(file, 'rb').read()
2071 if conf.config['debug']: print method, url
2073 old_timeout = socket.getdefaulttimeout()
2074 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2075 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2076 socket.setdefaulttimeout(timeout)
2078 fd = urllib2.urlopen(req, data=data)
2080 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2081 socket.setdefaulttimeout(old_timeout)
2082 if hasattr(conf.cookiejar, 'save'):
2083 conf.cookiejar.save(ignore_discard=True)
2085 if filefd: filefd.close()
2090 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2091 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2092 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2093 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2096 def init_project_dir(apiurl, dir, project):
2097 if not os.path.exists(dir):
2098 if conf.config['checkout_no_colon']:
2099 os.makedirs(dir) # helpful with checkout_no_colon
2102 if not os.path.exists(os.path.join(dir, store)):
2103 os.mkdir(os.path.join(dir, store))
2105 # print 'project=',project,' dir=',dir
2106 store_write_project(dir, project)
2107 store_write_apiurl(dir, apiurl)
2108 if conf.config['do_package_tracking']:
2109 store_write_initial_packages(dir, project, [])
2111 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2112 if not os.path.isdir(store):
2115 f = open('_project', 'w')
2116 f.write(project + '\n')
2118 f = open('_package', 'w')
2119 f.write(package + '\n')
2123 f = open('_size_limit', 'w')
2124 f.write(str(limit_size))
2128 f = open('_files', 'w')
2129 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2133 ET.ElementTree(element=ET.Element('directory')).write('_files')
2135 f = open('_osclib_version', 'w')
2136 f.write(__store_version__ + '\n')
2139 store_write_apiurl(os.path.pardir, apiurl)
2145 def check_store_version(dir):
2146 versionfile = os.path.join(dir, store, '_osclib_version')
2148 v = open(versionfile).read().strip()
2153 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2154 if os.path.exists(os.path.join(dir, '.svn')):
2155 msg = msg + '\nTry svn instead of osc.'
2156 raise oscerr.NoWorkingCopy(msg)
2158 if v != __store_version__:
2159 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2160 # version is fine, no migration needed
2161 f = open(versionfile, 'w')
2162 f.write(__store_version__ + '\n')
2165 msg = 'The osc metadata of your working copy "%s"' % dir
2166 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2167 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2168 raise oscerr.WorkingCopyWrongVersion, msg
2171 def meta_get_packagelist(apiurl, prj):
2173 u = makeurl(apiurl, ['source', prj])
2175 root = ET.parse(f).getroot()
2176 return [ node.get('name') for node in root.findall('entry') ]
2179 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2180 """return a list of file names,
2181 or a list File() instances if verbose=True"""
2187 query['rev'] = revision
2189 query['rev'] = 'latest'
2191 u = makeurl(apiurl, ['source', prj, package], query=query)
2193 root = ET.parse(f).getroot()
2196 return [ node.get('name') for node in root.findall('entry') ]
2200 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2201 rev = root.get('rev')
2202 for node in root.findall('entry'):
2203 f = File(node.get('name'),
2205 int(node.get('size')),
2206 int(node.get('mtime')))
2212 def meta_get_project_list(apiurl):
2213 u = makeurl(apiurl, ['source'])
2215 root = ET.parse(f).getroot()
2216 return sorted([ node.get('name') for node in root ])
2219 def show_project_meta(apiurl, prj):
2220 url = makeurl(apiurl, ['source', prj, '_meta'])
2222 return f.readlines()
2225 def show_project_conf(apiurl, prj):
2226 url = makeurl(apiurl, ['source', prj, '_config'])
2228 return f.readlines()
2231 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2232 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2236 except urllib2.HTTPError, e:
2237 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2241 def show_package_meta(apiurl, prj, pac):
2242 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2245 return f.readlines()
2246 except urllib2.HTTPError, e:
2247 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2251 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2253 path.append('source')
2259 path.append('_attribute')
2261 path.append(attribute)
2264 query.append("with_default=1")
2266 query.append("with_project=1")
2267 url = makeurl(apiurl, path, query)
2270 return f.readlines()
2271 except urllib2.HTTPError, e:
2272 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2276 def show_develproject(apiurl, prj, pac):
2277 m = show_package_meta(apiurl, prj, pac)
2279 return ET.fromstring(''.join(m)).find('devel').get('project')
2284 def show_pattern_metalist(apiurl, prj):
2285 url = makeurl(apiurl, ['source', prj, '_pattern'])
2289 except urllib2.HTTPError, e:
2290 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2292 r = [ node.get('name') for node in tree.getroot() ]
2297 def show_pattern_meta(apiurl, prj, pattern):
2298 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2301 return f.readlines()
2302 except urllib2.HTTPError, e:
2303 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2308 """metafile that can be manipulated and is stored back after manipulation."""
2309 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2313 self.change_is_required = change_is_required
2314 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2315 f = os.fdopen(fd, 'w')
2316 f.write(''.join(input))
2318 self.hash_orig = dgst(self.filename)
2321 hash = dgst(self.filename)
2322 if self.change_is_required and hash == self.hash_orig:
2323 print 'File unchanged. Not saving.'
2324 os.unlink(self.filename)
2327 print 'Sending meta data...'
2328 # don't do any exception handling... it's up to the caller what to do in case
2330 http_PUT(self.url, file=self.filename)
2331 os.unlink(self.filename)
2337 run_editor(self.filename)
2341 except urllib2.HTTPError, e:
2342 error_help = "%d" % e.code
2343 if e.headers.get('X-Opensuse-Errorcode'):
2344 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2346 print >>sys.stderr, 'BuildService API error:', error_help
2347 # examine the error - we can't raise an exception because we might want
2350 if '<summary>' in data:
2351 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2352 input = raw_input('Try again? ([y/N]): ')
2353 if input not in ['y', 'Y']:
2359 if os.path.exists(self.filename):
2360 print 'discarding %s' % self.filename
2361 os.unlink(self.filename)
2364 # different types of metadata
2365 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2366 'template': new_project_templ,
2369 'pkg': { 'path' : 'source/%s/%s/_meta',
2370 'template': new_package_templ,
2373 'attribute': { 'path' : 'source/%s/%s/_meta',
2374 'template': new_attribute_templ,
2377 'prjconf': { 'path': 'source/%s/_config',
2381 'user': { 'path': 'person/%s',
2382 'template': new_user_template,
2385 'pattern': { 'path': 'source/%s/_pattern/%s',
2386 'template': new_pattern_template,
2391 def meta_exists(metatype,
2398 apiurl = conf.config['apiurl']
2399 url = make_meta_url(metatype, path_args, apiurl)
2401 data = http_GET(url).readlines()
2402 except urllib2.HTTPError, e:
2403 if e.code == 404 and create_new:
2404 data = metatypes[metatype]['template']
2406 data = StringIO(data % template_args).readlines()
2411 def make_meta_url(metatype, path_args=None, apiurl=None):
2413 apiurl = conf.config['apiurl']
2414 if metatype not in metatypes.keys():
2415 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2416 path = metatypes[metatype]['path']
2419 path = path % path_args
2421 return makeurl(apiurl, [path])
2424 def edit_meta(metatype,
2429 change_is_required=False,
2433 apiurl = conf.config['apiurl']
2435 data = meta_exists(metatype,
2438 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2442 change_is_required = True
2444 url = make_meta_url(metatype, path_args, apiurl)
2445 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2453 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2456 query['rev'] = revision
2458 query['rev'] = 'latest'
2460 query['linkrev'] = linkrev
2461 elif conf.config['linkcontrol']:
2462 query['linkrev'] = 'base'
2466 query['emptylink'] = 1
2467 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2469 # look for "too large" files according to size limit and mark them
2470 root = ET.fromstring(''.join(f.readlines()))
2471 for e in root.findall('entry'):
2472 size = e.get('size')
2473 if size and limit_size and int(size) > int(limit_size):
2474 e.set('skipped', 'true')
2475 return ET.tostring(root)
2478 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2479 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2480 return ET.fromstring(''.join(m)).get('srcmd5')
2483 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2484 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2486 # only source link packages have a <linkinfo> element.
2487 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2495 raise oscerr.LinkExpandError(prj, pac, li.error)
2499 def show_upstream_rev(apiurl, prj, pac):
2500 m = show_files_meta(apiurl, prj, pac)
2501 return ET.fromstring(''.join(m)).get('rev')
2504 def read_meta_from_spec(specfile, *args):
2505 import codecs, locale, re
2507 Read tags and sections from spec file. To read out
2508 a tag the passed argument mustn't end with a colon. To
2509 read out a section the passed argument must start with
2511 This method returns a dictionary which contains the
2515 if not os.path.isfile(specfile):
2516 raise IOError('\'%s\' is not a regular file' % specfile)
2519 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2520 except UnicodeDecodeError:
2521 lines = open(specfile).readlines()
2528 if itm.startswith('%'):
2529 sections.append(itm)
2533 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2535 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2536 if m and m.group('val'):
2537 spec_data[tag] = m.group('val').strip()
2539 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2542 section_pat = '^%s\s*?$'
2543 for section in sections:
2544 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2546 start = lines.index(m.group()+'\n') + 1
2548 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2551 for line in lines[start:]:
2552 if line.startswith('%'):
2555 spec_data[section] = data
2559 def run_pager(message):
2560 import tempfile, sys
2562 if not sys.stdout.isatty():
2565 tmpfile = tempfile.NamedTemporaryFile()
2566 tmpfile.write(message)
2568 pager = os.getenv('PAGER', default='less')
2569 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2572 def run_editor(filename):
2573 if sys.platform[:3] != 'win':
2574 editor = os.getenv('EDITOR', default='vim')
2576 editor = os.getenv('EDITOR', default='notepad')
2578 return subprocess.call([ editor, filename ])
2580 def edit_message(footer='', template='', templatelen=30):
2581 delim = '--This line, and those below, will be ignored--\n'
2583 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2584 f = os.fdopen(fd, 'w')
2586 if not templatelen is None:
2587 lines = template.splitlines()
2588 template = '\n'.join(lines[:templatelen])
2589 if lines[templatelen:]:
2590 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2600 run_editor(filename)
2601 msg = open(filename).read().split(delim)[0].rstrip()
2606 input = raw_input('Log message not specified\n'
2607 'a)bort, c)ontinue, e)dit: ')
2609 raise oscerr.UserAbort()
2619 def create_delete_request(apiurl, project, package, message):
2624 package = """package="%s" """ % (package)
2630 <action type="delete">
2631 <target project="%s" %s/>
2634 <description>%s</description>
2636 """ % (project, package,
2637 cgi.escape(message or ''))
2639 u = makeurl(apiurl, ['request'], query='cmd=create')
2640 f = http_POST(u, data=xml)
2642 root = ET.parse(f).getroot()
2643 return root.get('id')
2646 def create_change_devel_request(apiurl,
2647 devel_project, devel_package,
2654 <action type="change_devel">
2655 <source project="%s" package="%s" />
2656 <target project="%s" package="%s" />
2659 <description>%s</description>
2661 """ % (devel_project,
2665 cgi.escape(message or ''))
2667 u = makeurl(apiurl, ['request'], query='cmd=create')
2668 f = http_POST(u, data=xml)
2670 root = ET.parse(f).getroot()
2671 return root.get('id')
2674 # This creates an old style submit request for server api 1.0
2675 def create_submit_request(apiurl,
2676 src_project, src_package,
2677 dst_project=None, dst_package=None,
2678 message=None, orev=None, src_update=None):
2683 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2685 # Yes, this kind of xml construction is horrible
2690 packagexml = """package="%s" """ %( dst_package )
2691 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2692 # XXX: keep the old template for now in order to work with old obs instances
2694 <request type="submit">
2696 <source project="%s" package="%s" rev="%s"/>
2701 <description>%s</description>
2705 orev or show_upstream_rev(apiurl, src_project, src_package),
2708 cgi.escape(message or ""))
2710 u = makeurl(apiurl, ['request'], query='cmd=create')
2711 f = http_POST(u, data=xml)
2713 root = ET.parse(f).getroot()
2714 return root.get('id')
2717 def get_request(apiurl, reqid):
2718 u = makeurl(apiurl, ['request', reqid])
2720 root = ET.parse(f).getroot()
2727 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2730 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2731 f = http_POST(u, data=message)
2734 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2737 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2738 f = http_POST(u, data=message)
2742 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2744 if not 'all' in req_state:
2745 for state in req_state:
2746 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2748 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2750 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2754 todo['project'] = project
2756 todo['package'] = package
2757 for kind, val in todo.iteritems():
2758 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2759 'action/source/@%(kind)s=\'%(val)s\' or ' \
2760 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2761 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2763 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2764 for i in exclude_target_projects:
2765 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2766 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2768 if conf.config['verbose'] > 1:
2769 print '[ %s ]' % xpath
2770 res = search(apiurl, request=xpath)
2771 collection = res['request']
2773 for root in collection.findall('request'):
2779 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2780 """Return all new requests for all projects/packages where is user is involved"""
2782 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2783 for i in res['project_id'].findall('project'):
2784 projpkgs[i.get('name')] = []
2785 for i in res['package_id'].findall('package'):
2786 if not i.get('project') in projpkgs.keys():
2787 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2789 for prj, pacs in projpkgs.iteritems():
2791 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2795 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2796 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2797 xpath = xpath_join(xpath, xp, inner=True)
2799 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2800 if not 'all' in req_state:
2802 for state in req_state:
2803 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2804 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2805 res = search(apiurl, request=xpath)
2807 for root in res['request'].findall('request'):
2813 def get_request_log(apiurl, reqid):
2814 r = get_request(conf.config['apiurl'], reqid)
2816 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2817 # the description of the request is used for the initial log entry
2818 # otherwise its comment attribute would contain None
2819 if len(r.statehistory) >= 1:
2820 r.statehistory[-1].comment = r.descr
2822 r.state.comment = r.descr
2823 for state in [ r.state ] + r.statehistory:
2824 s = frmt % (state.name, state.who, state.when, str(state.comment))
2829 def get_user_meta(apiurl, user):
2830 u = makeurl(apiurl, ['person', quote_plus(user)])
2833 return ''.join(f.readlines())
2834 except urllib2.HTTPError:
2835 print 'user \'%s\' not found' % user
2839 def get_user_data(apiurl, user, *tags):
2840 """get specified tags from the user meta"""
2841 meta = get_user_meta(apiurl, user)
2844 root = ET.fromstring(meta)
2847 if root.find(tag).text != None:
2848 data.append(root.find(tag).text)
2852 except AttributeError:
2853 # this part is reached if the tags tuple contains an invalid tag
2854 print 'The xml file for user \'%s\' seems to be broken' % user
2859 def download(url, filename, progress_obj = None, mtime = None):
2860 import tempfile, shutil
2863 prefix = os.path.basename(filename)
2864 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2865 os.chmod(tmpfile, 0644)
2867 o = os.fdopen(fd, 'wb')
2868 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2871 shutil.move(tmpfile, filename)
2880 os.utime(filename, (-1, mtime))
2882 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None):
2883 targetfilename = targetfilename or filename
2886 query = { 'rev': revision }
2887 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2888 download(u, targetfilename, progress_obj, mtime)
2890 def get_binary_file(apiurl, prj, repo, arch,
2893 target_filename = None,
2894 target_mtime = None,
2895 progress_meter = False):
2898 from meter import TextMeter
2899 progress_obj = TextMeter()
2901 target_filename = target_filename or filename
2903 where = package or '_repository'
2904 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2905 download(u, target_filename, progress_obj, target_mtime)
2907 def dgst_from_string(str):
2908 # Python 2.5 depracates the md5 modules
2909 # Python 2.4 doesn't have hashlib yet
2912 md5_hash = hashlib.md5()
2915 md5_hash = md5.new()
2916 md5_hash.update(str)
2917 return md5_hash.hexdigest()
2921 #if not os.path.exists(file):
2931 f = open(file, 'rb')
2933 buf = f.read(BUFSIZE)
2936 return s.hexdigest()
2941 """return true if a string is binary data using diff's heuristic"""
2942 if s and '\0' in s[:4096]:
2947 def binary_file(fn):
2948 """read 4096 bytes from a file named fn, and call binary() on the data"""
2949 return binary(open(fn, 'rb').read(4096))
2952 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2954 This methods diffs oldfilename against filename (so filename will
2955 be shown as the new file).
2956 The variable origfilename is used if filename and oldfilename differ
2957 in their names (for instance if a tempfile is used for filename etc.)
2963 oldfilename = filename
2966 olddir = os.path.join(dir, store)
2968 if not origfilename:
2969 origfilename = filename
2971 file1 = os.path.join(olddir, oldfilename) # old/stored original
2972 file2 = os.path.join(dir, filename) # working copy
2974 f1 = open(file1, 'rb')
2978 f2 = open(file2, 'rb')
2982 if binary(s1) or binary (s2):
2983 d = ['Binary file %s has changed\n' % origfilename]
2986 d = difflib.unified_diff(\
2989 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2990 tofile = '%s\t(working copy)' % origfilename)
2992 # if file doesn't end with newline, we need to append one in the diff result
2994 for i, line in enumerate(d):
2995 if not line.endswith('\n'):
2996 d[i] += '\n\\ No newline at end of file'
3002 def make_diff(wc, revision):
3008 diff_hdr = 'Index: %s\n'
3009 diff_hdr += '===================================================================\n'
3011 olddir = os.getcwd()
3015 for file in wc.todo:
3016 if file in wc.skipped:
3018 if file in wc.filenamelist+wc.filenamelist_unvers:
3019 state = wc.status(file)
3021 added_files.append(file)
3023 removed_files.append(file)
3024 elif state == 'M' or state == 'C':
3025 changed_files.append(file)
3027 diff.append('osc: \'%s\' is not under version control' % file)
3029 for file in wc.filenamelist+wc.filenamelist_unvers:
3030 if file in wc.skipped:
3032 state = wc.status(file)
3033 if state == 'M' or state == 'C':
3034 changed_files.append(file)
3036 added_files.append(file)
3038 removed_files.append(file)
3040 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
3042 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
3043 cmp_pac = Package(tmpdir)
3045 for file in wc.todo:
3046 if file in cmp_pac.skipped:
3048 if file in cmp_pac.filenamelist:
3049 if file in wc.filenamelist:
3050 changed_files.append(file)
3052 diff.append('osc: \'%s\' is not under version control' % file)
3054 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
3056 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
3058 for file in changed_files:
3059 diff.append(diff_hdr % file)
3061 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
3063 cmp_pac.updatefile(file, revision)
3064 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3065 cmp_pac.absdir, file))
3066 (fd, tmpfile) = tempfile.mkstemp()
3067 for file in added_files:
3068 diff.append(diff_hdr % file)
3070 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3071 os.path.dirname(tmpfile), file))
3073 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3074 os.path.dirname(tmpfile), file))
3076 # FIXME: this is ugly but it cannot be avoided atm
3077 # if a file is deleted via "osc rm file" we should keep the storefile.
3079 if cmp_pac == None and removed_files:
3080 tmpdir = tempfile.mkdtemp()
3082 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3083 tmp_pac = Package(tmpdir)
3086 for file in removed_files:
3087 diff.append(diff_hdr % file)
3089 tmp_pac.updatefile(file, tmp_pac.rev)
3090 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3091 wc.rev, file, tmp_pac.storedir, file))
3093 cmp_pac.updatefile(file, revision)
3094 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3095 revision, file, cmp_pac.storedir, file))
3099 delete_dir(cmp_pac.absdir)
3101 delete_dir(tmp_pac.absdir)
3105 def server_diff(apiurl,
3106 old_project, old_package, old_revision,
3107 new_project, new_package, new_revision, unified=False, missingok=False):
3108 query = {'cmd': 'diff', 'expand': '1'}
3110 query['oproject'] = old_project
3112 query['opackage'] = old_package
3114 query['orev'] = old_revision
3116 query['rev'] = new_revision
3118 query['unified'] = 1
3120 query['missingok'] = 1
3122 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3128 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3130 creates the plain directory structure for a package dir.
3131 The 'apiurl' parameter is needed for the project dir initialization.
3132 The 'project' and 'package' parameters specify the name of the
3133 project and the package. The optional 'pathname' parameter is used
3134 for printing out the message that a new dir was created (default: 'prj_dir/package').
3135 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3137 prj_dir = prj_dir or project
3139 # FIXME: carefully test each patch component of prj_dir,
3140 # if we have a .osc/_files entry at that level.
3141 # -> if so, we have a package/project clash,
3142 # and should rename this path component by appending '.proj'
3143 # and give user a warning message, to discourage such clashes
3145 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3146 if is_package_dir(prj_dir):
3147 # we want this to become a project directory,
3148 # but it already is a package directory.
3149 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3151 if not is_project_dir(prj_dir):
3152 # this directory could exist as a parent direory for one of our earlier
3153 # checked out sub-projects. in this case, we still need to initialize it.
3154 print statfrmt('A', prj_dir)
3155 init_project_dir(apiurl, prj_dir, project)
3157 if is_project_dir(os.path.join(prj_dir, package)):
3158 # the thing exists, but is a project directory and not a package directory
3159 # FIXME: this should be a warning message to discourage package/project clashes
3160 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3162 if not os.path.exists(os.path.join(prj_dir, package)):
3163 print statfrmt('A', pathname)
3164 os.mkdir(os.path.join(prj_dir, package))
3165 os.mkdir(os.path.join(prj_dir, package, store))
3167 return(os.path.join(prj_dir, package))
3170 def checkout_package(apiurl, project, package,
3171 revision=None, pathname=None, prj_obj=None,
3172 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3174 # the project we're in might be deleted.
3175 # that'll throw an error then.
3176 olddir = os.getcwd()
3178 olddir = os.environ.get("PWD")
3183 if sys.platform[:3] == 'win':
3184 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3186 if conf.config['checkout_no_colon']:
3187 prj_dir = prj_dir.replace(':', '/')
3190 pathname = getTransActPath(os.path.join(prj_dir, package))
3192 # before we create directories and stuff, check if the package actually
3194 show_package_meta(apiurl, project, package)
3198 # try to read from the linkinfo
3199 # if it is a link we use the xsrcmd5 as the revision to be
3202 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3204 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3209 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3210 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3212 p = Package(package, progress_obj=progress_obj)
3215 for filename in p.filenamelist:
3216 if filename in p.skipped:
3218 if service_files or not filename.startswith('_service:'):
3219 p.updatefile(filename, revision)
3220 # print 'A ', os.path.join(project, package, filename)
3221 print statfrmt('A', os.path.join(pathname, filename))
3222 if conf.config['do_package_tracking']:
3223 # check if we can re-use an existing project object
3225 prj_obj = Project(os.getcwd())
3226 prj_obj.set_state(p.name, ' ')
3227 prj_obj.write_packages()
3231 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3232 dst_userid = None, keep_develproject = False):
3234 update pkgmeta with new new_name and new_prj and set calling user as the
3235 only maintainer (unless keep_maintainers is set). Additionally remove the
3236 develproject entry (<devel />) unless keep_develproject is true.
3238 root = ET.fromstring(''.join(pkgmeta))
3239 root.set('name', new_name)
3240 root.set('project', new_prj)
3241 if not keep_maintainers:
3242 for person in root.findall('person'):
3244 if not keep_develproject:
3245 for dp in root.findall('devel'):
3247 return ET.tostring(root)
3249 def link_to_branch(apiurl, project, package):
3251 convert a package with a _link + project.diff to a branch
3254 if '_link' in meta_get_filelist(apiurl, project, package):
3255 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3258 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3260 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3262 create a linked package
3263 - "src" is the original package
3264 - "dst" is the "link" package that we are creating here
3269 dst_meta = meta_exists(metatype='pkg',
3270 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3272 create_new=False, apiurl=conf.config['apiurl'])
3273 root = ET.fromstring(''.join(dst_meta))
3274 print root.attrib['project']
3275 if root.attrib['project'] != dst_project:
3276 # The source comes from a different project via a project link, we need to create this instance
3282 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3283 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3287 root = ET.fromstring(''.join(dst_meta))
3288 elm = root.find('publish')
3290 elm = ET.SubElement(root, 'publish')
3292 ET.SubElement(elm, 'disable')
3293 dst_meta = ET.tostring(root)
3297 path_args=(dst_project, dst_package),
3299 # create the _link file
3300 # but first, make sure not to overwrite an existing one
3301 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3303 print >>sys.stderr, 'forced overwrite of existing _link file'
3306 print >>sys.stderr, '_link file already exists...! Aborting'
3310 rev = 'rev="%s"' % rev
3315 cicount = 'cicount="%s"' % cicount
3319 print 'Creating _link...',
3320 link_template = """\
3321 <link project="%s" package="%s" %s %s>
3323 <!-- <apply name="patch" /> apply a patch on the source directory -->
3324 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3325 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3326 <!-- <delete>filename</delete> delete a file -->
3329 """ % (src_project, src_package, rev, cicount)
3331 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3332 http_PUT(u, data=link_template)
3335 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3338 - "src" is the original package
3339 - "dst" is the "aggregate" package that we are creating here
3340 - "map" is a dictionary SRC => TARGET repository mappings
3345 dst_meta = meta_exists(metatype='pkg',
3346 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3348 create_new=False, apiurl=conf.config['apiurl'])
3350 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3351 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3356 root = ET.fromstring(''.join(dst_meta))
3357 elm = root.find('publish')
3359 elm = ET.SubElement(root, 'publish')
3361 ET.SubElement(elm, 'disable')
3362 dst_meta = ET.tostring(root)
3365 path_args=(dst_project, dst_package),