1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E', # obsolete with OBS 2.0
201 # our own xml writer function to write xml nice, but with correct syntax
202 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
203 from xml.dom import minidom
204 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
205 # indent = current indentation
206 # addindent = indentation to add to higher levels
207 # newl = newline string
208 writer.write(indent+"<" + self.tagName)
210 attrs = self._get_attributes()
211 a_names = attrs.keys()
214 for a_name in a_names:
215 writer.write(" %s=\"" % a_name)
216 minidom._write_data(writer, attrs[a_name].value)
219 if len(self.childNodes) == 1 \
220 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
222 self.childNodes[0].writexml(writer, "", "", "")
223 writer.write("</%s>%s" % (self.tagName, newl))
225 writer.write(">%s"%(newl))
226 for node in self.childNodes:
227 node.writexml(writer,indent+addindent,addindent,newl)
228 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
230 writer.write("/>%s"%(newl))
231 # replace minidom's function with ours
232 minidom.Element.writexml = fixed_writexml
235 # os.path.samefile is available only under Unix
236 def os_path_samefile(path1, path2):
238 return os.path.samefile(path1, path2)
240 return os.path.realpath(path1) == os.path.realpath(path2)
243 """represent a file, including its metadata"""
244 def __init__(self, name, md5, size, mtime):
254 """Source service content
257 """creates an empty serviceinfo instance"""
260 def read(self, serviceinfo_node):
261 """read in the source services <services> element passed as
264 if serviceinfo_node == None:
267 services = serviceinfo_node.findall('service')
269 for service in services:
270 name = service.get('name')
272 for param in service.findall('param'):
273 option = param.get('name', None)
275 name += " --" + option + " '" + value + "'"
276 self.commands.append(name)
278 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
279 raise oscerr.APIError(msg)
281 def addVerifyFile(self, serviceinfo_node, filename):
284 f = open(filename, 'r')
285 digest = hashlib.sha256(f.read()).hexdigest()
289 s = ET.Element( "service", name="verify_file" )
290 ET.SubElement(s, "param", name="file").text = filename
291 ET.SubElement(s, "param", name="verifier").text = "sha256"
292 ET.SubElement(s, "param", name="checksum").text = digest
298 def addDownloadUrl(self, serviceinfo_node, url_string):
299 from urlparse import urlparse
300 url = urlparse( url_string )
301 protocol = url.scheme
306 s = ET.Element( "service", name="download_url" )
307 ET.SubElement(s, "param", name="protocol").text = protocol
308 ET.SubElement(s, "param", name="host").text = host
309 ET.SubElement(s, "param", name="path").text = path
315 def execute(self, dir):
318 for call in self.commands:
319 temp_dir = tempfile.mkdtemp()
320 name = call.split(None, 1)[0]
321 if not os.path.exists("/usr/lib/obs/service/"+name):
322 msg = "ERROR: service is not installed!\n"
323 msg += "Maybe try this: zypper in obs-server-" + name
324 raise oscerr.APIError(msg)
325 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
326 ret = subprocess.call(c, shell=True)
328 print "ERROR: service call failed: " + c
330 for file in os.listdir(temp_dir):
331 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
335 """linkinfo metadata (which is part of the xml representing a directory
338 """creates an empty linkinfo instance"""
348 def read(self, linkinfo_node):
349 """read in the linkinfo metadata from the <linkinfo> element passed as
351 If the passed element is None, the method does nothing.
353 if linkinfo_node == None:
355 self.project = linkinfo_node.get('project')
356 self.package = linkinfo_node.get('package')
357 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
358 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
359 self.srcmd5 = linkinfo_node.get('srcmd5')
360 self.error = linkinfo_node.get('error')
361 self.rev = linkinfo_node.get('rev')
362 self.baserev = linkinfo_node.get('baserev')
365 """returns True if the linkinfo is not empty, otherwise False"""
366 if self.xsrcmd5 or self.lsrcmd5:
370 def isexpanded(self):
371 """returns True if the package is an expanded link"""
372 if self.lsrcmd5 and not self.xsrcmd5:
377 """returns True if the link is in error state (could not be applied)"""
383 """return an informatory string representation"""
384 if self.islink() and not self.isexpanded():
385 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
386 % (self.project, self.package, self.xsrcmd5, self.rev)
387 elif self.islink() and self.isexpanded():
389 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
390 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
392 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
393 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
399 """represent a project directory, holding packages"""
400 def __init__(self, dir, getPackageList=True, progress_obj=None):
403 self.absdir = os.path.abspath(dir)
404 self.progress_obj = progress_obj
406 self.name = store_read_project(self.dir)
407 self.apiurl = store_read_apiurl(self.dir)
410 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
412 self.pacs_available = []
414 if conf.config['do_package_tracking']:
415 self.pac_root = self.read_packages().getroot()
416 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
417 self.pacs_excluded = [ i for i in os.listdir(self.dir)
418 for j in conf.config['exclude_glob']
419 if fnmatch.fnmatch(i, j) ]
420 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
421 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
422 # in the self.pacs_broken list
423 self.pacs_broken = []
424 for p in self.pacs_have:
425 if not os.path.isdir(os.path.join(self.absdir, p)):
426 # all states will be replaced with the '!'-state
427 # (except it is already marked as deleted ('D'-state))
428 self.pacs_broken.append(p)
430 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
432 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
434 def checkout_missing_pacs(self, expand_link=False):
435 for pac in self.pacs_missing:
437 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
438 # pac is not under version control but a local file/dir exists
439 msg = 'can\'t add package \'%s\': Object already exists' % pac
440 raise oscerr.PackageExists(self.name, pac, msg)
442 print 'checking out new package %s' % pac
443 checkout_package(self.apiurl, self.name, pac, \
444 pathname=getTransActPath(os.path.join(self.dir, pac)), \
445 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
447 def set_state(self, pac, state):
448 node = self.get_package_node(pac)
450 self.new_package_entry(pac, state)
452 node.attrib['state'] = state
454 def get_package_node(self, pac):
455 for node in self.pac_root.findall('package'):
456 if pac == node.get('name'):
460 def del_package_node(self, pac):
461 for node in self.pac_root.findall('package'):
462 if pac == node.get('name'):
463 self.pac_root.remove(node)
465 def get_state(self, pac):
466 node = self.get_package_node(pac)
468 return node.get('state')
472 def new_package_entry(self, name, state):
473 ET.SubElement(self.pac_root, 'package', name=name, state=state)
475 def read_packages(self):
476 packages_file = os.path.join(self.absdir, store, '_packages')
477 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
478 return ET.parse(packages_file)
480 # scan project for existing packages and migrate them
482 for data in os.listdir(self.dir):
483 pac_dir = os.path.join(self.absdir, data)
484 # we cannot use self.pacs_available because we cannot guarantee that the package list
485 # was fetched from the server
486 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
487 and Package(pac_dir).name == data:
488 cur_pacs.append(ET.Element('package', name=data, state=' '))
489 store_write_initial_packages(self.absdir, self.name, cur_pacs)
490 return ET.parse(os.path.join(self.absdir, store, '_packages'))
492 def write_packages(self):
493 # TODO: should we only modify the existing file instead of overwriting?
494 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
496 def addPackage(self, pac):
498 for i in conf.config['exclude_glob']:
499 if fnmatch.fnmatch(pac, i):
500 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
501 raise oscerr.OscIOError(None, msg)
502 state = self.get_state(pac)
503 if state == None or state == 'D':
504 self.new_package_entry(pac, 'A')
505 self.write_packages()
506 # sometimes the new pac doesn't exist in the list because
507 # it would take too much time to update all data structs regularly
508 if pac in self.pacs_unvers:
509 self.pacs_unvers.remove(pac)
511 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
513 def delPackage(self, pac, force = False):
514 state = self.get_state(pac.name)
516 if state == ' ' or state == 'D':
518 for file in pac.filenamelist + pac.filenamelist_unvers:
519 filestate = pac.status(file)
520 if filestate == 'M' or filestate == 'C' or \
521 filestate == 'A' or filestate == '?':
524 del_files.append(file)
525 if can_delete or force:
526 for file in del_files:
527 pac.delete_localfile(file)
528 if pac.status(file) != '?':
529 pac.delete_storefile(file)
530 # this is not really necessary
531 pac.put_on_deletelist(file)
532 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
533 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
534 pac.write_deletelist()
535 self.set_state(pac.name, 'D')
536 self.write_packages()
538 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
541 delete_dir(pac.absdir)
542 self.del_package_node(pac.name)
543 self.write_packages()
544 print statfrmt('D', pac.name)
546 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
548 print 'package is not under version control'
550 print 'unsupported state'
552 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
555 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
557 # we need to make sure that the _packages file will be written (even if an exception
560 # update complete project
561 # packages which no longer exists upstream
562 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
564 for pac in upstream_del:
565 p = Package(os.path.join(self.dir, pac))
566 self.delPackage(p, force = True)
567 delete_storedir(p.storedir)
572 self.pac_root.remove(self.get_package_node(p.name))
573 self.pacs_have.remove(pac)
575 for pac in self.pacs_have:
576 state = self.get_state(pac)
577 if pac in self.pacs_broken:
578 if self.get_state(pac) != 'A':
579 checkout_package(self.apiurl, self.name, pac,
580 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
581 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
584 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
586 if expand_link and p.islink() and not p.isexpanded():
589 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
591 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
594 rev = p.linkinfo.xsrcmd5
595 print 'Expanding to rev', rev
596 elif unexpand_link and p.islink() and p.isexpanded():
597 rev = p.linkinfo.lsrcmd5
598 print 'Unexpanding to rev', rev
599 elif p.islink() and p.isexpanded():
601 print 'Updating %s' % p.name
602 p.update(rev, service_files)
606 # TODO: Package::update has to fixed to behave like svn does
607 if pac in self.pacs_broken:
608 checkout_package(self.apiurl, self.name, pac,
609 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
610 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
612 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
613 elif state == 'A' and pac in self.pacs_available:
614 # file/dir called pac already exists and is under version control
615 msg = 'can\'t add package \'%s\': Object already exists' % pac
616 raise oscerr.PackageExists(self.name, pac, msg)
621 print 'unexpected state.. package \'%s\'' % pac
623 self.checkout_missing_pacs(expand_link=not unexpand_link)
625 self.write_packages()
627 def commit(self, pacs = (), msg = '', files = {}, validators = None):
632 if files.has_key(pac):
634 state = self.get_state(pac)
636 self.commitNewPackage(pac, msg, todo)
638 self.commitDelPackage(pac)
640 # display the correct dir when sending the changes
641 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
644 p = Package(os.path.join(self.dir, pac))
646 p.commit(msg, validators=validators)
647 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
648 print 'osc: \'%s\' is not under version control' % pac
649 elif pac in self.pacs_broken:
650 print 'osc: \'%s\' package not found' % pac
652 self.commitExtPackage(pac, msg, todo)
654 self.write_packages()
656 # if we have packages marked as '!' we cannot commit
657 for pac in self.pacs_broken:
658 if self.get_state(pac) != 'D':
659 msg = 'commit failed: package \'%s\' is missing' % pac
660 raise oscerr.PackageMissing(self.name, pac, msg)
662 for pac in self.pacs_have:
663 state = self.get_state(pac)
666 Package(os.path.join(self.dir, pac)).commit(msg, validators=validators)
668 self.commitDelPackage(pac)
670 self.commitNewPackage(pac, msg)
672 self.write_packages()
674 def commitNewPackage(self, pac, msg = '', files = []):
675 """creates and commits a new package if it does not exist on the server"""
676 if pac in self.pacs_available:
677 print 'package \'%s\' already exists' % pac
679 user = conf.get_apiurl_usr(self.apiurl)
680 edit_meta(metatype='pkg',
681 path_args=(quote_plus(self.name), quote_plus(pac)),
686 # display the correct dir when sending the changes
688 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
692 p = Package(os.path.join(self.dir, pac))
694 print statfrmt('Sending', os.path.normpath(p.dir))
696 self.set_state(pac, ' ')
699 def commitDelPackage(self, pac):
700 """deletes a package on the server and in the working copy"""
702 # display the correct dir when sending the changes
703 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
706 pac_dir = os.path.join(self.dir, pac)
707 p = Package(os.path.join(self.dir, pac))
708 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
709 delete_storedir(p.storedir)
715 pac_dir = os.path.join(self.dir, pac)
716 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
717 print statfrmt('Deleting', getTransActPath(pac_dir))
718 delete_package(self.apiurl, self.name, pac)
719 self.del_package_node(pac)
721 def commitExtPackage(self, pac, msg, files = []):
722 """commits a package from an external project"""
723 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
726 pac_path = os.path.join(self.dir, pac)
728 project = store_read_project(pac_path)
729 package = store_read_package(pac_path)
730 apiurl = store_read_apiurl(pac_path)
731 if meta_exists(metatype='pkg',
732 path_args=(quote_plus(project), quote_plus(package)),
734 create_new=False, apiurl=apiurl):
735 p = Package(pac_path)
739 user = conf.get_apiurl_usr(self.apiurl)
740 edit_meta(metatype='pkg',
741 path_args=(quote_plus(project), quote_plus(package)),
746 p = Package(pac_path)
752 r.append('*****************************************************')
753 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
754 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
755 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
756 r.append('*****************************************************')
762 """represent a package (its directory) and read/keep/write its metadata"""
763 def __init__(self, workingdir, progress_obj=None, limit_size=None):
764 self.dir = workingdir
765 self.absdir = os.path.abspath(self.dir)
766 self.storedir = os.path.join(self.absdir, store)
767 self.progress_obj = progress_obj
768 self.limit_size = limit_size
769 if limit_size and limit_size == 0:
770 self.limit_size = None
772 check_store_version(self.dir)
774 self.prjname = store_read_project(self.dir)
775 self.name = store_read_package(self.dir)
776 self.apiurl = store_read_apiurl(self.dir)
778 self.update_datastructs()
782 self.todo_delete = []
785 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
786 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
789 def addfile(self, n):
790 st = os.stat(os.path.join(self.dir, n))
791 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
793 def delete_file(self, n, force=False):
794 """deletes a file if possible and marks the file as deleted"""
797 state = self.status(n)
801 if state in ['?', 'A', 'M'] and not force:
802 return (False, state)
803 self.delete_localfile(n)
805 self.put_on_deletelist(n)
806 self.write_deletelist()
808 self.delete_storefile(n)
811 def delete_storefile(self, n):
812 try: os.unlink(os.path.join(self.storedir, n))
815 def delete_localfile(self, n):
816 try: os.unlink(os.path.join(self.dir, n))
819 def put_on_deletelist(self, n):
820 if n not in self.to_be_deleted:
821 self.to_be_deleted.append(n)
823 def put_on_conflictlist(self, n):
824 if n not in self.in_conflict:
825 self.in_conflict.append(n)
827 def clear_from_conflictlist(self, n):
828 """delete an entry from the file, and remove the file if it would be empty"""
829 if n in self.in_conflict:
831 filename = os.path.join(self.dir, n)
832 storefilename = os.path.join(self.storedir, n)
833 myfilename = os.path.join(self.dir, n + '.mine')
834 if self.islinkrepair() or self.ispulled():
835 upfilename = os.path.join(self.dir, n + '.new')
837 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
840 os.unlink(myfilename)
841 # the working copy may be updated, so the .r* ending may be obsolete...
843 os.unlink(upfilename)
844 if self.islinkrepair() or self.ispulled():
845 os.unlink(os.path.join(self.dir, n + '.old'))
849 self.in_conflict.remove(n)
851 self.write_conflictlist()
853 def write_sizelimit(self):
854 if self.size_limit and self.size_limit <= 0:
856 os.unlink(os.path.join(self.storedir, '_size_limit'))
860 fname = os.path.join(self.storedir, '_size_limit')
862 f.write(str(self.size_limit))
865 def write_deletelist(self):
866 if len(self.to_be_deleted) == 0:
868 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
872 fname = os.path.join(self.storedir, '_to_be_deleted')
874 f.write('\n'.join(self.to_be_deleted))
878 def delete_source_file(self, n):
879 """delete local a source file"""
880 self.delete_localfile(n)
881 self.delete_storefile(n)
883 def delete_remote_source_file(self, n):
884 """delete a remote source file (e.g. from the server)"""
886 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
889 def put_source_file(self, n):
891 # escaping '+' in the URL path (note: not in the URL query string) is
892 # only a workaround for ruby on rails, which swallows it otherwise
894 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
895 http_PUT(u, file = os.path.join(self.dir, n))
897 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
899 def commit(self, msg='', validators=None):
900 # commit only if the upstream revision is the same as the working copy's
901 upstream_rev = self.latest_rev()
902 if self.rev != upstream_rev:
903 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
906 self.todo = self.filenamelist_unvers + self.filenamelist
908 pathn = getTransActPath(self.dir)
913 for validator in os.listdir(validators):
914 fn=validators+"/"+validator
916 if S_ISREG(mode[ST_MODE]):
917 p = subprocess.Popen([fn], close_fds=True)
919 raise oscerr.RuntimeError(p.stdout, validator )
921 have_conflicts = False
922 for filename in self.todo:
923 if not filename.startswith('_service:') and not filename.startswith('_service_'):
924 st = self.status(filename)
926 self.todo.remove(filename)
927 elif st == 'A' or st == 'M':
928 self.todo_send.append(filename)
929 print statfrmt('Sending', os.path.join(pathn, filename))
931 self.todo_delete.append(filename)
932 print statfrmt('Deleting', os.path.join(pathn, filename))
934 have_conflicts = True
937 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
940 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
941 print 'nothing to do for package %s' % self.name
944 if self.islink() and self.isexpanded():
945 # resolve the link into the upload revision
946 # XXX: do this always?
947 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
948 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
951 print 'Transmitting file data ',
953 for filename in self.todo_delete:
954 # do not touch local files on commit --
955 # delete remotely instead
956 self.delete_remote_source_file(filename)
957 self.to_be_deleted.remove(filename)
958 for filename in self.todo_send:
959 sys.stdout.write('.')
961 self.put_source_file(filename)
963 # all source files are committed - now comes the log
964 query = { 'cmd' : 'commit',
966 'user' : conf.get_apiurl_usr(self.apiurl),
968 if self.islink() and self.isexpanded():
969 query['keeplink'] = '1'
970 if conf.config['linkcontrol'] or self.isfrozen():
971 query['linkrev'] = self.linkinfo.srcmd5
973 query['repairlink'] = '1'
974 query['linkrev'] = self.get_pulled_srcmd5()
975 if self.islinkrepair():
976 query['repairlink'] = '1'
977 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
980 # delete upload revision
982 query = { 'cmd': 'deleteuploadrev' }
983 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
989 root = ET.parse(f).getroot()
990 self.rev = int(root.get('rev'))
992 print 'Committed revision %s.' % self.rev
995 os.unlink(os.path.join(self.storedir, '_pulled'))
996 if self.islinkrepair():
997 os.unlink(os.path.join(self.storedir, '_linkrepair'))
998 self.linkrepair = False
999 # XXX: mark package as invalid?
1000 print 'The source link has been repaired. This directory can now be removed.'
1001 if self.islink() and self.isexpanded():
1002 self.update_local_filesmeta(revision=self.latest_rev())
1004 self.update_local_filesmeta()
1005 self.write_deletelist()
1006 self.update_datastructs()
1008 if self.filenamelist.count('_service'):
1009 print 'The package contains a source service.'
1010 for filename in self.todo:
1011 if filename.startswith('_service:') and os.path.exists(filename):
1012 os.unlink(filename) # remove local files
1013 print_request_list(self.apiurl, self.prjname, self.name)
1015 def write_conflictlist(self):
1016 if len(self.in_conflict) == 0:
1018 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1022 fname = os.path.join(self.storedir, '_in_conflict')
1023 f = open(fname, 'w')
1024 f.write('\n'.join(self.in_conflict))
1028 def updatefile(self, n, revision):
1029 filename = os.path.join(self.dir, n)
1030 storefilename = os.path.join(self.storedir, n)
1031 mtime = self.findfilebyname(n).mtime
1033 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1034 revision=revision, progress_obj=self.progress_obj, mtime=mtime)
1036 shutil.copyfile(filename, storefilename)
1038 def mergefile(self, n):
1039 filename = os.path.join(self.dir, n)
1040 storefilename = os.path.join(self.storedir, n)
1041 myfilename = os.path.join(self.dir, n + '.mine')
1042 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1043 os.rename(filename, myfilename)
1045 mtime = self.findfilebyname(n).mtime
1046 get_source_file(self.apiurl, self.prjname, self.name, n,
1047 revision=self.rev, targetfilename=upfilename,
1048 progress_obj=self.progress_obj, mtime=mtime)
1050 if binary_file(myfilename) or binary_file(upfilename):
1052 shutil.copyfile(upfilename, filename)
1053 shutil.copyfile(upfilename, storefilename)
1054 self.in_conflict.append(n)
1055 self.write_conflictlist()
1059 # diff3 OPTIONS... MINE OLDER YOURS
1060 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1061 # we would rather use the subprocess module, but it is not availablebefore 2.4
1062 ret = subprocess.call(merge_cmd, shell=True)
1064 # "An exit status of 0 means `diff3' was successful, 1 means some
1065 # conflicts were found, and 2 means trouble."
1067 # merge was successful... clean up
1068 shutil.copyfile(upfilename, storefilename)
1069 os.unlink(upfilename)
1070 os.unlink(myfilename)
1073 # unsuccessful merge
1074 shutil.copyfile(upfilename, storefilename)
1075 self.in_conflict.append(n)
1076 self.write_conflictlist()
1079 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1080 print >>sys.stderr, 'the command line was:'
1081 print >>sys.stderr, merge_cmd
1086 def update_local_filesmeta(self, revision=None):
1088 Update the local _files file in the store.
1089 It is replaced with the version pulled from upstream.
1091 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1092 store_write_string(self.absdir, '_files', meta)
1094 def update_datastructs(self):
1096 Update the internal data structures if the local _files
1097 file has changed (e.g. update_local_filesmeta() has been
1101 files_tree = read_filemeta(self.dir)
1102 files_tree_root = files_tree.getroot()
1104 self.rev = files_tree_root.get('rev')
1105 self.srcmd5 = files_tree_root.get('srcmd5')
1107 self.linkinfo = Linkinfo()
1108 self.linkinfo.read(files_tree_root.find('linkinfo'))
1110 self.filenamelist = []
1113 for node in files_tree_root.findall('entry'):
1115 f = File(node.get('name'),
1117 int(node.get('size')),
1118 int(node.get('mtime')))
1119 if node.get('skipped'):
1120 self.skipped.append(f.name)
1122 # okay, a very old version of _files, which didn't contain any metadata yet...
1123 f = File(node.get('name'), '', 0, 0)
1124 self.filelist.append(f)
1125 self.filenamelist.append(f.name)
1127 self.to_be_deleted = read_tobedeleted(self.dir)
1128 self.in_conflict = read_inconflict(self.dir)
1129 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1130 self.size_limit = read_sizelimit(self.dir)
1132 # gather unversioned files, but ignore some stuff
1133 self.excluded = [ i for i in os.listdir(self.dir)
1134 for j in conf.config['exclude_glob']
1135 if fnmatch.fnmatch(i, j) ]
1136 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1137 if i not in self.excluded
1138 if i not in self.filenamelist ]
1141 """tells us if the package is a link (has 'linkinfo').
1142 A package with linkinfo is a package which links to another package.
1143 Returns True if the package is a link, otherwise False."""
1144 return self.linkinfo.islink()
1146 def isexpanded(self):
1147 """tells us if the package is a link which is expanded.
1148 Returns True if the package is expanded, otherwise False."""
1149 return self.linkinfo.isexpanded()
1151 def islinkrepair(self):
1152 """tells us if we are repairing a broken source link."""
1153 return self.linkrepair
1156 """tells us if we have pulled a link."""
1157 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1160 """tells us if the link is frozen."""
1161 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1163 def get_pulled_srcmd5(self):
1165 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1166 pulledrev = line.strip()
1169 def haslinkerror(self):
1171 Returns True if the link is broken otherwise False.
1172 If the package is not a link it returns False.
1174 return self.linkinfo.haserror()
1176 def linkerror(self):
1178 Returns an error message if the link is broken otherwise None.
1179 If the package is not a link it returns None.
1181 return self.linkinfo.error
1183 def update_local_pacmeta(self):
1185 Update the local _meta file in the store.
1186 It is replaced with the version pulled from upstream.
1188 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1189 store_write_string(self.absdir, '_meta', meta)
1191 def findfilebyname(self, n):
1192 for i in self.filelist:
1196 def status(self, n):
1200 file storefile file present STATUS
1201 exists exists in _files
1204 x x x ' ' if digest differs: 'M'
1205 and if in conflicts file: 'C'
1207 x - x 'D' and listed in _to_be_deleted
1209 - x - 'D' (when file in working copy is already deleted)
1210 - - x 'F' (new in repo, but not yet in working copy)
1215 known_by_meta = False
1217 exists_in_store = False
1218 if n in self.filenamelist:
1219 known_by_meta = True
1220 if os.path.exists(os.path.join(self.absdir, n)):
1222 if os.path.exists(os.path.join(self.storedir, n)):
1223 exists_in_store = True
1226 if n in self.skipped:
1228 elif exists and not exists_in_store and known_by_meta:
1230 elif n in self.to_be_deleted:
1232 elif n in self.in_conflict:
1234 elif exists and exists_in_store and known_by_meta:
1235 #print self.findfilebyname(n)
1236 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1240 elif exists and not exists_in_store and not known_by_meta:
1242 elif exists and exists_in_store and not known_by_meta:
1244 elif not exists and exists_in_store and known_by_meta:
1246 elif not exists and not exists_in_store and known_by_meta:
1248 elif not exists and exists_in_store and not known_by_meta:
1250 elif not exists and not exists_in_store and not known_by_meta:
1251 # this case shouldn't happen (except there was a typo in the filename etc.)
1252 raise IOError('osc: \'%s\' is not under version control' % n)
1256 def comparePac(self, cmp_pac):
1258 This method compares the local filelist with
1259 the filelist of the passed package to see which files
1260 were added, removed and changed.
1267 for file in self.filenamelist+self.filenamelist_unvers:
1268 state = self.status(file)
1269 if file in self.skipped:
1271 if state == 'A' and (not file in cmp_pac.filenamelist):
1272 added_files.append(file)
1273 elif file in cmp_pac.filenamelist and state == 'D':
1274 removed_files.append(file)
1275 elif state == ' ' and not file in cmp_pac.filenamelist:
1276 added_files.append(file)
1277 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1278 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1279 changed_files.append(file)
1280 for file in cmp_pac.filenamelist:
1281 if not file in self.filenamelist:
1282 removed_files.append(file)
1283 removed_files = set(removed_files)
1285 return changed_files, added_files, removed_files
1287 def merge(self, otherpac):
1288 self.todo += otherpac.todo
1302 '\n '.join(self.filenamelist),
1310 def read_meta_from_spec(self, spec = None):
1315 # scan for spec files
1316 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1317 if len(speclist) == 1:
1318 specfile = speclist[0]
1319 elif len(speclist) > 1:
1320 print 'the following specfiles were found:'
1321 for file in speclist:
1323 print 'please specify one with --specfile'
1326 print 'no specfile was found - please specify one ' \
1330 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1331 self.summary = data['Summary']
1332 self.url = data['Url']
1333 self.descr = data['%description']
1336 def update_package_meta(self, force=False):
1338 for the updatepacmetafromspec subcommand
1339 argument force supress the confirm question
1342 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1344 root = ET.fromstring(m)
1345 root.find('title').text = self.summary
1346 root.find('description').text = ''.join(self.descr)
1347 url = root.find('url')
1349 url = ET.SubElement(root, 'url')
1352 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1353 mf = metafile(u, ET.tostring(root))
1356 print '*' * 36, 'old', '*' * 36
1358 print '*' * 36, 'new', '*' * 36
1359 print ET.tostring(root)
1361 repl = raw_input('Write? (y/N/e) ')
1372 def mark_frozen(self):
1373 store_write_string(self.absdir, '_frozenlink', '')
1375 print "The link in this package is currently broken. Checking"
1376 print "out the last working version instead; please use 'osc pull'"
1377 print "to repair the link."
1380 def unmark_frozen(self):
1381 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1382 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1384 def latest_rev(self):
1385 if self.islinkrepair():
1386 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1387 elif self.islink() and self.isexpanded():
1388 if self.isfrozen() or self.ispulled():
1389 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1392 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1395 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1397 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1400 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1403 def update(self, rev = None, service_files = False, limit_size = None):
1404 # save filelist and (modified) status before replacing the meta file
1405 saved_filenames = self.filenamelist
1406 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1410 self.limit_size = limit_size
1412 self.limit_size = read_sizelimit(self.dir)
1413 self.update_local_filesmeta(rev)
1414 self = Package(self.dir, progress_obj=self.progress_obj)
1416 # which files do no longer exist upstream?
1417 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1419 pathn = getTransActPath(self.dir)
1421 for filename in saved_filenames:
1422 if filename in self.skipped:
1424 if not filename.startswith('_service:') and filename in disappeared:
1425 print statfrmt('D', os.path.join(pathn, filename))
1426 # keep file if it has local modifications
1427 if oldp.status(filename) == ' ':
1428 self.delete_localfile(filename)
1429 self.delete_storefile(filename)
1431 for filename in self.filenamelist:
1432 if filename in self.skipped:
1435 state = self.status(filename)
1436 if not service_files and filename.startswith('_service:'):
1438 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1439 # no merge necessary... local file is changed, but upstream isn't
1441 elif state == 'M' and filename in saved_modifiedfiles:
1442 status_after_merge = self.mergefile(filename)
1443 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1445 self.updatefile(filename, rev)
1446 print statfrmt('U', os.path.join(pathn, filename))
1448 self.updatefile(filename, rev)
1449 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1451 self.updatefile(filename, rev)
1452 print statfrmt('A', os.path.join(pathn, filename))
1453 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1454 self.updatefile(filename, rev)
1455 self.delete_storefile(filename)
1456 print statfrmt('U', os.path.join(pathn, filename))
1460 self.update_local_pacmeta()
1462 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1463 print 'At revision %s.' % self.rev
1465 if not service_files:
1466 self.run_source_services()
1468 def run_source_services(self):
1469 if self.filenamelist.count('_service'):
1470 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1473 si.execute(self.absdir)
1475 def prepare_filelist(self):
1476 """Prepare a list of files, which will be processed by process_filelist
1477 method. This allows easy modifications of a file list in commit
1481 self.todo = self.filenamelist + self.filenamelist_unvers
1485 for f in [f for f in self.todo if not os.path.isdir(f)]:
1487 status = self.status(f)
1492 ret += "%s %s %s\n" % (action, status, f)
1495 # Edit a filelist for package \'%s\'
1497 # l, leave = leave a file as is
1498 # r, remove = remove a file
1499 # a, add = add a file
1501 # If you remove file from a list, it will be unchanged
1502 # If you remove all, commit will be aborted""" % self.name
1506 def edit_filelist(self):
1507 """Opens a package list in editor for editing. This allows easy
1508 modifications of it just by simple text editing
1512 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1513 f = os.fdopen(fd, 'w')
1514 f.write(self.prepare_filelist())
1516 mtime_orig = os.stat(filename).st_mtime
1519 run_editor(filename)
1520 mtime = os.stat(filename).st_mtime
1521 if mtime_orig < mtime:
1522 filelist = open(filename).readlines()
1526 raise oscerr.UserAbort()
1528 return self.process_filelist(filelist)
1530 def process_filelist(self, filelist):
1531 """Process a filelist - it add/remove or leave files. This depends on
1532 user input. If no file is processed, it raises an ValueError
1536 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1538 foo = line.split(' ')
1540 action, state, name = (foo[0], ' ', foo[3])
1542 action, state, name = (foo[0], foo[1], foo[2])
1545 action = action.lower()
1548 if action in ('r', 'remove'):
1549 if self.status(name) == '?':
1551 if name in self.todo:
1552 self.todo.remove(name)
1554 self.delete_file(name, True)
1555 elif action in ('a', 'add'):
1556 if self.status(name) != '?':
1557 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1560 elif action in ('l', 'leave'):
1563 raise ValueError("Unknow action `%s'" % action)
1566 raise ValueError("Empty filelist")
1569 """for objects to represent the review state in a request"""
1570 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1572 self.by_user = by_user
1573 self.by_group = by_group
1576 self.comment = comment
1579 """for objects to represent the "state" of a request"""
1580 def __init__(self, name=None, who=None, when=None, comment=None):
1584 self.comment = comment
1587 """represents an action"""
1588 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1590 self.src_project = src_project
1591 self.src_package = src_package
1592 self.src_rev = src_rev
1593 self.dst_project = dst_project
1594 self.dst_package = dst_package
1595 self.src_update = src_update
1598 """represent a request and holds its metadata
1599 it has methods to read in metadata from xml,
1600 different views, ..."""
1603 self.state = RequestState()
1606 self.last_author = None
1609 self.statehistory = []
1612 def read(self, root):
1613 self.reqid = int(root.get('id'))
1614 actions = root.findall('action')
1615 if len(actions) == 0:
1616 actions = [ root.find('submit') ] # for old style requests
1618 for action in actions:
1619 type = action.get('type', 'submit')
1621 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1622 if action.findall('source'):
1623 n = action.find('source')
1624 src_prj = n.get('project', None)
1625 src_pkg = n.get('package', None)
1626 src_rev = n.get('rev', None)
1627 if action.findall('target'):
1628 n = action.find('target')
1629 dst_prj = n.get('project', None)
1630 dst_pkg = n.get('package', None)
1631 if action.findall('options'):
1632 n = action.find('options')
1633 if n.findall('sourceupdate'):
1634 src_update = n.find('sourceupdate').text.strip()
1635 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1637 msg = 'invalid request format:\n%s' % ET.tostring(root)
1638 raise oscerr.APIError(msg)
1641 n = root.find('state')
1642 self.state.name, self.state.who, self.state.when \
1643 = n.get('name'), n.get('who'), n.get('when')
1645 self.state.comment = n.find('comment').text.strip()
1647 self.state.comment = None
1649 # read the review states
1650 for r in root.findall('review'):
1652 s.state = r.get('state')
1653 s.by_user = r.get('by_user')
1654 s.by_group = r.get('by_group')
1655 s.who = r.get('who')
1656 s.when = r.get('when')
1658 s.comment = r.find('comment').text.strip()
1661 self.reviews.append(s)
1663 # read the state history
1664 for h in root.findall('history'):
1666 s.name = h.get('name')
1667 s.who = h.get('who')
1668 s.when = h.get('when')
1670 s.comment = h.find('comment').text.strip()
1673 self.statehistory.append(s)
1674 self.statehistory.reverse()
1676 # read a description, if it exists
1678 n = root.find('description').text
1683 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1684 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1685 dst_prj, dst_pkg, src_update)
1688 def list_view(self):
1689 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1691 for a in self.actions:
1692 dst = "%s/%s" % (a.dst_project, a.dst_package)
1693 if a.src_package == a.dst_package:
1697 if a.type=="submit":
1698 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1699 if a.type=="change_devel":
1700 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1701 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1703 ret += '\n %s: %-50s %-20s ' % \
1704 (a.type, sr_source, dst)
1706 if self.statehistory and self.statehistory[0]:
1708 for h in self.statehistory:
1709 who.append("%s(%s)" % (h.who,h.name))
1711 ret += "\n From: %s" % (' -> '.join(who))
1713 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1715 lines = txt.splitlines()
1716 wrapper = textwrap.TextWrapper( width = 80,
1717 initial_indent=' Descr: ',
1718 subsequent_indent=' ')
1719 ret += "\n" + wrapper.fill(lines[0])
1720 wrapper.initial_indent = ' '
1721 for line in lines[1:]:
1722 ret += "\n" + wrapper.fill(line)
1728 def __cmp__(self, other):
1729 return cmp(self.reqid, other.reqid)
1733 for action in self.actions:
1734 action_list=" %s: " % (action.type)
1735 if action.type=="submit":
1738 r="(r%s)" % (action.src_rev)
1740 if action.src_update:
1741 m="(%s)" % (action.src_update)
1742 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1743 if action.dst_package:
1744 action_list=action_list+"/%s" % ( action.dst_package )
1745 elif action.type=="delete":
1746 action_list=action_list+" %s" % ( action.dst_project )
1747 if action.dst_package:
1748 action_list=action_list+"/%s" % ( action.dst_package )
1749 elif action.type=="change_devel":
1750 action_list=action_list+" %s/%s developed in %s/%s" % \
1751 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1752 action_list=action_list+"\n"
1767 self.state.name, self.state.when, self.state.who,
1770 if len(self.reviews):
1771 reviewitems = [ '%-10s %s %s %s %s %s' \
1772 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1773 for i in self.reviews ]
1774 s += '\nReview: ' + '\n '.join(reviewitems)
1777 if len(self.statehistory):
1778 histitems = [ '%-10s %s %s' \
1779 % (i.name, i.when, i.who) \
1780 for i in self.statehistory ]
1781 s += '\nHistory: ' + '\n '.join(histitems)
1788 """format time as Apr 02 18:19
1790 depending on whether it is in the current year
1794 if time.localtime()[0] == time.localtime(t)[0]:
1796 return time.strftime('%b %d %H:%M',time.localtime(t))
1798 return time.strftime('%b %d %Y',time.localtime(t))
1801 def is_project_dir(d):
1802 return os.path.exists(os.path.join(d, store, '_project')) and not \
1803 os.path.exists(os.path.join(d, store, '_package'))
1806 def is_package_dir(d):
1807 return os.path.exists(os.path.join(d, store, '_project')) and \
1808 os.path.exists(os.path.join(d, store, '_package'))
1810 def parse_disturl(disturl):
1811 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1812 revision), else raises an oscerr.WrongArgs exception
1815 m = DISTURL_RE.match(disturl)
1817 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1819 apiurl = m.group('apiurl')
1820 if apiurl.split('.')[0] != 'api':
1821 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1822 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1824 def parse_buildlogurl(buildlogurl):
1825 """Parse a build log url, returns a tuple (apiurl, project, package,
1826 repository, arch), else raises oscerr.WrongArgs exception"""
1828 global BUILDLOGURL_RE
1830 m = BUILDLOGURL_RE.match(buildlogurl)
1832 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1834 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1837 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1838 This is handy to allow copy/paste a project/package combination in this form.
1840 Trailing slashes are removed before the split, because the split would
1841 otherwise give an additional empty string.
1849 def expand_proj_pack(args, idx=0, howmany=0):
1850 """looks for occurance of '.' at the position idx.
1851 If howmany is 2, both proj and pack are expanded together
1852 using the current directory, or none of them, if not possible.
1853 If howmany is 0, proj is expanded if possible, then, if there
1854 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1855 expanded, if possible.
1856 If howmany is 1, only proj is expanded if possible.
1858 If args[idx] does not exists, an implicit '.' is assumed.
1859 if not enough elements up to idx exist, an error is raised.
1861 See also parseargs(args), slash_split(args), findpacs(args)
1862 All these need unification, somehow.
1865 # print args,idx,howmany
1868 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1870 if len(args) == idx:
1872 if args[idx+0] == '.':
1873 if howmany == 0 and len(args) > idx+1:
1874 if args[idx+1] == '.':
1876 # remove one dot and make sure to expand both proj and pack
1881 # print args,idx,howmany
1883 args[idx+0] = store_read_project('.')
1886 package = store_read_package('.')
1887 args.insert(idx+1, package)
1891 package = store_read_package('.')
1892 args.insert(idx+1, package)
1896 def findpacs(files, progress_obj=None):
1897 """collect Package objects belonging to the given files
1898 and make sure each Package is returned only once"""
1901 p = filedir_to_pac(f, progress_obj)
1904 if i.name == p.name:
1914 def filedir_to_pac(f, progress_obj=None):
1915 """Takes a working copy path, or a path to a file inside a working copy,
1916 and returns a Package object instance
1918 If the argument was a filename, add it onto the "todo" list of the Package """
1920 if os.path.isdir(f):
1922 p = Package(wd, progress_obj=progress_obj)
1924 wd = os.path.dirname(f) or os.curdir
1925 p = Package(wd, progress_obj=progress_obj)
1926 p.todo = [ os.path.basename(f) ]
1930 def read_filemeta(dir):
1932 r = ET.parse(os.path.join(dir, store, '_files'))
1933 except SyntaxError, e:
1934 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1935 'When parsing .osc/_files, the following error was encountered:\n'
1940 def read_tobedeleted(dir):
1942 fname = os.path.join(dir, store, '_to_be_deleted')
1944 if os.path.exists(fname):
1945 r = [ line.strip() for line in open(fname) ]
1950 def read_sizelimit(dir):
1952 fname = os.path.join(dir, store, '_size_limit')
1954 if os.path.exists(fname):
1955 r = open(fname).readline()
1957 if r is None or not r.isdigit():
1961 def read_inconflict(dir):
1963 fname = os.path.join(dir, store, '_in_conflict')
1965 if os.path.exists(fname):
1966 r = [ line.strip() for line in open(fname) ]
1971 def parseargs(list_of_args):
1972 """Convenience method osc's commandline argument parsing.
1974 If called with an empty tuple (or list), return a list containing the current directory.
1975 Otherwise, return a list of the arguments."""
1977 return list(list_of_args)
1982 def statfrmt(statusletter, filename):
1983 return '%s %s' % (statusletter, filename)
1986 def pathjoin(a, *p):
1987 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1988 path = os.path.join(a, *p)
1989 if path.startswith('./'):
1994 def makeurl(baseurl, l, query=[]):
1995 """Given a list of path compoments, construct a complete URL.
1997 Optional parameters for a query string can be given as a list, as a
1998 dictionary, or as an already assembled string.
1999 In case of a dictionary, the parameters will be urlencoded by this
2000 function. In case of a list not -- this is to be backwards compatible.
2003 if conf.config['verbose'] > 1:
2004 print 'makeurl:', baseurl, l, query
2006 if type(query) == type(list()):
2007 query = '&'.join(query)
2008 elif type(query) == type(dict()):
2009 query = urlencode(query)
2011 scheme, netloc = urlsplit(baseurl)[0:2]
2012 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2015 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2016 """wrapper around urllib2.urlopen for error handling,
2017 and to support additional (PUT, DELETE) methods"""
2021 if conf.config['http_debug']:
2024 print '--', method, url
2026 if method == 'POST' and not file and not data:
2027 # adding data to an urllib2 request transforms it into a POST
2030 req = urllib2.Request(url)
2031 api_host_options = {}
2033 api_host_options = conf.get_apiurl_api_host_options(url)
2034 for header, value in api_host_options['http_headers']:
2035 req.add_header(header, value)
2037 # "external" request (url is no apiurl)
2040 req.get_method = lambda: method
2042 # POST requests are application/x-www-form-urlencoded per default
2043 # since we change the request into PUT, we also need to adjust the content type header
2044 if method == 'PUT' or (method == 'POST' and data):
2045 req.add_header('Content-Type', 'application/octet-stream')
2047 if type(headers) == type({}):
2048 for i in headers.keys():
2050 req.add_header(i, headers[i])
2052 if file and not data:
2053 size = os.path.getsize(file)
2055 data = open(file, 'rb').read()
2058 filefd = open(file, 'rb')
2060 if sys.platform[:3] != 'win':
2061 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2063 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2065 except EnvironmentError, e:
2067 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2068 '\non a filesystem which does not support this.' % (e, file))
2069 elif hasattr(e, 'winerror') and e.winerror == 5:
2070 # falling back to the default io
2071 data = open(file, 'rb').read()
2075 if conf.config['debug']: print method, url
2077 old_timeout = socket.getdefaulttimeout()
2078 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2079 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2080 socket.setdefaulttimeout(timeout)
2082 fd = urllib2.urlopen(req, data=data)
2084 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2085 socket.setdefaulttimeout(old_timeout)
2086 if hasattr(conf.cookiejar, 'save'):
2087 conf.cookiejar.save(ignore_discard=True)
2089 if filefd: filefd.close()
2094 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2095 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2096 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2097 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2100 def init_project_dir(apiurl, dir, project):
2101 if not os.path.exists(dir):
2102 if conf.config['checkout_no_colon']:
2103 os.makedirs(dir) # helpful with checkout_no_colon
2106 if not os.path.exists(os.path.join(dir, store)):
2107 os.mkdir(os.path.join(dir, store))
2109 # print 'project=',project,' dir=',dir
2110 store_write_project(dir, project)
2111 store_write_apiurl(dir, apiurl)
2112 if conf.config['do_package_tracking']:
2113 store_write_initial_packages(dir, project, [])
2115 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2116 if not os.path.isdir(store):
2119 f = open('_project', 'w')
2120 f.write(project + '\n')
2122 f = open('_package', 'w')
2123 f.write(package + '\n')
2127 f = open('_size_limit', 'w')
2128 f.write(str(limit_size))
2132 f = open('_files', 'w')
2133 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2137 ET.ElementTree(element=ET.Element('directory')).write('_files')
2139 f = open('_osclib_version', 'w')
2140 f.write(__store_version__ + '\n')
2143 store_write_apiurl(os.path.pardir, apiurl)
2149 def check_store_version(dir):
2150 versionfile = os.path.join(dir, store, '_osclib_version')
2152 v = open(versionfile).read().strip()
2157 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2158 if os.path.exists(os.path.join(dir, '.svn')):
2159 msg = msg + '\nTry svn instead of osc.'
2160 raise oscerr.NoWorkingCopy(msg)
2162 if v != __store_version__:
2163 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2164 # version is fine, no migration needed
2165 f = open(versionfile, 'w')
2166 f.write(__store_version__ + '\n')
2169 msg = 'The osc metadata of your working copy "%s"' % dir
2170 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2171 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2172 raise oscerr.WorkingCopyWrongVersion, msg
2175 def meta_get_packagelist(apiurl, prj):
2177 u = makeurl(apiurl, ['source', prj])
2179 root = ET.parse(f).getroot()
2180 return [ node.get('name') for node in root.findall('entry') ]
2183 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2184 """return a list of file names,
2185 or a list File() instances if verbose=True"""
2191 query['rev'] = revision
2193 query['rev'] = 'latest'
2195 u = makeurl(apiurl, ['source', prj, package], query=query)
2197 root = ET.parse(f).getroot()
2200 return [ node.get('name') for node in root.findall('entry') ]
2204 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2205 rev = root.get('rev')
2206 for node in root.findall('entry'):
2207 f = File(node.get('name'),
2209 int(node.get('size')),
2210 int(node.get('mtime')))
2216 def meta_get_project_list(apiurl):
2217 u = makeurl(apiurl, ['source'])
2219 root = ET.parse(f).getroot()
2220 return sorted([ node.get('name') for node in root ])
2223 def show_project_meta(apiurl, prj):
2224 url = makeurl(apiurl, ['source', prj, '_meta'])
2226 return f.readlines()
2229 def show_project_conf(apiurl, prj):
2230 url = makeurl(apiurl, ['source', prj, '_config'])
2232 return f.readlines()
2235 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2236 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2240 except urllib2.HTTPError, e:
2241 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2245 def show_package_meta(apiurl, prj, pac):
2246 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2249 return f.readlines()
2250 except urllib2.HTTPError, e:
2251 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2255 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2257 path.append('source')
2263 path.append('_attribute')
2265 path.append(attribute)
2268 query.append("with_default=1")
2270 query.append("with_project=1")
2271 url = makeurl(apiurl, path, query)
2274 return f.readlines()
2275 except urllib2.HTTPError, e:
2276 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2280 def show_develproject(apiurl, prj, pac):
2281 m = show_package_meta(apiurl, prj, pac)
2283 return ET.fromstring(''.join(m)).find('devel').get('project')
2288 def show_pattern_metalist(apiurl, prj):
2289 url = makeurl(apiurl, ['source', prj, '_pattern'])
2293 except urllib2.HTTPError, e:
2294 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2296 r = [ node.get('name') for node in tree.getroot() ]
2301 def show_pattern_meta(apiurl, prj, pattern):
2302 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2305 return f.readlines()
2306 except urllib2.HTTPError, e:
2307 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2312 """metafile that can be manipulated and is stored back after manipulation."""
2313 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2317 self.change_is_required = change_is_required
2318 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2319 f = os.fdopen(fd, 'w')
2320 f.write(''.join(input))
2322 self.hash_orig = dgst(self.filename)
2325 hash = dgst(self.filename)
2326 if self.change_is_required and hash == self.hash_orig:
2327 print 'File unchanged. Not saving.'
2328 os.unlink(self.filename)
2331 print 'Sending meta data...'
2332 # don't do any exception handling... it's up to the caller what to do in case
2334 http_PUT(self.url, file=self.filename)
2335 os.unlink(self.filename)
2341 run_editor(self.filename)
2345 except urllib2.HTTPError, e:
2346 error_help = "%d" % e.code
2347 if e.headers.get('X-Opensuse-Errorcode'):
2348 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2350 print >>sys.stderr, 'BuildService API error:', error_help
2351 # examine the error - we can't raise an exception because we might want
2354 if '<summary>' in data:
2355 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2356 input = raw_input('Try again? ([y/N]): ')
2357 if input not in ['y', 'Y']:
2363 if os.path.exists(self.filename):
2364 print 'discarding %s' % self.filename
2365 os.unlink(self.filename)
2368 # different types of metadata
2369 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2370 'template': new_project_templ,
2373 'pkg': { 'path' : 'source/%s/%s/_meta',
2374 'template': new_package_templ,
2377 'attribute': { 'path' : 'source/%s/%s/_meta',
2378 'template': new_attribute_templ,
2381 'prjconf': { 'path': 'source/%s/_config',
2385 'user': { 'path': 'person/%s',
2386 'template': new_user_template,
2389 'pattern': { 'path': 'source/%s/_pattern/%s',
2390 'template': new_pattern_template,
2395 def meta_exists(metatype,
2402 apiurl = conf.config['apiurl']
2403 url = make_meta_url(metatype, path_args, apiurl)
2405 data = http_GET(url).readlines()
2406 except urllib2.HTTPError, e:
2407 if e.code == 404 and create_new:
2408 data = metatypes[metatype]['template']
2410 data = StringIO(data % template_args).readlines()
2415 def make_meta_url(metatype, path_args=None, apiurl=None):
2417 apiurl = conf.config['apiurl']
2418 if metatype not in metatypes.keys():
2419 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2420 path = metatypes[metatype]['path']
2423 path = path % path_args
2425 return makeurl(apiurl, [path])
2428 def edit_meta(metatype,
2433 change_is_required=False,
2437 apiurl = conf.config['apiurl']
2439 data = meta_exists(metatype,
2442 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2446 change_is_required = True
2448 url = make_meta_url(metatype, path_args, apiurl)
2449 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2457 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2460 query['rev'] = revision
2462 query['rev'] = 'latest'
2464 query['linkrev'] = linkrev
2465 elif conf.config['linkcontrol']:
2466 query['linkrev'] = 'base'
2470 query['emptylink'] = 1
2471 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2473 # look for "too large" files according to size limit and mark them
2474 root = ET.fromstring(''.join(f.readlines()))
2475 for e in root.findall('entry'):
2476 size = e.get('size')
2477 if size and limit_size and int(size) > int(limit_size):
2478 e.set('skipped', 'true')
2479 return ET.tostring(root)
2482 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2483 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2484 return ET.fromstring(''.join(m)).get('srcmd5')
2487 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2488 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2490 # only source link packages have a <linkinfo> element.
2491 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2499 raise oscerr.LinkExpandError(prj, pac, li.error)
2503 def show_upstream_rev(apiurl, prj, pac):
2504 m = show_files_meta(apiurl, prj, pac)
2505 return ET.fromstring(''.join(m)).get('rev')
2508 def read_meta_from_spec(specfile, *args):
2509 import codecs, locale, re
2511 Read tags and sections from spec file. To read out
2512 a tag the passed argument mustn't end with a colon. To
2513 read out a section the passed argument must start with
2515 This method returns a dictionary which contains the
2519 if not os.path.isfile(specfile):
2520 raise IOError('\'%s\' is not a regular file' % specfile)
2523 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2524 except UnicodeDecodeError:
2525 lines = open(specfile).readlines()
2532 if itm.startswith('%'):
2533 sections.append(itm)
2537 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2539 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2540 if m and m.group('val'):
2541 spec_data[tag] = m.group('val').strip()
2543 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2546 section_pat = '^%s\s*?$'
2547 for section in sections:
2548 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2550 start = lines.index(m.group()+'\n') + 1
2552 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2555 for line in lines[start:]:
2556 if line.startswith('%'):
2559 spec_data[section] = data
2563 def run_pager(message):
2564 import tempfile, sys
2566 if not sys.stdout.isatty():
2569 tmpfile = tempfile.NamedTemporaryFile()
2570 tmpfile.write(message)
2572 pager = os.getenv('PAGER', default='less')
2573 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2576 def run_editor(filename):
2577 if sys.platform[:3] != 'win':
2578 editor = os.getenv('EDITOR', default='vim')
2580 editor = os.getenv('EDITOR', default='notepad')
2582 return subprocess.call([ editor, filename ])
2584 def edit_message(footer='', template='', templatelen=30):
2585 delim = '--This line, and those below, will be ignored--\n'
2587 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2588 f = os.fdopen(fd, 'w')
2590 if not templatelen is None:
2591 lines = template.splitlines()
2592 template = '\n'.join(lines[:templatelen])
2593 if lines[templatelen:]:
2594 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2604 run_editor(filename)
2605 msg = open(filename).read().split(delim)[0].rstrip()
2610 input = raw_input('Log message not specified\n'
2611 'a)bort, c)ontinue, e)dit: ')
2613 raise oscerr.UserAbort()
2623 def create_delete_request(apiurl, project, package, message):
2628 package = """package="%s" """ % (package)
2634 <action type="delete">
2635 <target project="%s" %s/>
2638 <description>%s</description>
2640 """ % (project, package,
2641 cgi.escape(message or ''))
2643 u = makeurl(apiurl, ['request'], query='cmd=create')
2644 f = http_POST(u, data=xml)
2646 root = ET.parse(f).getroot()
2647 return root.get('id')
2650 def create_change_devel_request(apiurl,
2651 devel_project, devel_package,
2658 <action type="change_devel">
2659 <source project="%s" package="%s" />
2660 <target project="%s" package="%s" />
2663 <description>%s</description>
2665 """ % (devel_project,
2669 cgi.escape(message or ''))
2671 u = makeurl(apiurl, ['request'], query='cmd=create')
2672 f = http_POST(u, data=xml)
2674 root = ET.parse(f).getroot()
2675 return root.get('id')
2678 # This creates an old style submit request for server api 1.0
2679 def create_submit_request(apiurl,
2680 src_project, src_package,
2681 dst_project=None, dst_package=None,
2682 message=None, orev=None, src_update=None):
2687 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2689 # Yes, this kind of xml construction is horrible
2694 packagexml = """package="%s" """ %( dst_package )
2695 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2696 # XXX: keep the old template for now in order to work with old obs instances
2698 <request type="submit">
2700 <source project="%s" package="%s" rev="%s"/>
2705 <description>%s</description>
2709 orev or show_upstream_rev(apiurl, src_project, src_package),
2712 cgi.escape(message or ""))
2714 u = makeurl(apiurl, ['request'], query='cmd=create')
2715 f = http_POST(u, data=xml)
2717 root = ET.parse(f).getroot()
2718 return root.get('id')
2721 def get_request(apiurl, reqid):
2722 u = makeurl(apiurl, ['request', reqid])
2724 root = ET.parse(f).getroot()
2731 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2734 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2735 f = http_POST(u, data=message)
2738 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2741 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2742 f = http_POST(u, data=message)
2746 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2748 if not 'all' in req_state:
2749 for state in req_state:
2750 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2752 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2754 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2758 todo['project'] = project
2760 todo['package'] = package
2761 for kind, val in todo.iteritems():
2762 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2763 'action/source/@%(kind)s=\'%(val)s\' or ' \
2764 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2765 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2767 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2768 for i in exclude_target_projects:
2769 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2770 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2772 if conf.config['verbose'] > 1:
2773 print '[ %s ]' % xpath
2774 res = search(apiurl, request=xpath)
2775 collection = res['request']
2777 for root in collection.findall('request'):
2783 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2784 """Return all new requests for all projects/packages where is user is involved"""
2786 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2787 for i in res['project_id'].findall('project'):
2788 projpkgs[i.get('name')] = []
2789 for i in res['package_id'].findall('package'):
2790 if not i.get('project') in projpkgs.keys():
2791 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2793 for prj, pacs in projpkgs.iteritems():
2795 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2799 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2800 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2801 xpath = xpath_join(xpath, xp, inner=True)
2803 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2804 if not 'all' in req_state:
2806 for state in req_state:
2807 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2808 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2809 res = search(apiurl, request=xpath)
2811 for root in res['request'].findall('request'):
2817 def get_request_log(apiurl, reqid):
2818 r = get_request(conf.config['apiurl'], reqid)
2820 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2821 # the description of the request is used for the initial log entry
2822 # otherwise its comment attribute would contain None
2823 if len(r.statehistory) >= 1:
2824 r.statehistory[-1].comment = r.descr
2826 r.state.comment = r.descr
2827 for state in [ r.state ] + r.statehistory:
2828 s = frmt % (state.name, state.who, state.when, str(state.comment))
2833 def get_user_meta(apiurl, user):
2834 u = makeurl(apiurl, ['person', quote_plus(user)])
2837 return ''.join(f.readlines())
2838 except urllib2.HTTPError:
2839 print 'user \'%s\' not found' % user
2843 def get_user_data(apiurl, user, *tags):
2844 """get specified tags from the user meta"""
2845 meta = get_user_meta(apiurl, user)
2848 root = ET.fromstring(meta)
2851 if root.find(tag).text != None:
2852 data.append(root.find(tag).text)
2856 except AttributeError:
2857 # this part is reached if the tags tuple contains an invalid tag
2858 print 'The xml file for user \'%s\' seems to be broken' % user
2863 def download(url, filename, progress_obj = None, mtime = None):
2864 import tempfile, shutil
2867 prefix = os.path.basename(filename)
2868 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2869 os.chmod(tmpfile, 0644)
2871 o = os.fdopen(fd, 'wb')
2872 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2875 shutil.move(tmpfile, filename)
2884 os.utime(filename, (-1, mtime))
2886 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None):
2887 targetfilename = targetfilename or filename
2890 query = { 'rev': revision }
2891 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2892 download(u, targetfilename, progress_obj, mtime)
2894 def get_binary_file(apiurl, prj, repo, arch,
2897 target_filename = None,
2898 target_mtime = None,
2899 progress_meter = False):
2902 from meter import TextMeter
2903 progress_obj = TextMeter()
2905 target_filename = target_filename or filename
2907 where = package or '_repository'
2908 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2909 download(u, target_filename, progress_obj, target_mtime)
2911 def dgst_from_string(str):
2912 # Python 2.5 depracates the md5 modules
2913 # Python 2.4 doesn't have hashlib yet
2916 md5_hash = hashlib.md5()
2919 md5_hash = md5.new()
2920 md5_hash.update(str)
2921 return md5_hash.hexdigest()
2925 #if not os.path.exists(file):
2935 f = open(file, 'rb')
2937 buf = f.read(BUFSIZE)
2940 return s.hexdigest()
2945 """return true if a string is binary data using diff's heuristic"""
2946 if s and '\0' in s[:4096]:
2951 def binary_file(fn):
2952 """read 4096 bytes from a file named fn, and call binary() on the data"""
2953 return binary(open(fn, 'rb').read(4096))
2956 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2958 This methods diffs oldfilename against filename (so filename will
2959 be shown as the new file).
2960 The variable origfilename is used if filename and oldfilename differ
2961 in their names (for instance if a tempfile is used for filename etc.)
2967 oldfilename = filename
2970 olddir = os.path.join(dir, store)
2972 if not origfilename:
2973 origfilename = filename
2975 file1 = os.path.join(olddir, oldfilename) # old/stored original
2976 file2 = os.path.join(dir, filename) # working copy
2978 f1 = open(file1, 'rb')
2982 f2 = open(file2, 'rb')
2986 if binary(s1) or binary (s2):
2987 d = ['Binary file %s has changed\n' % origfilename]
2990 d = difflib.unified_diff(\
2993 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2994 tofile = '%s\t(working copy)' % origfilename)
2996 # if file doesn't end with newline, we need to append one in the diff result
2998 for i, line in enumerate(d):
2999 if not line.endswith('\n'):
3000 d[i] += '\n\\ No newline at end of file'
3006 def make_diff(wc, revision):
3012 diff_hdr = 'Index: %s\n'
3013 diff_hdr += '===================================================================\n'
3015 olddir = os.getcwd()
3019 for file in wc.todo:
3020 if file in wc.skipped:
3022 if file in wc.filenamelist+wc.filenamelist_unvers:
3023 state = wc.status(file)
3025 added_files.append(file)
3027 removed_files.append(file)
3028 elif state == 'M' or state == 'C':
3029 changed_files.append(file)
3031 diff.append('osc: \'%s\' is not under version control' % file)
3033 for file in wc.filenamelist+wc.filenamelist_unvers:
3034 if file in wc.skipped:
3036 state = wc.status(file)
3037 if state == 'M' or state == 'C':
3038 changed_files.append(file)
3040 added_files.append(file)
3042 removed_files.append(file)
3044 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
3046 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
3047 cmp_pac = Package(tmpdir)
3049 for file in wc.todo:
3050 if file in cmp_pac.skipped:
3052 if file in cmp_pac.filenamelist:
3053 if file in wc.filenamelist:
3054 changed_files.append(file)
3056 diff.append('osc: \'%s\' is not under version control' % file)
3058 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
3060 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
3062 for file in changed_files:
3063 diff.append(diff_hdr % file)
3065 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
3067 cmp_pac.updatefile(file, revision)
3068 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3069 cmp_pac.absdir, file))
3070 (fd, tmpfile) = tempfile.mkstemp()
3071 for file in added_files:
3072 diff.append(diff_hdr % file)
3074 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3075 os.path.dirname(tmpfile), file))
3077 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3078 os.path.dirname(tmpfile), file))
3080 # FIXME: this is ugly but it cannot be avoided atm
3081 # if a file is deleted via "osc rm file" we should keep the storefile.
3083 if cmp_pac == None and removed_files:
3084 tmpdir = tempfile.mkdtemp()
3086 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3087 tmp_pac = Package(tmpdir)
3090 for file in removed_files:
3091 diff.append(diff_hdr % file)
3093 tmp_pac.updatefile(file, tmp_pac.rev)
3094 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3095 wc.rev, file, tmp_pac.storedir, file))
3097 cmp_pac.updatefile(file, revision)
3098 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3099 revision, file, cmp_pac.storedir, file))
3103 delete_dir(cmp_pac.absdir)
3105 delete_dir(tmp_pac.absdir)
3109 def server_diff(apiurl,
3110 old_project, old_package, old_revision,
3111 new_project, new_package, new_revision, unified=False, missingok=False):
3112 query = {'cmd': 'diff', 'expand': '1'}
3114 query['oproject'] = old_project
3116 query['opackage'] = old_package
3118 query['orev'] = old_revision
3120 query['rev'] = new_revision
3122 query['unified'] = 1
3124 query['missingok'] = 1
3126 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3132 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3134 creates the plain directory structure for a package dir.
3135 The 'apiurl' parameter is needed for the project dir initialization.
3136 The 'project' and 'package' parameters specify the name of the
3137 project and the package. The optional 'pathname' parameter is used
3138 for printing out the message that a new dir was created (default: 'prj_dir/package').
3139 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3141 prj_dir = prj_dir or project
3143 # FIXME: carefully test each patch component of prj_dir,
3144 # if we have a .osc/_files entry at that level.
3145 # -> if so, we have a package/project clash,
3146 # and should rename this path component by appending '.proj'
3147 # and give user a warning message, to discourage such clashes
3149 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3150 if is_package_dir(prj_dir):
3151 # we want this to become a project directory,
3152 # but it already is a package directory.
3153 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3155 if not is_project_dir(prj_dir):
3156 # this directory could exist as a parent direory for one of our earlier
3157 # checked out sub-projects. in this case, we still need to initialize it.
3158 print statfrmt('A', prj_dir)
3159 init_project_dir(apiurl, prj_dir, project)
3161 if is_project_dir(os.path.join(prj_dir, package)):
3162 # the thing exists, but is a project directory and not a package directory
3163 # FIXME: this should be a warning message to discourage package/project clashes
3164 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3166 if not os.path.exists(os.path.join(prj_dir, package)):
3167 print statfrmt('A', pathname)
3168 os.mkdir(os.path.join(prj_dir, package))
3169 os.mkdir(os.path.join(prj_dir, package, store))
3171 return(os.path.join(prj_dir, package))
3174 def checkout_package(apiurl, project, package,
3175 revision=None, pathname=None, prj_obj=None,
3176 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3178 # the project we're in might be deleted.
3179 # that'll throw an error then.
3180 olddir = os.getcwd()
3182 olddir = os.environ.get("PWD")
3187 if sys.platform[:3] == 'win':
3188 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3190 if conf.config['checkout_no_colon']:
3191 prj_dir = prj_dir.replace(':', '/')
3194 pathname = getTransActPath(os.path.join(prj_dir, package))
3196 # before we create directories and stuff, check if the package actually
3198 show_package_meta(apiurl, project, package)
3202 # try to read from the linkinfo
3203 # if it is a link we use the xsrcmd5 as the revision to be
3206 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3208 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3213 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3214 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3216 p = Package(package, progress_obj=progress_obj)
3219 for filename in p.filenamelist:
3220 if filename in p.skipped:
3222 if service_files or not filename.startswith('_service:'):
3223 p.updatefile(filename, revision)
3224 # print 'A ', os.path.join(project, package, filename)
3225 print statfrmt('A', os.path.join(pathname, filename))
3226 if conf.config['do_package_tracking']:
3227 # check if we can re-use an existing project object
3229 prj_obj = Project(os.getcwd())
3230 prj_obj.set_state(p.name, ' ')
3231 prj_obj.write_packages()
3235 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3236 dst_userid = None, keep_develproject = False):
3238 update pkgmeta with new new_name and new_prj and set calling user as the
3239 only maintainer (unless keep_maintainers is set). Additionally remove the
3240 develproject entry (<devel />) unless keep_develproject is true.
3242 root = ET.fromstring(''.join(pkgmeta))
3243 root.set('name', new_name)
3244 root.set('project', new_prj)
3245 if not keep_maintainers:
3246 for person in root.findall('person'):
3248 if not keep_develproject:
3249 for dp in root.findall('devel'):
3251 return ET.tostring(root)
3253 def link_to_branch(apiurl, project, package):
3255 convert a package with a _link + project.diff to a branch
3258 if '_link' in meta_get_filelist(apiurl, project, package):
3259 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3262 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3264 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3266 create a linked package
3267 - "src" is the original package
3268 - "dst" is the "link" package that we are creating here
3273 dst_meta = meta_exists(metatype='pkg',
3274 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3276 create_new=False, apiurl=conf.config['apiurl'])
3277 root = ET.fromstring(''.join(dst_meta))
3278 print root.attrib['project']
3279 if root.attrib['project'] != dst_project:
3280 # The source comes from a different project via a project link, we need to create this instance
3286 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3287 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3291 root = ET.fromstring(''.join(dst_meta))
3292 elm = root.find('publish')
3294 elm = ET.SubElement(root, 'publish')
3296 ET.SubElement(elm, 'disable')
3297 dst_meta = ET.tostring(root)
3301 path_args=(dst_project, dst_package),
3303 # create the _link file
3304 # but first, make sure not to overwrite an existing one
3305 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3307 print >>sys.stderr, 'forced overwrite of existing _link file'
3310 print >>sys.stderr, '_link file already exists...! Aborting'
3314 rev = 'rev="%s"' % rev
3319 cicount = 'cicount="%s"' % cicount
3323 print 'Creating _link...',
3324 link_template = """\
3325 <link project="%s" package="%s" %s %s>
3327 <!-- <apply name="patch" /> apply a patch on the source directory -->
3328 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3329 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3330 <!-- <delete>filename</delete> delete a file -->
3333 """ % (src_project, src_package, rev, cicount)
3335 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3336 http_PUT(u, data=link_template)
3339 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3342 - "src" is the original package
3343 - "dst" is the "aggregate" package that we are creating here
3344 - "map" is a dictionary SRC => TARGET repository mappings
3349 dst_meta = meta_exists(metatype='pkg',
3350 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3352 create_new=False, apiurl=conf.config['apiurl'])
3354 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3355 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3360 root = ET.fromstring(''.join(dst_meta))
3361 elm = root.find('publish')
3363 elm = ET.SubElement(root, 'publish')
3365 ET.SubElement(elm, 'disable')