1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E', # obsolete with OBS 2.0
201 # our own xml writer function to write xml nice, but with correct syntax
202 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
203 from xml.dom import minidom
204 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
205 # indent = current indentation
206 # addindent = indentation to add to higher levels
207 # newl = newline string
208 writer.write(indent+"<" + self.tagName)
210 attrs = self._get_attributes()
211 a_names = attrs.keys()
214 for a_name in a_names:
215 writer.write(" %s=\"" % a_name)
216 minidom._write_data(writer, attrs[a_name].value)
219 if len(self.childNodes) == 1 \
220 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
222 self.childNodes[0].writexml(writer, "", "", "")
223 writer.write("</%s>%s" % (self.tagName, newl))
225 writer.write(">%s"%(newl))
226 for node in self.childNodes:
227 node.writexml(writer,indent+addindent,addindent,newl)
228 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
230 writer.write("/>%s"%(newl))
231 # replace minidom's function with ours
232 minidom.Element.writexml = fixed_writexml
235 # os.path.samefile is available only under Unix
236 def os_path_samefile(path1, path2):
238 return os.path.samefile(path1, path2)
240 return os.path.realpath(path1) == os.path.realpath(path2)
243 """represent a file, including its metadata"""
244 def __init__(self, name, md5, size, mtime):
254 """Source service content
257 """creates an empty serviceinfo instance"""
260 def read(self, serviceinfo_node):
261 """read in the source services <services> element passed as
264 if serviceinfo_node == None:
267 services = serviceinfo_node.findall('service')
269 for service in services:
270 name = service.get('name')
272 for param in service.findall('param'):
273 option = param.get('name', None)
275 name += " --" + option + " '" + value + "'"
276 self.commands.append(name)
278 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
279 raise oscerr.APIError(msg)
281 def addVerifyFile(self, serviceinfo_node, filename):
284 f = open(filename, 'r')
285 digest = hashlib.sha256(f.read()).hexdigest()
289 s = ET.Element( "service", name="verify_file" )
290 ET.SubElement(s, "param", name="file").text = filename
291 ET.SubElement(s, "param", name="verifier").text = "sha256"
292 ET.SubElement(s, "param", name="checksum").text = digest
298 def addDownloadUrl(self, serviceinfo_node, url_string):
299 from urlparse import urlparse
300 url = urlparse( url_string )
301 protocol = url.scheme
306 s = ET.Element( "service", name="download_url" )
307 ET.SubElement(s, "param", name="protocol").text = protocol
308 ET.SubElement(s, "param", name="host").text = host
309 ET.SubElement(s, "param", name="path").text = path
315 def execute(self, dir):
318 for call in self.commands:
319 temp_dir = tempfile.mkdtemp()
320 name = call.split(None, 1)[0]
321 if not os.path.exists("/usr/lib/obs/service/"+name):
322 msg = "ERROR: service is not installed !"
323 msg += "Can maybe solved with: zypper in obs-server-" + name
324 raise oscerr.APIError(msg)
325 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
326 ret = subprocess.call(c, shell=True)
328 print "ERROR: service call failed: " + c
330 for file in os.listdir(temp_dir):
331 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
335 """linkinfo metadata (which is part of the xml representing a directory
338 """creates an empty linkinfo instance"""
348 def read(self, linkinfo_node):
349 """read in the linkinfo metadata from the <linkinfo> element passed as
351 If the passed element is None, the method does nothing.
353 if linkinfo_node == None:
355 self.project = linkinfo_node.get('project')
356 self.package = linkinfo_node.get('package')
357 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
358 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
359 self.srcmd5 = linkinfo_node.get('srcmd5')
360 self.error = linkinfo_node.get('error')
361 self.rev = linkinfo_node.get('rev')
362 self.baserev = linkinfo_node.get('baserev')
365 """returns True if the linkinfo is not empty, otherwise False"""
366 if self.xsrcmd5 or self.lsrcmd5:
370 def isexpanded(self):
371 """returns True if the package is an expanded link"""
372 if self.lsrcmd5 and not self.xsrcmd5:
377 """returns True if the link is in error state (could not be applied)"""
383 """return an informatory string representation"""
384 if self.islink() and not self.isexpanded():
385 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
386 % (self.project, self.package, self.xsrcmd5, self.rev)
387 elif self.islink() and self.isexpanded():
389 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
390 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
392 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
393 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
399 """represent a project directory, holding packages"""
400 def __init__(self, dir, getPackageList=True, progress_obj=None):
403 self.absdir = os.path.abspath(dir)
404 self.progress_obj = progress_obj
406 self.name = store_read_project(self.dir)
407 self.apiurl = store_read_apiurl(self.dir)
410 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
412 self.pacs_available = []
414 if conf.config['do_package_tracking']:
415 self.pac_root = self.read_packages().getroot()
416 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
417 self.pacs_excluded = [ i for i in os.listdir(self.dir)
418 for j in conf.config['exclude_glob']
419 if fnmatch.fnmatch(i, j) ]
420 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
421 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
422 # in the self.pacs_broken list
423 self.pacs_broken = []
424 for p in self.pacs_have:
425 if not os.path.isdir(os.path.join(self.absdir, p)):
426 # all states will be replaced with the '!'-state
427 # (except it is already marked as deleted ('D'-state))
428 self.pacs_broken.append(p)
430 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
432 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
434 def checkout_missing_pacs(self, expand_link=False):
435 for pac in self.pacs_missing:
437 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
438 # pac is not under version control but a local file/dir exists
439 msg = 'can\'t add package \'%s\': Object already exists' % pac
440 raise oscerr.PackageExists(self.name, pac, msg)
442 print 'checking out new package %s' % pac
443 checkout_package(self.apiurl, self.name, pac, \
444 pathname=getTransActPath(os.path.join(self.dir, pac)), \
445 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
447 def set_state(self, pac, state):
448 node = self.get_package_node(pac)
450 self.new_package_entry(pac, state)
452 node.attrib['state'] = state
454 def get_package_node(self, pac):
455 for node in self.pac_root.findall('package'):
456 if pac == node.get('name'):
460 def del_package_node(self, pac):
461 for node in self.pac_root.findall('package'):
462 if pac == node.get('name'):
463 self.pac_root.remove(node)
465 def get_state(self, pac):
466 node = self.get_package_node(pac)
468 return node.get('state')
472 def new_package_entry(self, name, state):
473 ET.SubElement(self.pac_root, 'package', name=name, state=state)
475 def read_packages(self):
476 packages_file = os.path.join(self.absdir, store, '_packages')
477 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
478 return ET.parse(packages_file)
480 # scan project for existing packages and migrate them
482 for data in os.listdir(self.dir):
483 pac_dir = os.path.join(self.absdir, data)
484 # we cannot use self.pacs_available because we cannot guarantee that the package list
485 # was fetched from the server
486 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
487 and Package(pac_dir).name == data:
488 cur_pacs.append(ET.Element('package', name=data, state=' '))
489 store_write_initial_packages(self.absdir, self.name, cur_pacs)
490 return ET.parse(os.path.join(self.absdir, store, '_packages'))
492 def write_packages(self):
493 # TODO: should we only modify the existing file instead of overwriting?
494 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
496 def addPackage(self, pac):
498 for i in conf.config['exclude_glob']:
499 if fnmatch.fnmatch(pac, i):
500 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
501 raise oscerr.OscIOError(None, msg)
502 state = self.get_state(pac)
503 if state == None or state == 'D':
504 self.new_package_entry(pac, 'A')
505 self.write_packages()
506 # sometimes the new pac doesn't exist in the list because
507 # it would take too much time to update all data structs regularly
508 if pac in self.pacs_unvers:
509 self.pacs_unvers.remove(pac)
511 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
513 def delPackage(self, pac, force = False):
514 state = self.get_state(pac.name)
516 if state == ' ' or state == 'D':
518 for file in pac.filenamelist + pac.filenamelist_unvers:
519 filestate = pac.status(file)
520 if filestate == 'M' or filestate == 'C' or \
521 filestate == 'A' or filestate == '?':
524 del_files.append(file)
525 if can_delete or force:
526 for file in del_files:
527 pac.delete_localfile(file)
528 if pac.status(file) != '?':
529 pac.delete_storefile(file)
530 # this is not really necessary
531 pac.put_on_deletelist(file)
532 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
533 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
534 pac.write_deletelist()
535 self.set_state(pac.name, 'D')
536 self.write_packages()
538 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
541 delete_dir(pac.absdir)
542 self.del_package_node(pac.name)
543 self.write_packages()
544 print statfrmt('D', pac.name)
546 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
548 print 'package is not under version control'
550 print 'unsupported state'
552 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
555 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
557 # we need to make sure that the _packages file will be written (even if an exception
560 # update complete project
561 # packages which no longer exists upstream
562 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
564 for pac in upstream_del:
565 p = Package(os.path.join(self.dir, pac))
566 self.delPackage(p, force = True)
567 delete_storedir(p.storedir)
572 self.pac_root.remove(self.get_package_node(p.name))
573 self.pacs_have.remove(pac)
575 for pac in self.pacs_have:
576 state = self.get_state(pac)
577 if pac in self.pacs_broken:
578 if self.get_state(pac) != 'A':
579 checkout_package(self.apiurl, self.name, pac,
580 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
581 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
584 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
586 if expand_link and p.islink() and not p.isexpanded():
589 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
591 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
594 rev = p.linkinfo.xsrcmd5
595 print 'Expanding to rev', rev
596 elif unexpand_link and p.islink() and p.isexpanded():
597 rev = p.linkinfo.lsrcmd5
598 print 'Unexpanding to rev', rev
599 elif p.islink() and p.isexpanded():
601 print 'Updating %s' % p.name
602 p.update(rev, service_files)
606 # TODO: Package::update has to fixed to behave like svn does
607 if pac in self.pacs_broken:
608 checkout_package(self.apiurl, self.name, pac,
609 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
610 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
612 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
613 elif state == 'A' and pac in self.pacs_available:
614 # file/dir called pac already exists and is under version control
615 msg = 'can\'t add package \'%s\': Object already exists' % pac
616 raise oscerr.PackageExists(self.name, pac, msg)
621 print 'unexpected state.. package \'%s\'' % pac
623 self.checkout_missing_pacs(expand_link=not unexpand_link)
625 self.write_packages()
627 def commit(self, pacs = (), msg = '', files = {}):
632 if files.has_key(pac):
634 state = self.get_state(pac)
636 self.commitNewPackage(pac, msg, todo)
638 self.commitDelPackage(pac)
640 # display the correct dir when sending the changes
641 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
644 p = Package(os.path.join(self.dir, pac))
647 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
648 print 'osc: \'%s\' is not under version control' % pac
649 elif pac in self.pacs_broken:
650 print 'osc: \'%s\' package not found' % pac
652 self.commitExtPackage(pac, msg, todo)
654 self.write_packages()
656 # if we have packages marked as '!' we cannot commit
657 for pac in self.pacs_broken:
658 if self.get_state(pac) != 'D':
659 msg = 'commit failed: package \'%s\' is missing' % pac
660 raise oscerr.PackageMissing(self.name, pac, msg)
662 for pac in self.pacs_have:
663 state = self.get_state(pac)
666 Package(os.path.join(self.dir, pac)).commit(msg)
668 self.commitDelPackage(pac)
670 self.commitNewPackage(pac, msg)
672 self.write_packages()
674 def commitNewPackage(self, pac, msg = '', files = []):
675 """creates and commits a new package if it does not exist on the server"""
676 if pac in self.pacs_available:
677 print 'package \'%s\' already exists' % pac
679 user = conf.get_apiurl_usr(self.apiurl)
680 edit_meta(metatype='pkg',
681 path_args=(quote_plus(self.name), quote_plus(pac)),
686 # display the correct dir when sending the changes
688 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
692 p = Package(os.path.join(self.dir, pac))
694 print statfrmt('Sending', os.path.normpath(p.dir))
696 self.set_state(pac, ' ')
699 def commitDelPackage(self, pac):
700 """deletes a package on the server and in the working copy"""
702 # display the correct dir when sending the changes
703 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
706 pac_dir = os.path.join(self.dir, pac)
707 p = Package(os.path.join(self.dir, pac))
708 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
709 delete_storedir(p.storedir)
715 pac_dir = os.path.join(self.dir, pac)
716 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
717 print statfrmt('Deleting', getTransActPath(pac_dir))
718 delete_package(self.apiurl, self.name, pac)
719 self.del_package_node(pac)
721 def commitExtPackage(self, pac, msg, files = []):
722 """commits a package from an external project"""
723 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
726 pac_path = os.path.join(self.dir, pac)
728 project = store_read_project(pac_path)
729 package = store_read_package(pac_path)
730 apiurl = store_read_apiurl(pac_path)
731 if meta_exists(metatype='pkg',
732 path_args=(quote_plus(project), quote_plus(package)),
734 create_new=False, apiurl=apiurl):
735 p = Package(pac_path)
739 user = conf.get_apiurl_usr(self.apiurl)
740 edit_meta(metatype='pkg',
741 path_args=(quote_plus(project), quote_plus(package)),
746 p = Package(pac_path)
752 r.append('*****************************************************')
753 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
754 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
755 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
756 r.append('*****************************************************')
762 """represent a package (its directory) and read/keep/write its metadata"""
763 def __init__(self, workingdir, progress_obj=None, limit_size=None):
764 self.dir = workingdir
765 self.absdir = os.path.abspath(self.dir)
766 self.storedir = os.path.join(self.absdir, store)
767 self.progress_obj = progress_obj
768 self.limit_size = limit_size
769 if limit_size and limit_size == 0:
770 self.limit_size = None
772 check_store_version(self.dir)
774 self.prjname = store_read_project(self.dir)
775 self.name = store_read_package(self.dir)
776 self.apiurl = store_read_apiurl(self.dir)
778 self.update_datastructs()
782 self.todo_delete = []
785 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
786 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
789 def addfile(self, n):
790 st = os.stat(os.path.join(self.dir, n))
791 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
793 def delete_file(self, n, force=False):
794 """deletes a file if possible and marks the file as deleted"""
797 state = self.status(n)
801 if state in ['?', 'A', 'M'] and not force:
802 return (False, state)
803 self.delete_localfile(n)
805 self.put_on_deletelist(n)
806 self.write_deletelist()
808 self.delete_storefile(n)
811 def delete_storefile(self, n):
812 try: os.unlink(os.path.join(self.storedir, n))
815 def delete_localfile(self, n):
816 try: os.unlink(os.path.join(self.dir, n))
819 def put_on_deletelist(self, n):
820 if n not in self.to_be_deleted:
821 self.to_be_deleted.append(n)
823 def put_on_conflictlist(self, n):
824 if n not in self.in_conflict:
825 self.in_conflict.append(n)
827 def clear_from_conflictlist(self, n):
828 """delete an entry from the file, and remove the file if it would be empty"""
829 if n in self.in_conflict:
831 filename = os.path.join(self.dir, n)
832 storefilename = os.path.join(self.storedir, n)
833 myfilename = os.path.join(self.dir, n + '.mine')
834 if self.islinkrepair() or self.ispulled():
835 upfilename = os.path.join(self.dir, n + '.new')
837 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
840 os.unlink(myfilename)
841 # the working copy may be updated, so the .r* ending may be obsolete...
843 os.unlink(upfilename)
844 if self.islinkrepair() or self.ispulled():
845 os.unlink(os.path.join(self.dir, n + '.old'))
849 self.in_conflict.remove(n)
851 self.write_conflictlist()
853 def write_sizelimit(self):
854 if self.size_limit and self.size_limit <= 0:
856 os.unlink(os.path.join(self.storedir, '_size_limit'))
860 fname = os.path.join(self.storedir, '_size_limit')
862 f.write(str(self.size_limit))
865 def write_deletelist(self):
866 if len(self.to_be_deleted) == 0:
868 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
872 fname = os.path.join(self.storedir, '_to_be_deleted')
874 f.write('\n'.join(self.to_be_deleted))
878 def delete_source_file(self, n):
879 """delete local a source file"""
880 self.delete_localfile(n)
881 self.delete_storefile(n)
883 def delete_remote_source_file(self, n):
884 """delete a remote source file (e.g. from the server)"""
886 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
889 def put_source_file(self, n):
891 # escaping '+' in the URL path (note: not in the URL query string) is
892 # only a workaround for ruby on rails, which swallows it otherwise
894 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
895 http_PUT(u, file = os.path.join(self.dir, n))
897 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
899 def commit(self, msg=''):
900 # commit only if the upstream revision is the same as the working copy's
901 upstream_rev = self.latest_rev()
902 if self.rev != upstream_rev:
903 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
906 self.todo = self.filenamelist_unvers + self.filenamelist
908 pathn = getTransActPath(self.dir)
910 have_conflicts = False
911 for filename in self.todo:
912 if not filename.startswith('_service:') and not filename.startswith('_service_'):
913 st = self.status(filename)
915 self.todo.remove(filename)
916 elif st == 'A' or st == 'M':
917 self.todo_send.append(filename)
918 print statfrmt('Sending', os.path.join(pathn, filename))
920 self.todo_delete.append(filename)
921 print statfrmt('Deleting', os.path.join(pathn, filename))
923 have_conflicts = True
926 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
929 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
930 print 'nothing to do for package %s' % self.name
933 if self.islink() and self.isexpanded():
934 # resolve the link into the upload revision
935 # XXX: do this always?
936 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
937 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
940 print 'Transmitting file data ',
942 for filename in self.todo_delete:
943 # do not touch local files on commit --
944 # delete remotely instead
945 self.delete_remote_source_file(filename)
946 self.to_be_deleted.remove(filename)
947 for filename in self.todo_send:
948 sys.stdout.write('.')
950 self.put_source_file(filename)
952 # all source files are committed - now comes the log
953 query = { 'cmd' : 'commit',
955 'user' : conf.get_apiurl_usr(self.apiurl),
957 if self.islink() and self.isexpanded():
958 query['keeplink'] = '1'
959 if conf.config['linkcontrol'] or self.isfrozen():
960 query['linkrev'] = self.linkinfo.srcmd5
962 query['repairlink'] = '1'
963 query['linkrev'] = self.get_pulled_srcmd5()
964 if self.islinkrepair():
965 query['repairlink'] = '1'
966 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
969 # delete upload revision
971 query = { 'cmd': 'deleteuploadrev' }
972 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
978 root = ET.parse(f).getroot()
979 self.rev = int(root.get('rev'))
981 print 'Committed revision %s.' % self.rev
984 os.unlink(os.path.join(self.storedir, '_pulled'))
985 if self.islinkrepair():
986 os.unlink(os.path.join(self.storedir, '_linkrepair'))
987 self.linkrepair = False
988 # XXX: mark package as invalid?
989 print 'The source link has been repaired. This directory can now be removed.'
990 if self.islink() and self.isexpanded():
991 self.update_local_filesmeta(revision=self.latest_rev())
993 self.update_local_filesmeta()
994 self.write_deletelist()
995 self.update_datastructs()
997 if self.filenamelist.count('_service'):
998 print 'The package contains a source service.'
999 for filename in self.todo:
1000 if filename.startswith('_service:') and os.path.exists(filename):
1001 os.unlink(filename) # remove local files
1002 print_request_list(self.apiurl, self.prjname, self.name)
1004 def write_conflictlist(self):
1005 if len(self.in_conflict) == 0:
1007 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1011 fname = os.path.join(self.storedir, '_in_conflict')
1012 f = open(fname, 'w')
1013 f.write('\n'.join(self.in_conflict))
1017 def updatefile(self, n, revision):
1018 filename = os.path.join(self.dir, n)
1019 storefilename = os.path.join(self.storedir, n)
1020 mtime = self.findfilebyname(n).mtime
1022 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1023 revision=revision, progress_obj=self.progress_obj, mtime=mtime)
1025 shutil.copyfile(filename, storefilename)
1027 def mergefile(self, n):
1028 filename = os.path.join(self.dir, n)
1029 storefilename = os.path.join(self.storedir, n)
1030 myfilename = os.path.join(self.dir, n + '.mine')
1031 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1032 os.rename(filename, myfilename)
1034 mtime = self.findfilebyname(n).mtime
1035 get_source_file(self.apiurl, self.prjname, self.name, n,
1036 revision=self.rev, targetfilename=upfilename,
1037 progress_obj=self.progress_obj, mtime=mtime)
1039 if binary_file(myfilename) or binary_file(upfilename):
1041 shutil.copyfile(upfilename, filename)
1042 shutil.copyfile(upfilename, storefilename)
1043 self.in_conflict.append(n)
1044 self.write_conflictlist()
1048 # diff3 OPTIONS... MINE OLDER YOURS
1049 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1050 # we would rather use the subprocess module, but it is not availablebefore 2.4
1051 ret = subprocess.call(merge_cmd, shell=True)
1053 # "An exit status of 0 means `diff3' was successful, 1 means some
1054 # conflicts were found, and 2 means trouble."
1056 # merge was successful... clean up
1057 shutil.copyfile(upfilename, storefilename)
1058 os.unlink(upfilename)
1059 os.unlink(myfilename)
1062 # unsuccessful merge
1063 shutil.copyfile(upfilename, storefilename)
1064 self.in_conflict.append(n)
1065 self.write_conflictlist()
1068 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1069 print >>sys.stderr, 'the command line was:'
1070 print >>sys.stderr, merge_cmd
1075 def update_local_filesmeta(self, revision=None):
1077 Update the local _files file in the store.
1078 It is replaced with the version pulled from upstream.
1080 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1081 store_write_string(self.absdir, '_files', meta)
1083 def update_datastructs(self):
1085 Update the internal data structures if the local _files
1086 file has changed (e.g. update_local_filesmeta() has been
1090 files_tree = read_filemeta(self.dir)
1091 files_tree_root = files_tree.getroot()
1093 self.rev = files_tree_root.get('rev')
1094 self.srcmd5 = files_tree_root.get('srcmd5')
1096 self.linkinfo = Linkinfo()
1097 self.linkinfo.read(files_tree_root.find('linkinfo'))
1099 self.filenamelist = []
1102 for node in files_tree_root.findall('entry'):
1104 f = File(node.get('name'),
1106 int(node.get('size')),
1107 int(node.get('mtime')))
1108 if node.get('skipped'):
1109 self.skipped.append(f.name)
1111 # okay, a very old version of _files, which didn't contain any metadata yet...
1112 f = File(node.get('name'), '', 0, 0)
1113 self.filelist.append(f)
1114 self.filenamelist.append(f.name)
1116 self.to_be_deleted = read_tobedeleted(self.dir)
1117 self.in_conflict = read_inconflict(self.dir)
1118 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1119 self.size_limit = read_sizelimit(self.dir)
1121 # gather unversioned files, but ignore some stuff
1122 self.excluded = [ i for i in os.listdir(self.dir)
1123 for j in conf.config['exclude_glob']
1124 if fnmatch.fnmatch(i, j) ]
1125 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1126 if i not in self.excluded
1127 if i not in self.filenamelist ]
1130 """tells us if the package is a link (has 'linkinfo').
1131 A package with linkinfo is a package which links to another package.
1132 Returns True if the package is a link, otherwise False."""
1133 return self.linkinfo.islink()
1135 def isexpanded(self):
1136 """tells us if the package is a link which is expanded.
1137 Returns True if the package is expanded, otherwise False."""
1138 return self.linkinfo.isexpanded()
1140 def islinkrepair(self):
1141 """tells us if we are repairing a broken source link."""
1142 return self.linkrepair
1145 """tells us if we have pulled a link."""
1146 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1149 """tells us if the link is frozen."""
1150 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1152 def get_pulled_srcmd5(self):
1154 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1155 pulledrev = line.strip()
1158 def haslinkerror(self):
1160 Returns True if the link is broken otherwise False.
1161 If the package is not a link it returns False.
1163 return self.linkinfo.haserror()
1165 def linkerror(self):
1167 Returns an error message if the link is broken otherwise None.
1168 If the package is not a link it returns None.
1170 return self.linkinfo.error
1172 def update_local_pacmeta(self):
1174 Update the local _meta file in the store.
1175 It is replaced with the version pulled from upstream.
1177 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1178 store_write_string(self.absdir, '_meta', meta)
1180 def findfilebyname(self, n):
1181 for i in self.filelist:
1185 def status(self, n):
1189 file storefile file present STATUS
1190 exists exists in _files
1193 x x x ' ' if digest differs: 'M'
1194 and if in conflicts file: 'C'
1196 x - x 'D' and listed in _to_be_deleted
1198 - x - 'D' (when file in working copy is already deleted)
1199 - - x 'F' (new in repo, but not yet in working copy)
1204 known_by_meta = False
1206 exists_in_store = False
1207 if n in self.filenamelist:
1208 known_by_meta = True
1209 if os.path.exists(os.path.join(self.absdir, n)):
1211 if os.path.exists(os.path.join(self.storedir, n)):
1212 exists_in_store = True
1215 if n in self.skipped:
1217 elif exists and not exists_in_store and known_by_meta:
1219 elif n in self.to_be_deleted:
1221 elif n in self.in_conflict:
1223 elif exists and exists_in_store and known_by_meta:
1224 #print self.findfilebyname(n)
1225 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1229 elif exists and not exists_in_store and not known_by_meta:
1231 elif exists and exists_in_store and not known_by_meta:
1233 elif not exists and exists_in_store and known_by_meta:
1235 elif not exists and not exists_in_store and known_by_meta:
1237 elif not exists and exists_in_store and not known_by_meta:
1239 elif not exists and not exists_in_store and not known_by_meta:
1240 # this case shouldn't happen (except there was a typo in the filename etc.)
1241 raise IOError('osc: \'%s\' is not under version control' % n)
1245 def comparePac(self, cmp_pac):
1247 This method compares the local filelist with
1248 the filelist of the passed package to see which files
1249 were added, removed and changed.
1256 for file in self.filenamelist+self.filenamelist_unvers:
1257 state = self.status(file)
1258 if file in self.skipped:
1260 if state == 'A' and (not file in cmp_pac.filenamelist):
1261 added_files.append(file)
1262 elif file in cmp_pac.filenamelist and state == 'D':
1263 removed_files.append(file)
1264 elif state == ' ' and not file in cmp_pac.filenamelist:
1265 added_files.append(file)
1266 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1267 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1268 changed_files.append(file)
1269 for file in cmp_pac.filenamelist:
1270 if not file in self.filenamelist:
1271 removed_files.append(file)
1272 removed_files = set(removed_files)
1274 return changed_files, added_files, removed_files
1276 def merge(self, otherpac):
1277 self.todo += otherpac.todo
1291 '\n '.join(self.filenamelist),
1299 def read_meta_from_spec(self, spec = None):
1304 # scan for spec files
1305 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1306 if len(speclist) == 1:
1307 specfile = speclist[0]
1308 elif len(speclist) > 1:
1309 print 'the following specfiles were found:'
1310 for file in speclist:
1312 print 'please specify one with --specfile'
1315 print 'no specfile was found - please specify one ' \
1319 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1320 self.summary = data['Summary']
1321 self.url = data['Url']
1322 self.descr = data['%description']
1325 def update_package_meta(self, force=False):
1327 for the updatepacmetafromspec subcommand
1328 argument force supress the confirm question
1331 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1333 root = ET.fromstring(m)
1334 root.find('title').text = self.summary
1335 root.find('description').text = ''.join(self.descr)
1336 url = root.find('url')
1338 url = ET.SubElement(root, 'url')
1341 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1342 mf = metafile(u, ET.tostring(root))
1345 print '*' * 36, 'old', '*' * 36
1347 print '*' * 36, 'new', '*' * 36
1348 print ET.tostring(root)
1350 repl = raw_input('Write? (y/N/e) ')
1361 def mark_frozen(self):
1362 store_write_string(self.absdir, '_frozenlink', '')
1364 print "The link in this package is currently broken. Checking"
1365 print "out the last working version instead; please use 'osc pull'"
1366 print "to repair the link."
1369 def unmark_frozen(self):
1370 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1371 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1373 def latest_rev(self):
1374 if self.islinkrepair():
1375 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1376 elif self.islink() and self.isexpanded():
1377 if self.isfrozen() or self.ispulled():
1378 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1381 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1384 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1386 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1389 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1392 def update(self, rev = None, service_files = False, limit_size = None):
1393 # save filelist and (modified) status before replacing the meta file
1394 saved_filenames = self.filenamelist
1395 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1399 self.limit_size = limit_size
1401 self.limit_size = read_sizelimit(self.dir)
1402 self.update_local_filesmeta(rev)
1403 self = Package(self.dir, progress_obj=self.progress_obj)
1405 # which files do no longer exist upstream?
1406 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1408 pathn = getTransActPath(self.dir)
1410 for filename in saved_filenames:
1411 if filename in self.skipped:
1413 if not filename.startswith('_service:') and filename in disappeared:
1414 print statfrmt('D', os.path.join(pathn, filename))
1415 # keep file if it has local modifications
1416 if oldp.status(filename) == ' ':
1417 self.delete_localfile(filename)
1418 self.delete_storefile(filename)
1420 for filename in self.filenamelist:
1421 if filename in self.skipped:
1424 state = self.status(filename)
1425 if not service_files and filename.startswith('_service:'):
1427 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1428 # no merge necessary... local file is changed, but upstream isn't
1430 elif state == 'M' and filename in saved_modifiedfiles:
1431 status_after_merge = self.mergefile(filename)
1432 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1434 self.updatefile(filename, rev)
1435 print statfrmt('U', os.path.join(pathn, filename))
1437 self.updatefile(filename, rev)
1438 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1440 self.updatefile(filename, rev)
1441 print statfrmt('A', os.path.join(pathn, filename))
1442 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1443 self.updatefile(filename, rev)
1444 self.delete_storefile(filename)
1445 print statfrmt('U', os.path.join(pathn, filename))
1449 self.update_local_pacmeta()
1451 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1452 print 'At revision %s.' % self.rev
1454 if not service_files:
1455 self.run_source_services()
1457 def run_source_services(self):
1458 if self.filenamelist.count('_service'):
1459 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1462 si.execute(self.absdir)
1464 def prepare_filelist(self):
1465 """Prepare a list of files, which will be processed by process_filelist
1466 method. This allows easy modifications of a file list in commit
1470 self.todo = self.filenamelist + self.filenamelist_unvers
1474 for f in [f for f in self.todo if not os.path.isdir(f)]:
1476 status = self.status(f)
1481 ret += "%s %s %s\n" % (action, status, f)
1484 # Edit a filelist for package \'%s\'
1486 # l, leave = leave a file as is
1487 # r, remove = remove a file
1488 # a, add = add a file
1490 # If you remove file from a list, it will be unchanged
1491 # If you remove all, commit will be aborted""" % self.name
1495 def edit_filelist(self):
1496 """Opens a package list in editor for editing. This allows easy
1497 modifications of it just by simple text editing
1501 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1502 f = os.fdopen(fd, 'w')
1503 f.write(self.prepare_filelist())
1505 mtime_orig = os.stat(filename).st_mtime
1508 run_editor(filename)
1509 mtime = os.stat(filename).st_mtime
1510 if mtime_orig < mtime:
1511 filelist = open(filename).readlines()
1515 raise oscerr.UserAbort()
1517 return self.process_filelist(filelist)
1519 def process_filelist(self, filelist):
1520 """Process a filelist - it add/remove or leave files. This depends on
1521 user input. If no file is processed, it raises an ValueError
1525 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1527 foo = line.split(' ')
1529 action, state, name = (foo[0], ' ', foo[3])
1531 action, state, name = (foo[0], foo[1], foo[2])
1534 action = action.lower()
1537 if action in ('r', 'remove'):
1538 if self.status(name) == '?':
1540 if name in self.todo:
1541 self.todo.remove(name)
1543 self.delete_file(name, True)
1544 elif action in ('a', 'add'):
1545 if self.status(name) != '?':
1546 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1549 elif action in ('l', 'leave'):
1552 raise ValueError("Unknow action `%s'" % action)
1555 raise ValueError("Empty filelist")
1558 """for objects to represent the review state in a request"""
1559 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1561 self.by_user = by_user
1562 self.by_group = by_group
1565 self.comment = comment
1568 """for objects to represent the "state" of a request"""
1569 def __init__(self, name=None, who=None, when=None, comment=None):
1573 self.comment = comment
1576 """represents an action"""
1577 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1579 self.src_project = src_project
1580 self.src_package = src_package
1581 self.src_rev = src_rev
1582 self.dst_project = dst_project
1583 self.dst_package = dst_package
1584 self.src_update = src_update
1587 """represent a request and holds its metadata
1588 it has methods to read in metadata from xml,
1589 different views, ..."""
1592 self.state = RequestState()
1595 self.last_author = None
1598 self.statehistory = []
1601 def read(self, root):
1602 self.reqid = int(root.get('id'))
1603 actions = root.findall('action')
1604 if len(actions) == 0:
1605 actions = [ root.find('submit') ] # for old style requests
1607 for action in actions:
1608 type = action.get('type', 'submit')
1610 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1611 if action.findall('source'):
1612 n = action.find('source')
1613 src_prj = n.get('project', None)
1614 src_pkg = n.get('package', None)
1615 src_rev = n.get('rev', None)
1616 if action.findall('target'):
1617 n = action.find('target')
1618 dst_prj = n.get('project', None)
1619 dst_pkg = n.get('package', None)
1620 if action.findall('options'):
1621 n = action.find('options')
1622 if n.findall('sourceupdate'):
1623 src_update = n.find('sourceupdate').text.strip()
1624 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1626 msg = 'invalid request format:\n%s' % ET.tostring(root)
1627 raise oscerr.APIError(msg)
1630 n = root.find('state')
1631 self.state.name, self.state.who, self.state.when \
1632 = n.get('name'), n.get('who'), n.get('when')
1634 self.state.comment = n.find('comment').text.strip()
1636 self.state.comment = None
1638 # read the review states
1639 for r in root.findall('review'):
1641 s.state = r.get('state')
1642 s.by_user = r.get('by_user')
1643 s.by_group = r.get('by_group')
1644 s.who = r.get('who')
1645 s.when = r.get('when')
1647 s.comment = r.find('comment').text.strip()
1650 self.reviews.append(s)
1652 # read the state history
1653 for h in root.findall('history'):
1655 s.name = h.get('name')
1656 s.who = h.get('who')
1657 s.when = h.get('when')
1659 s.comment = h.find('comment').text.strip()
1662 self.statehistory.append(s)
1663 self.statehistory.reverse()
1665 # read a description, if it exists
1667 n = root.find('description').text
1672 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1673 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1674 dst_prj, dst_pkg, src_update)
1677 def list_view(self):
1678 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1680 for a in self.actions:
1681 dst = "%s/%s" % (a.dst_project, a.dst_package)
1682 if a.src_package == a.dst_package:
1686 if a.type=="submit":
1687 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1688 if a.type=="change_devel":
1689 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1690 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1692 ret += '\n %s: %-50s %-20s ' % \
1693 (a.type, sr_source, dst)
1695 if self.statehistory and self.statehistory[0]:
1697 for h in self.statehistory:
1698 who.append("%s(%s)" % (h.who,h.name))
1700 ret += "\n From: %s" % (' -> '.join(who))
1702 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1704 lines = txt.splitlines()
1705 wrapper = textwrap.TextWrapper( width = 80,
1706 initial_indent=' Descr: ',
1707 subsequent_indent=' ')
1708 ret += "\n" + wrapper.fill(lines[0])
1709 wrapper.initial_indent = ' '
1710 for line in lines[1:]:
1711 ret += "\n" + wrapper.fill(line)
1717 def __cmp__(self, other):
1718 return cmp(self.reqid, other.reqid)
1722 for action in self.actions:
1723 action_list=" %s: " % (action.type)
1724 if action.type=="submit":
1727 r="(r%s)" % (action.src_rev)
1729 if action.src_update:
1730 m="(%s)" % (action.src_update)
1731 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1732 if action.dst_package:
1733 action_list=action_list+"/%s" % ( action.dst_package )
1734 elif action.type=="delete":
1735 action_list=action_list+" %s" % ( action.dst_project )
1736 if action.dst_package:
1737 action_list=action_list+"/%s" % ( action.dst_package )
1738 elif action.type=="change_devel":
1739 action_list=action_list+" %s/%s developed in %s/%s" % \
1740 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1741 action_list=action_list+"\n"
1756 self.state.name, self.state.when, self.state.who,
1759 if len(self.reviews):
1760 reviewitems = [ '%-10s %s %s %s %s %s' \
1761 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1762 for i in self.reviews ]
1763 s += '\nReview: ' + '\n '.join(reviewitems)
1766 if len(self.statehistory):
1767 histitems = [ '%-10s %s %s' \
1768 % (i.name, i.when, i.who) \
1769 for i in self.statehistory ]
1770 s += '\nHistory: ' + '\n '.join(histitems)
1777 """format time as Apr 02 18:19
1779 depending on whether it is in the current year
1783 if time.localtime()[0] == time.localtime(t)[0]:
1785 return time.strftime('%b %d %H:%M',time.localtime(t))
1787 return time.strftime('%b %d %Y',time.localtime(t))
1790 def is_project_dir(d):
1791 return os.path.exists(os.path.join(d, store, '_project')) and not \
1792 os.path.exists(os.path.join(d, store, '_package'))
1795 def is_package_dir(d):
1796 return os.path.exists(os.path.join(d, store, '_project')) and \
1797 os.path.exists(os.path.join(d, store, '_package'))
1799 def parse_disturl(disturl):
1800 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1801 revision), else raises an oscerr.WrongArgs exception
1804 m = DISTURL_RE.match(disturl)
1806 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1808 apiurl = m.group('apiurl')
1809 if apiurl.split('.')[0] != 'api':
1810 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1811 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1813 def parse_buildlogurl(buildlogurl):
1814 """Parse a build log url, returns a tuple (apiurl, project, package,
1815 repository, arch), else raises oscerr.WrongArgs exception"""
1817 global BUILDLOGURL_RE
1819 m = BUILDLOGURL_RE.match(buildlogurl)
1821 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1823 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1826 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1827 This is handy to allow copy/paste a project/package combination in this form.
1829 Trailing slashes are removed before the split, because the split would
1830 otherwise give an additional empty string.
1838 def expand_proj_pack(args, idx=0, howmany=0):
1839 """looks for occurance of '.' at the position idx.
1840 If howmany is 2, both proj and pack are expanded together
1841 using the current directory, or none of them, if not possible.
1842 If howmany is 0, proj is expanded if possible, then, if there
1843 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1844 expanded, if possible.
1845 If howmany is 1, only proj is expanded if possible.
1847 If args[idx] does not exists, an implicit '.' is assumed.
1848 if not enough elements up to idx exist, an error is raised.
1850 See also parseargs(args), slash_split(args), findpacs(args)
1851 All these need unification, somehow.
1854 # print args,idx,howmany
1857 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1859 if len(args) == idx:
1861 if args[idx+0] == '.':
1862 if howmany == 0 and len(args) > idx+1:
1863 if args[idx+1] == '.':
1865 # remove one dot and make sure to expand both proj and pack
1870 # print args,idx,howmany
1872 args[idx+0] = store_read_project('.')
1875 package = store_read_package('.')
1876 args.insert(idx+1, package)
1880 package = store_read_package('.')
1881 args.insert(idx+1, package)
1885 def findpacs(files, progress_obj=None):
1886 """collect Package objects belonging to the given files
1887 and make sure each Package is returned only once"""
1890 p = filedir_to_pac(f, progress_obj)
1893 if i.name == p.name:
1903 def filedir_to_pac(f, progress_obj=None):
1904 """Takes a working copy path, or a path to a file inside a working copy,
1905 and returns a Package object instance
1907 If the argument was a filename, add it onto the "todo" list of the Package """
1909 if os.path.isdir(f):
1911 p = Package(wd, progress_obj=progress_obj)
1913 wd = os.path.dirname(f) or os.curdir
1914 p = Package(wd, progress_obj=progress_obj)
1915 p.todo = [ os.path.basename(f) ]
1919 def read_filemeta(dir):
1921 r = ET.parse(os.path.join(dir, store, '_files'))
1922 except SyntaxError, e:
1923 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1924 'When parsing .osc/_files, the following error was encountered:\n'
1929 def read_tobedeleted(dir):
1931 fname = os.path.join(dir, store, '_to_be_deleted')
1933 if os.path.exists(fname):
1934 r = [ line.strip() for line in open(fname) ]
1939 def read_sizelimit(dir):
1941 fname = os.path.join(dir, store, '_size_limit')
1943 if os.path.exists(fname):
1944 r = open(fname).readline()
1946 if r is None or not r.isdigit():
1950 def read_inconflict(dir):
1952 fname = os.path.join(dir, store, '_in_conflict')
1954 if os.path.exists(fname):
1955 r = [ line.strip() for line in open(fname) ]
1960 def parseargs(list_of_args):
1961 """Convenience method osc's commandline argument parsing.
1963 If called with an empty tuple (or list), return a list containing the current directory.
1964 Otherwise, return a list of the arguments."""
1966 return list(list_of_args)
1971 def statfrmt(statusletter, filename):
1972 return '%s %s' % (statusletter, filename)
1975 def pathjoin(a, *p):
1976 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1977 path = os.path.join(a, *p)
1978 if path.startswith('./'):
1983 def makeurl(baseurl, l, query=[]):
1984 """Given a list of path compoments, construct a complete URL.
1986 Optional parameters for a query string can be given as a list, as a
1987 dictionary, or as an already assembled string.
1988 In case of a dictionary, the parameters will be urlencoded by this
1989 function. In case of a list not -- this is to be backwards compatible.
1992 if conf.config['verbose'] > 1:
1993 print 'makeurl:', baseurl, l, query
1995 if type(query) == type(list()):
1996 query = '&'.join(query)
1997 elif type(query) == type(dict()):
1998 query = urlencode(query)
2000 scheme, netloc = urlsplit(baseurl)[0:2]
2001 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2004 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2005 """wrapper around urllib2.urlopen for error handling,
2006 and to support additional (PUT, DELETE) methods"""
2010 if conf.config['http_debug']:
2013 print '--', method, url
2015 if method == 'POST' and not file and not data:
2016 # adding data to an urllib2 request transforms it into a POST
2019 req = urllib2.Request(url)
2020 api_host_options = {}
2022 api_host_options = conf.get_apiurl_api_host_options(url)
2023 for header, value in api_host_options['http_headers']:
2024 req.add_header(header, value)
2026 # "external" request (url is no apiurl)
2029 req.get_method = lambda: method
2031 # POST requests are application/x-www-form-urlencoded per default
2032 # since we change the request into PUT, we also need to adjust the content type header
2033 if method == 'PUT' or (method == 'POST' and data):
2034 req.add_header('Content-Type', 'application/octet-stream')
2036 if type(headers) == type({}):
2037 for i in headers.keys():
2039 req.add_header(i, headers[i])
2041 if file and not data:
2042 size = os.path.getsize(file)
2044 data = open(file, 'rb').read()
2047 filefd = open(file, 'rb')
2049 if sys.platform[:3] != 'win':
2050 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2052 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2054 except EnvironmentError, e:
2056 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2057 '\non a filesystem which does not support this.' % (e, file))
2058 elif hasattr(e, 'winerror') and e.winerror == 5:
2059 # falling back to the default io
2060 data = open(file, 'rb').read()
2064 if conf.config['debug']: print method, url
2066 old_timeout = socket.getdefaulttimeout()
2067 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2068 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2069 socket.setdefaulttimeout(timeout)
2071 fd = urllib2.urlopen(req, data=data)
2073 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2074 socket.setdefaulttimeout(old_timeout)
2075 if hasattr(conf.cookiejar, 'save'):
2076 conf.cookiejar.save(ignore_discard=True)
2078 if filefd: filefd.close()
2083 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2084 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2085 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2086 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2089 def init_project_dir(apiurl, dir, project):
2090 if not os.path.exists(dir):
2091 if conf.config['checkout_no_colon']:
2092 os.makedirs(dir) # helpful with checkout_no_colon
2095 if not os.path.exists(os.path.join(dir, store)):
2096 os.mkdir(os.path.join(dir, store))
2098 # print 'project=',project,' dir=',dir
2099 store_write_project(dir, project)
2100 store_write_apiurl(dir, apiurl)
2101 if conf.config['do_package_tracking']:
2102 store_write_initial_packages(dir, project, [])
2104 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2105 if not os.path.isdir(store):
2108 f = open('_project', 'w')
2109 f.write(project + '\n')
2111 f = open('_package', 'w')
2112 f.write(package + '\n')
2116 f = open('_size_limit', 'w')
2117 f.write(str(limit_size))
2121 f = open('_files', 'w')
2122 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2126 ET.ElementTree(element=ET.Element('directory')).write('_files')
2128 f = open('_osclib_version', 'w')
2129 f.write(__store_version__ + '\n')
2132 store_write_apiurl(os.path.pardir, apiurl)
2138 def check_store_version(dir):
2139 versionfile = os.path.join(dir, store, '_osclib_version')
2141 v = open(versionfile).read().strip()
2146 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2147 if os.path.exists(os.path.join(dir, '.svn')):
2148 msg = msg + '\nTry svn instead of osc.'
2149 raise oscerr.NoWorkingCopy(msg)
2151 if v != __store_version__:
2152 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2153 # version is fine, no migration needed
2154 f = open(versionfile, 'w')
2155 f.write(__store_version__ + '\n')
2158 msg = 'The osc metadata of your working copy "%s"' % dir
2159 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2160 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2161 raise oscerr.WorkingCopyWrongVersion, msg
2164 def meta_get_packagelist(apiurl, prj):
2166 u = makeurl(apiurl, ['source', prj])
2168 root = ET.parse(f).getroot()
2169 return [ node.get('name') for node in root.findall('entry') ]
2172 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2173 """return a list of file names,
2174 or a list File() instances if verbose=True"""
2180 query['rev'] = revision
2182 query['rev'] = 'latest'
2184 u = makeurl(apiurl, ['source', prj, package], query=query)
2186 root = ET.parse(f).getroot()
2189 return [ node.get('name') for node in root.findall('entry') ]
2193 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2194 rev = root.get('rev')
2195 for node in root.findall('entry'):
2196 f = File(node.get('name'),
2198 int(node.get('size')),
2199 int(node.get('mtime')))
2205 def meta_get_project_list(apiurl):
2206 u = makeurl(apiurl, ['source'])
2208 root = ET.parse(f).getroot()
2209 return sorted([ node.get('name') for node in root ])
2212 def show_project_meta(apiurl, prj):
2213 url = makeurl(apiurl, ['source', prj, '_meta'])
2215 return f.readlines()
2218 def show_project_conf(apiurl, prj):
2219 url = makeurl(apiurl, ['source', prj, '_config'])
2221 return f.readlines()
2224 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2225 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2229 except urllib2.HTTPError, e:
2230 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2234 def show_package_meta(apiurl, prj, pac):
2235 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2238 return f.readlines()
2239 except urllib2.HTTPError, e:
2240 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2244 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2246 path.append('source')
2252 path.append('_attribute')
2254 path.append(attribute)
2257 query.append("with_default=1")
2259 query.append("with_project=1")
2260 url = makeurl(apiurl, path, query)
2263 return f.readlines()
2264 except urllib2.HTTPError, e:
2265 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2269 def show_develproject(apiurl, prj, pac):
2270 m = show_package_meta(apiurl, prj, pac)
2272 return ET.fromstring(''.join(m)).find('devel').get('project')
2277 def show_pattern_metalist(apiurl, prj):
2278 url = makeurl(apiurl, ['source', prj, '_pattern'])
2282 except urllib2.HTTPError, e:
2283 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2285 r = [ node.get('name') for node in tree.getroot() ]
2290 def show_pattern_meta(apiurl, prj, pattern):
2291 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2294 return f.readlines()
2295 except urllib2.HTTPError, e:
2296 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2301 """metafile that can be manipulated and is stored back after manipulation."""
2302 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2306 self.change_is_required = change_is_required
2307 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2308 f = os.fdopen(fd, 'w')
2309 f.write(''.join(input))
2311 self.hash_orig = dgst(self.filename)
2314 hash = dgst(self.filename)
2315 if self.change_is_required and hash == self.hash_orig:
2316 print 'File unchanged. Not saving.'
2317 os.unlink(self.filename)
2320 print 'Sending meta data...'
2321 # don't do any exception handling... it's up to the caller what to do in case
2323 http_PUT(self.url, file=self.filename)
2324 os.unlink(self.filename)
2330 run_editor(self.filename)
2334 except urllib2.HTTPError, e:
2335 error_help = "%d" % e.code
2336 if e.headers.get('X-Opensuse-Errorcode'):
2337 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2339 print >>sys.stderr, 'BuildService API error:', error_help
2340 # examine the error - we can't raise an exception because we might want
2343 if '<summary>' in data:
2344 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2345 input = raw_input('Try again? ([y/N]): ')
2346 if input not in ['y', 'Y']:
2352 if os.path.exists(self.filename):
2353 print 'discarding %s' % self.filename
2354 os.unlink(self.filename)
2357 # different types of metadata
2358 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2359 'template': new_project_templ,
2362 'pkg': { 'path' : 'source/%s/%s/_meta',
2363 'template': new_package_templ,
2366 'attribute': { 'path' : 'source/%s/%s/_meta',
2367 'template': new_attribute_templ,
2370 'prjconf': { 'path': 'source/%s/_config',
2374 'user': { 'path': 'person/%s',
2375 'template': new_user_template,
2378 'pattern': { 'path': 'source/%s/_pattern/%s',
2379 'template': new_pattern_template,
2384 def meta_exists(metatype,
2391 apiurl = conf.config['apiurl']
2392 url = make_meta_url(metatype, path_args, apiurl)
2394 data = http_GET(url).readlines()
2395 except urllib2.HTTPError, e:
2396 if e.code == 404 and create_new:
2397 data = metatypes[metatype]['template']
2399 data = StringIO(data % template_args).readlines()
2404 def make_meta_url(metatype, path_args=None, apiurl=None):
2406 apiurl = conf.config['apiurl']
2407 if metatype not in metatypes.keys():
2408 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2409 path = metatypes[metatype]['path']
2412 path = path % path_args
2414 return makeurl(apiurl, [path])
2417 def edit_meta(metatype,
2422 change_is_required=False,
2426 apiurl = conf.config['apiurl']
2428 data = meta_exists(metatype,
2431 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2435 change_is_required = True
2437 url = make_meta_url(metatype, path_args, apiurl)
2438 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2446 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2449 query['rev'] = revision
2451 query['rev'] = 'latest'
2453 query['linkrev'] = linkrev
2454 elif conf.config['linkcontrol']:
2455 query['linkrev'] = 'base'
2459 query['emptylink'] = 1
2460 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2462 # look for "too large" files according to size limit and mark them
2463 root = ET.fromstring(''.join(f.readlines()))
2464 for e in root.findall('entry'):
2465 size = e.get('size')
2466 if size and limit_size and int(size) > int(limit_size):
2467 e.set('skipped', 'true')
2468 return ET.tostring(root)
2471 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2472 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2473 return ET.fromstring(''.join(m)).get('srcmd5')
2476 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2477 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2479 # only source link packages have a <linkinfo> element.
2480 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2488 raise oscerr.LinkExpandError(prj, pac, li.error)
2492 def show_upstream_rev(apiurl, prj, pac):
2493 m = show_files_meta(apiurl, prj, pac)
2494 return ET.fromstring(''.join(m)).get('rev')
2497 def read_meta_from_spec(specfile, *args):
2498 import codecs, locale, re
2500 Read tags and sections from spec file. To read out
2501 a tag the passed argument mustn't end with a colon. To
2502 read out a section the passed argument must start with
2504 This method returns a dictionary which contains the
2508 if not os.path.isfile(specfile):
2509 raise IOError('\'%s\' is not a regular file' % specfile)
2512 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2513 except UnicodeDecodeError:
2514 lines = open(specfile).readlines()
2521 if itm.startswith('%'):
2522 sections.append(itm)
2526 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2528 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2529 if m and m.group('val'):
2530 spec_data[tag] = m.group('val').strip()
2532 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2535 section_pat = '^%s\s*?$'
2536 for section in sections:
2537 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2539 start = lines.index(m.group()+'\n') + 1
2541 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2544 for line in lines[start:]:
2545 if line.startswith('%'):
2548 spec_data[section] = data
2552 def run_pager(message):
2553 import tempfile, sys
2555 if not sys.stdout.isatty():
2558 tmpfile = tempfile.NamedTemporaryFile()
2559 tmpfile.write(message)
2561 pager = os.getenv('PAGER', default='less')
2562 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2565 def run_editor(filename):
2566 if sys.platform[:3] != 'win':
2567 editor = os.getenv('EDITOR', default='vim')
2569 editor = os.getenv('EDITOR', default='notepad')
2571 return subprocess.call([ editor, filename ])
2573 def edit_message(footer='', template='', templatelen=30):
2574 delim = '--This line, and those below, will be ignored--\n'
2576 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2577 f = os.fdopen(fd, 'w')
2579 if not templatelen is None:
2580 lines = template.splitlines()
2581 template = '\n'.join(lines[:templatelen])
2582 if lines[templatelen:]:
2583 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2593 run_editor(filename)
2594 msg = open(filename).read().split(delim)[0].rstrip()
2599 input = raw_input('Log message not specified\n'
2600 'a)bort, c)ontinue, e)dit: ')
2602 raise oscerr.UserAbort()
2612 def create_delete_request(apiurl, project, package, message):
2617 package = """package="%s" """ % (package)
2623 <action type="delete">
2624 <target project="%s" %s/>
2627 <description>%s</description>
2629 """ % (project, package,
2630 cgi.escape(message or ''))
2632 u = makeurl(apiurl, ['request'], query='cmd=create')
2633 f = http_POST(u, data=xml)
2635 root = ET.parse(f).getroot()
2636 return root.get('id')
2639 def create_change_devel_request(apiurl,
2640 devel_project, devel_package,
2647 <action type="change_devel">
2648 <source project="%s" package="%s" />
2649 <target project="%s" package="%s" />
2652 <description>%s</description>
2654 """ % (devel_project,
2658 cgi.escape(message or ''))
2660 u = makeurl(apiurl, ['request'], query='cmd=create')
2661 f = http_POST(u, data=xml)
2663 root = ET.parse(f).getroot()
2664 return root.get('id')
2667 # This creates an old style submit request for server api 1.0
2668 def create_submit_request(apiurl,
2669 src_project, src_package,
2670 dst_project=None, dst_package=None,
2671 message=None, orev=None, src_update=None):
2676 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2678 # Yes, this kind of xml construction is horrible
2683 packagexml = """package="%s" """ %( dst_package )
2684 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2685 # XXX: keep the old template for now in order to work with old obs instances
2687 <request type="submit">
2689 <source project="%s" package="%s" rev="%s"/>
2694 <description>%s</description>
2698 orev or show_upstream_rev(apiurl, src_project, src_package),
2701 cgi.escape(message or ""))
2703 u = makeurl(apiurl, ['request'], query='cmd=create')
2704 f = http_POST(u, data=xml)
2706 root = ET.parse(f).getroot()
2707 return root.get('id')
2710 def get_request(apiurl, reqid):
2711 u = makeurl(apiurl, ['request', reqid])
2713 root = ET.parse(f).getroot()
2720 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2723 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2724 f = http_POST(u, data=message)
2727 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2730 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2731 f = http_POST(u, data=message)
2735 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2737 if not 'all' in req_state:
2738 for state in req_state:
2739 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2741 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2743 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2747 todo['project'] = project
2749 todo['package'] = package
2750 for kind, val in todo.iteritems():
2751 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2752 'action/source/@%(kind)s=\'%(val)s\' or ' \
2753 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2754 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2756 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2757 for i in exclude_target_projects:
2758 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2759 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2761 if conf.config['verbose'] > 1:
2762 print '[ %s ]' % xpath
2763 res = search(apiurl, request=xpath)
2764 collection = res['request']
2766 for root in collection.findall('request'):
2772 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2773 """Return all new requests for all projects/packages where is user is involved"""
2775 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2776 for i in res['project_id'].findall('project'):
2777 projpkgs[i.get('name')] = []
2778 for i in res['package_id'].findall('package'):
2779 if not i.get('project') in projpkgs.keys():
2780 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2782 for prj, pacs in projpkgs.iteritems():
2784 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2788 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2789 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2790 xpath = xpath_join(xpath, xp, inner=True)
2792 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2793 if not 'all' in req_state:
2795 for state in req_state:
2796 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2797 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2798 res = search(apiurl, request=xpath)
2800 for root in res['request'].findall('request'):
2806 def get_request_log(apiurl, reqid):
2807 r = get_request(conf.config['apiurl'], reqid)
2809 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2810 # the description of the request is used for the initial log entry
2811 # otherwise its comment attribute would contain None
2812 if len(r.statehistory) >= 1:
2813 r.statehistory[-1].comment = r.descr
2815 r.state.comment = r.descr
2816 for state in [ r.state ] + r.statehistory:
2817 s = frmt % (state.name, state.who, state.when, str(state.comment))
2822 def get_user_meta(apiurl, user):
2823 u = makeurl(apiurl, ['person', quote_plus(user)])
2826 return ''.join(f.readlines())
2827 except urllib2.HTTPError:
2828 print 'user \'%s\' not found' % user
2832 def get_user_data(apiurl, user, *tags):
2833 """get specified tags from the user meta"""
2834 meta = get_user_meta(apiurl, user)
2837 root = ET.fromstring(meta)
2840 if root.find(tag).text != None:
2841 data.append(root.find(tag).text)
2845 except AttributeError:
2846 # this part is reached if the tags tuple contains an invalid tag
2847 print 'The xml file for user \'%s\' seems to be broken' % user
2852 def download(url, filename, progress_obj = None, mtime = None):
2853 import tempfile, shutil
2856 prefix = os.path.basename(filename)
2857 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2858 os.chmod(tmpfile, 0644)
2860 o = os.fdopen(fd, 'wb')
2861 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2864 shutil.move(tmpfile, filename)
2873 os.utime(filename, (-1, mtime))
2875 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None):
2876 targetfilename = targetfilename or filename
2879 query = { 'rev': revision }
2880 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2881 download(u, targetfilename, progress_obj, mtime)
2883 def get_binary_file(apiurl, prj, repo, arch,
2886 target_filename = None,
2887 target_mtime = None,
2888 progress_meter = False):
2891 from meter import TextMeter
2892 progress_obj = TextMeter()
2894 target_filename = target_filename or filename
2896 where = package or '_repository'
2897 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2898 download(u, target_filename, progress_obj, target_mtime)
2900 def dgst_from_string(str):
2901 # Python 2.5 depracates the md5 modules
2902 # Python 2.4 doesn't have hashlib yet
2905 md5_hash = hashlib.md5()
2908 md5_hash = md5.new()
2909 md5_hash.update(str)
2910 return md5_hash.hexdigest()
2914 #if not os.path.exists(file):
2924 f = open(file, 'rb')
2926 buf = f.read(BUFSIZE)
2929 return s.hexdigest()
2934 """return true if a string is binary data using diff's heuristic"""
2935 if s and '\0' in s[:4096]:
2940 def binary_file(fn):
2941 """read 4096 bytes from a file named fn, and call binary() on the data"""
2942 return binary(open(fn, 'rb').read(4096))
2945 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2947 This methods diffs oldfilename against filename (so filename will
2948 be shown as the new file).
2949 The variable origfilename is used if filename and oldfilename differ
2950 in their names (for instance if a tempfile is used for filename etc.)
2956 oldfilename = filename
2959 olddir = os.path.join(dir, store)
2961 if not origfilename:
2962 origfilename = filename
2964 file1 = os.path.join(olddir, oldfilename) # old/stored original
2965 file2 = os.path.join(dir, filename) # working copy
2967 f1 = open(file1, 'rb')
2971 f2 = open(file2, 'rb')
2975 if binary(s1) or binary (s2):
2976 d = ['Binary file %s has changed\n' % origfilename]
2979 d = difflib.unified_diff(\
2982 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2983 tofile = '%s\t(working copy)' % origfilename)
2985 # if file doesn't end with newline, we need to append one in the diff result
2987 for i, line in enumerate(d):
2988 if not line.endswith('\n'):
2989 d[i] += '\n\\ No newline at end of file'
2995 def make_diff(wc, revision):
3001 diff_hdr = 'Index: %s\n'
3002 diff_hdr += '===================================================================\n'
3004 olddir = os.getcwd()
3008 for file in wc.todo:
3009 if file in wc.skipped:
3011 if file in wc.filenamelist+wc.filenamelist_unvers:
3012 state = wc.status(file)
3014 added_files.append(file)
3016 removed_files.append(file)
3017 elif state == 'M' or state == 'C':
3018 changed_files.append(file)
3020 diff.append('osc: \'%s\' is not under version control' % file)
3022 for file in wc.filenamelist+wc.filenamelist_unvers:
3023 if file in wc.skipped:
3025 state = wc.status(file)
3026 if state == 'M' or state == 'C':
3027 changed_files.append(file)
3029 added_files.append(file)
3031 removed_files.append(file)
3033 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
3035 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
3036 cmp_pac = Package(tmpdir)
3038 for file in wc.todo:
3039 if file in cmp_pac.skipped:
3041 if file in cmp_pac.filenamelist:
3042 if file in wc.filenamelist:
3043 changed_files.append(file)
3045 diff.append('osc: \'%s\' is not under version control' % file)
3047 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
3049 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
3051 for file in changed_files:
3052 diff.append(diff_hdr % file)
3054 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
3056 cmp_pac.updatefile(file, revision)
3057 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3058 cmp_pac.absdir, file))
3059 (fd, tmpfile) = tempfile.mkstemp()
3060 for file in added_files:
3061 diff.append(diff_hdr % file)
3063 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3064 os.path.dirname(tmpfile), file))
3066 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3067 os.path.dirname(tmpfile), file))
3069 # FIXME: this is ugly but it cannot be avoided atm
3070 # if a file is deleted via "osc rm file" we should keep the storefile.
3072 if cmp_pac == None and removed_files:
3073 tmpdir = tempfile.mkdtemp()
3075 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3076 tmp_pac = Package(tmpdir)
3079 for file in removed_files:
3080 diff.append(diff_hdr % file)
3082 tmp_pac.updatefile(file, tmp_pac.rev)
3083 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3084 wc.rev, file, tmp_pac.storedir, file))
3086 cmp_pac.updatefile(file, revision)
3087 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3088 revision, file, cmp_pac.storedir, file))
3092 delete_dir(cmp_pac.absdir)
3094 delete_dir(tmp_pac.absdir)
3098 def server_diff(apiurl,
3099 old_project, old_package, old_revision,
3100 new_project, new_package, new_revision, unified=False, missingok=False):
3101 query = {'cmd': 'diff', 'expand': '1'}
3103 query['oproject'] = old_project
3105 query['opackage'] = old_package
3107 query['orev'] = old_revision
3109 query['rev'] = new_revision
3111 query['unified'] = 1
3113 query['missingok'] = 1
3115 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3121 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3123 creates the plain directory structure for a package dir.
3124 The 'apiurl' parameter is needed for the project dir initialization.
3125 The 'project' and 'package' parameters specify the name of the
3126 project and the package. The optional 'pathname' parameter is used
3127 for printing out the message that a new dir was created (default: 'prj_dir/package').
3128 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3130 prj_dir = prj_dir or project
3132 # FIXME: carefully test each patch component of prj_dir,
3133 # if we have a .osc/_files entry at that level.
3134 # -> if so, we have a package/project clash,
3135 # and should rename this path component by appending '.proj'
3136 # and give user a warning message, to discourage such clashes
3138 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3139 if is_package_dir(prj_dir):
3140 # we want this to become a project directory,
3141 # but it already is a package directory.
3142 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3144 if not is_project_dir(prj_dir):
3145 # this directory could exist as a parent direory for one of our earlier
3146 # checked out sub-projects. in this case, we still need to initialize it.
3147 print statfrmt('A', prj_dir)
3148 init_project_dir(apiurl, prj_dir, project)
3150 if is_project_dir(os.path.join(prj_dir, package)):
3151 # the thing exists, but is a project directory and not a package directory
3152 # FIXME: this should be a warning message to discourage package/project clashes
3153 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3155 if not os.path.exists(os.path.join(prj_dir, package)):
3156 print statfrmt('A', pathname)
3157 os.mkdir(os.path.join(prj_dir, package))
3158 os.mkdir(os.path.join(prj_dir, package, store))
3160 return(os.path.join(prj_dir, package))
3163 def checkout_package(apiurl, project, package,
3164 revision=None, pathname=None, prj_obj=None,
3165 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3167 # the project we're in might be deleted.
3168 # that'll throw an error then.
3169 olddir = os.getcwd()
3171 olddir = os.environ.get("PWD")
3176 if sys.platform[:3] == 'win':
3177 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3179 if conf.config['checkout_no_colon']:
3180 prj_dir = prj_dir.replace(':', '/')
3183 pathname = getTransActPath(os.path.join(prj_dir, package))
3185 # before we create directories and stuff, check if the package actually
3187 show_package_meta(apiurl, project, package)
3191 # try to read from the linkinfo
3192 # if it is a link we use the xsrcmd5 as the revision to be
3195 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3197 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3202 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3203 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3205 p = Package(package, progress_obj=progress_obj)
3208 for filename in p.filenamelist:
3209 if filename in p.skipped:
3211 if service_files or not filename.startswith('_service:'):
3212 p.updatefile(filename, revision)
3213 # print 'A ', os.path.join(project, package, filename)
3214 print statfrmt('A', os.path.join(pathname, filename))
3215 if conf.config['do_package_tracking']:
3216 # check if we can re-use an existing project object
3218 prj_obj = Project(os.getcwd())
3219 prj_obj.set_state(p.name, ' ')
3220 prj_obj.write_packages()
3224 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3225 dst_userid = None, keep_develproject = False):
3227 update pkgmeta with new new_name and new_prj and set calling user as the
3228 only maintainer (unless keep_maintainers is set). Additionally remove the
3229 develproject entry (<devel />) unless keep_develproject is true.
3231 root = ET.fromstring(''.join(pkgmeta))
3232 root.set('name', new_name)
3233 root.set('project', new_prj)
3234 if not keep_maintainers:
3235 for person in root.findall('person'):
3237 if not keep_develproject:
3238 for dp in root.findall('devel'):
3240 return ET.tostring(root)
3242 def link_to_branch(apiurl, project, package):
3244 convert a package with a _link + project.diff to a branch
3247 if '_link' in meta_get_filelist(apiurl, project, package):
3248 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3251 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3253 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3255 create a linked package
3256 - "src" is the original package
3257 - "dst" is the "link" package that we are creating here
3262 dst_meta = meta_exists(metatype='pkg',
3263 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3265 create_new=False, apiurl=conf.config['apiurl'])
3266 root = ET.fromstring(''.join(dst_meta))
3267 print root.attrib['project']
3268 if root.attrib['project'] != dst_project:
3269 # The source comes from a different project via a project link, we need to create this instance
3275 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3276 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3280 root = ET.fromstring(''.join(dst_meta))
3281 elm = root.find('publish')
3283 elm = ET.SubElement(root, 'publish')
3285 ET.SubElement(elm, 'disable')
3286 dst_meta = ET.tostring(root)
3290 path_args=(dst_project, dst_package),
3292 # create the _link file
3293 # but first, make sure not to overwrite an existing one
3294 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3296 print >>sys.stderr, 'forced overwrite of existing _link file'
3299 print >>sys.stderr, '_link file already exists...! Aborting'
3303 rev = 'rev="%s"' % rev
3308 cicount = 'cicount="%s"' % cicount
3312 print 'Creating _link...',
3313 link_template = """\
3314 <link project="%s" package="%s" %s %s>
3316 <!-- <apply name="patch" /> apply a patch on the source directory -->
3317 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3318 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3319 <!-- <delete>filename</delete> delete a file -->
3322 """ % (src_project, src_package, rev, cicount)
3324 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3325 http_PUT(u, data=link_template)
3328 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3331 - "src" is the original package
3332 - "dst" is the "aggregate" package that we are creating here
3333 - "map" is a dictionary SRC => TARGET repository mappings
3338 dst_meta = meta_exists(metatype='pkg',
3339 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3341 create_new=False, apiurl=conf.config['apiurl'])
3343 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3344 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3349 root = ET.fromstring(''.join(dst_meta))
3350 elm = root.find('publish')
3352 elm = ET.SubElement(root, 'publish')
3354 ET.SubElement(elm, 'disable')
3355 dst_meta = ET.tostring(root)
3358 path_args=(dst_project, dst_package),
3361 # create the _aggregate file
3362 # but first, make sure not to overwrite an existing one
3363 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3365 print >>sys.stderr, '_aggregate file already exists...! Aborting'