1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="Fedora_12">
81 <path project="Fedora:12" repository="standard" />
85 <repository name="SLE_11">
86 <path project="SUSE:SLE-11" repository="standard" />
95 new_package_templ = """\
96 <package name="%(name)s">
98 <title></title> <!-- Title of package -->
101 <!-- for long description -->
104 <person role="maintainer" userid="%(user)s"/>
105 <person role="bugowner" userid="%(user)s"/>
107 <url>PUT_UPSTREAM_URL_HERE</url>
111 use one of the examples below to disable building of this package
112 on a certain architecture, in a certain repository,
113 or a combination thereof:
115 <disable arch="x86_64"/>
116 <disable repository="SUSE_SLE-10"/>
117 <disable repository="SUSE_SLE-10" arch="x86_64"/>
119 Possible sections where you can use the tags above:
129 Please have a look at:
130 http://en.opensuse.org/Restricted_Formats
131 Packages containing formats listed there are NOT allowed to
132 be packaged in the openSUSE Buildservice and will be deleted!
139 new_attribute_templ = """\
141 <attribute namespace="" name="">
147 new_user_template = """\
149 <login>%(user)s</login>
150 <email>PUT_EMAIL_ADDRESS_HERE</email>
151 <realname>PUT_REAL_NAME_HERE</realname>
153 <project name="home:%(user)s"/>
169 new_pattern_template = """\
170 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
176 buildstatus_symbols = {'succeeded': '.',
178 'expansion error': 'U', # obsolete with OBS 2.0
191 # our own xml writer function to write xml nice, but with correct syntax
192 # This function is from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
193 from xml.dom import minidom
194 def fixed_writexml(self, writer, indent="", addindent="", newl=""):
195 # indent = current indentation
196 # addindent = indentation to add to higher levels
197 # newl = newline string
198 writer.write(indent+"<" + self.tagName)
200 attrs = self._get_attributes()
201 a_names = attrs.keys()
204 for a_name in a_names:
205 writer.write(" %s=\"" % a_name)
206 minidom._write_data(writer, attrs[a_name].value)
209 if len(self.childNodes) == 1 \
210 and self.childNodes[0].nodeType == minidom.Node.TEXT_NODE:
212 self.childNodes[0].writexml(writer, "", "", "")
213 writer.write("</%s>%s" % (self.tagName, newl))
215 writer.write(">%s"%(newl))
216 for node in self.childNodes:
217 node.writexml(writer,indent+addindent,addindent,newl)
218 writer.write("%s</%s>%s" % (indent,self.tagName,newl))
220 writer.write("/>%s"%(newl))
221 # replace minidom's function with ours
222 minidom.Element.writexml = fixed_writexml
225 # os.path.samefile is available only under Unix
226 def os_path_samefile(path1, path2):
228 return os.path.samefile(path1, path2)
230 return os.path.realpath(path1) == os.path.realpath(path2)
233 """represent a file, including its metadata"""
234 def __init__(self, name, md5, size, mtime):
244 """Source service content
247 """creates an empty serviceinfo instance"""
250 def read(self, serviceinfo_node):
251 """read in the source services <services> element passed as
254 if serviceinfo_node == None:
257 services = serviceinfo_node.findall('service')
259 for service in services:
260 name = service.get('name')
262 for param in service.findall('param'):
263 option = param.get('name', None)
265 name += " --" + option + " '" + value + "'"
266 self.commands.append(name)
268 msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
269 raise oscerr.APIError(msg)
271 def addVerifyFile(self, serviceinfo_node, filename):
274 f = open(filename, 'r')
275 digest = hashlib.sha256(f.read()).hexdigest()
279 s = ET.Element( "service", name="verify_file" )
280 ET.SubElement(s, "param", name="file").text = filename
281 ET.SubElement(s, "param", name="verifier").text = "sha256"
282 ET.SubElement(s, "param", name="checksum").text = digest
288 def addDownloadUrl(self, serviceinfo_node, url_string):
289 from urlparse import urlparse
290 url = urlparse( url_string )
291 protocol = url.scheme
296 s = ET.Element( "service", name="download_url" )
297 ET.SubElement(s, "param", name="protocol").text = protocol
298 ET.SubElement(s, "param", name="host").text = host
299 ET.SubElement(s, "param", name="path").text = path
305 def execute(self, dir):
308 for call in self.commands:
309 temp_dir = tempfile.mkdtemp()
310 name = call.split(None, 1)[0]
311 if not os.path.exists("/usr/lib/obs/service/"+name):
312 msg = "ERROR: service is not installed!\n"
313 msg += "Maybe try this: zypper in obs-server-" + name
314 raise oscerr.APIError(msg)
315 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
316 ret = subprocess.call(c, shell=True)
318 print "ERROR: service call failed: " + c
320 for file in os.listdir(temp_dir):
321 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
325 """linkinfo metadata (which is part of the xml representing a directory
328 """creates an empty linkinfo instance"""
338 def read(self, linkinfo_node):
339 """read in the linkinfo metadata from the <linkinfo> element passed as
341 If the passed element is None, the method does nothing.
343 if linkinfo_node == None:
345 self.project = linkinfo_node.get('project')
346 self.package = linkinfo_node.get('package')
347 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
348 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
349 self.srcmd5 = linkinfo_node.get('srcmd5')
350 self.error = linkinfo_node.get('error')
351 self.rev = linkinfo_node.get('rev')
352 self.baserev = linkinfo_node.get('baserev')
355 """returns True if the linkinfo is not empty, otherwise False"""
356 if self.xsrcmd5 or self.lsrcmd5:
360 def isexpanded(self):
361 """returns True if the package is an expanded link"""
362 if self.lsrcmd5 and not self.xsrcmd5:
367 """returns True if the link is in error state (could not be applied)"""
373 """return an informatory string representation"""
374 if self.islink() and not self.isexpanded():
375 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
376 % (self.project, self.package, self.xsrcmd5, self.rev)
377 elif self.islink() and self.isexpanded():
379 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
380 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
382 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
383 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
389 """represent a project directory, holding packages"""
390 def __init__(self, dir, getPackageList=True, progress_obj=None):
393 self.absdir = os.path.abspath(dir)
394 self.progress_obj = progress_obj
396 self.name = store_read_project(self.dir)
397 self.apiurl = store_read_apiurl(self.dir)
400 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
402 self.pacs_available = []
404 if conf.config['do_package_tracking']:
405 self.pac_root = self.read_packages().getroot()
406 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
407 self.pacs_excluded = [ i for i in os.listdir(self.dir)
408 for j in conf.config['exclude_glob']
409 if fnmatch.fnmatch(i, j) ]
410 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
411 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
412 # in the self.pacs_broken list
413 self.pacs_broken = []
414 for p in self.pacs_have:
415 if not os.path.isdir(os.path.join(self.absdir, p)):
416 # all states will be replaced with the '!'-state
417 # (except it is already marked as deleted ('D'-state))
418 self.pacs_broken.append(p)
420 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
422 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
424 def checkout_missing_pacs(self, expand_link=False):
425 for pac in self.pacs_missing:
427 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
428 # pac is not under version control but a local file/dir exists
429 msg = 'can\'t add package \'%s\': Object already exists' % pac
430 raise oscerr.PackageExists(self.name, pac, msg)
432 print 'checking out new package %s' % pac
433 checkout_package(self.apiurl, self.name, pac, \
434 pathname=getTransActPath(os.path.join(self.dir, pac)), \
435 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
437 def set_state(self, pac, state):
438 node = self.get_package_node(pac)
440 self.new_package_entry(pac, state)
442 node.attrib['state'] = state
444 def get_package_node(self, pac):
445 for node in self.pac_root.findall('package'):
446 if pac == node.get('name'):
450 def del_package_node(self, pac):
451 for node in self.pac_root.findall('package'):
452 if pac == node.get('name'):
453 self.pac_root.remove(node)
455 def get_state(self, pac):
456 node = self.get_package_node(pac)
458 return node.get('state')
462 def new_package_entry(self, name, state):
463 ET.SubElement(self.pac_root, 'package', name=name, state=state)
465 def read_packages(self):
466 packages_file = os.path.join(self.absdir, store, '_packages')
467 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
468 return ET.parse(packages_file)
470 # scan project for existing packages and migrate them
472 for data in os.listdir(self.dir):
473 pac_dir = os.path.join(self.absdir, data)
474 # we cannot use self.pacs_available because we cannot guarantee that the package list
475 # was fetched from the server
476 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
477 and Package(pac_dir).name == data:
478 cur_pacs.append(ET.Element('package', name=data, state=' '))
479 store_write_initial_packages(self.absdir, self.name, cur_pacs)
480 return ET.parse(os.path.join(self.absdir, store, '_packages'))
482 def write_packages(self):
483 # TODO: should we only modify the existing file instead of overwriting?
484 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
486 def addPackage(self, pac):
488 for i in conf.config['exclude_glob']:
489 if fnmatch.fnmatch(pac, i):
490 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
491 raise oscerr.OscIOError(None, msg)
492 state = self.get_state(pac)
493 if state == None or state == 'D':
494 self.new_package_entry(pac, 'A')
495 self.write_packages()
496 # sometimes the new pac doesn't exist in the list because
497 # it would take too much time to update all data structs regularly
498 if pac in self.pacs_unvers:
499 self.pacs_unvers.remove(pac)
501 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
503 def delPackage(self, pac, force = False):
504 state = self.get_state(pac.name)
506 if state == ' ' or state == 'D':
508 for file in pac.filenamelist + pac.filenamelist_unvers:
509 filestate = pac.status(file)
510 if filestate == 'M' or filestate == 'C' or \
511 filestate == 'A' or filestate == '?':
514 del_files.append(file)
515 if can_delete or force:
516 for file in del_files:
517 pac.delete_localfile(file)
518 if pac.status(file) != '?':
519 pac.delete_storefile(file)
520 # this is not really necessary
521 pac.put_on_deletelist(file)
522 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
523 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
524 pac.write_deletelist()
525 self.set_state(pac.name, 'D')
526 self.write_packages()
528 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
531 delete_dir(pac.absdir)
532 self.del_package_node(pac.name)
533 self.write_packages()
534 print statfrmt('D', pac.name)
536 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
538 print 'package is not under version control'
540 print 'unsupported state'
542 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
545 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
547 # we need to make sure that the _packages file will be written (even if an exception
550 # update complete project
551 # packages which no longer exists upstream
552 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
554 for pac in upstream_del:
555 p = Package(os.path.join(self.dir, pac))
556 self.delPackage(p, force = True)
557 delete_storedir(p.storedir)
562 self.pac_root.remove(self.get_package_node(p.name))
563 self.pacs_have.remove(pac)
565 for pac in self.pacs_have:
566 state = self.get_state(pac)
567 if pac in self.pacs_broken:
568 if self.get_state(pac) != 'A':
569 checkout_package(self.apiurl, self.name, pac,
570 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
571 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
574 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
576 if expand_link and p.islink() and not p.isexpanded():
579 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
581 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
584 rev = p.linkinfo.xsrcmd5
585 print 'Expanding to rev', rev
586 elif unexpand_link and p.islink() and p.isexpanded():
587 rev = p.linkinfo.lsrcmd5
588 print 'Unexpanding to rev', rev
589 elif p.islink() and p.isexpanded():
591 print 'Updating %s' % p.name
592 p.update(rev, service_files)
596 # TODO: Package::update has to fixed to behave like svn does
597 if pac in self.pacs_broken:
598 checkout_package(self.apiurl, self.name, pac,
599 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
600 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
602 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
603 elif state == 'A' and pac in self.pacs_available:
604 # file/dir called pac already exists and is under version control
605 msg = 'can\'t add package \'%s\': Object already exists' % pac
606 raise oscerr.PackageExists(self.name, pac, msg)
611 print 'unexpected state.. package \'%s\'' % pac
613 self.checkout_missing_pacs(expand_link=not unexpand_link)
615 self.write_packages()
617 def commit(self, pacs = (), msg = '', files = {}, validators = None):
622 if files.has_key(pac):
624 state = self.get_state(pac)
626 self.commitNewPackage(pac, msg, todo)
628 self.commitDelPackage(pac)
630 # display the correct dir when sending the changes
631 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
634 p = Package(os.path.join(self.dir, pac))
636 p.commit(msg, validators=validators)
637 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
638 print 'osc: \'%s\' is not under version control' % pac
639 elif pac in self.pacs_broken:
640 print 'osc: \'%s\' package not found' % pac
642 self.commitExtPackage(pac, msg, todo)
644 self.write_packages()
646 # if we have packages marked as '!' we cannot commit
647 for pac in self.pacs_broken:
648 if self.get_state(pac) != 'D':
649 msg = 'commit failed: package \'%s\' is missing' % pac
650 raise oscerr.PackageMissing(self.name, pac, msg)
652 for pac in self.pacs_have:
653 state = self.get_state(pac)
656 Package(os.path.join(self.dir, pac)).commit(msg, validators=validators)
658 self.commitDelPackage(pac)
660 self.commitNewPackage(pac, msg)
662 self.write_packages()
664 def commitNewPackage(self, pac, msg = '', files = []):
665 """creates and commits a new package if it does not exist on the server"""
666 if pac in self.pacs_available:
667 print 'package \'%s\' already exists' % pac
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(self.name), quote_plus(pac)),
676 # display the correct dir when sending the changes
678 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
682 p = Package(os.path.join(self.dir, pac))
684 print statfrmt('Sending', os.path.normpath(p.dir))
686 self.set_state(pac, ' ')
689 def commitDelPackage(self, pac):
690 """deletes a package on the server and in the working copy"""
692 # display the correct dir when sending the changes
693 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
696 pac_dir = os.path.join(self.dir, pac)
697 p = Package(os.path.join(self.dir, pac))
698 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
699 delete_storedir(p.storedir)
705 pac_dir = os.path.join(self.dir, pac)
706 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
707 print statfrmt('Deleting', getTransActPath(pac_dir))
708 delete_package(self.apiurl, self.name, pac)
709 self.del_package_node(pac)
711 def commitExtPackage(self, pac, msg, files = []):
712 """commits a package from an external project"""
713 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
716 pac_path = os.path.join(self.dir, pac)
718 project = store_read_project(pac_path)
719 package = store_read_package(pac_path)
720 apiurl = store_read_apiurl(pac_path)
721 if meta_exists(metatype='pkg',
722 path_args=(quote_plus(project), quote_plus(package)),
724 create_new=False, apiurl=apiurl):
725 p = Package(pac_path)
729 user = conf.get_apiurl_usr(self.apiurl)
730 edit_meta(metatype='pkg',
731 path_args=(quote_plus(project), quote_plus(package)),
736 p = Package(pac_path)
742 r.append('*****************************************************')
743 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
744 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
745 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
746 r.append('*****************************************************')
752 """represent a package (its directory) and read/keep/write its metadata"""
753 def __init__(self, workingdir, progress_obj=None, limit_size=None, meta=None):
754 self.dir = workingdir
755 self.absdir = os.path.abspath(self.dir)
756 self.storedir = os.path.join(self.absdir, store)
757 self.progress_obj = progress_obj
759 self.limit_size = limit_size
760 if limit_size and limit_size == 0:
761 self.limit_size = None
763 check_store_version(self.dir)
765 self.prjname = store_read_project(self.dir)
766 self.name = store_read_package(self.dir)
767 self.apiurl = store_read_apiurl(self.dir)
769 self.update_datastructs()
773 self.todo_delete = []
776 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
777 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
780 def addfile(self, n):
781 st = os.stat(os.path.join(self.dir, n))
782 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
784 def delete_file(self, n, force=False):
785 """deletes a file if possible and marks the file as deleted"""
788 state = self.status(n)
792 if state in ['?', 'A', 'M'] and not force:
793 return (False, state)
794 self.delete_localfile(n)
796 self.put_on_deletelist(n)
797 self.write_deletelist()
799 self.delete_storefile(n)
802 def delete_storefile(self, n):
803 try: os.unlink(os.path.join(self.storedir, n))
806 def delete_localfile(self, n):
807 try: os.unlink(os.path.join(self.dir, n))
810 def put_on_deletelist(self, n):
811 if n not in self.to_be_deleted:
812 self.to_be_deleted.append(n)
814 def put_on_conflictlist(self, n):
815 if n not in self.in_conflict:
816 self.in_conflict.append(n)
818 def clear_from_conflictlist(self, n):
819 """delete an entry from the file, and remove the file if it would be empty"""
820 if n in self.in_conflict:
822 filename = os.path.join(self.dir, n)
823 storefilename = os.path.join(self.storedir, n)
824 myfilename = os.path.join(self.dir, n + '.mine')
825 if self.islinkrepair() or self.ispulled():
826 upfilename = os.path.join(self.dir, n + '.new')
828 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
831 os.unlink(myfilename)
832 # the working copy may be updated, so the .r* ending may be obsolete...
834 os.unlink(upfilename)
835 if self.islinkrepair() or self.ispulled():
836 os.unlink(os.path.join(self.dir, n + '.old'))
840 self.in_conflict.remove(n)
842 self.write_conflictlist()
844 def write_meta_mode(self):
846 fname = os.path.join(self.storedir, '_meta_mode')
852 os.unlink(os.path.join(self.storedir, '_meta_mode'))
856 def write_sizelimit(self):
857 if self.size_limit and self.size_limit <= 0:
859 os.unlink(os.path.join(self.storedir, '_size_limit'))
863 fname = os.path.join(self.storedir, '_size_limit')
865 f.write(str(self.size_limit))
868 def write_deletelist(self):
869 if len(self.to_be_deleted) == 0:
871 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
875 fname = os.path.join(self.storedir, '_to_be_deleted')
877 f.write('\n'.join(self.to_be_deleted))
881 def delete_source_file(self, n):
882 """delete local a source file"""
883 self.delete_localfile(n)
884 self.delete_storefile(n)
886 def delete_remote_source_file(self, n):
887 """delete a remote source file (e.g. from the server)"""
889 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
892 def put_source_file(self, n):
894 # escaping '+' in the URL path (note: not in the URL query string) is
895 # only a workaround for ruby on rails, which swallows it otherwise
897 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
898 http_PUT(u, file = os.path.join(self.dir, n))
900 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
902 def commit(self, msg='', validators=None):
903 # commit only if the upstream revision is the same as the working copy's
904 upstream_rev = self.latest_rev()
905 if self.rev != upstream_rev:
906 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
909 self.todo = self.filenamelist_unvers + self.filenamelist
911 pathn = getTransActPath(self.dir)
916 for validator in sorted(os.listdir(validators)):
917 fn=validators+"/"+validator
919 if S_ISREG(mode[ST_MODE]):
921 p = subprocess.Popen([fn], close_fds=True)
923 raise oscerr.RuntimeError(p.stdout, validator )
925 have_conflicts = False
926 for filename in self.todo:
927 if not filename.startswith('_service:') and not filename.startswith('_service_'):
928 st = self.status(filename)
930 self.todo.remove(filename)
931 elif st == 'A' or st == 'M':
932 self.todo_send.append(filename)
933 print statfrmt('Sending', os.path.join(pathn, filename))
935 self.todo_delete.append(filename)
936 print statfrmt('Deleting', os.path.join(pathn, filename))
938 have_conflicts = True
941 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
944 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
945 print 'nothing to do for package %s' % self.name
948 if self.islink() and self.isexpanded():
949 # resolve the link into the upload revision
950 # XXX: do this always?
951 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
952 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
955 print 'Transmitting file data ',
957 for filename in self.todo_delete:
958 # do not touch local files on commit --
959 # delete remotely instead
960 self.delete_remote_source_file(filename)
961 self.to_be_deleted.remove(filename)
962 for filename in self.todo_send:
963 sys.stdout.write('.')
965 self.put_source_file(filename)
967 # all source files are committed - now comes the log
968 query = { 'cmd' : 'commit',
970 'user' : conf.get_apiurl_usr(self.apiurl),
972 if self.islink() and self.isexpanded():
973 query['keeplink'] = '1'
974 if conf.config['linkcontrol'] or self.isfrozen():
975 query['linkrev'] = self.linkinfo.srcmd5
977 query['repairlink'] = '1'
978 query['linkrev'] = self.get_pulled_srcmd5()
979 if self.islinkrepair():
980 query['repairlink'] = '1'
981 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
984 # delete upload revision
986 query = { 'cmd': 'deleteuploadrev' }
987 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
993 root = ET.parse(f).getroot()
994 self.rev = int(root.get('rev'))
996 print 'Committed revision %s.' % self.rev
999 os.unlink(os.path.join(self.storedir, '_pulled'))
1000 if self.islinkrepair():
1001 os.unlink(os.path.join(self.storedir, '_linkrepair'))
1002 self.linkrepair = False
1003 # XXX: mark package as invalid?
1004 print 'The source link has been repaired. This directory can now be removed.'
1005 if self.islink() and self.isexpanded():
1006 self.update_local_filesmeta(revision=self.latest_rev())
1008 self.update_local_filesmeta()
1009 self.write_deletelist()
1010 self.update_datastructs()
1012 if self.filenamelist.count('_service'):
1013 print 'The package contains a source service.'
1014 for filename in self.todo:
1015 if filename.startswith('_service:') and os.path.exists(filename):
1016 os.unlink(filename) # remove local files
1017 print_request_list(self.apiurl, self.prjname, self.name)
1019 def write_conflictlist(self):
1020 if len(self.in_conflict) == 0:
1022 os.unlink(os.path.join(self.storedir, '_in_conflict'))
1026 fname = os.path.join(self.storedir, '_in_conflict')
1027 f = open(fname, 'w')
1028 f.write('\n'.join(self.in_conflict))
1032 def updatefile(self, n, revision):
1033 filename = os.path.join(self.dir, n)
1034 storefilename = os.path.join(self.storedir, n)
1035 mtime = self.findfilebyname(n).mtime
1037 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename,
1038 revision=revision, progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1040 shutil.copyfile(filename, storefilename)
1042 def mergefile(self, n):
1043 filename = os.path.join(self.dir, n)
1044 storefilename = os.path.join(self.storedir, n)
1045 myfilename = os.path.join(self.dir, n + '.mine')
1046 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
1047 os.rename(filename, myfilename)
1049 mtime = self.findfilebyname(n).mtime
1050 get_source_file(self.apiurl, self.prjname, self.name, n,
1051 revision=self.rev, targetfilename=upfilename,
1052 progress_obj=self.progress_obj, mtime=mtime, meta=self.meta)
1054 if binary_file(myfilename) or binary_file(upfilename):
1056 shutil.copyfile(upfilename, filename)
1057 shutil.copyfile(upfilename, storefilename)
1058 self.in_conflict.append(n)
1059 self.write_conflictlist()
1063 # diff3 OPTIONS... MINE OLDER YOURS
1064 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
1065 # we would rather use the subprocess module, but it is not availablebefore 2.4
1066 ret = subprocess.call(merge_cmd, shell=True)
1068 # "An exit status of 0 means `diff3' was successful, 1 means some
1069 # conflicts were found, and 2 means trouble."
1071 # merge was successful... clean up
1072 shutil.copyfile(upfilename, storefilename)
1073 os.unlink(upfilename)
1074 os.unlink(myfilename)
1077 # unsuccessful merge
1078 shutil.copyfile(upfilename, storefilename)
1079 self.in_conflict.append(n)
1080 self.write_conflictlist()
1083 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1084 print >>sys.stderr, 'the command line was:'
1085 print >>sys.stderr, merge_cmd
1090 def update_local_filesmeta(self, revision=None):
1092 Update the local _files file in the store.
1093 It is replaced with the version pulled from upstream.
1095 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size, meta=self.meta))
1096 store_write_string(self.absdir, '_files', meta)
1098 def update_datastructs(self):
1100 Update the internal data structures if the local _files
1101 file has changed (e.g. update_local_filesmeta() has been
1105 files_tree = read_filemeta(self.dir)
1106 files_tree_root = files_tree.getroot()
1108 self.rev = files_tree_root.get('rev')
1109 self.srcmd5 = files_tree_root.get('srcmd5')
1111 self.linkinfo = Linkinfo()
1112 self.linkinfo.read(files_tree_root.find('linkinfo'))
1114 self.filenamelist = []
1117 for node in files_tree_root.findall('entry'):
1119 f = File(node.get('name'),
1121 int(node.get('size')),
1122 int(node.get('mtime')))
1123 if node.get('skipped'):
1124 self.skipped.append(f.name)
1126 # okay, a very old version of _files, which didn't contain any metadata yet...
1127 f = File(node.get('name'), '', 0, 0)
1128 self.filelist.append(f)
1129 self.filenamelist.append(f.name)
1131 self.to_be_deleted = read_tobedeleted(self.dir)
1132 self.in_conflict = read_inconflict(self.dir)
1133 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1134 self.size_limit = read_sizelimit(self.dir)
1135 self.meta = read_meta_mode(self.dir)
1137 # gather unversioned files, but ignore some stuff
1138 self.excluded = [ i for i in os.listdir(self.dir)
1139 for j in conf.config['exclude_glob']
1140 if fnmatch.fnmatch(i, j) ]
1141 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1142 if i not in self.excluded
1143 if i not in self.filenamelist ]
1146 """tells us if the package is a link (has 'linkinfo').
1147 A package with linkinfo is a package which links to another package.
1148 Returns True if the package is a link, otherwise False."""
1149 return self.linkinfo.islink()
1151 def isexpanded(self):
1152 """tells us if the package is a link which is expanded.
1153 Returns True if the package is expanded, otherwise False."""
1154 return self.linkinfo.isexpanded()
1156 def islinkrepair(self):
1157 """tells us if we are repairing a broken source link."""
1158 return self.linkrepair
1161 """tells us if we have pulled a link."""
1162 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1165 """tells us if the link is frozen."""
1166 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1168 def get_pulled_srcmd5(self):
1170 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1171 pulledrev = line.strip()
1174 def haslinkerror(self):
1176 Returns True if the link is broken otherwise False.
1177 If the package is not a link it returns False.
1179 return self.linkinfo.haserror()
1181 def linkerror(self):
1183 Returns an error message if the link is broken otherwise None.
1184 If the package is not a link it returns None.
1186 return self.linkinfo.error
1188 def update_local_pacmeta(self):
1190 Update the local _meta file in the store.
1191 It is replaced with the version pulled from upstream.
1193 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1194 store_write_string(self.absdir, '_meta', meta)
1196 def findfilebyname(self, n):
1197 for i in self.filelist:
1201 def status(self, n):
1205 file storefile file present STATUS
1206 exists exists in _files
1209 x x x ' ' if digest differs: 'M'
1210 and if in conflicts file: 'C'
1212 x - x 'D' and listed in _to_be_deleted
1214 - x - 'D' (when file in working copy is already deleted)
1215 - - x 'F' (new in repo, but not yet in working copy)
1220 known_by_meta = False
1222 exists_in_store = False
1223 if n in self.filenamelist:
1224 known_by_meta = True
1225 if os.path.exists(os.path.join(self.absdir, n)):
1227 if os.path.exists(os.path.join(self.storedir, n)):
1228 exists_in_store = True
1231 if n in self.skipped:
1233 elif exists and not exists_in_store and known_by_meta:
1235 elif n in self.to_be_deleted:
1237 elif n in self.in_conflict:
1239 elif exists and exists_in_store and known_by_meta:
1240 #print self.findfilebyname(n)
1241 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1245 elif exists and not exists_in_store and not known_by_meta:
1247 elif exists and exists_in_store and not known_by_meta:
1249 elif not exists and exists_in_store and known_by_meta:
1251 elif not exists and not exists_in_store and known_by_meta:
1253 elif not exists and exists_in_store and not known_by_meta:
1255 elif not exists and not exists_in_store and not known_by_meta:
1256 # this case shouldn't happen (except there was a typo in the filename etc.)
1257 raise IOError('osc: \'%s\' is not under version control' % n)
1261 def comparePac(self, cmp_pac):
1263 This method compares the local filelist with
1264 the filelist of the passed package to see which files
1265 were added, removed and changed.
1272 for file in self.filenamelist+self.filenamelist_unvers:
1273 state = self.status(file)
1274 if file in self.skipped:
1276 if state == 'A' and (not file in cmp_pac.filenamelist):
1277 added_files.append(file)
1278 elif file in cmp_pac.filenamelist and state == 'D':
1279 removed_files.append(file)
1280 elif state == ' ' and not file in cmp_pac.filenamelist:
1281 added_files.append(file)
1282 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1283 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1284 changed_files.append(file)
1285 for file in cmp_pac.filenamelist:
1286 if not file in self.filenamelist:
1287 removed_files.append(file)
1288 removed_files = set(removed_files)
1290 return changed_files, added_files, removed_files
1292 def merge(self, otherpac):
1293 self.todo += otherpac.todo
1307 '\n '.join(self.filenamelist),
1315 def read_meta_from_spec(self, spec = None):
1320 # scan for spec files
1321 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1322 if len(speclist) == 1:
1323 specfile = speclist[0]
1324 elif len(speclist) > 1:
1325 print 'the following specfiles were found:'
1326 for file in speclist:
1328 print 'please specify one with --specfile'
1331 print 'no specfile was found - please specify one ' \
1335 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1336 self.summary = data['Summary']
1337 self.url = data['Url']
1338 self.descr = data['%description']
1341 def update_package_meta(self, force=False):
1343 for the updatepacmetafromspec subcommand
1344 argument force supress the confirm question
1347 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1349 root = ET.fromstring(m)
1350 root.find('title').text = self.summary
1351 root.find('description').text = ''.join(self.descr)
1352 url = root.find('url')
1354 url = ET.SubElement(root, 'url')
1357 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1358 mf = metafile(u, ET.tostring(root))
1361 print '*' * 36, 'old', '*' * 36
1363 print '*' * 36, 'new', '*' * 36
1364 print ET.tostring(root)
1366 repl = raw_input('Write? (y/N/e) ')
1377 def mark_frozen(self):
1378 store_write_string(self.absdir, '_frozenlink', '')
1380 print "The link in this package is currently broken. Checking"
1381 print "out the last working version instead; please use 'osc pull'"
1382 print "to repair the link."
1385 def unmark_frozen(self):
1386 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1387 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1389 def latest_rev(self):
1390 if self.islinkrepair():
1391 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1392 elif self.islink() and self.isexpanded():
1393 if self.isfrozen() or self.ispulled():
1394 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1397 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1400 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1402 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1405 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1408 def update(self, rev = None, service_files = False, limit_size = None):
1409 # save filelist and (modified) status before replacing the meta file
1410 saved_filenames = self.filenamelist
1411 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1415 self.limit_size = limit_size
1417 self.limit_size = read_sizelimit(self.dir)
1418 self.update_local_filesmeta(rev)
1419 self = Package(self.dir, progress_obj=self.progress_obj)
1421 # which files do no longer exist upstream?
1422 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1424 pathn = getTransActPath(self.dir)
1426 for filename in saved_filenames:
1427 if filename in self.skipped:
1429 if not filename.startswith('_service:') and filename in disappeared:
1430 print statfrmt('D', os.path.join(pathn, filename))
1431 # keep file if it has local modifications
1432 if oldp.status(filename) == ' ':
1433 self.delete_localfile(filename)
1434 self.delete_storefile(filename)
1436 for filename in self.filenamelist:
1437 if filename in self.skipped:
1440 state = self.status(filename)
1441 if not service_files and filename.startswith('_service:'):
1443 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1444 # no merge necessary... local file is changed, but upstream isn't
1446 elif state == 'M' and filename in saved_modifiedfiles:
1447 status_after_merge = self.mergefile(filename)
1448 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1450 self.updatefile(filename, rev)
1451 print statfrmt('U', os.path.join(pathn, filename))
1453 self.updatefile(filename, rev)
1454 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1456 self.updatefile(filename, rev)
1457 print statfrmt('A', os.path.join(pathn, filename))
1458 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1459 self.updatefile(filename, rev)
1460 self.delete_storefile(filename)
1461 print statfrmt('U', os.path.join(pathn, filename))
1465 self.update_local_pacmeta()
1467 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1468 print 'At revision %s.' % self.rev
1470 if not service_files:
1471 self.run_source_services()
1473 def run_source_services(self):
1474 if self.filenamelist.count('_service'):
1475 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1478 si.execute(self.absdir)
1480 def prepare_filelist(self):
1481 """Prepare a list of files, which will be processed by process_filelist
1482 method. This allows easy modifications of a file list in commit
1486 self.todo = self.filenamelist + self.filenamelist_unvers
1490 for f in [f for f in self.todo if not os.path.isdir(f)]:
1492 status = self.status(f)
1497 ret += "%s %s %s\n" % (action, status, f)
1500 # Edit a filelist for package \'%s\'
1502 # l, leave = leave a file as is
1503 # r, remove = remove a file
1504 # a, add = add a file
1506 # If you remove file from a list, it will be unchanged
1507 # If you remove all, commit will be aborted""" % self.name
1511 def edit_filelist(self):
1512 """Opens a package list in editor for editing. This allows easy
1513 modifications of it just by simple text editing
1517 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1518 f = os.fdopen(fd, 'w')
1519 f.write(self.prepare_filelist())
1521 mtime_orig = os.stat(filename).st_mtime
1524 run_editor(filename)
1525 mtime = os.stat(filename).st_mtime
1526 if mtime_orig < mtime:
1527 filelist = open(filename).readlines()
1531 raise oscerr.UserAbort()
1533 return self.process_filelist(filelist)
1535 def process_filelist(self, filelist):
1536 """Process a filelist - it add/remove or leave files. This depends on
1537 user input. If no file is processed, it raises an ValueError
1541 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1543 foo = line.split(' ')
1545 action, state, name = (foo[0], ' ', foo[3])
1547 action, state, name = (foo[0], foo[1], foo[2])
1550 action = action.lower()
1553 if action in ('r', 'remove'):
1554 if self.status(name) == '?':
1556 if name in self.todo:
1557 self.todo.remove(name)
1559 self.delete_file(name, True)
1560 elif action in ('a', 'add'):
1561 if self.status(name) != '?':
1562 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1565 elif action in ('l', 'leave'):
1568 raise ValueError("Unknow action `%s'" % action)
1571 raise ValueError("Empty filelist")
1574 """for objects to represent the review state in a request"""
1575 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1577 self.by_user = by_user
1578 self.by_group = by_group
1581 self.comment = comment
1584 """for objects to represent the "state" of a request"""
1585 def __init__(self, name=None, who=None, when=None, comment=None):
1589 self.comment = comment
1592 """represents an action"""
1593 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1595 self.src_project = src_project
1596 self.src_package = src_package
1597 self.src_rev = src_rev
1598 self.dst_project = dst_project
1599 self.dst_package = dst_package
1600 self.src_update = src_update
1603 """represent a request and holds its metadata
1604 it has methods to read in metadata from xml,
1605 different views, ..."""
1608 self.state = RequestState()
1611 self.last_author = None
1614 self.statehistory = []
1617 def read(self, root):
1618 self.reqid = int(root.get('id'))
1619 actions = root.findall('action')
1620 if len(actions) == 0:
1621 actions = [ root.find('submit') ] # for old style requests
1623 for action in actions:
1624 type = action.get('type', 'submit')
1626 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1627 if action.findall('source'):
1628 n = action.find('source')
1629 src_prj = n.get('project', None)
1630 src_pkg = n.get('package', None)
1631 src_rev = n.get('rev', None)
1632 if action.findall('target'):
1633 n = action.find('target')
1634 dst_prj = n.get('project', None)
1635 dst_pkg = n.get('package', None)
1636 if action.findall('options'):
1637 n = action.find('options')
1638 if n.findall('sourceupdate'):
1639 src_update = n.find('sourceupdate').text.strip()
1640 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1642 msg = 'invalid request format:\n%s' % ET.tostring(root)
1643 raise oscerr.APIError(msg)
1646 n = root.find('state')
1647 self.state.name, self.state.who, self.state.when \
1648 = n.get('name'), n.get('who'), n.get('when')
1650 self.state.comment = n.find('comment').text.strip()
1652 self.state.comment = None
1654 # read the review states
1655 for r in root.findall('review'):
1657 s.state = r.get('state')
1658 s.by_user = r.get('by_user')
1659 s.by_group = r.get('by_group')
1660 s.who = r.get('who')
1661 s.when = r.get('when')
1663 s.comment = r.find('comment').text.strip()
1666 self.reviews.append(s)
1668 # read the state history
1669 for h in root.findall('history'):
1671 s.name = h.get('name')
1672 s.who = h.get('who')
1673 s.when = h.get('when')
1675 s.comment = h.find('comment').text.strip()
1678 self.statehistory.append(s)
1679 self.statehistory.reverse()
1681 # read a description, if it exists
1683 n = root.find('description').text
1688 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1689 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1690 dst_prj, dst_pkg, src_update)
1693 def list_view(self):
1694 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1696 for a in self.actions:
1697 dst = "%s/%s" % (a.dst_project, a.dst_package)
1698 if a.src_package == a.dst_package:
1702 if a.type=="submit":
1703 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1704 if a.type=="change_devel":
1705 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1706 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1708 ret += '\n %s: %-50s %-20s ' % \
1709 (a.type, sr_source, dst)
1711 if self.statehistory and self.statehistory[0]:
1713 for h in self.statehistory:
1714 who.append("%s(%s)" % (h.who,h.name))
1716 ret += "\n From: %s" % (' -> '.join(who))
1718 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1720 lines = txt.splitlines()
1721 wrapper = textwrap.TextWrapper( width = 80,
1722 initial_indent=' Descr: ',
1723 subsequent_indent=' ')
1724 ret += "\n" + wrapper.fill(lines[0])
1725 wrapper.initial_indent = ' '
1726 for line in lines[1:]:
1727 ret += "\n" + wrapper.fill(line)
1733 def __cmp__(self, other):
1734 return cmp(self.reqid, other.reqid)
1738 for action in self.actions:
1739 action_list=" %s: " % (action.type)
1740 if action.type=="submit":
1743 r="(r%s)" % (action.src_rev)
1745 if action.src_update:
1746 m="(%s)" % (action.src_update)
1747 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1748 if action.dst_package:
1749 action_list=action_list+"/%s" % ( action.dst_package )
1750 elif action.type=="delete":
1751 action_list=action_list+" %s" % ( action.dst_project )
1752 if action.dst_package:
1753 action_list=action_list+"/%s" % ( action.dst_package )
1754 elif action.type=="change_devel":
1755 action_list=action_list+" %s/%s developed in %s/%s" % \
1756 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1757 action_list=action_list+"\n"
1772 self.state.name, self.state.when, self.state.who,
1775 if len(self.reviews):
1776 reviewitems = [ '%-10s %s %s %s %s %s' \
1777 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1778 for i in self.reviews ]
1779 s += '\nReview: ' + '\n '.join(reviewitems)
1782 if len(self.statehistory):
1783 histitems = [ '%-10s %s %s' \
1784 % (i.name, i.when, i.who) \
1785 for i in self.statehistory ]
1786 s += '\nHistory: ' + '\n '.join(histitems)
1793 """format time as Apr 02 18:19
1795 depending on whether it is in the current year
1799 if time.localtime()[0] == time.localtime(t)[0]:
1801 return time.strftime('%b %d %H:%M',time.localtime(t))
1803 return time.strftime('%b %d %Y',time.localtime(t))
1806 def is_project_dir(d):
1807 return os.path.exists(os.path.join(d, store, '_project')) and not \
1808 os.path.exists(os.path.join(d, store, '_package'))
1811 def is_package_dir(d):
1812 return os.path.exists(os.path.join(d, store, '_project')) and \
1813 os.path.exists(os.path.join(d, store, '_package'))
1815 def parse_disturl(disturl):
1816 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1817 revision), else raises an oscerr.WrongArgs exception
1820 m = DISTURL_RE.match(disturl)
1822 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1824 apiurl = m.group('apiurl')
1825 if apiurl.split('.')[0] != 'api':
1826 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1827 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1829 def parse_buildlogurl(buildlogurl):
1830 """Parse a build log url, returns a tuple (apiurl, project, package,
1831 repository, arch), else raises oscerr.WrongArgs exception"""
1833 global BUILDLOGURL_RE
1835 m = BUILDLOGURL_RE.match(buildlogurl)
1837 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1839 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1842 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1843 This is handy to allow copy/paste a project/package combination in this form.
1845 Trailing slashes are removed before the split, because the split would
1846 otherwise give an additional empty string.
1854 def expand_proj_pack(args, idx=0, howmany=0):
1855 """looks for occurance of '.' at the position idx.
1856 If howmany is 2, both proj and pack are expanded together
1857 using the current directory, or none of them, if not possible.
1858 If howmany is 0, proj is expanded if possible, then, if there
1859 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1860 expanded, if possible.
1861 If howmany is 1, only proj is expanded if possible.
1863 If args[idx] does not exists, an implicit '.' is assumed.
1864 if not enough elements up to idx exist, an error is raised.
1866 See also parseargs(args), slash_split(args), findpacs(args)
1867 All these need unification, somehow.
1870 # print args,idx,howmany
1873 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1875 if len(args) == idx:
1877 if args[idx+0] == '.':
1878 if howmany == 0 and len(args) > idx+1:
1879 if args[idx+1] == '.':
1881 # remove one dot and make sure to expand both proj and pack
1886 # print args,idx,howmany
1888 args[idx+0] = store_read_project('.')
1891 package = store_read_package('.')
1892 args.insert(idx+1, package)
1896 package = store_read_package('.')
1897 args.insert(idx+1, package)
1901 def findpacs(files, progress_obj=None):
1902 """collect Package objects belonging to the given files
1903 and make sure each Package is returned only once"""
1906 p = filedir_to_pac(f, progress_obj)
1909 if i.name == p.name:
1919 def filedir_to_pac(f, progress_obj=None):
1920 """Takes a working copy path, or a path to a file inside a working copy,
1921 and returns a Package object instance
1923 If the argument was a filename, add it onto the "todo" list of the Package """
1925 if os.path.isdir(f):
1927 p = Package(wd, progress_obj=progress_obj)
1929 wd = os.path.dirname(f) or os.curdir
1930 p = Package(wd, progress_obj=progress_obj)
1931 p.todo = [ os.path.basename(f) ]
1935 def read_filemeta(dir):
1937 r = ET.parse(os.path.join(dir, store, '_files'))
1938 except SyntaxError, e:
1939 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1940 'When parsing .osc/_files, the following error was encountered:\n'
1945 def read_tobedeleted(dir):
1947 fname = os.path.join(dir, store, '_to_be_deleted')
1949 if os.path.exists(fname):
1950 r = [ line.strip() for line in open(fname) ]
1955 def read_meta_mode(dir):
1957 fname = os.path.join(dir, store, '_meta_mode')
1959 if os.path.exists(fname):
1960 r = open(fname).readline()
1962 if r is None or not r == "true":
1966 def read_sizelimit(dir):
1968 fname = os.path.join(dir, store, '_size_limit')
1970 if os.path.exists(fname):
1971 r = open(fname).readline()
1973 if r is None or not r.isdigit():
1977 def read_inconflict(dir):
1979 fname = os.path.join(dir, store, '_in_conflict')
1981 if os.path.exists(fname):
1982 r = [ line.strip() for line in open(fname) ]
1987 def parseargs(list_of_args):
1988 """Convenience method osc's commandline argument parsing.
1990 If called with an empty tuple (or list), return a list containing the current directory.
1991 Otherwise, return a list of the arguments."""
1993 return list(list_of_args)
1998 def statfrmt(statusletter, filename):
1999 return '%s %s' % (statusletter, filename)
2002 def pathjoin(a, *p):
2003 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
2004 path = os.path.join(a, *p)
2005 if path.startswith('./'):
2010 def makeurl(baseurl, l, query=[]):
2011 """Given a list of path compoments, construct a complete URL.
2013 Optional parameters for a query string can be given as a list, as a
2014 dictionary, or as an already assembled string.
2015 In case of a dictionary, the parameters will be urlencoded by this
2016 function. In case of a list not -- this is to be backwards compatible.
2019 if conf.config['verbose'] > 1:
2020 print 'makeurl:', baseurl, l, query
2022 if type(query) == type(list()):
2023 query = '&'.join(query)
2024 elif type(query) == type(dict()):
2025 query = urlencode(query)
2027 scheme, netloc = urlsplit(baseurl)[0:2]
2028 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
2031 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
2032 """wrapper around urllib2.urlopen for error handling,
2033 and to support additional (PUT, DELETE) methods"""
2037 if conf.config['http_debug']:
2040 print '--', method, url
2042 if method == 'POST' and not file and not data:
2043 # adding data to an urllib2 request transforms it into a POST
2046 req = urllib2.Request(url)
2047 api_host_options = {}
2049 api_host_options = conf.get_apiurl_api_host_options(url)
2050 for header, value in api_host_options['http_headers']:
2051 req.add_header(header, value)
2053 # "external" request (url is no apiurl)
2056 req.get_method = lambda: method
2058 # POST requests are application/x-www-form-urlencoded per default
2059 # since we change the request into PUT, we also need to adjust the content type header
2060 if method == 'PUT' or (method == 'POST' and data):
2061 req.add_header('Content-Type', 'application/octet-stream')
2063 if type(headers) == type({}):
2064 for i in headers.keys():
2066 req.add_header(i, headers[i])
2068 if file and not data:
2069 size = os.path.getsize(file)
2071 data = open(file, 'rb').read()
2074 filefd = open(file, 'rb')
2076 if sys.platform[:3] != 'win':
2077 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
2079 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
2081 except EnvironmentError, e:
2083 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
2084 '\non a filesystem which does not support this.' % (e, file))
2085 elif hasattr(e, 'winerror') and e.winerror == 5:
2086 # falling back to the default io
2087 data = open(file, 'rb').read()
2091 if conf.config['debug']: print method, url
2093 old_timeout = socket.getdefaulttimeout()
2094 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2095 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2096 socket.setdefaulttimeout(timeout)
2098 fd = urllib2.urlopen(req, data=data)
2100 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2101 socket.setdefaulttimeout(old_timeout)
2102 if hasattr(conf.cookiejar, 'save'):
2103 conf.cookiejar.save(ignore_discard=True)
2105 if filefd: filefd.close()
2110 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2111 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2112 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2113 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2116 def init_project_dir(apiurl, dir, project):
2117 if not os.path.exists(dir):
2118 if conf.config['checkout_no_colon']:
2119 os.makedirs(dir) # helpful with checkout_no_colon
2122 if not os.path.exists(os.path.join(dir, store)):
2123 os.mkdir(os.path.join(dir, store))
2125 # print 'project=',project,' dir=',dir
2126 store_write_project(dir, project)
2127 store_write_apiurl(dir, apiurl)
2128 if conf.config['do_package_tracking']:
2129 store_write_initial_packages(dir, project, [])
2131 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=None):
2132 if not os.path.isdir(store):
2135 f = open('_project', 'w')
2136 f.write(project + '\n')
2138 f = open('_package', 'w')
2139 f.write(package + '\n')
2143 f = open('_meta_mode', 'w')
2148 f = open('_size_limit', 'w')
2149 f.write(str(limit_size))
2153 f = open('_files', 'w')
2154 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta)))
2158 ET.ElementTree(element=ET.Element('directory')).write('_files')
2160 f = open('_osclib_version', 'w')
2161 f.write(__store_version__ + '\n')
2164 store_write_apiurl(os.path.pardir, apiurl)
2170 def check_store_version(dir):
2171 versionfile = os.path.join(dir, store, '_osclib_version')
2173 v = open(versionfile).read().strip()
2178 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2179 if os.path.exists(os.path.join(dir, '.svn')):
2180 msg = msg + '\nTry svn instead of osc.'
2181 raise oscerr.NoWorkingCopy(msg)
2183 if v != __store_version__:
2184 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2185 # version is fine, no migration needed
2186 f = open(versionfile, 'w')
2187 f.write(__store_version__ + '\n')
2190 msg = 'The osc metadata of your working copy "%s"' % dir
2191 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2192 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2193 raise oscerr.WorkingCopyWrongVersion, msg
2196 def meta_get_packagelist(apiurl, prj, deleted=None):
2200 query['deleted'] = 1
2202 u = makeurl(apiurl, ['source', prj], query)
2204 root = ET.parse(f).getroot()
2205 return [ node.get('name') for node in root.findall('entry') ]
2208 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2209 """return a list of file names,
2210 or a list File() instances if verbose=True"""
2216 query['rev'] = revision
2218 query['rev'] = 'latest'
2220 u = makeurl(apiurl, ['source', prj, package], query=query)
2222 root = ET.parse(f).getroot()
2225 return [ node.get('name') for node in root.findall('entry') ]
2229 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2230 rev = root.get('rev')
2231 for node in root.findall('entry'):
2232 f = File(node.get('name'),
2234 int(node.get('size')),
2235 int(node.get('mtime')))
2241 def meta_get_project_list(apiurl, deleted):
2244 query['deleted'] = 1
2246 u = makeurl(apiurl, ['source'], query)
2248 root = ET.parse(f).getroot()
2249 return sorted([ node.get('name') for node in root ])
2252 def show_project_meta(apiurl, prj):
2253 url = makeurl(apiurl, ['source', prj, '_meta'])
2255 return f.readlines()
2258 def show_project_conf(apiurl, prj):
2259 url = makeurl(apiurl, ['source', prj, '_config'])
2261 return f.readlines()
2264 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2265 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2269 except urllib2.HTTPError, e:
2270 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2274 def show_package_meta(apiurl, prj, pac):
2275 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2278 return f.readlines()
2279 except urllib2.HTTPError, e:
2280 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2284 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2286 path.append('source')
2292 path.append('_attribute')
2294 path.append(attribute)
2297 query.append("with_default=1")
2299 query.append("with_project=1")
2300 url = makeurl(apiurl, path, query)
2303 return f.readlines()
2304 except urllib2.HTTPError, e:
2305 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2309 def show_develproject(apiurl, prj, pac):
2310 m = show_package_meta(apiurl, prj, pac)
2312 return ET.fromstring(''.join(m)).find('devel').get('project')
2317 def show_pattern_metalist(apiurl, prj):
2318 url = makeurl(apiurl, ['source', prj, '_pattern'])
2322 except urllib2.HTTPError, e:
2323 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2325 r = [ node.get('name') for node in tree.getroot() ]
2330 def show_pattern_meta(apiurl, prj, pattern):
2331 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2334 return f.readlines()
2335 except urllib2.HTTPError, e:
2336 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2341 """metafile that can be manipulated and is stored back after manipulation."""
2342 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2346 self.change_is_required = change_is_required
2347 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2348 f = os.fdopen(fd, 'w')
2349 f.write(''.join(input))
2351 self.hash_orig = dgst(self.filename)
2354 hash = dgst(self.filename)
2355 if self.change_is_required and hash == self.hash_orig:
2356 print 'File unchanged. Not saving.'
2357 os.unlink(self.filename)
2360 print 'Sending meta data...'
2361 # don't do any exception handling... it's up to the caller what to do in case
2363 http_PUT(self.url, file=self.filename)
2364 os.unlink(self.filename)
2370 run_editor(self.filename)
2374 except urllib2.HTTPError, e:
2375 error_help = "%d" % e.code
2376 if e.headers.get('X-Opensuse-Errorcode'):
2377 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2379 print >>sys.stderr, 'BuildService API error:', error_help
2380 # examine the error - we can't raise an exception because we might want
2383 if '<summary>' in data:
2384 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2385 input = raw_input('Try again? ([y/N]): ')
2386 if input not in ['y', 'Y']:
2392 if os.path.exists(self.filename):
2393 print 'discarding %s' % self.filename
2394 os.unlink(self.filename)
2397 # different types of metadata
2398 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2399 'template': new_project_templ,
2402 'pkg': { 'path' : 'source/%s/%s/_meta',
2403 'template': new_package_templ,
2406 'attribute': { 'path' : 'source/%s/%s/_meta',
2407 'template': new_attribute_templ,
2410 'prjconf': { 'path': 'source/%s/_config',
2414 'user': { 'path': 'person/%s',
2415 'template': new_user_template,
2418 'pattern': { 'path': 'source/%s/_pattern/%s',
2419 'template': new_pattern_template,
2424 def meta_exists(metatype,
2431 apiurl = conf.config['apiurl']
2432 url = make_meta_url(metatype, path_args, apiurl)
2434 data = http_GET(url).readlines()
2435 except urllib2.HTTPError, e:
2436 if e.code == 404 and create_new:
2437 data = metatypes[metatype]['template']
2439 data = StringIO(data % template_args).readlines()
2444 def make_meta_url(metatype, path_args=None, apiurl=None):
2446 apiurl = conf.config['apiurl']
2447 if metatype not in metatypes.keys():
2448 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2449 path = metatypes[metatype]['path']
2452 path = path % path_args
2454 return makeurl(apiurl, [path])
2457 def edit_meta(metatype,
2462 change_is_required=False,
2466 apiurl = conf.config['apiurl']
2468 data = meta_exists(metatype,
2471 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2475 change_is_required = True
2477 url = make_meta_url(metatype, path_args, apiurl)
2478 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2486 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=None):
2489 query['rev'] = revision
2491 query['rev'] = 'latest'
2493 query['linkrev'] = linkrev
2494 elif conf.config['linkcontrol']:
2495 query['linkrev'] = 'base'
2501 query['emptylink'] = 1
2502 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2504 # look for "too large" files according to size limit and mark them
2505 root = ET.fromstring(''.join(f.readlines()))
2506 for e in root.findall('entry'):
2507 size = e.get('size')
2508 if size and limit_size and int(size) > int(limit_size):
2509 e.set('skipped', 'true')
2510 return ET.tostring(root)
2513 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2514 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2515 return ET.fromstring(''.join(m)).get('srcmd5')
2518 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2519 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2521 # only source link packages have a <linkinfo> element.
2522 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2530 raise oscerr.LinkExpandError(prj, pac, li.error)
2534 def show_upstream_rev(apiurl, prj, pac):
2535 m = show_files_meta(apiurl, prj, pac)
2536 return ET.fromstring(''.join(m)).get('rev')
2539 def read_meta_from_spec(specfile, *args):
2540 import codecs, locale, re
2542 Read tags and sections from spec file. To read out
2543 a tag the passed argument mustn't end with a colon. To
2544 read out a section the passed argument must start with
2546 This method returns a dictionary which contains the
2550 if not os.path.isfile(specfile):
2551 raise IOError('\'%s\' is not a regular file' % specfile)
2554 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2555 except UnicodeDecodeError:
2556 lines = open(specfile).readlines()
2563 if itm.startswith('%'):
2564 sections.append(itm)
2568 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2570 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2571 if m and m.group('val'):
2572 spec_data[tag] = m.group('val').strip()
2574 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2577 section_pat = '^%s\s*?$'
2578 for section in sections:
2579 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2581 start = lines.index(m.group()+'\n') + 1
2583 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2586 for line in lines[start:]:
2587 if line.startswith('%'):
2590 spec_data[section] = data
2594 def run_pager(message):
2595 import tempfile, sys
2597 if not sys.stdout.isatty():
2600 tmpfile = tempfile.NamedTemporaryFile()
2601 tmpfile.write(message)
2603 pager = os.getenv('PAGER', default='less')
2604 subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
2607 def run_editor(filename):
2608 if sys.platform[:3] != 'win':
2609 editor = os.getenv('EDITOR', default='vim')
2611 editor = os.getenv('EDITOR', default='notepad')
2613 return subprocess.call([ editor, filename ])
2615 def edit_message(footer='', template='', templatelen=30):
2616 delim = '--This line, and those below, will be ignored--\n'
2618 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2619 f = os.fdopen(fd, 'w')
2621 if not templatelen is None:
2622 lines = template.splitlines()
2623 template = '\n'.join(lines[:templatelen])
2624 if lines[templatelen:]:
2625 footer = '%s\n\n%s' % ('\n'.join(lines[templatelen:]), footer)
2635 run_editor(filename)
2636 msg = open(filename).read().split(delim)[0].rstrip()
2641 input = raw_input('Log message not specified\n'
2642 'a)bort, c)ontinue, e)dit: ')
2644 raise oscerr.UserAbort()
2654 def create_delete_request(apiurl, project, package, message):
2659 package = """package="%s" """ % (package)
2665 <action type="delete">
2666 <target project="%s" %s/>
2669 <description>%s</description>
2671 """ % (project, package,
2672 cgi.escape(message or ''))
2674 u = makeurl(apiurl, ['request'], query='cmd=create')
2675 f = http_POST(u, data=xml)
2677 root = ET.parse(f).getroot()
2678 return root.get('id')
2681 def create_change_devel_request(apiurl,
2682 devel_project, devel_package,
2689 <action type="change_devel">
2690 <source project="%s" package="%s" />
2691 <target project="%s" package="%s" />
2694 <description>%s</description>
2696 """ % (devel_project,
2700 cgi.escape(message or ''))
2702 u = makeurl(apiurl, ['request'], query='cmd=create')
2703 f = http_POST(u, data=xml)
2705 root = ET.parse(f).getroot()
2706 return root.get('id')
2709 # This creates an old style submit request for server api 1.0
2710 def create_submit_request(apiurl,
2711 src_project, src_package,
2712 dst_project=None, dst_package=None,
2713 message=None, orev=None, src_update=None):
2718 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2720 # Yes, this kind of xml construction is horrible
2725 packagexml = """package="%s" """ %( dst_package )
2726 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2727 # XXX: keep the old template for now in order to work with old obs instances
2729 <request type="submit">
2731 <source project="%s" package="%s" rev="%s"/>
2736 <description>%s</description>
2740 orev or show_upstream_rev(apiurl, src_project, src_package),
2743 cgi.escape(message or ""))
2745 u = makeurl(apiurl, ['request'], query='cmd=create')
2746 f = http_POST(u, data=xml)
2748 root = ET.parse(f).getroot()
2749 return root.get('id')
2752 def get_request(apiurl, reqid):
2753 u = makeurl(apiurl, ['request', reqid])
2755 root = ET.parse(f).getroot()
2762 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2765 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2766 f = http_POST(u, data=message)
2769 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2772 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2773 f = http_POST(u, data=message)
2777 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2779 if not 'all' in req_state:
2780 for state in req_state:
2781 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2783 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2785 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2789 todo['project'] = project
2791 todo['package'] = package
2792 for kind, val in todo.iteritems():
2793 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2794 'action/source/@%(kind)s=\'%(val)s\' or ' \
2795 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2796 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2798 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2799 for i in exclude_target_projects:
2800 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2801 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2803 if conf.config['verbose'] > 1:
2804 print '[ %s ]' % xpath
2805 res = search(apiurl, request=xpath)
2806 collection = res['request']
2808 for root in collection.findall('request'):
2814 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2815 """Return all new requests for all projects/packages where is user is involved"""
2817 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2818 for i in res['project_id'].findall('project'):
2819 projpkgs[i.get('name')] = []
2820 for i in res['package_id'].findall('package'):
2821 if not i.get('project') in projpkgs.keys():
2822 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2824 for prj, pacs in projpkgs.iteritems():
2826 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2830 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2831 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2832 xpath = xpath_join(xpath, xp, inner=True)
2834 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2835 if not 'all' in req_state:
2837 for state in req_state:
2838 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2839 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2840 res = search(apiurl, request=xpath)
2842 for root in res['request'].findall('request'):
2848 def get_request_log(apiurl, reqid):
2849 r = get_request(conf.config['apiurl'], reqid)
2851 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2852 # the description of the request is used for the initial log entry
2853 # otherwise its comment attribute would contain None
2854 if len(r.statehistory) >= 1:
2855 r.statehistory[-1].comment = r.descr
2857 r.state.comment = r.descr
2858 for state in [ r.state ] + r.statehistory:
2859 s = frmt % (state.name, state.who, state.when, str(state.comment))
2864 def get_user_meta(apiurl, user):
2865 u = makeurl(apiurl, ['person', quote_plus(user)])
2868 return ''.join(f.readlines())
2869 except urllib2.HTTPError:
2870 print 'user \'%s\' not found' % user
2874 def get_user_data(apiurl, user, *tags):
2875 """get specified tags from the user meta"""
2876 meta = get_user_meta(apiurl, user)
2879 root = ET.fromstring(meta)
2882 if root.find(tag).text != None:
2883 data.append(root.find(tag).text)
2887 except AttributeError:
2888 # this part is reached if the tags tuple contains an invalid tag
2889 print 'The xml file for user \'%s\' seems to be broken' % user
2894 def download(url, filename, progress_obj = None, mtime = None):
2895 import tempfile, shutil
2898 prefix = os.path.basename(filename)
2899 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2900 os.chmod(tmpfile, 0644)
2902 o = os.fdopen(fd, 'wb')
2903 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2906 shutil.move(tmpfile, filename)
2915 os.utime(filename, (-1, mtime))
2917 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None, meta=None):
2918 targetfilename = targetfilename or filename
2923 query['rev'] = revision
2924 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2925 download(u, targetfilename, progress_obj, mtime)
2927 def get_binary_file(apiurl, prj, repo, arch,
2930 target_filename = None,
2931 target_mtime = None,
2932 progress_meter = False):
2935 from meter import TextMeter
2936 progress_obj = TextMeter()
2938 target_filename = target_filename or filename
2940 where = package or '_repository'
2941 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2942 download(u, target_filename, progress_obj, target_mtime)
2944 def dgst_from_string(str):
2945 # Python 2.5 depracates the md5 modules
2946 # Python 2.4 doesn't have hashlib yet
2949 md5_hash = hashlib.md5()
2952 md5_hash = md5.new()
2953 md5_hash.update(str)
2954 return md5_hash.hexdigest()
2958 #if not os.path.exists(file):
2968 f = open(file, 'rb')
2970 buf = f.read(BUFSIZE)
2973 return s.hexdigest()
2978 """return true if a string is binary data using diff's heuristic"""
2979 if s and '\0' in s[:4096]:
2984 def binary_file(fn):
2985 """read 4096 bytes from a file named fn, and call binary() on the data"""
2986 return binary(open(fn, 'rb').read(4096))
2989 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2991 This methods diffs oldfilename against filename (so filename will
2992 be shown as the new file).
2993 The variable origfilename is used if filename and oldfilename differ
2994 in their names (for instance if a tempfile is used for filename etc.)
3000 oldfilename = filename
3003 olddir = os.path.join(dir, store)
3005 if not origfilename:
3006 origfilename = filename
3008 file1 = os.path.join(olddir, oldfilename) # old/stored original
3009 file2 = os.path.join(dir, filename) # working copy
3011 f1 = open(file1, 'rb')
3015 f2 = open(file2, 'rb')
3019 if binary(s1) or binary (s2):
3020 d = ['Binary file %s has changed\n' % origfilename]
3023 d = difflib.unified_diff(\
3026 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
3027 tofile = '%s\t(working copy)' % origfilename)
3029 # if file doesn't end with newline, we need to append one in the diff result
3031 for i, line in enumerate(d):
3032 if not line.endswith('\n'):
3033 d[i] += '\n\\ No newline at end of file'
3039 def make_diff(wc, revision):
3045 diff_hdr = 'Index: %s\n'
3046 diff_hdr += '===================================================================\n'
3048 olddir = os.getcwd()
3052 for file in wc.todo:
3053 if file in wc.skipped:
3055 if file in wc.filenamelist+wc.filenamelist_unvers:
3056 state = wc.status(file)
3058 added_files.append(file)
3060 removed_files.append(file)
3061 elif state == 'M' or state == 'C':
3062 changed_files.append(file)
3064 diff.append('osc: \'%s\' is not under version control' % file)
3066 for file in wc.filenamelist+wc.filenamelist_unvers:
3067 if file in wc.skipped:
3069 state = wc.status(file)
3070 if state == 'M' or state == 'C':
3071 changed_files.append(file)
3073 added_files.append(file)
3075 removed_files.append(file)
3077 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
3079 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
3080 cmp_pac = Package(tmpdir)
3082 for file in wc.todo:
3083 if file in cmp_pac.skipped:
3085 if file in cmp_pac.filenamelist:
3086 if file in wc.filenamelist:
3087 changed_files.append(file)
3089 diff.append('osc: \'%s\' is not under version control' % file)
3091 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
3093 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
3095 for file in changed_files:
3096 diff.append(diff_hdr % file)
3098 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
3100 cmp_pac.updatefile(file, revision)
3101 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3102 cmp_pac.absdir, file))
3103 (fd, tmpfile) = tempfile.mkstemp()
3104 for file in added_files:
3105 diff.append(diff_hdr % file)
3107 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3108 os.path.dirname(tmpfile), file))
3110 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3111 os.path.dirname(tmpfile), file))
3113 # FIXME: this is ugly but it cannot be avoided atm
3114 # if a file is deleted via "osc rm file" we should keep the storefile.
3116 if cmp_pac == None and removed_files:
3117 tmpdir = tempfile.mkdtemp()
3119 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3120 tmp_pac = Package(tmpdir)
3123 for file in removed_files:
3124 diff.append(diff_hdr % file)
3126 tmp_pac.updatefile(file, tmp_pac.rev)
3127 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3128 wc.rev, file, tmp_pac.storedir, file))
3130 cmp_pac.updatefile(file, revision)
3131 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3132 revision, file, cmp_pac.storedir, file))
3136 delete_dir(cmp_pac.absdir)
3138 delete_dir(tmp_pac.absdir)
3142 def server_diff(apiurl,
3143 old_project, old_package, old_revision,
3144 new_project, new_package, new_revision, unified=False, missingok=False, meta=None):
3145 query = {'cmd': 'diff', 'expand': '1'}
3147 query['oproject'] = old_project
3149 query['opackage'] = old_package
3151 query['orev'] = old_revision
3153 query['rev'] = new_revision
3155 query['unified'] = 1
3157 query['missingok'] = 1
3161 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3167 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3169 creates the plain directory structure for a package dir.
3170 The 'apiurl' parameter is needed for the project dir initialization.
3171 The 'project' and 'package' parameters specify the name of the
3172 project and the package. The optional 'pathname' parameter is used
3173 for printing out the message that a new dir was created (default: 'prj_dir/package').
3174 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3176 prj_dir = prj_dir or project
3178 # FIXME: carefully test each patch component of prj_dir,
3179 # if we have a .osc/_files entry at that level.
3180 # -> if so, we have a package/project clash,
3181 # and should rename this path component by appending '.proj'
3182 # and give user a warning message, to discourage such clashes
3184 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3185 if is_package_dir(prj_dir):
3186 # we want this to become a project directory,
3187 # but it already is a package directory.
3188 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3190 if not is_project_dir(prj_dir):
3191 # this directory could exist as a parent direory for one of our earlier
3192 # checked out sub-projects. in this case, we still need to initialize it.
3193 print statfrmt('A', prj_dir)
3194 init_project_dir(apiurl, prj_dir, project)
3196 if is_project_dir(os.path.join(prj_dir, package)):
3197 # the thing exists, but is a project directory and not a package directory
3198 # FIXME: this should be a warning message to discourage package/project clashes
3199 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3201 if not os.path.exists(os.path.join(prj_dir, package)):
3202 print statfrmt('A', pathname)
3203 os.mkdir(os.path.join(prj_dir, package))
3204 os.mkdir(os.path.join(prj_dir, package, store))
3206 return(os.path.join(prj_dir, package))
3209 def checkout_package(apiurl, project, package,
3210 revision=None, pathname=None, prj_obj=None,
3211 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None, meta=None):
3213 # the project we're in might be deleted.
3214 # that'll throw an error then.
3215 olddir = os.getcwd()
3217 olddir = os.environ.get("PWD")
3222 if sys.platform[:3] == 'win':
3223 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3225 if conf.config['checkout_no_colon']:
3226 prj_dir = prj_dir.replace(':', '/')
3229 pathname = getTransActPath(os.path.join(prj_dir, package))
3231 # before we create directories and stuff, check if the package actually
3233 show_package_meta(apiurl, project, package)
3237 # try to read from the linkinfo
3238 # if it is a link we use the xsrcmd5 as the revision to be
3241 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3243 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3248 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3249 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size, meta=meta)
3251 p = Package(package, progress_obj=progress_obj)
3254 for filename in p.filenamelist:
3255 if filename in p.skipped:
3257 if service_files or not filename.startswith('_service:'):
3258 p.updatefile(filename, revision)
3259 # print 'A ', os.path.join(project, package, filename)
3260 print statfrmt('A', os.path.join(pathname, filename))
3261 if conf.config['do_package_tracking']:
3262 # check if we can re-use an existing project object
3264 prj_obj = Project(os.getcwd())
3265 prj_obj.set_state(p.name, ' ')
3266 prj_obj.write_packages()
3270 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3271 dst_userid = None, keep_develproject = False):
3273 update pkgmeta with new new_name and new_prj and set calling user as the
3274 only maintainer (unless keep_maintainers is set). Additionally remove the
3275 develproject entry (<devel />) unless keep_develproject is true.
3277 root = ET.fromstring(''.join(pkgmeta))
3278 root.set('name', new_name)
3279 root.set('project', new_prj)
3280 if not keep_maintainers:
3281 for person in root.findall('person'):
3283 if not keep_develproject:
3284 for dp in root.findall('devel'):
3286 return ET.tostring(root)
3288 def link_to_branch(apiurl, project, package):
3290 convert a package with a _link + project.diff to a branch
3293 if '_link' in meta_get_filelist(apiurl, project, package):
3294 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3297 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3299 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3301 create a linked package
3302 - "src" is the original package
3303 - "dst" is the "link" package that we are creating here
3308 dst_meta = meta_exists(metatype='pkg',
3309 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3311 create_new=False, apiurl=conf.config['apiurl'])
3312 root = ET.fromstring(''.join(dst_meta))
3313 print root.attrib['project']
3314 if root.attrib['project'] != dst_project:
3315 # The source comes from a different project via a project link, we need to create this instance
3321 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3322 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3326 root = ET.fromstring(''.join(dst_meta))
3327 elm = root.find('publish')
3329 elm = ET.SubElement(root, 'publish')
3331 ET.SubElement(elm, 'disable')
3332 dst_meta = ET.tostring(root)
3336 path_args=(dst_project, dst_package),
3338 # create the _link file
3339 # but first, make sure not to overwrite an existing one
3340 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3342 print >>sys.stderr, 'forced overwrite of existing _link file'
3345 print >>sys.stderr, '_link file already exists...! Aborting'
3349 rev = 'rev="%s"' % rev
3354 cicount = 'cicount="%s"' % cicount
3358 print 'Creating _link...',
3359 link_template = """\
3360 <link project="%s" package="%s" %s %s>
3362 <!-- <apply name="patch" /> apply a patch on the source directory -->
3363 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3364 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3365 <!-- <delete>filename</delete> delete a file -->