1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E',
199 # os.path.samefile is available only under Unix
200 def os_path_samefile(path1, path2):
202 return os.path.samefile(path1, path2)
204 return os.path.realpath(path1) == os.path.realpath(path2)
207 """represent a file, including its metadata"""
208 def __init__(self, name, md5, size, mtime):
218 """Source service content
221 """creates an empty serviceinfo instance"""
224 def read(self, serviceinfo_node):
225 """read in the source services <services> element passed as
228 if serviceinfo_node == None:
231 services = serviceinfo_node.findall('service')
233 for service in services:
234 name = service.get('name')
236 for param in service.findall('param'):
237 option = param.get('name', None)
239 name += " --" + option + " '" + value + "'"
240 self.commands.append(name)
242 msg = 'invalid service format:\n%s' % ET.tostring(root)
243 raise oscerr.APIError(msg)
245 def execute(self, dir):
248 for call in self.commands:
249 temp_dir = tempfile.mkdtemp()
250 name = call.split(None, 1)[0]
251 if not os.path.exists("/usr/lib/obs/service/"+name):
252 msg = "ERROR: service is not installed !"
253 msg += "Can maybe solved with: zypper in obs-server-" + name
254 raise oscerr.APIError(msg)
255 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
256 ret = subprocess.call(c, shell=True)
258 print "ERROR: service call failed: " + c
260 for file in os.listdir(temp_dir):
261 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
265 """linkinfo metadata (which is part of the xml representing a directory
268 """creates an empty linkinfo instance"""
278 def read(self, linkinfo_node):
279 """read in the linkinfo metadata from the <linkinfo> element passed as
281 If the passed element is None, the method does nothing.
283 if linkinfo_node == None:
285 self.project = linkinfo_node.get('project')
286 self.package = linkinfo_node.get('package')
287 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
288 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
289 self.srcmd5 = linkinfo_node.get('srcmd5')
290 self.error = linkinfo_node.get('error')
291 self.rev = linkinfo_node.get('rev')
292 self.baserev = linkinfo_node.get('baserev')
295 """returns True if the linkinfo is not empty, otherwise False"""
296 if self.xsrcmd5 or self.lsrcmd5:
300 def isexpanded(self):
301 """returns True if the package is an expanded link"""
302 if self.lsrcmd5 and not self.xsrcmd5:
307 """returns True if the link is in error state (could not be applied)"""
313 """return an informatory string representation"""
314 if self.islink() and not self.isexpanded():
315 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
316 % (self.project, self.package, self.xsrcmd5, self.rev)
317 elif self.islink() and self.isexpanded():
319 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
320 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
322 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
323 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
329 """represent a project directory, holding packages"""
330 def __init__(self, dir, getPackageList=True, progress_obj=None):
333 self.absdir = os.path.abspath(dir)
334 self.progress_obj = progress_obj
336 self.name = store_read_project(self.dir)
337 self.apiurl = store_read_apiurl(self.dir)
340 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
342 self.pacs_available = []
344 if conf.config['do_package_tracking']:
345 self.pac_root = self.read_packages().getroot()
346 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
347 self.pacs_excluded = [ i for i in os.listdir(self.dir)
348 for j in conf.config['exclude_glob']
349 if fnmatch.fnmatch(i, j) ]
350 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
351 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
352 # in the self.pacs_broken list
353 self.pacs_broken = []
354 for p in self.pacs_have:
355 if not os.path.isdir(os.path.join(self.absdir, p)):
356 # all states will be replaced with the '!'-state
357 # (except it is already marked as deleted ('D'-state))
358 self.pacs_broken.append(p)
360 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
362 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
364 def checkout_missing_pacs(self, expand_link=False):
365 for pac in self.pacs_missing:
367 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
368 # pac is not under version control but a local file/dir exists
369 msg = 'can\'t add package \'%s\': Object already exists' % pac
370 raise oscerr.PackageExists(self.name, pac, msg)
372 print 'checking out new package %s' % pac
373 checkout_package(self.apiurl, self.name, pac, \
374 pathname=getTransActPath(os.path.join(self.dir, pac)), \
375 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
377 def set_state(self, pac, state):
378 node = self.get_package_node(pac)
380 self.new_package_entry(pac, state)
382 node.attrib['state'] = state
384 def get_package_node(self, pac):
385 for node in self.pac_root.findall('package'):
386 if pac == node.get('name'):
390 def del_package_node(self, pac):
391 for node in self.pac_root.findall('package'):
392 if pac == node.get('name'):
393 self.pac_root.remove(node)
395 def get_state(self, pac):
396 node = self.get_package_node(pac)
398 return node.get('state')
402 def new_package_entry(self, name, state):
403 ET.SubElement(self.pac_root, 'package', name=name, state=state)
405 def read_packages(self):
406 packages_file = os.path.join(self.absdir, store, '_packages')
407 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
408 return ET.parse(packages_file)
410 # scan project for existing packages and migrate them
412 for data in os.listdir(self.dir):
413 pac_dir = os.path.join(self.absdir, data)
414 # we cannot use self.pacs_available because we cannot guarantee that the package list
415 # was fetched from the server
416 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
417 and Package(pac_dir).name == data:
418 cur_pacs.append(ET.Element('package', name=data, state=' '))
419 store_write_initial_packages(self.absdir, self.name, cur_pacs)
420 return ET.parse(os.path.join(self.absdir, store, '_packages'))
422 def write_packages(self):
423 # TODO: should we only modify the existing file instead of overwriting?
424 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
426 def addPackage(self, pac):
428 for i in conf.config['exclude_glob']:
429 if fnmatch.fnmatch(pac, i):
430 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
431 raise oscerr.OscIOError(None, msg)
432 state = self.get_state(pac)
433 if state == None or state == 'D':
434 self.new_package_entry(pac, 'A')
435 self.write_packages()
436 # sometimes the new pac doesn't exist in the list because
437 # it would take too much time to update all data structs regularly
438 if pac in self.pacs_unvers:
439 self.pacs_unvers.remove(pac)
441 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
443 def delPackage(self, pac, force = False):
444 state = self.get_state(pac.name)
446 if state == ' ' or state == 'D':
448 for file in pac.filenamelist + pac.filenamelist_unvers:
449 filestate = pac.status(file)
450 if filestate == 'M' or filestate == 'C' or \
451 filestate == 'A' or filestate == '?':
454 del_files.append(file)
455 if can_delete or force:
456 for file in del_files:
457 pac.delete_localfile(file)
458 if pac.status(file) != '?':
459 pac.delete_storefile(file)
460 # this is not really necessary
461 pac.put_on_deletelist(file)
462 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
464 pac.write_deletelist()
465 self.set_state(pac.name, 'D')
466 self.write_packages()
468 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
471 delete_dir(pac.absdir)
472 self.del_package_node(pac.name)
473 self.write_packages()
474 print statfrmt('D', pac.name)
476 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
478 print 'package is not under version control'
480 print 'unsupported state'
482 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
485 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
487 # we need to make sure that the _packages file will be written (even if an exception
490 # update complete project
491 # packages which no longer exists upstream
492 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
494 for pac in upstream_del:
495 p = Package(os.path.join(self.dir, pac))
496 self.delPackage(p, force = True)
497 delete_storedir(p.storedir)
502 self.pac_root.remove(self.get_package_node(p.name))
503 self.pacs_have.remove(pac)
505 for pac in self.pacs_have:
506 state = self.get_state(pac)
507 if pac in self.pacs_broken:
508 if self.get_state(pac) != 'A':
509 checkout_package(self.apiurl, self.name, pac,
510 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
511 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
514 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
516 if expand_link and p.islink() and not p.isexpanded():
519 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
524 rev = p.linkinfo.xsrcmd5
525 print 'Expanding to rev', rev
526 elif unexpand_link and p.islink() and p.isexpanded():
527 rev = p.linkinfo.lsrcmd5
528 print 'Unexpanding to rev', rev
529 elif p.islink() and p.isexpanded():
531 print 'Updating %s' % p.name
532 p.update(rev, service_files)
536 # TODO: Package::update has to fixed to behave like svn does
537 if pac in self.pacs_broken:
538 checkout_package(self.apiurl, self.name, pac,
539 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
540 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
542 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
543 elif state == 'A' and pac in self.pacs_available:
544 # file/dir called pac already exists and is under version control
545 msg = 'can\'t add package \'%s\': Object already exists' % pac
546 raise oscerr.PackageExists(self.name, pac, msg)
551 print 'unexpected state.. package \'%s\'' % pac
553 self.checkout_missing_pacs(expand_link=not unexpand_link)
555 self.write_packages()
557 def commit(self, pacs = (), msg = '', files = {}):
562 if files.has_key(pac):
564 state = self.get_state(pac)
566 self.commitNewPackage(pac, msg, todo)
568 self.commitDelPackage(pac)
570 # display the correct dir when sending the changes
571 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
574 p = Package(os.path.join(self.dir, pac))
577 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
578 print 'osc: \'%s\' is not under version control' % pac
579 elif pac in self.pacs_broken:
580 print 'osc: \'%s\' package not found' % pac
582 self.commitExtPackage(pac, msg, todo)
584 self.write_packages()
586 # if we have packages marked as '!' we cannot commit
587 for pac in self.pacs_broken:
588 if self.get_state(pac) != 'D':
589 msg = 'commit failed: package \'%s\' is missing' % pac
590 raise oscerr.PackageMissing(self.name, pac, msg)
592 for pac in self.pacs_have:
593 state = self.get_state(pac)
596 Package(os.path.join(self.dir, pac)).commit(msg)
598 self.commitDelPackage(pac)
600 self.commitNewPackage(pac, msg)
602 self.write_packages()
604 def commitNewPackage(self, pac, msg = '', files = []):
605 """creates and commits a new package if it does not exist on the server"""
606 if pac in self.pacs_available:
607 print 'package \'%s\' already exists' % pac
609 user = conf.get_apiurl_usr(self.apiurl)
610 edit_meta(metatype='pkg',
611 path_args=(quote_plus(self.name), quote_plus(pac)),
616 # display the correct dir when sending the changes
618 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
622 p = Package(os.path.join(self.dir, pac))
624 print statfrmt('Sending', os.path.normpath(p.dir))
626 self.set_state(pac, ' ')
629 def commitDelPackage(self, pac):
630 """deletes a package on the server and in the working copy"""
632 # display the correct dir when sending the changes
633 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
636 pac_dir = os.path.join(self.dir, pac)
637 p = Package(os.path.join(self.dir, pac))
638 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
639 delete_storedir(p.storedir)
645 pac_dir = os.path.join(self.dir, pac)
646 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
647 print statfrmt('Deleting', getTransActPath(pac_dir))
648 delete_package(self.apiurl, self.name, pac)
649 self.del_package_node(pac)
651 def commitExtPackage(self, pac, msg, files = []):
652 """commits a package from an external project"""
653 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
656 pac_path = os.path.join(self.dir, pac)
658 project = store_read_project(pac_path)
659 package = store_read_package(pac_path)
660 apiurl = store_read_apiurl(pac_path)
661 if meta_exists(metatype='pkg',
662 path_args=(quote_plus(project), quote_plus(package)),
664 create_new=False, apiurl=apiurl):
665 p = Package(pac_path)
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(project), quote_plus(package)),
676 p = Package(pac_path)
682 r.append('*****************************************************')
683 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
684 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
685 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
686 r.append('*****************************************************')
692 """represent a package (its directory) and read/keep/write its metadata"""
693 def __init__(self, workingdir, progress_obj=None):
694 self.dir = workingdir
695 self.absdir = os.path.abspath(self.dir)
696 self.storedir = os.path.join(self.absdir, store)
697 self.progress_obj = progress_obj
699 check_store_version(self.dir)
701 self.prjname = store_read_project(self.dir)
702 self.name = store_read_package(self.dir)
703 self.apiurl = store_read_apiurl(self.dir)
705 self.update_datastructs()
709 self.todo_delete = []
712 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
713 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
716 def addfile(self, n):
717 st = os.stat(os.path.join(self.dir, n))
718 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
720 def delete_file(self, n, force=False):
721 """deletes a file if possible and marks the file as deleted"""
724 state = self.status(n)
728 if state in ['?', 'A', 'M'] and not force:
729 return (False, state)
730 self.delete_localfile(n)
732 self.put_on_deletelist(n)
733 self.write_deletelist()
735 self.delete_storefile(n)
738 def delete_storefile(self, n):
739 try: os.unlink(os.path.join(self.storedir, n))
742 def delete_localfile(self, n):
743 try: os.unlink(os.path.join(self.dir, n))
746 def put_on_deletelist(self, n):
747 if n not in self.to_be_deleted:
748 self.to_be_deleted.append(n)
750 def put_on_conflictlist(self, n):
751 if n not in self.in_conflict:
752 self.in_conflict.append(n)
754 def clear_from_conflictlist(self, n):
755 """delete an entry from the file, and remove the file if it would be empty"""
756 if n in self.in_conflict:
758 filename = os.path.join(self.dir, n)
759 storefilename = os.path.join(self.storedir, n)
760 myfilename = os.path.join(self.dir, n + '.mine')
761 if self.islinkrepair() or self.ispulled():
762 upfilename = os.path.join(self.dir, n + '.new')
764 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
767 os.unlink(myfilename)
768 # the working copy may be updated, so the .r* ending may be obsolete...
770 os.unlink(upfilename)
771 if self.islinkrepair() or self.ispulled():
772 os.unlink(os.path.join(self.dir, n + '.old'))
776 self.in_conflict.remove(n)
778 self.write_conflictlist()
780 def write_deletelist(self):
781 if len(self.to_be_deleted) == 0:
783 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
787 fname = os.path.join(self.storedir, '_to_be_deleted')
789 f.write('\n'.join(self.to_be_deleted))
793 def delete_source_file(self, n):
794 """delete local a source file"""
795 self.delete_localfile(n)
796 self.delete_storefile(n)
798 def delete_remote_source_file(self, n):
799 """delete a remote source file (e.g. from the server)"""
801 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
804 def put_source_file(self, n):
806 # escaping '+' in the URL path (note: not in the URL query string) is
807 # only a workaround for ruby on rails, which swallows it otherwise
809 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
810 http_PUT(u, file = os.path.join(self.dir, n))
812 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
814 def commit(self, msg=''):
815 # commit only if the upstream revision is the same as the working copy's
816 upstream_rev = self.latest_rev()
817 if self.rev != upstream_rev:
818 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
821 self.todo = self.filenamelist_unvers + self.filenamelist
823 pathn = getTransActPath(self.dir)
825 have_conflicts = False
826 for filename in self.todo:
827 if not filename.startswith('_service:') and not filename.startswith('_service_'):
828 st = self.status(filename)
829 if st == 'A' or st == 'M':
830 self.todo_send.append(filename)
831 print statfrmt('Sending', os.path.join(pathn, filename))
833 self.todo_delete.append(filename)
834 print statfrmt('Deleting', os.path.join(pathn, filename))
836 have_conflicts = True
839 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
842 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
843 print 'nothing to do for package %s' % self.name
846 if self.islink() and self.isexpanded():
847 # resolve the link into the upload revision
848 # XXX: do this always?
849 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
850 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
853 print 'Transmitting file data ',
855 for filename in self.todo_delete:
856 # do not touch local files on commit --
857 # delete remotely instead
858 self.delete_remote_source_file(filename)
859 self.to_be_deleted.remove(filename)
860 for filename in self.todo_send:
861 sys.stdout.write('.')
863 self.put_source_file(filename)
865 # all source files are committed - now comes the log
866 query = { 'cmd' : 'commit',
868 'user' : conf.get_apiurl_usr(self.apiurl),
870 if self.islink() and self.isexpanded():
871 query['keeplink'] = '1'
872 if conf.config['linkcontrol'] or self.isfrozen():
873 query['linkrev'] = self.linkinfo.srcmd5
875 query['repairlink'] = '1'
876 query['linkrev'] = self.get_pulled_srcmd5()
877 if self.islinkrepair():
878 query['repairlink'] = '1'
879 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
882 # delete upload revision
884 query = { 'cmd': 'deleteuploadrev' }
885 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
891 root = ET.parse(f).getroot()
892 self.rev = int(root.get('rev'))
894 print 'Committed revision %s.' % self.rev
897 os.unlink(os.path.join(self.storedir, '_pulled'))
898 if self.islinkrepair():
899 os.unlink(os.path.join(self.storedir, '_linkrepair'))
900 self.linkrepair = False
901 # XXX: mark package as invalid?
902 print 'The source link has been repaired. This directory can now be removed.'
903 if self.islink() and self.isexpanded():
904 self.update_local_filesmeta(revision=self.latest_rev())
906 self.update_local_filesmeta()
907 self.write_deletelist()
908 self.update_datastructs()
910 if self.filenamelist.count('_service'):
911 print 'The package contains a source service.'
912 for filename in self.todo:
913 if filename.startswith('_service:') and os.path.exists(filename):
914 os.unlink(filename) # remove local files
915 print_request_list(self.apiurl, self.prjname, self.name)
917 def write_conflictlist(self):
918 if len(self.in_conflict) == 0:
920 os.unlink(os.path.join(self.storedir, '_in_conflict'))
924 fname = os.path.join(self.storedir, '_in_conflict')
926 f.write('\n'.join(self.in_conflict))
930 def updatefile(self, n, revision):
931 filename = os.path.join(self.dir, n)
932 storefilename = os.path.join(self.storedir, n)
933 mtime = self.findfilebyname(n).mtime
935 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
936 os.utime(filename, (-1, mtime))
938 shutil.copyfile(filename, storefilename)
940 def mergefile(self, n):
941 filename = os.path.join(self.dir, n)
942 storefilename = os.path.join(self.storedir, n)
943 myfilename = os.path.join(self.dir, n + '.mine')
944 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
945 os.rename(filename, myfilename)
947 mtime = self.findfilebyname(n).mtime
948 get_source_file(self.apiurl, self.prjname, self.name, n,
949 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
950 os.utime(upfilename, (-1, mtime))
952 if binary_file(myfilename) or binary_file(upfilename):
954 shutil.copyfile(upfilename, filename)
955 shutil.copyfile(upfilename, storefilename)
956 self.in_conflict.append(n)
957 self.write_conflictlist()
961 # diff3 OPTIONS... MINE OLDER YOURS
962 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
963 # we would rather use the subprocess module, but it is not availablebefore 2.4
964 ret = subprocess.call(merge_cmd, shell=True)
966 # "An exit status of 0 means `diff3' was successful, 1 means some
967 # conflicts were found, and 2 means trouble."
969 # merge was successful... clean up
970 shutil.copyfile(upfilename, storefilename)
971 os.unlink(upfilename)
972 os.unlink(myfilename)
976 shutil.copyfile(upfilename, storefilename)
977 self.in_conflict.append(n)
978 self.write_conflictlist()
981 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
982 print >>sys.stderr, 'the command line was:'
983 print >>sys.stderr, merge_cmd
988 def update_local_filesmeta(self, revision=None):
990 Update the local _files file in the store.
991 It is replaced with the version pulled from upstream.
993 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
994 store_write_string(self.absdir, '_files', meta)
996 def update_datastructs(self):
998 Update the internal data structures if the local _files
999 file has changed (e.g. update_local_filesmeta() has been
1003 files_tree = read_filemeta(self.dir)
1004 files_tree_root = files_tree.getroot()
1006 self.rev = files_tree_root.get('rev')
1007 self.srcmd5 = files_tree_root.get('srcmd5')
1009 self.linkinfo = Linkinfo()
1010 self.linkinfo.read(files_tree_root.find('linkinfo'))
1012 self.filenamelist = []
1014 for node in files_tree_root.findall('entry'):
1016 f = File(node.get('name'),
1018 int(node.get('size')),
1019 int(node.get('mtime')))
1021 # okay, a very old version of _files, which didn't contain any metadata yet...
1022 f = File(node.get('name'), '', 0, 0)
1023 self.filelist.append(f)
1024 self.filenamelist.append(f.name)
1026 self.to_be_deleted = read_tobedeleted(self.dir)
1027 self.in_conflict = read_inconflict(self.dir)
1028 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1030 # gather unversioned files, but ignore some stuff
1031 self.excluded = [ i for i in os.listdir(self.dir)
1032 for j in conf.config['exclude_glob']
1033 if fnmatch.fnmatch(i, j) ]
1034 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1035 if i not in self.excluded
1036 if i not in self.filenamelist ]
1039 """tells us if the package is a link (has 'linkinfo').
1040 A package with linkinfo is a package which links to another package.
1041 Returns True if the package is a link, otherwise False."""
1042 return self.linkinfo.islink()
1044 def isexpanded(self):
1045 """tells us if the package is a link which is expanded.
1046 Returns True if the package is expanded, otherwise False."""
1047 return self.linkinfo.isexpanded()
1049 def islinkrepair(self):
1050 """tells us if we are repairing a broken source link."""
1051 return self.linkrepair
1054 """tells us if we have pulled a link."""
1055 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1058 """tells us if the link is frozen."""
1059 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1061 def get_pulled_srcmd5(self):
1063 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1064 pulledrev = line.strip()
1067 def haslinkerror(self):
1069 Returns True if the link is broken otherwise False.
1070 If the package is not a link it returns False.
1072 return self.linkinfo.haserror()
1074 def linkerror(self):
1076 Returns an error message if the link is broken otherwise None.
1077 If the package is not a link it returns None.
1079 return self.linkinfo.error
1081 def update_local_pacmeta(self):
1083 Update the local _meta file in the store.
1084 It is replaced with the version pulled from upstream.
1086 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1087 store_write_string(self.absdir, '_meta', meta)
1089 def findfilebyname(self, n):
1090 for i in self.filelist:
1094 def status(self, n):
1098 file storefile file present STATUS
1099 exists exists in _files
1102 x x x ' ' if digest differs: 'M'
1103 and if in conflicts file: 'C'
1105 x - x 'D' and listed in _to_be_deleted
1107 - x - 'D' (when file in working copy is already deleted)
1108 - - x 'F' (new in repo, but not yet in working copy)
1113 known_by_meta = False
1115 exists_in_store = False
1116 if n in self.filenamelist:
1117 known_by_meta = True
1118 if os.path.exists(os.path.join(self.absdir, n)):
1120 if os.path.exists(os.path.join(self.storedir, n)):
1121 exists_in_store = True
1124 if exists and not exists_in_store and known_by_meta:
1126 elif n in self.to_be_deleted:
1128 elif n in self.in_conflict:
1130 elif exists and exists_in_store and known_by_meta:
1131 #print self.findfilebyname(n)
1132 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1136 elif exists and not exists_in_store and not known_by_meta:
1138 elif exists and exists_in_store and not known_by_meta:
1140 elif not exists and exists_in_store and known_by_meta:
1142 elif not exists and not exists_in_store and known_by_meta:
1144 elif not exists and exists_in_store and not known_by_meta:
1146 elif not exists and not exists_in_store and not known_by_meta:
1147 # this case shouldn't happen (except there was a typo in the filename etc.)
1148 raise IOError('osc: \'%s\' is not under version control' % n)
1152 def comparePac(self, cmp_pac):
1154 This method compares the local filelist with
1155 the filelist of the passed package to see which files
1156 were added, removed and changed.
1163 for file in self.filenamelist+self.filenamelist_unvers:
1164 state = self.status(file)
1165 if state == 'A' and (not file in cmp_pac.filenamelist):
1166 added_files.append(file)
1167 elif file in cmp_pac.filenamelist and state == 'D':
1168 removed_files.append(file)
1169 elif state == ' ' and not file in cmp_pac.filenamelist:
1170 added_files.append(file)
1171 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1172 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1173 changed_files.append(file)
1174 for file in cmp_pac.filenamelist:
1175 if not file in self.filenamelist:
1176 removed_files.append(file)
1177 removed_files = set(removed_files)
1179 return changed_files, added_files, removed_files
1181 def merge(self, otherpac):
1182 self.todo += otherpac.todo
1196 '\n '.join(self.filenamelist),
1204 def read_meta_from_spec(self, spec = None):
1209 # scan for spec files
1210 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1211 if len(speclist) == 1:
1212 specfile = speclist[0]
1213 elif len(speclist) > 1:
1214 print 'the following specfiles were found:'
1215 for file in speclist:
1217 print 'please specify one with --specfile'
1220 print 'no specfile was found - please specify one ' \
1224 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1225 self.summary = data['Summary']
1226 self.url = data['Url']
1227 self.descr = data['%description']
1230 def update_package_meta(self, force=False):
1232 for the updatepacmetafromspec subcommand
1233 argument force supress the confirm question
1236 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1238 root = ET.fromstring(m)
1239 root.find('title').text = self.summary
1240 root.find('description').text = ''.join(self.descr)
1241 url = root.find('url')
1243 url = ET.SubElement(root, 'url')
1246 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1247 mf = metafile(u, ET.tostring(root))
1250 print '*' * 36, 'old', '*' * 36
1252 print '*' * 36, 'new', '*' * 36
1253 print ET.tostring(root)
1255 repl = raw_input('Write? (y/N/e) ')
1266 def mark_frozen(self):
1267 store_write_string(self.absdir, '_frozenlink', '')
1269 print "The link in this package is currently broken. Checking"
1270 print "out the last working version instead; please use 'osc pull'"
1271 print "to repair the link."
1274 def unmark_frozen(self):
1275 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1276 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1278 def latest_rev(self):
1279 if self.islinkrepair():
1280 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1281 elif self.islink() and self.isexpanded():
1282 if self.isfrozen() or self.ispulled():
1283 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1286 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1289 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1291 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1294 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1297 def update(self, rev = None, service_files = False):
1298 # save filelist and (modified) status before replacing the meta file
1299 saved_filenames = self.filenamelist
1300 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1303 self.update_local_filesmeta(rev)
1304 self = Package(self.dir, progress_obj=self.progress_obj)
1306 # which files do no longer exist upstream?
1307 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1309 pathn = getTransActPath(self.dir)
1311 for filename in saved_filenames:
1312 if not filename.startswith('_service:') and filename in disappeared:
1313 print statfrmt('D', os.path.join(pathn, filename))
1314 # keep file if it has local modifications
1315 if oldp.status(filename) == ' ':
1316 self.delete_localfile(filename)
1317 self.delete_storefile(filename)
1319 for filename in self.filenamelist:
1321 state = self.status(filename)
1322 if not service_files and filename.startswith('_service:'):
1324 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1325 # no merge necessary... local file is changed, but upstream isn't
1327 elif state == 'M' and filename in saved_modifiedfiles:
1328 status_after_merge = self.mergefile(filename)
1329 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1331 self.updatefile(filename, rev)
1332 print statfrmt('U', os.path.join(pathn, filename))
1334 self.updatefile(filename, rev)
1335 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1337 self.updatefile(filename, rev)
1338 print statfrmt('A', os.path.join(pathn, filename))
1339 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1340 self.updatefile(filename, rev)
1341 self.delete_storefile(filename)
1342 print statfrmt('U', os.path.join(pathn, filename))
1346 self.update_local_pacmeta()
1348 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1349 print 'At revision %s.' % self.rev
1351 if not service_files:
1352 self.run_source_services()
1354 def run_source_services(self):
1355 if self.filenamelist.count('_service'):
1356 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1359 si.execute(self.absdir)
1361 def prepare_filelist(self):
1362 """Prepare a list of files, which will be processed by process_filelist
1363 method. This allows easy modifications of a file list in commit
1367 self.todo = self.filenamelist + self.filenamelist_unvers
1371 for f in [f for f in self.todo if not os.path.isdir(f)]:
1373 status = self.status(f)
1376 ret += "%s %s %s\n" % (action, status, f)
1379 # Edit a filelist for package \'%s\'
1381 # l, leave = leave a file as is
1382 # r, remove = remove a file
1383 # a, add = add a file
1385 # If you remove file from a list, it will be unchanged
1386 # If you remove all, commit will be aborted""" % self.name
1390 def edit_filelist(self):
1391 """Opens a package list in editor for eediting. This allows easy
1392 modifications of it just by simple text editing
1396 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1397 f = os.fdopen(fd, 'w')
1398 f.write(self.prepare_filelist())
1400 mtime_orig = os.stat(filename).st_mtime
1402 if sys.platform[:3] != 'win':
1403 editor = os.getenv('EDITOR', default='vim')
1405 editor = os.getenv('EDITOR', default='notepad')
1407 subprocess.call('%s %s' % (editor, filename), shell=True)
1408 mtime = os.stat(filename).st_mtime
1409 if mtime_orig < mtime:
1410 filelist = open(filename).readlines()
1414 raise oscerr.UserAbort()
1416 return self.process_filelist(filelist)
1418 def process_filelist(self, filelist):
1419 """Process a filelist - it add/remove or leave files. This depends on
1420 user input. If no file is processed, it raises an ValueError
1424 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1426 foo = line.split(' ')
1428 action, state, name = (foo[0], ' ', foo[3])
1430 action, state, name = (foo[0], foo[1], foo[2])
1433 action = action.lower()
1436 if action in ('r', 'remove'):
1437 if self.status(name) == '?':
1439 if name in self.todo:
1440 self.todo.remove(name)
1442 self.delete_file(name, True)
1443 elif action in ('a', 'add'):
1444 if self.status(name) != '?':
1445 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1448 elif action in ('l', 'leave'):
1451 raise ValueError("Unknow action `%s'" % action)
1454 raise ValueError("Empty filelist")
1457 """for objects to represent the review state in a request"""
1458 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1460 self.by_user = by_user
1461 self.by_group = by_group
1464 self.comment = comment
1467 """for objects to represent the "state" of a request"""
1468 def __init__(self, name=None, who=None, when=None, comment=None):
1472 self.comment = comment
1475 """represents an action"""
1476 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1478 self.src_project = src_project
1479 self.src_package = src_package
1480 self.src_rev = src_rev
1481 self.dst_project = dst_project
1482 self.dst_package = dst_package
1483 self.src_update = src_update
1486 """represent a request and holds its metadata
1487 it has methods to read in metadata from xml,
1488 different views, ..."""
1491 self.state = RequestState()
1494 self.last_author = None
1497 self.statehistory = []
1500 def read(self, root):
1501 self.reqid = int(root.get('id'))
1502 actions = root.findall('action')
1503 if len(actions) == 0:
1504 actions = [ root.find('submit') ] # for old style requests
1506 for action in actions:
1507 type = action.get('type', 'submit')
1509 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1510 if action.findall('source'):
1511 n = action.find('source')
1512 src_prj = n.get('project', None)
1513 src_pkg = n.get('package', None)
1514 src_rev = n.get('rev', None)
1515 if action.findall('target'):
1516 n = action.find('target')
1517 dst_prj = n.get('project', None)
1518 dst_pkg = n.get('package', None)
1519 if action.findall('options'):
1520 n = action.find('options')
1521 if n.findall('sourceupdate'):
1522 src_update = n.find('sourceupdate').text.strip()
1523 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1525 msg = 'invalid request format:\n%s' % ET.tostring(root)
1526 raise oscerr.APIError(msg)
1529 n = root.find('state')
1530 self.state.name, self.state.who, self.state.when \
1531 = n.get('name'), n.get('who'), n.get('when')
1533 self.state.comment = n.find('comment').text.strip()
1535 self.state.comment = None
1537 # read the review states
1538 for r in root.findall('review'):
1540 s.state = r.get('state')
1541 s.by_user = r.get('by_user')
1542 s.by_group = r.get('by_group')
1543 s.who = r.get('who')
1544 s.when = r.get('when')
1546 s.comment = r.find('comment').text.strip()
1549 self.reviews.append(s)
1551 # read the state history
1552 for h in root.findall('history'):
1554 s.name = h.get('name')
1555 s.who = h.get('who')
1556 s.when = h.get('when')
1558 s.comment = h.find('comment').text.strip()
1561 self.statehistory.append(s)
1562 self.statehistory.reverse()
1564 # read a description, if it exists
1566 n = root.find('description').text
1571 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1572 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1573 dst_prj, dst_pkg, src_update)
1576 def list_view(self):
1577 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1579 for a in self.actions:
1580 dst = "%s/%s" % (a.dst_project, a.dst_package)
1581 if a.src_package == a.dst_package:
1585 if a.type=="submit":
1586 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1587 if a.type=="change_devel":
1588 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1589 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1591 ret += '\n %s: %-50s %-20s ' % \
1592 (a.type, sr_source, dst)
1594 if self.statehistory and self.statehistory[0]:
1596 for h in self.statehistory:
1597 who.append("%s(%s)" % (h.who,h.name))
1599 ret += "\n From: %s" % (' -> '.join(who))
1601 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1603 lines = txt.splitlines()
1604 wrapper = textwrap.TextWrapper( width = 80,
1605 initial_indent=' Descr: ',
1606 subsequent_indent=' ')
1607 ret += "\n" + wrapper.fill(lines[0])
1608 wrapper.initial_indent = ' '
1609 for line in lines[1:]:
1610 ret += "\n" + wrapper.fill(line)
1616 def __cmp__(self, other):
1617 return cmp(self.reqid, other.reqid)
1621 for action in self.actions:
1622 action_list=" %s: " % (action.type)
1623 if action.type=="submit":
1626 r="(r%s)" % (action.src_rev)
1628 if action.src_update:
1629 m="(%s)" % (action.src_update)
1630 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1631 if action.dst_package:
1632 action_list=action_list+"/%s" % ( action.dst_package )
1633 elif action.type=="delete":
1634 action_list=action_list+" %s" % ( action.dst_project )
1635 if action.dst_package:
1636 action_list=action_list+"/%s" % ( action.dst_package )
1637 elif action.type=="change_devel":
1638 action_list=action_list+" %s/%s developed in %s/%s" % \
1639 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1640 action_list=action_list+"\n"
1655 self.state.name, self.state.when, self.state.who,
1658 if len(self.reviews):
1659 reviewitems = [ '%-10s %s %s %s %s %s' \
1660 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1661 for i in self.reviews ]
1662 s += '\nReview: ' + '\n '.join(reviewitems)
1665 if len(self.statehistory):
1666 histitems = [ '%-10s %s %s' \
1667 % (i.name, i.when, i.who) \
1668 for i in self.statehistory ]
1669 s += '\nHistory: ' + '\n '.join(histitems)
1676 """format time as Apr 02 18:19
1678 depending on whether it is in the current year
1682 if time.localtime()[0] == time.localtime(t)[0]:
1684 return time.strftime('%b %d %H:%M',time.localtime(t))
1686 return time.strftime('%b %d %Y',time.localtime(t))
1689 def is_project_dir(d):
1690 return os.path.exists(os.path.join(d, store, '_project')) and not \
1691 os.path.exists(os.path.join(d, store, '_package'))
1694 def is_package_dir(d):
1695 return os.path.exists(os.path.join(d, store, '_project')) and \
1696 os.path.exists(os.path.join(d, store, '_package'))
1698 def parse_disturl(disturl):
1699 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1700 revision), else raises an oscerr.WrongArgs exception
1703 m = DISTURL_RE.match(disturl)
1705 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1707 apiurl = m.group('apiurl')
1708 if apiurl.split('.')[0] != 'api':
1709 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1710 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1712 def parse_buildlogurl(buildlogurl):
1713 """Parse a build log url, returns a tuple (apiurl, project, package,
1714 repository, arch), else raises oscerr.WrongArgs exception"""
1716 global BUILDLOGURL_RE
1718 m = BUILDLOGURL_RE.match(buildlogurl)
1720 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1722 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1725 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1726 This is handy to allow copy/paste a project/package combination in this form.
1728 Trailing slashes are removed before the split, because the split would
1729 otherwise give an additional empty string.
1737 def expand_proj_pack(args, idx=0, howmany=0):
1738 """looks for occurance of '.' at the position idx.
1739 If howmany is 2, both proj and pack are expanded together
1740 using the current directory, or none of them, if not possible.
1741 If howmany is 0, proj is expanded if possible, then, if there
1742 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1743 expanded, if possible.
1744 If howmany is 1, only proj is expanded if possible.
1746 If args[idx] does not exists, an implicit '.' is assumed.
1747 if not enough elements up to idx exist, an error is raised.
1749 See also parseargs(args), slash_split(args), findpacs(args)
1750 All these need unification, somehow.
1753 # print args,idx,howmany
1756 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1758 if len(args) == idx:
1760 if args[idx+0] == '.':
1761 if howmany == 0 and len(args) > idx+1:
1762 if args[idx+1] == '.':
1764 # remove one dot and make sure to expand both proj and pack
1769 # print args,idx,howmany
1771 args[idx+0] = store_read_project('.')
1774 package = store_read_package('.')
1775 args.insert(idx+1, package)
1779 package = store_read_package('.')
1780 args.insert(idx+1, package)
1784 def findpacs(files, progress_obj=None):
1785 """collect Package objects belonging to the given files
1786 and make sure each Package is returned only once"""
1789 p = filedir_to_pac(f, progress_obj)
1792 if i.name == p.name:
1802 def filedir_to_pac(f, progress_obj=None):
1803 """Takes a working copy path, or a path to a file inside a working copy,
1804 and returns a Package object instance
1806 If the argument was a filename, add it onto the "todo" list of the Package """
1808 if os.path.isdir(f):
1810 p = Package(wd, progress_obj=progress_obj)
1812 wd = os.path.dirname(f) or os.curdir
1813 p = Package(wd, progress_obj=progress_obj)
1814 p.todo = [ os.path.basename(f) ]
1818 def read_filemeta(dir):
1820 r = ET.parse(os.path.join(dir, store, '_files'))
1821 except SyntaxError, e:
1822 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1823 'When parsing .osc/_files, the following error was encountered:\n'
1828 def read_tobedeleted(dir):
1830 fname = os.path.join(dir, store, '_to_be_deleted')
1832 if os.path.exists(fname):
1833 r = [ line.strip() for line in open(fname) ]
1838 def read_inconflict(dir):
1840 fname = os.path.join(dir, store, '_in_conflict')
1842 if os.path.exists(fname):
1843 r = [ line.strip() for line in open(fname) ]
1848 def parseargs(list_of_args):
1849 """Convenience method osc's commandline argument parsing.
1851 If called with an empty tuple (or list), return a list containing the current directory.
1852 Otherwise, return a list of the arguments."""
1854 return list(list_of_args)
1859 def statfrmt(statusletter, filename):
1860 return '%s %s' % (statusletter, filename)
1863 def pathjoin(a, *p):
1864 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1865 path = os.path.join(a, *p)
1866 if path.startswith('./'):
1871 def makeurl(baseurl, l, query=[]):
1872 """Given a list of path compoments, construct a complete URL.
1874 Optional parameters for a query string can be given as a list, as a
1875 dictionary, or as an already assembled string.
1876 In case of a dictionary, the parameters will be urlencoded by this
1877 function. In case of a list not -- this is to be backwards compatible.
1880 if conf.config['verbose'] > 1:
1881 print 'makeurl:', baseurl, l, query
1883 if type(query) == type(list()):
1884 query = '&'.join(query)
1885 elif type(query) == type(dict()):
1886 query = urlencode(query)
1888 scheme, netloc = urlsplit(baseurl)[0:2]
1889 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1892 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1893 """wrapper around urllib2.urlopen for error handling,
1894 and to support additional (PUT, DELETE) methods"""
1898 if conf.config['http_debug']:
1901 print '--', method, url
1903 if method == 'POST' and not file and not data:
1904 # adding data to an urllib2 request transforms it into a POST
1907 req = urllib2.Request(url)
1908 api_host_options = {}
1910 api_host_options = conf.get_apiurl_api_host_options(url)
1911 for header, value in api_host_options['http_headers']:
1912 req.add_header(header, value)
1914 # "external" request (url is no apiurl)
1917 req.get_method = lambda: method
1919 # POST requests are application/x-www-form-urlencoded per default
1920 # since we change the request into PUT, we also need to adjust the content type header
1921 if method == 'PUT' or (method == 'POST' and data):
1922 req.add_header('Content-Type', 'application/octet-stream')
1924 if type(headers) == type({}):
1925 for i in headers.keys():
1927 req.add_header(i, headers[i])
1929 if file and not data:
1930 size = os.path.getsize(file)
1932 data = open(file, 'rb').read()
1935 filefd = open(file, 'rb')
1937 if sys.platform[:3] != 'win':
1938 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1940 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1942 except EnvironmentError, e:
1944 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1945 '\non a filesystem which does not support this.' % (e, file))
1946 elif hasattr(e, 'winerror') and e.winerror == 5:
1947 # falling back to the default io
1948 data = open(file, 'rb').read()
1952 if conf.config['debug']: print method, url
1954 old_timeout = socket.getdefaulttimeout()
1955 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1956 if old_timeout != timeout and not api_host_options.get('sslcertck'):
1957 socket.setdefaulttimeout(timeout)
1959 fd = urllib2.urlopen(req, data=data)
1961 if old_timeout != timeout and not api_host_options.get('sslcertck'):
1962 socket.setdefaulttimeout(old_timeout)
1963 if hasattr(conf.cookiejar, 'save'):
1964 conf.cookiejar.save(ignore_discard=True)
1966 if filefd: filefd.close()
1971 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1972 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1973 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1974 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1977 def init_project_dir(apiurl, dir, project):
1978 if not os.path.exists(dir):
1979 if conf.config['checkout_no_colon']:
1980 os.makedirs(dir) # helpful with checkout_no_colon
1983 if not os.path.exists(os.path.join(dir, store)):
1984 os.mkdir(os.path.join(dir, store))
1986 # print 'project=',project,' dir=',dir
1987 store_write_project(dir, project)
1988 store_write_apiurl(dir, apiurl)
1989 if conf.config['do_package_tracking']:
1990 store_write_initial_packages(dir, project, [])
1992 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
1993 if not os.path.isdir(store):
1996 f = open('_project', 'w')
1997 f.write(project + '\n')
1999 f = open('_package', 'w')
2000 f.write(package + '\n')
2004 f = open('_files', 'w')
2005 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2009 ET.ElementTree(element=ET.Element('directory')).write('_files')
2011 f = open('_osclib_version', 'w')
2012 f.write(__store_version__ + '\n')
2015 store_write_apiurl(os.path.pardir, apiurl)
2021 def check_store_version(dir):
2022 versionfile = os.path.join(dir, store, '_osclib_version')
2024 v = open(versionfile).read().strip()
2029 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2030 if os.path.exists(os.path.join(dir, '.svn')):
2031 msg = msg + '\nTry svn instead of osc.'
2032 raise oscerr.NoWorkingCopy(msg)
2034 if v != __store_version__:
2035 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2036 # version is fine, no migration needed
2037 f = open(versionfile, 'w')
2038 f.write(__store_version__ + '\n')
2041 msg = 'The osc metadata of your working copy "%s"' % dir
2042 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2043 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2044 raise oscerr.WorkingCopyWrongVersion, msg
2047 def meta_get_packagelist(apiurl, prj):
2049 u = makeurl(apiurl, ['source', prj])
2051 root = ET.parse(f).getroot()
2052 return [ node.get('name') for node in root.findall('entry') ]
2055 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2056 """return a list of file names,
2057 or a list File() instances if verbose=True"""
2063 query['rev'] = revision
2065 query['rev'] = 'latest'
2067 u = makeurl(apiurl, ['source', prj, package], query=query)
2069 root = ET.parse(f).getroot()
2072 return [ node.get('name') for node in root.findall('entry') ]
2076 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2077 rev = root.get('rev')
2078 for node in root.findall('entry'):
2079 f = File(node.get('name'),
2081 int(node.get('size')),
2082 int(node.get('mtime')))
2088 def meta_get_project_list(apiurl):
2089 u = makeurl(apiurl, ['source'])
2091 root = ET.parse(f).getroot()
2092 return sorted([ node.get('name') for node in root ])
2095 def show_project_meta(apiurl, prj):
2096 url = makeurl(apiurl, ['source', prj, '_meta'])
2098 return f.readlines()
2101 def show_project_conf(apiurl, prj):
2102 url = makeurl(apiurl, ['source', prj, '_config'])
2104 return f.readlines()
2107 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2108 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2112 except urllib2.HTTPError, e:
2113 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2117 def show_package_meta(apiurl, prj, pac):
2118 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2121 return f.readlines()
2122 except urllib2.HTTPError, e:
2123 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2127 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2129 path.append('source')
2135 path.append('_attribute')
2137 path.append(attribute)
2140 query.append("with_default=1")
2142 query.append("with_project=1")
2143 url = makeurl(apiurl, path, query)
2146 return f.readlines()
2147 except urllib2.HTTPError, e:
2148 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2152 def show_develproject(apiurl, prj, pac):
2153 m = show_package_meta(apiurl, prj, pac)
2155 return ET.fromstring(''.join(m)).find('devel').get('project')
2160 def show_pattern_metalist(apiurl, prj):
2161 url = makeurl(apiurl, ['source', prj, '_pattern'])
2165 except urllib2.HTTPError, e:
2166 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2168 r = [ node.get('name') for node in tree.getroot() ]
2173 def show_pattern_meta(apiurl, prj, pattern):
2174 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2177 return f.readlines()
2178 except urllib2.HTTPError, e:
2179 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2184 """metafile that can be manipulated and is stored back after manipulation."""
2185 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2189 self.change_is_required = change_is_required
2190 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2191 f = os.fdopen(fd, 'w')
2192 f.write(''.join(input))
2194 self.hash_orig = dgst(self.filename)
2197 hash = dgst(self.filename)
2198 if self.change_is_required == True and hash == self.hash_orig:
2199 print 'File unchanged. Not saving.'
2200 os.unlink(self.filename)
2203 print 'Sending meta data...'
2204 # don't do any exception handling... it's up to the caller what to do in case
2206 http_PUT(self.url, file=self.filename)
2207 os.unlink(self.filename)
2211 if sys.platform[:3] != 'win':
2212 editor = os.getenv('EDITOR', default='vim')
2214 editor = os.getenv('EDITOR', default='notepad')
2217 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2221 except urllib2.HTTPError, e:
2222 error_help = "%d" % e.code
2223 if e.headers.get('X-Opensuse-Errorcode'):
2224 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2226 print >>sys.stderr, 'BuildService API error:', error_help
2227 # examine the error - we can't raise an exception because we might want
2230 if '<summary>' in data:
2231 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2232 input = raw_input('Try again? ([y/N]): ')
2233 if input not in ['y', 'Y']:
2239 if os.path.exists(self.filename):
2240 print 'discarding %s' % self.filename
2241 os.unlink(self.filename)
2244 # different types of metadata
2245 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2246 'template': new_project_templ,
2249 'pkg': { 'path' : 'source/%s/%s/_meta',
2250 'template': new_package_templ,
2253 'attribute': { 'path' : 'source/%s/%s/_meta',
2254 'template': new_attribute_templ,
2257 'prjconf': { 'path': 'source/%s/_config',
2261 'user': { 'path': 'person/%s',
2262 'template': new_user_template,
2265 'pattern': { 'path': 'source/%s/_pattern/%s',
2266 'template': new_pattern_template,
2271 def meta_exists(metatype,
2278 apiurl = conf.config['apiurl']
2279 url = make_meta_url(metatype, path_args, apiurl)
2281 data = http_GET(url).readlines()
2282 except urllib2.HTTPError, e:
2283 if e.code == 404 and create_new:
2284 data = metatypes[metatype]['template']
2286 data = StringIO(data % template_args).readlines()
2291 def make_meta_url(metatype, path_args=None, apiurl=None):
2293 apiurl = conf.config['apiurl']
2294 if metatype not in metatypes.keys():
2295 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2296 path = metatypes[metatype]['path']
2299 path = path % path_args
2301 return makeurl(apiurl, [path])
2304 def edit_meta(metatype,
2309 change_is_required=False,
2313 apiurl = conf.config['apiurl']
2315 data = meta_exists(metatype,
2318 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2322 change_is_required = True
2324 url = make_meta_url(metatype, path_args, apiurl)
2325 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2333 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2336 query['rev'] = revision
2338 query['rev'] = 'latest'
2340 query['linkrev'] = linkrev
2341 elif conf.config['linkcontrol']:
2342 query['linkrev'] = 'base'
2346 query['emptylink'] = 1
2347 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2348 return f.readlines()
2351 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2352 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2353 return ET.fromstring(''.join(m)).get('srcmd5')
2356 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2357 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2359 # only source link packages have a <linkinfo> element.
2360 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2368 raise oscerr.LinkExpandError(prj, pac, li.error)
2372 def show_upstream_rev(apiurl, prj, pac):
2373 m = show_files_meta(apiurl, prj, pac)
2374 return ET.fromstring(''.join(m)).get('rev')
2377 def read_meta_from_spec(specfile, *args):
2378 import codecs, locale, re
2380 Read tags and sections from spec file. To read out
2381 a tag the passed argument mustn't end with a colon. To
2382 read out a section the passed argument must start with
2384 This method returns a dictionary which contains the
2388 if not os.path.isfile(specfile):
2389 raise IOError('\'%s\' is not a regular file' % specfile)
2392 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2393 except UnicodeDecodeError:
2394 lines = open(specfile).readlines()
2401 if itm.startswith('%'):
2402 sections.append(itm)
2406 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2408 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2409 if m and m.group('val'):
2410 spec_data[tag] = m.group('val').strip()
2412 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2415 section_pat = '^%s\s*?$'
2416 for section in sections:
2417 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2419 start = lines.index(m.group()+'\n') + 1
2421 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2424 for line in lines[start:]:
2425 if line.startswith('%'):
2428 spec_data[section] = data
2433 def edit_message(footer='', template=''):
2434 delim = '--This line, and those below, will be ignored--\n'
2436 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2437 f = os.fdopen(fd, 'w')
2446 if sys.platform[:3] != 'win':
2447 editor = os.getenv('EDITOR', default='vim')
2449 editor = os.getenv('EDITOR', default='notepad')
2452 subprocess.call('%s %s' % (editor, filename), shell=True)
2453 msg = open(filename).read().split(delim)[0].rstrip()
2458 input = raw_input('Log message not specified\n'
2459 'a)bort, c)ontinue, e)dit: ')
2461 raise oscerr.UserAbort()
2471 def create_delete_request(apiurl, project, package, message):
2476 package = """package="%s" """ % (package)
2482 <action type="delete">
2483 <target project="%s" %s/>
2486 <description>%s</description>
2488 """ % (project, package,
2489 cgi.escape(message or ''))
2491 u = makeurl(apiurl, ['request'], query='cmd=create')
2492 f = http_POST(u, data=xml)
2494 root = ET.parse(f).getroot()
2495 return root.get('id')
2498 def create_change_devel_request(apiurl,
2499 devel_project, devel_package,
2506 <action type="change_devel">
2507 <source project="%s" package="%s" />
2508 <target project="%s" package="%s" />
2511 <description>%s</description>
2513 """ % (devel_project,
2517 cgi.escape(message or ''))
2519 u = makeurl(apiurl, ['request'], query='cmd=create')
2520 f = http_POST(u, data=xml)
2522 root = ET.parse(f).getroot()
2523 return root.get('id')
2526 # This creates an old style submit request for server api 1.0
2527 def create_submit_request(apiurl,
2528 src_project, src_package,
2529 dst_project=None, dst_package=None,
2530 message=None, orev=None, src_update=None):
2535 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2537 # Yes, this kind of xml construction is horrible
2542 packagexml = """package="%s" """ %( dst_package )
2543 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2544 # XXX: keep the old template for now in order to work with old obs instances
2546 <request type="submit">
2548 <source project="%s" package="%s" rev="%s"/>
2553 <description>%s</description>
2557 orev or show_upstream_rev(apiurl, src_project, src_package),
2560 cgi.escape(message or ""))
2562 u = makeurl(apiurl, ['request'], query='cmd=create')
2563 f = http_POST(u, data=xml)
2565 root = ET.parse(f).getroot()
2566 return root.get('id')
2569 def get_request(apiurl, reqid):
2570 u = makeurl(apiurl, ['request', reqid])
2572 root = ET.parse(f).getroot()
2579 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2582 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2583 f = http_POST(u, data=message)
2586 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2589 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2590 f = http_POST(u, data=message)
2594 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2596 if not 'all' in req_state:
2597 for state in req_state:
2598 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2600 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2602 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2606 todo['project'] = project
2608 todo['package'] = package
2609 for kind, val in todo.iteritems():
2610 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2611 'action/source/@%(kind)s=\'%(val)s\' or ' \
2612 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2613 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2615 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2616 for i in exclude_target_projects:
2617 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2618 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2620 if conf.config['verbose'] > 1:
2621 print '[ %s ]' % xpath
2622 res = search(apiurl, request=xpath)
2623 collection = res['request']
2625 for root in collection.findall('request'):
2631 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2632 """Return all new requests for all projects/packages where is user is involved"""
2634 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2635 for i in res['project_id'].findall('project'):
2636 projpkgs[i.get('name')] = []
2637 for i in res['package_id'].findall('package'):
2638 if not i.get('project') in projpkgs.keys():
2639 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2641 for prj, pacs in projpkgs.iteritems():
2643 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2647 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2648 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2649 xpath = xpath_join(xpath, xp, inner=True)
2651 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2652 if not 'all' in req_state:
2654 for state in req_state:
2655 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2656 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2657 res = search(apiurl, request=xpath)
2659 for root in res['request'].findall('request'):
2665 def get_request_log(apiurl, reqid):
2666 r = get_request(conf.config['apiurl'], reqid)
2668 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2669 # the description of the request is used for the initial log entry
2670 # otherwise its comment attribute would contain None
2671 if len(r.statehistory) >= 1:
2672 r.statehistory[-1].comment = r.descr
2674 r.state.comment = r.descr
2675 for state in [ r.state ] + r.statehistory:
2676 s = frmt % (state.name, state.who, state.when, str(state.comment))
2681 def get_user_meta(apiurl, user):
2682 u = makeurl(apiurl, ['person', quote_plus(user)])
2685 return ''.join(f.readlines())
2686 except urllib2.HTTPError:
2687 print 'user \'%s\' not found' % user
2691 def get_user_data(apiurl, user, *tags):
2692 """get specified tags from the user meta"""
2693 meta = get_user_meta(apiurl, user)
2696 root = ET.fromstring(meta)
2699 if root.find(tag).text != None:
2700 data.append(root.find(tag).text)
2704 except AttributeError:
2705 # this part is reached if the tags tuple contains an invalid tag
2706 print 'The xml file for user \'%s\' seems to be broken' % user
2711 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2712 import tempfile, shutil
2715 query = { 'rev': revision }
2719 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2720 o = os.fdopen(fd, 'wb')
2721 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2722 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2725 shutil.move(tmpfile, targetfilename or filename)
2726 os.chmod(targetfilename or filename, 0644)
2734 def get_binary_file(apiurl, prj, repo, arch,
2737 target_filename = None,
2738 target_mtime = None,
2739 progress_meter = False):
2741 target_filename = target_filename or filename
2743 where = package or '_repository'
2744 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2747 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2751 binsize = int(f.headers['content-length'])
2754 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2755 os.chmod(tmpfilename, 0644)
2758 o = os.fdopen(fd, 'wb')
2762 #buf = f.read(BUFSIZE)
2766 downloaded += len(buf)
2768 completion = str(int((float(downloaded)/binsize)*100))
2769 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2774 sys.stdout.write('\n')
2776 shutil.move(tmpfilename, target_filename)
2778 os.utime(target_filename, (-1, target_mtime))
2780 # make sure that the temp file is cleaned up when we are interrupted
2782 try: os.unlink(tmpfilename)
2785 def dgst_from_string(str):
2786 # Python 2.5 depracates the md5 modules
2787 # Python 2.4 doesn't have hashlib yet
2790 md5_hash = hashlib.md5()
2793 md5_hash = md5.new()
2794 md5_hash.update(str)
2795 return md5_hash.hexdigest()
2799 #if not os.path.exists(file):
2809 f = open(file, 'rb')
2811 buf = f.read(BUFSIZE)
2814 return s.hexdigest()
2819 """return true if a string is binary data using diff's heuristic"""
2820 if s and '\0' in s[:4096]:
2825 def binary_file(fn):
2826 """read 4096 bytes from a file named fn, and call binary() on the data"""
2827 return binary(open(fn, 'rb').read(4096))
2830 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2832 This methods diffs oldfilename against filename (so filename will
2833 be shown as the new file).
2834 The variable origfilename is used if filename and oldfilename differ
2835 in their names (for instance if a tempfile is used for filename etc.)
2841 oldfilename = filename
2844 olddir = os.path.join(dir, store)
2846 if not origfilename:
2847 origfilename = filename
2849 file1 = os.path.join(olddir, oldfilename) # old/stored original
2850 file2 = os.path.join(dir, filename) # working copy
2852 f1 = open(file1, 'rb')
2856 f2 = open(file2, 'rb')
2860 if binary(s1) or binary (s2):
2861 d = ['Binary file %s has changed\n' % origfilename]
2864 d = difflib.unified_diff(\
2867 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2868 tofile = '%s\t(working copy)' % origfilename)
2870 # if file doesn't end with newline, we need to append one in the diff result
2872 for i, line in enumerate(d):
2873 if not line.endswith('\n'):
2874 d[i] += '\n\\ No newline at end of file'
2880 def make_diff(wc, revision):
2886 diff_hdr = 'Index: %s\n'
2887 diff_hdr += '===================================================================\n'
2889 olddir = os.getcwd()
2893 for file in wc.todo:
2894 if file in wc.filenamelist+wc.filenamelist_unvers:
2895 state = wc.status(file)
2897 added_files.append(file)
2899 removed_files.append(file)
2900 elif state == 'M' or state == 'C':
2901 changed_files.append(file)
2903 diff.append('osc: \'%s\' is not under version control' % file)
2905 for file in wc.filenamelist+wc.filenamelist_unvers:
2906 state = wc.status(file)
2907 if state == 'M' or state == 'C':
2908 changed_files.append(file)
2910 added_files.append(file)
2912 removed_files.append(file)
2914 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2916 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2917 cmp_pac = Package(tmpdir)
2919 for file in wc.todo:
2920 if file in cmp_pac.filenamelist:
2921 if file in wc.filenamelist:
2922 changed_files.append(file)
2924 diff.append('osc: \'%s\' is not under version control' % file)
2926 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2928 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2930 for file in changed_files:
2931 diff.append(diff_hdr % file)
2933 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2935 cmp_pac.updatefile(file, revision)
2936 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2937 cmp_pac.absdir, file))
2938 (fd, tmpfile) = tempfile.mkstemp()
2939 for file in added_files:
2940 diff.append(diff_hdr % file)
2942 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2943 os.path.dirname(tmpfile), file))
2945 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2946 os.path.dirname(tmpfile), file))
2948 # FIXME: this is ugly but it cannot be avoided atm
2949 # if a file is deleted via "osc rm file" we should keep the storefile.
2951 if cmp_pac == None and removed_files:
2952 tmpdir = tempfile.mkdtemp()
2954 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2955 tmp_pac = Package(tmpdir)
2958 for file in removed_files:
2959 diff.append(diff_hdr % file)
2961 tmp_pac.updatefile(file, tmp_pac.rev)
2962 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2963 wc.rev, file, tmp_pac.storedir, file))
2965 cmp_pac.updatefile(file, revision)
2966 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2967 revision, file, cmp_pac.storedir, file))
2971 delete_dir(cmp_pac.absdir)
2973 delete_dir(tmp_pac.absdir)
2977 def server_diff(apiurl,
2978 old_project, old_package, old_revision,
2979 new_project, new_package, new_revision, unified=False, missingok=False):
2980 query = {'cmd': 'diff', 'expand': '1'}
2982 query['oproject'] = old_project
2984 query['opackage'] = old_package
2986 query['orev'] = old_revision
2988 query['rev'] = new_revision
2990 query['unified'] = 1
2992 query['missingok'] = 1
2994 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3000 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3002 creates the plain directory structure for a package dir.
3003 The 'apiurl' parameter is needed for the project dir initialization.
3004 The 'project' and 'package' parameters specify the name of the
3005 project and the package. The optional 'pathname' parameter is used
3006 for printing out the message that a new dir was created (default: 'prj_dir/package').
3007 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3009 prj_dir = prj_dir or project
3011 # FIXME: carefully test each patch component of prj_dir,
3012 # if we have a .osc/_files entry at that level.
3013 # -> if so, we have a package/project clash,
3014 # and should rename this path component by appending '.proj'
3015 # and give user a warning message, to discourage such clashes
3017 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3018 if is_package_dir(prj_dir):
3019 # we want this to become a project directory,
3020 # but it already is a package directory.
3021 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3023 if not is_project_dir(prj_dir):
3024 # this directory could exist as a parent direory for one of our earlier
3025 # checked out sub-projects. in this case, we still need to initialize it.
3026 print statfrmt('A', prj_dir)
3027 init_project_dir(apiurl, prj_dir, project)
3029 if is_project_dir(os.path.join(prj_dir, package)):
3030 # the thing exists, but is a project directory and not a package directory
3031 # FIXME: this should be a warning message to discourage package/project clashes
3032 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3034 if not os.path.exists(os.path.join(prj_dir, package)):
3035 print statfrmt('A', pathname)
3036 os.mkdir(os.path.join(prj_dir, package))
3037 os.mkdir(os.path.join(prj_dir, package, store))
3039 return(os.path.join(prj_dir, package))
3042 def checkout_package(apiurl, project, package,
3043 revision=None, pathname=None, prj_obj=None,
3044 expand_link=False, prj_dir=None, service_files=None, progress_obj=None):
3046 # the project we're in might be deleted.
3047 # that'll throw an error then.
3048 olddir = os.getcwd()
3050 olddir = os.environ.get("PWD")
3055 if sys.platform[:3] == 'win':
3056 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3058 if conf.config['checkout_no_colon']:
3059 prj_dir = prj_dir.replace(':', '/')
3062 pathname = getTransActPath(os.path.join(prj_dir, package))
3064 # before we create directories and stuff, check if the package actually
3066 show_package_meta(apiurl, project, package)
3070 # try to read from the linkinfo
3071 # if it is a link we use the xsrcmd5 as the revision to be
3074 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3076 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3081 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3082 init_package_dir(apiurl, project, package, store, revision)
3084 p = Package(package, progress_obj=progress_obj)
3087 for filename in p.filenamelist:
3088 if service_files or not filename.startswith('_service:'):
3089 p.updatefile(filename, revision)
3090 # print 'A ', os.path.join(project, package, filename)
3091 print statfrmt('A', os.path.join(pathname, filename))
3092 if conf.config['do_package_tracking']:
3093 # check if we can re-use an existing project object
3095 prj_obj = Project(os.getcwd())
3096 prj_obj.set_state(p.name, ' ')
3097 prj_obj.write_packages()
3101 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3102 dst_userid = None, keep_develproject = False):
3104 update pkgmeta with new new_name and new_prj and set calling user as the
3105 only maintainer (unless keep_maintainers is set). Additionally remove the
3106 develproject entry (<devel />) unless keep_develproject is true.
3108 root = ET.fromstring(''.join(pkgmeta))
3109 root.set('name', new_name)
3110 root.set('project', new_prj)
3111 if not keep_maintainers:
3112 for person in root.findall('person'):
3114 if not keep_develproject:
3115 for dp in root.findall('devel'):
3117 return ET.tostring(root)
3119 def link_to_branch(apiurl, project, package):
3121 convert a package with a _link + project.diff to a branch
3124 if '_link' in meta_get_filelist(apiurl, project, package):
3125 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3128 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3130 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3132 create a linked package
3133 - "src" is the original package
3134 - "dst" is the "link" package that we are creating here
3139 dst_meta = meta_exists(metatype='pkg',
3140 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3142 create_new=False, apiurl=conf.config['apiurl'])
3144 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3145 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3150 root = ET.fromstring(''.join(dst_meta))
3151 elm = root.find('publish')
3153 elm = ET.SubElement(root, 'publish')
3155 ET.SubElement(elm, 'disable')
3156 dst_meta = ET.tostring(root)
3159 path_args=(dst_project, dst_package),
3161 # create the _link file
3162 # but first, make sure not to overwrite an existing one
3163 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3165 print >>sys.stderr, 'forced overwrite of existing _link file'
3168 print >>sys.stderr, '_link file already exists...! Aborting'
3172 rev = 'rev="%s"' % rev
3177 cicount = 'cicount="%s"' % cicount
3181 print 'Creating _link...',
3182 link_template = """\
3183 <link project="%s" package="%s" %s %s>
3185 <!-- <apply name="patch" /> apply a patch on the source directory -->
3186 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3187 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3188 <!-- <delete>filename</delete> delete a file -->
3191 """ % (src_project, src_package, rev, cicount)
3193 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3194 http_PUT(u, data=link_template)
3197 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3200 - "src" is the original package
3201 - "dst" is the "aggregate" package that we are creating here
3202 - "map" is a dictionary SRC => TARGET repository mappings
3207 dst_meta = meta_exists(metatype='pkg',
3208 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3210 create_new=False, apiurl=conf.config['apiurl'])
3212 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3213 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3218 root = ET.fromstring(''.join(dst_meta))
3219 elm = root.find('publish')
3221 elm = ET.SubElement(root, 'publish')
3223 ET.SubElement(elm, 'disable')
3224 dst_meta = ET.tostring(root)
3227 path_args=(dst_project, dst_package),
3230 # create the _aggregate file
3231 # but first, make sure not to overwrite an existing one
3232 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3234 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3237 print 'Creating _aggregate...',
3238 aggregate_template = """\
3240 <aggregate project="%s">
3242 for tgt, src in repo_map.iteritems():
3243 aggregate_template += """\
3244 <repository target="%s" source="%s" />
3247 aggregate_template += """\
3248 <package>%s</package>
3251 """ % ( src_package)
3253 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3254 http_PUT(u, data=aggregate_template)
3258 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3260 Branch packages defined via attributes (via API call)
3262 query = { 'cmd': 'branch' }
3263 query['attribute'] = attribute
3265 query['target_project'] = targetproject
3267 query['package'] = package
3268 if maintained_update_project_attribute:
3269 query['update_project_attribute'] = maintained_update_project_attribute
3271 u = makeurl(apiurl, ['source'], query=query)
3275 except urllib2.HTTPError, e:
3276 msg = ''.join(e.readlines())
3277 msg = msg.split('<summary>')[1]
3278 msg = msg.split('</summary>')[0]
3279 raise oscerr.APIError(msg)
3282 r = r.split('targetproject">')[1]
3283 r = r.split('</data>')[0]
3287 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False, msg=''):
3289 Branch a package (via API call)
3291 query = { 'cmd': 'branch' }
3293 query['ignoredevel'] = '1'
3297 query['target_project'] = target_project
3299 query['target_package'] = target_package
3301 query['comment'] = msg
3302 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3305 except urllib2.HTTPError, e:
3306 if not return_existing:
3308 msg = ''.join(e.readlines())
3309 msg = msg.split('<summary>')[1]
3310 msg = msg.split('</summary>')[0]
3311 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3315 return (True, m.group(1), m.group(2), None, None)
3318 for i in ET.fromstring(f.read()).findall('data'):
3319 data[i.get('name')] = i.text
3320 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3321 data.get('sourceproject', None), data.get('sourcepackage', None))
3324 def copy_pac(src_apiurl, src_project, src_package,
3325 dst_apiurl, dst_project, dst_package,
3326 client_side_copy = False,
3327 keep_maintainers = False,
3328 keep_develproject = False,
3333 Create a copy of a package.
3335 Copying can be done by downloading the files from one package and commit
3336 them into the other by uploading them (client-side copy) --
3337 or by the server, in a single api call.
3340 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3341 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3342 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3343 dst_userid, keep_develproject)
3345 print 'Sending meta data...'
3346 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3347 http_PUT(u, data=src_meta)
3349 print 'Copying files...'
3350 if not client_side_copy:
3351 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3353 query['expand'] = '1'
3355 query['orev'] = revision
3357 query['comment'] = comment
3358 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3363 # copy one file after the other
3365 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')
3367 query = {'rev': 'upload'}
3368 for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand):
3370 get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n, revision=r