1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.125git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E',
199 # os.path.samefile is available only under Unix
200 def os_path_samefile(path1, path2):
202 return os.path.samefile(path1, path2)
204 return os.path.realpath(path1) == os.path.realpath(path2)
207 """represent a file, including its metadata"""
208 def __init__(self, name, md5, size, mtime):
218 """Source service content
221 """creates an empty serviceinfo instance"""
224 def read(self, serviceinfo_node):
225 """read in the source services <services> element passed as
228 if serviceinfo_node == None:
231 services = serviceinfo_node.findall('service')
233 for service in services:
234 name = service.get('name')
236 for param in service.findall('param'):
237 option = param.get('name', None)
239 name += " --" + option + " '" + value + "'"
240 self.commands.append(name)
242 msg = 'invalid service format:\n%s' % ET.tostring(root)
243 raise oscerr.APIError(msg)
245 def execute(self, dir):
248 for call in self.commands:
249 temp_dir = tempfile.mkdtemp()
250 name = call.split(None, 1)[0]
251 if not os.path.exists("/usr/lib/obs/service/"+name):
252 msg = "ERROR: service is not installed !"
253 msg += "Can maybe solved with: zypper in obs-server-" + name
254 raise oscerr.APIError(msg)
255 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
256 ret = subprocess.call(c, shell=True)
258 print "ERROR: service call failed: " + c
260 for file in os.listdir(temp_dir):
261 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
265 """linkinfo metadata (which is part of the xml representing a directory
268 """creates an empty linkinfo instance"""
278 def read(self, linkinfo_node):
279 """read in the linkinfo metadata from the <linkinfo> element passed as
281 If the passed element is None, the method does nothing.
283 if linkinfo_node == None:
285 self.project = linkinfo_node.get('project')
286 self.package = linkinfo_node.get('package')
287 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
288 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
289 self.srcmd5 = linkinfo_node.get('srcmd5')
290 self.error = linkinfo_node.get('error')
291 self.rev = linkinfo_node.get('rev')
292 self.baserev = linkinfo_node.get('baserev')
295 """returns True if the linkinfo is not empty, otherwise False"""
296 if self.xsrcmd5 or self.lsrcmd5:
300 def isexpanded(self):
301 """returns True if the package is an expanded link"""
302 if self.lsrcmd5 and not self.xsrcmd5:
307 """returns True if the link is in error state (could not be applied)"""
313 """return an informatory string representation"""
314 if self.islink() and not self.isexpanded():
315 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
316 % (self.project, self.package, self.xsrcmd5, self.rev)
317 elif self.islink() and self.isexpanded():
319 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
320 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
322 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
323 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
329 """represent a project directory, holding packages"""
330 def __init__(self, dir, getPackageList=True, progress_obj=None):
333 self.absdir = os.path.abspath(dir)
334 self.progress_obj = progress_obj
336 self.name = store_read_project(self.dir)
337 self.apiurl = store_read_apiurl(self.dir)
340 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
342 self.pacs_available = []
344 if conf.config['do_package_tracking']:
345 self.pac_root = self.read_packages().getroot()
346 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
347 self.pacs_excluded = [ i for i in os.listdir(self.dir)
348 for j in conf.config['exclude_glob']
349 if fnmatch.fnmatch(i, j) ]
350 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
351 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
352 # in the self.pacs_broken list
353 self.pacs_broken = []
354 for p in self.pacs_have:
355 if not os.path.isdir(os.path.join(self.absdir, p)):
356 # all states will be replaced with the '!'-state
357 # (except it is already marked as deleted ('D'-state))
358 self.pacs_broken.append(p)
360 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
362 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
364 def checkout_missing_pacs(self, expand_link=False):
365 for pac in self.pacs_missing:
367 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
368 # pac is not under version control but a local file/dir exists
369 msg = 'can\'t add package \'%s\': Object already exists' % pac
370 raise oscerr.PackageExists(self.name, pac, msg)
372 print 'checking out new package %s' % pac
373 checkout_package(self.apiurl, self.name, pac, \
374 pathname=getTransActPath(os.path.join(self.dir, pac)), \
375 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
377 def set_state(self, pac, state):
378 node = self.get_package_node(pac)
380 self.new_package_entry(pac, state)
382 node.attrib['state'] = state
384 def get_package_node(self, pac):
385 for node in self.pac_root.findall('package'):
386 if pac == node.get('name'):
390 def del_package_node(self, pac):
391 for node in self.pac_root.findall('package'):
392 if pac == node.get('name'):
393 self.pac_root.remove(node)
395 def get_state(self, pac):
396 node = self.get_package_node(pac)
398 return node.get('state')
402 def new_package_entry(self, name, state):
403 ET.SubElement(self.pac_root, 'package', name=name, state=state)
405 def read_packages(self):
406 packages_file = os.path.join(self.absdir, store, '_packages')
407 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
408 return ET.parse(packages_file)
410 # scan project for existing packages and migrate them
412 for data in os.listdir(self.dir):
413 pac_dir = os.path.join(self.absdir, data)
414 # we cannot use self.pacs_available because we cannot guarantee that the package list
415 # was fetched from the server
416 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
417 and Package(pac_dir).name == data:
418 cur_pacs.append(ET.Element('package', name=data, state=' '))
419 store_write_initial_packages(self.absdir, self.name, cur_pacs)
420 return ET.parse(os.path.join(self.absdir, store, '_packages'))
422 def write_packages(self):
423 # TODO: should we only modify the existing file instead of overwriting?
424 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
426 def addPackage(self, pac):
428 for i in conf.config['exclude_glob']:
429 if fnmatch.fnmatch(pac, i):
430 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
431 raise oscerr.OscIOError(None, msg)
432 state = self.get_state(pac)
433 if state == None or state == 'D':
434 self.new_package_entry(pac, 'A')
435 self.write_packages()
436 # sometimes the new pac doesn't exist in the list because
437 # it would take too much time to update all data structs regularly
438 if pac in self.pacs_unvers:
439 self.pacs_unvers.remove(pac)
441 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
443 def delPackage(self, pac, force = False):
444 state = self.get_state(pac.name)
446 if state == ' ' or state == 'D':
448 for file in pac.filenamelist + pac.filenamelist_unvers:
449 filestate = pac.status(file)
450 if filestate == 'M' or filestate == 'C' or \
451 filestate == 'A' or filestate == '?':
454 del_files.append(file)
455 if can_delete or force:
456 for file in del_files:
457 pac.delete_localfile(file)
458 if pac.status(file) != '?':
459 pac.delete_storefile(file)
460 # this is not really necessary
461 pac.put_on_deletelist(file)
462 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
464 pac.write_deletelist()
465 self.set_state(pac.name, 'D')
466 self.write_packages()
468 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
471 delete_dir(pac.absdir)
472 self.del_package_node(pac.name)
473 self.write_packages()
474 print statfrmt('D', pac.name)
476 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
478 print 'package is not under version control'
480 print 'unsupported state'
482 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
485 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
487 # we need to make sure that the _packages file will be written (even if an exception
490 # update complete project
491 # packages which no longer exists upstream
492 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
494 for pac in upstream_del:
495 p = Package(os.path.join(self.dir, pac))
496 self.delPackage(p, force = True)
497 delete_storedir(p.storedir)
502 self.pac_root.remove(self.get_package_node(p.name))
503 self.pacs_have.remove(pac)
505 for pac in self.pacs_have:
506 state = self.get_state(pac)
507 if pac in self.pacs_broken:
508 if self.get_state(pac) != 'A':
509 checkout_package(self.apiurl, self.name, pac,
510 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
511 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
514 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
516 if expand_link and p.islink() and not p.isexpanded():
519 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
524 rev = p.linkinfo.xsrcmd5
525 print 'Expanding to rev', rev
526 elif unexpand_link and p.islink() and p.isexpanded():
527 rev = p.linkinfo.lsrcmd5
528 print 'Unexpanding to rev', rev
529 elif p.islink() and p.isexpanded():
531 print 'Updating %s' % p.name
532 p.update(rev, service_files)
536 # TODO: Package::update has to fixed to behave like svn does
537 if pac in self.pacs_broken:
538 checkout_package(self.apiurl, self.name, pac,
539 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
540 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
542 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
543 elif state == 'A' and pac in self.pacs_available:
544 # file/dir called pac already exists and is under version control
545 msg = 'can\'t add package \'%s\': Object already exists' % pac
546 raise oscerr.PackageExists(self.name, pac, msg)
551 print 'unexpected state.. package \'%s\'' % pac
553 self.checkout_missing_pacs(expand_link=not unexpand_link)
555 self.write_packages()
557 def commit(self, pacs = (), msg = '', files = {}):
562 if files.has_key(pac):
564 state = self.get_state(pac)
566 self.commitNewPackage(pac, msg, todo)
568 self.commitDelPackage(pac)
570 # display the correct dir when sending the changes
571 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
574 p = Package(os.path.join(self.dir, pac))
577 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
578 print 'osc: \'%s\' is not under version control' % pac
579 elif pac in self.pacs_broken:
580 print 'osc: \'%s\' package not found' % pac
582 self.commitExtPackage(pac, msg, todo)
584 self.write_packages()
586 # if we have packages marked as '!' we cannot commit
587 for pac in self.pacs_broken:
588 if self.get_state(pac) != 'D':
589 msg = 'commit failed: package \'%s\' is missing' % pac
590 raise oscerr.PackageMissing(self.name, pac, msg)
592 for pac in self.pacs_have:
593 state = self.get_state(pac)
596 Package(os.path.join(self.dir, pac)).commit(msg)
598 self.commitDelPackage(pac)
600 self.commitNewPackage(pac, msg)
602 self.write_packages()
604 def commitNewPackage(self, pac, msg = '', files = []):
605 """creates and commits a new package if it does not exist on the server"""
606 if pac in self.pacs_available:
607 print 'package \'%s\' already exists' % pac
609 user = conf.get_apiurl_usr(self.apiurl)
610 edit_meta(metatype='pkg',
611 path_args=(quote_plus(self.name), quote_plus(pac)),
616 # display the correct dir when sending the changes
618 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
622 p = Package(os.path.join(self.dir, pac))
624 print statfrmt('Sending', os.path.normpath(p.dir))
626 self.set_state(pac, ' ')
629 def commitDelPackage(self, pac):
630 """deletes a package on the server and in the working copy"""
632 # display the correct dir when sending the changes
633 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
636 pac_dir = os.path.join(self.dir, pac)
637 p = Package(os.path.join(self.dir, pac))
638 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
639 delete_storedir(p.storedir)
645 pac_dir = os.path.join(self.dir, pac)
646 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
647 print statfrmt('Deleting', getTransActPath(pac_dir))
648 delete_package(self.apiurl, self.name, pac)
649 self.del_package_node(pac)
651 def commitExtPackage(self, pac, msg, files = []):
652 """commits a package from an external project"""
653 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
656 pac_path = os.path.join(self.dir, pac)
658 project = store_read_project(pac_path)
659 package = store_read_package(pac_path)
660 apiurl = store_read_apiurl(pac_path)
661 if meta_exists(metatype='pkg',
662 path_args=(quote_plus(project), quote_plus(package)),
664 create_new=False, apiurl=apiurl):
665 p = Package(pac_path)
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(project), quote_plus(package)),
676 p = Package(pac_path)
682 r.append('*****************************************************')
683 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
684 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
685 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
686 r.append('*****************************************************')
692 """represent a package (its directory) and read/keep/write its metadata"""
693 def __init__(self, workingdir, progress_obj=None):
694 self.dir = workingdir
695 self.absdir = os.path.abspath(self.dir)
696 self.storedir = os.path.join(self.absdir, store)
697 self.progress_obj = progress_obj
699 check_store_version(self.dir)
701 self.prjname = store_read_project(self.dir)
702 self.name = store_read_package(self.dir)
703 self.apiurl = store_read_apiurl(self.dir)
705 self.update_datastructs()
709 self.todo_delete = []
712 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
713 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
716 def addfile(self, n):
717 st = os.stat(os.path.join(self.dir, n))
718 f = File(n, None, st.st_size, st.st_mtime)
719 self.filelist.append(f)
720 self.filenamelist.append(n)
721 self.filenamelist_unvers.remove(n)
722 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
724 def delete_file(self, n, force=False):
725 """deletes a file if possible and marks the file as deleted"""
728 state = self.status(n)
732 if state in ['?', 'A', 'M'] and not force:
733 return (False, state)
734 self.delete_localfile(n)
736 self.put_on_deletelist(n)
737 self.write_deletelist()
739 self.delete_storefile(n)
742 def delete_storefile(self, n):
743 try: os.unlink(os.path.join(self.storedir, n))
746 def delete_localfile(self, n):
747 try: os.unlink(os.path.join(self.dir, n))
750 def put_on_deletelist(self, n):
751 if n not in self.to_be_deleted:
752 self.to_be_deleted.append(n)
754 def put_on_conflictlist(self, n):
755 if n not in self.in_conflict:
756 self.in_conflict.append(n)
758 def clear_from_conflictlist(self, n):
759 """delete an entry from the file, and remove the file if it would be empty"""
760 if n in self.in_conflict:
762 filename = os.path.join(self.dir, n)
763 storefilename = os.path.join(self.storedir, n)
764 myfilename = os.path.join(self.dir, n + '.mine')
765 if self.islinkrepair() or self.ispulled():
766 upfilename = os.path.join(self.dir, n + '.new')
768 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
771 os.unlink(myfilename)
772 # the working copy may be updated, so the .r* ending may be obsolete...
774 os.unlink(upfilename)
775 if self.islinkrepair() or self.ispulled():
776 os.unlink(os.path.join(self.dir, n + '.old'))
780 self.in_conflict.remove(n)
782 self.write_conflictlist()
784 def write_deletelist(self):
785 if len(self.to_be_deleted) == 0:
787 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
791 fname = os.path.join(self.storedir, '_to_be_deleted')
793 f.write('\n'.join(self.to_be_deleted))
797 def delete_source_file(self, n):
798 """delete local a source file"""
799 self.delete_localfile(n)
800 self.delete_storefile(n)
802 def delete_remote_source_file(self, n):
803 """delete a remote source file (e.g. from the server)"""
805 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
808 def put_source_file(self, n):
810 # escaping '+' in the URL path (note: not in the URL query string) is
811 # only a workaround for ruby on rails, which swallows it otherwise
813 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
814 http_PUT(u, file = os.path.join(self.dir, n))
816 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
818 def commit(self, msg=''):
819 # commit only if the upstream revision is the same as the working copy's
820 upstream_rev = self.latest_rev()
821 if self.rev != upstream_rev:
822 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
825 self.todo = self.filenamelist_unvers + self.filenamelist
827 pathn = getTransActPath(self.dir)
829 have_conflicts = False
830 for filename in self.todo:
831 if not filename.startswith('_service:') and not filename.startswith('_service_'):
832 st = self.status(filename)
833 if st == 'A' or st == 'M':
834 self.todo_send.append(filename)
835 print statfrmt('Sending', os.path.join(pathn, filename))
837 self.todo_delete.append(filename)
838 print statfrmt('Deleting', os.path.join(pathn, filename))
840 have_conflicts = True
843 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
846 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
847 print 'nothing to do for package %s' % self.name
850 if self.islink() and self.isexpanded():
851 # resolve the link into the upload revision
852 # XXX: do this always?
853 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
854 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
857 print 'Transmitting file data ',
859 for filename in self.todo_delete:
860 # do not touch local files on commit --
861 # delete remotely instead
862 self.delete_remote_source_file(filename)
863 self.to_be_deleted.remove(filename)
864 for filename in self.todo_send:
865 sys.stdout.write('.')
867 self.put_source_file(filename)
869 # all source files are committed - now comes the log
870 query = { 'cmd' : 'commit',
872 'user' : conf.get_apiurl_usr(self.apiurl),
874 if self.islink() and self.isexpanded():
875 query['keeplink'] = '1'
876 if conf.config['linkcontrol'] or self.isfrozen():
877 query['linkrev'] = self.linkinfo.srcmd5
879 query['repairlink'] = '1'
880 query['linkrev'] = self.get_pulled_srcmd5()
881 if self.islinkrepair():
882 query['repairlink'] = '1'
883 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
886 # delete upload revision
888 query = { 'cmd': 'deleteuploadrev' }
889 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
895 root = ET.parse(f).getroot()
896 self.rev = int(root.get('rev'))
898 print 'Committed revision %s.' % self.rev
901 os.unlink(os.path.join(self.storedir, '_pulled'))
902 if self.islinkrepair():
903 os.unlink(os.path.join(self.storedir, '_linkrepair'))
904 self.linkrepair = False
905 # XXX: mark package as invalid?
906 print 'The source link has been repaired. This directory can now be removed.'
907 if self.islink() and self.isexpanded():
908 self.update_local_filesmeta(revision=self.latest_rev())
910 self.update_local_filesmeta()
911 self.write_deletelist()
912 self.update_datastructs()
914 if self.filenamelist.count('_service'):
915 print 'The package contains a source service.'
916 for filename in self.todo:
917 if filename.startswith('_service:') and os.path.exists(filename):
918 os.unlink(filename) # remove local files
919 print_request_list(self.apiurl, self.prjname, self.name)
921 def write_conflictlist(self):
922 if len(self.in_conflict) == 0:
924 os.unlink(os.path.join(self.storedir, '_in_conflict'))
928 fname = os.path.join(self.storedir, '_in_conflict')
930 f.write('\n'.join(self.in_conflict))
934 def updatefile(self, n, revision):
935 filename = os.path.join(self.dir, n)
936 storefilename = os.path.join(self.storedir, n)
937 mtime = self.findfilebyname(n).mtime
939 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
940 os.utime(filename, (-1, mtime))
942 shutil.copyfile(filename, storefilename)
944 def mergefile(self, n):
945 filename = os.path.join(self.dir, n)
946 storefilename = os.path.join(self.storedir, n)
947 myfilename = os.path.join(self.dir, n + '.mine')
948 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
949 os.rename(filename, myfilename)
951 mtime = self.findfilebyname(n).mtime
952 get_source_file(self.apiurl, self.prjname, self.name, n,
953 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
954 os.utime(upfilename, (-1, mtime))
956 if binary_file(myfilename) or binary_file(upfilename):
958 shutil.copyfile(upfilename, filename)
959 shutil.copyfile(upfilename, storefilename)
960 self.in_conflict.append(n)
961 self.write_conflictlist()
965 # diff3 OPTIONS... MINE OLDER YOURS
966 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
967 # we would rather use the subprocess module, but it is not availablebefore 2.4
968 ret = subprocess.call(merge_cmd, shell=True)
970 # "An exit status of 0 means `diff3' was successful, 1 means some
971 # conflicts were found, and 2 means trouble."
973 # merge was successful... clean up
974 shutil.copyfile(upfilename, storefilename)
975 os.unlink(upfilename)
976 os.unlink(myfilename)
980 shutil.copyfile(upfilename, storefilename)
981 self.in_conflict.append(n)
982 self.write_conflictlist()
985 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
986 print >>sys.stderr, 'the command line was:'
987 print >>sys.stderr, merge_cmd
992 def update_local_filesmeta(self, revision=None):
994 Update the local _files file in the store.
995 It is replaced with the version pulled from upstream.
997 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
998 store_write_string(self.absdir, '_files', meta)
1000 def update_datastructs(self):
1002 Update the internal data structures if the local _files
1003 file has changed (e.g. update_local_filesmeta() has been
1007 files_tree = read_filemeta(self.dir)
1008 files_tree_root = files_tree.getroot()
1010 self.rev = files_tree_root.get('rev')
1011 self.srcmd5 = files_tree_root.get('srcmd5')
1013 self.linkinfo = Linkinfo()
1014 self.linkinfo.read(files_tree_root.find('linkinfo'))
1016 self.filenamelist = []
1018 for node in files_tree_root.findall('entry'):
1020 f = File(node.get('name'),
1022 int(node.get('size')),
1023 int(node.get('mtime')))
1025 # okay, a very old version of _files, which didn't contain any metadata yet...
1026 f = File(node.get('name'), '', 0, 0)
1027 self.filelist.append(f)
1028 self.filenamelist.append(f.name)
1030 self.to_be_deleted = read_tobedeleted(self.dir)
1031 self.in_conflict = read_inconflict(self.dir)
1032 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1034 # gather unversioned files, but ignore some stuff
1035 self.excluded = [ i for i in os.listdir(self.dir)
1036 for j in conf.config['exclude_glob']
1037 if fnmatch.fnmatch(i, j) ]
1038 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1039 if i not in self.excluded
1040 if i not in self.filenamelist ]
1043 """tells us if the package is a link (has 'linkinfo').
1044 A package with linkinfo is a package which links to another package.
1045 Returns True if the package is a link, otherwise False."""
1046 return self.linkinfo.islink()
1048 def isexpanded(self):
1049 """tells us if the package is a link which is expanded.
1050 Returns True if the package is expanded, otherwise False."""
1051 return self.linkinfo.isexpanded()
1053 def islinkrepair(self):
1054 """tells us if we are repairing a broken source link."""
1055 return self.linkrepair
1058 """tells us if we have pulled a link."""
1059 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1062 """tells us if the link is frozen."""
1063 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1065 def get_pulled_srcmd5(self):
1067 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1068 pulledrev = line.strip()
1071 def haslinkerror(self):
1073 Returns True if the link is broken otherwise False.
1074 If the package is not a link it returns False.
1076 return self.linkinfo.haserror()
1078 def linkerror(self):
1080 Returns an error message if the link is broken otherwise None.
1081 If the package is not a link it returns None.
1083 return self.linkinfo.error
1085 def update_local_pacmeta(self):
1087 Update the local _meta file in the store.
1088 It is replaced with the version pulled from upstream.
1090 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1091 store_write_string(self.absdir, '_meta', meta)
1093 def findfilebyname(self, n):
1094 for i in self.filelist:
1098 def status(self, n):
1102 file storefile file present STATUS
1103 exists exists in _files
1106 x x x ' ' if digest differs: 'M'
1107 and if in conflicts file: 'C'
1109 x - x 'D' and listed in _to_be_deleted
1111 - x - 'D' (when file in working copy is already deleted)
1112 - - x 'F' (new in repo, but not yet in working copy)
1117 known_by_meta = False
1119 exists_in_store = False
1120 if n in self.filenamelist:
1121 known_by_meta = True
1122 if os.path.exists(os.path.join(self.absdir, n)):
1124 if os.path.exists(os.path.join(self.storedir, n)):
1125 exists_in_store = True
1128 if exists and not exists_in_store and known_by_meta:
1130 elif n in self.to_be_deleted:
1132 elif n in self.in_conflict:
1134 elif exists and exists_in_store and known_by_meta:
1135 #print self.findfilebyname(n)
1136 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1140 elif exists and not exists_in_store and not known_by_meta:
1142 elif exists and exists_in_store and not known_by_meta:
1144 elif not exists and exists_in_store and known_by_meta:
1146 elif not exists and not exists_in_store and known_by_meta:
1148 elif not exists and exists_in_store and not known_by_meta:
1150 elif not exists and not exists_in_store and not known_by_meta:
1151 # this case shouldn't happen (except there was a typo in the filename etc.)
1152 raise IOError('osc: \'%s\' is not under version control' % n)
1156 def comparePac(self, cmp_pac):
1158 This method compares the local filelist with
1159 the filelist of the passed package to see which files
1160 were added, removed and changed.
1167 for file in self.filenamelist+self.filenamelist_unvers:
1168 state = self.status(file)
1169 if state == 'A' and (not file in cmp_pac.filenamelist):
1170 added_files.append(file)
1171 elif file in cmp_pac.filenamelist and state == 'D':
1172 removed_files.append(file)
1173 elif state == ' ' and not file in cmp_pac.filenamelist:
1174 added_files.append(file)
1175 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1176 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1177 changed_files.append(file)
1178 for file in cmp_pac.filenamelist:
1179 if not file in self.filenamelist:
1180 removed_files.append(file)
1181 removed_files = set(removed_files)
1183 return changed_files, added_files, removed_files
1185 def merge(self, otherpac):
1186 self.todo += otherpac.todo
1200 '\n '.join(self.filenamelist),
1208 def read_meta_from_spec(self, spec = None):
1213 # scan for spec files
1214 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1215 if len(speclist) == 1:
1216 specfile = speclist[0]
1217 elif len(speclist) > 1:
1218 print 'the following specfiles were found:'
1219 for file in speclist:
1221 print 'please specify one with --specfile'
1224 print 'no specfile was found - please specify one ' \
1228 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1229 self.summary = data['Summary']
1230 self.url = data['Url']
1231 self.descr = data['%description']
1234 def update_package_meta(self, force=False):
1236 for the updatepacmetafromspec subcommand
1237 argument force supress the confirm question
1240 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1242 root = ET.fromstring(m)
1243 root.find('title').text = self.summary
1244 root.find('description').text = ''.join(self.descr)
1245 url = root.find('url')
1247 url = ET.SubElement(root, 'url')
1250 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1251 mf = metafile(u, ET.tostring(root))
1254 print '*' * 36, 'old', '*' * 36
1256 print '*' * 36, 'new', '*' * 36
1257 print ET.tostring(root)
1259 repl = raw_input('Write? (y/N/e) ')
1270 def mark_frozen(self):
1271 store_write_string(self.absdir, '_frozenlink', '')
1273 print "The link in this package is currently broken. Checking"
1274 print "out the last working version instead; please use 'osc pull'"
1275 print "to repair the link."
1278 def unmark_frozen(self):
1279 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1280 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1282 def latest_rev(self):
1283 if self.islinkrepair():
1284 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1285 elif self.islink() and self.isexpanded():
1286 if self.isfrozen() or self.ispulled():
1287 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1290 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1293 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1295 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1298 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1301 def update(self, rev = None, service_files = False):
1302 # save filelist and (modified) status before replacing the meta file
1303 saved_filenames = self.filenamelist
1304 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1307 self.update_local_filesmeta(rev)
1308 self = Package(self.dir, progress_obj=self.progress_obj)
1310 # which files do no longer exist upstream?
1311 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1313 pathn = getTransActPath(self.dir)
1315 for filename in saved_filenames:
1316 if not filename.startswith('_service:') and filename in disappeared:
1317 print statfrmt('D', os.path.join(pathn, filename))
1318 # keep file if it has local modifications
1319 if oldp.status(filename) == ' ':
1320 self.delete_localfile(filename)
1321 self.delete_storefile(filename)
1323 for filename in self.filenamelist:
1325 state = self.status(filename)
1326 if not service_files and filename.startswith('_service:'):
1328 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1329 # no merge necessary... local file is changed, but upstream isn't
1331 elif state == 'M' and filename in saved_modifiedfiles:
1332 status_after_merge = self.mergefile(filename)
1333 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1335 self.updatefile(filename, rev)
1336 print statfrmt('U', os.path.join(pathn, filename))
1338 self.updatefile(filename, rev)
1339 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1341 self.updatefile(filename, rev)
1342 print statfrmt('A', os.path.join(pathn, filename))
1343 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1344 self.updatefile(filename, rev)
1345 self.delete_storefile(filename)
1346 print statfrmt('U', os.path.join(pathn, filename))
1350 self.update_local_pacmeta()
1352 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1353 print 'At revision %s.' % self.rev
1355 if not service_files:
1356 self.run_source_services()
1358 def run_source_services(self):
1359 if self.filenamelist.count('_service'):
1360 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1363 si.execute(self.absdir)
1365 def prepare_filelist(self):
1366 """Prepare a list of files, which will be processed by process_filelist
1367 method. This allows easy modifications of a file list in commit
1371 self.todo = self.filenamelist + self.filenamelist_unvers
1375 for f in [f for f in self.todo if not os.path.isdir(f)]:
1377 status = self.status(f)
1380 ret += "%s %s %s\n" % (action, status, f)
1383 # Edit a filelist for package %s
1385 # l, leave = leave a file as is
1386 # r, remove = remove a file
1387 # a, add = add a file
1389 # If you remove file from a list, it will be unchanged
1390 # If you remove all, commit will be aborted"""
1394 def edit_filelist(self):
1395 """Opens a package list in editor for eediting. This allows easy
1396 modifications of it just by simple text editing
1400 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1401 f = os.fdopen(fd, 'w')
1402 f.write(self.prepare_filelist())
1404 mtime_orig = os.stat(filename).st_mtime
1406 if sys.platform[:3] != 'win':
1407 editor = os.getenv('EDITOR', default='vim')
1409 editor = os.getenv('EDITOR', default='notepad')
1411 subprocess.call('%s %s' % (editor, filename), shell=True)
1412 mtime = os.stat(filename).st_mtime
1413 if mtime_orig < mtime:
1414 filelist = open(filename).readlines()
1418 raise oscerr.UserAbort()
1420 return self.process_filelist(filelist)
1422 def process_filelist(self, filelist):
1423 """Process a filelist - it add/remove or leave files. This depends on
1424 user input. If no file is processed, it raises an ValueError
1428 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1430 foo = line.split(' ')
1432 action, state, name = (foo[0], ' ', foo[3])
1434 action, state, name = (foo[0], foo[1], foo[2])
1437 action = action.lower()
1440 if action in ('r', 'remove'):
1441 if self.status(name) == '?':
1443 if name in self.todo:
1444 self.todo.remove(name)
1446 self.delete_file(name, True)
1447 elif action in ('a', 'add'):
1448 if self.status(name) != '?':
1449 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1452 elif action in ('l', 'leave'):
1455 raise ValueError("Unknow action `%s'" % action)
1458 raise ValueError("Empty filelist")
1461 """for objects to represent the review state in a request"""
1462 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1464 self.by_user = by_user
1465 self.by_group = by_group
1468 self.comment = comment
1471 """for objects to represent the "state" of a request"""
1472 def __init__(self, name=None, who=None, when=None, comment=None):
1476 self.comment = comment
1479 """represents an action"""
1480 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1482 self.src_project = src_project
1483 self.src_package = src_package
1484 self.src_rev = src_rev
1485 self.dst_project = dst_project
1486 self.dst_package = dst_package
1487 self.src_update = src_update
1490 """represent a request and holds its metadata
1491 it has methods to read in metadata from xml,
1492 different views, ..."""
1495 self.state = RequestState()
1498 self.last_author = None
1501 self.statehistory = []
1504 def read(self, root):
1505 self.reqid = int(root.get('id'))
1506 actions = root.findall('action')
1507 if len(actions) == 0:
1508 actions = [ root.find('submit') ] # for old style requests
1510 for action in actions:
1511 type = action.get('type', 'submit')
1513 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1514 if action.findall('source'):
1515 n = action.find('source')
1516 src_prj = n.get('project', None)
1517 src_pkg = n.get('package', None)
1518 src_rev = n.get('rev', None)
1519 if action.findall('target'):
1520 n = action.find('target')
1521 dst_prj = n.get('project', None)
1522 dst_pkg = n.get('package', None)
1523 if action.findall('options'):
1524 n = action.find('options')
1525 if n.findall('sourceupdate'):
1526 src_update = n.find('sourceupdate').text.strip()
1527 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1529 msg = 'invalid request format:\n%s' % ET.tostring(root)
1530 raise oscerr.APIError(msg)
1533 n = root.find('state')
1534 self.state.name, self.state.who, self.state.when \
1535 = n.get('name'), n.get('who'), n.get('when')
1537 self.state.comment = n.find('comment').text.strip()
1539 self.state.comment = None
1541 # read the review states
1542 for r in root.findall('review'):
1544 s.state = r.get('state')
1545 s.by_user = r.get('by_user')
1546 s.by_group = r.get('by_group')
1547 s.who = r.get('who')
1548 s.when = r.get('when')
1550 s.comment = r.find('comment').text.strip()
1553 self.reviews.append(s)
1555 # read the state history
1556 for h in root.findall('history'):
1558 s.name = h.get('name')
1559 s.who = h.get('who')
1560 s.when = h.get('when')
1562 s.comment = h.find('comment').text.strip()
1565 self.statehistory.append(s)
1566 self.statehistory.reverse()
1568 # read a description, if it exists
1570 n = root.find('description').text
1575 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1576 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1577 dst_prj, dst_pkg, src_update)
1580 def list_view(self):
1581 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1583 for a in self.actions:
1584 dst = "%s/%s" % (a.dst_project, a.dst_package)
1585 if a.src_package == a.dst_package:
1589 if a.type=="submit":
1590 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1591 if a.type=="change_devel":
1592 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1593 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1595 ret += '\n %s: %-50s %-20s ' % \
1596 (a.type, sr_source, dst)
1598 if self.statehistory and self.statehistory[0]:
1600 for h in self.statehistory:
1601 who.append("%s(%s)" % (h.who,h.name))
1603 ret += "\n From: %s" % (' -> '.join(who))
1605 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1607 lines = txt.splitlines()
1608 wrapper = textwrap.TextWrapper( width = 80,
1609 initial_indent=' Descr: ',
1610 subsequent_indent=' ')
1611 ret += "\n" + wrapper.fill(lines[0])
1612 wrapper.initial_indent = ' '
1613 for line in lines[1:]:
1614 ret += "\n" + wrapper.fill(line)
1620 def __cmp__(self, other):
1621 return cmp(self.reqid, other.reqid)
1625 for action in self.actions:
1626 action_list=" %s: " % (action.type)
1627 if action.type=="submit":
1630 r="(r%s)" % (action.src_rev)
1632 if action.src_update:
1633 m="(%s)" % (action.src_update)
1634 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1635 if action.dst_package:
1636 action_list=action_list+"/%s" % ( action.dst_package )
1637 elif action.type=="delete":
1638 action_list=action_list+" %s" % ( action.dst_project )
1639 if action.dst_package:
1640 action_list=action_list+"/%s" % ( action.dst_package )
1641 elif action.type=="change_devel":
1642 action_list=action_list+" %s/%s developed in %s/%s" % \
1643 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1644 action_list=action_list+"\n"
1659 self.state.name, self.state.when, self.state.who,
1662 if len(self.reviews):
1663 reviewitems = [ '%-10s %s %s %s %s %s' \
1664 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1665 for i in self.reviews ]
1666 s += '\nReview: ' + '\n '.join(reviewitems)
1669 if len(self.statehistory):
1670 histitems = [ '%-10s %s %s' \
1671 % (i.name, i.when, i.who) \
1672 for i in self.statehistory ]
1673 s += '\nHistory: ' + '\n '.join(histitems)
1680 """format time as Apr 02 18:19
1682 depending on whether it is in the current year
1686 if time.localtime()[0] == time.localtime(t)[0]:
1688 return time.strftime('%b %d %H:%M',time.localtime(t))
1690 return time.strftime('%b %d %Y',time.localtime(t))
1693 def is_project_dir(d):
1694 return os.path.exists(os.path.join(d, store, '_project')) and not \
1695 os.path.exists(os.path.join(d, store, '_package'))
1698 def is_package_dir(d):
1699 return os.path.exists(os.path.join(d, store, '_project')) and \
1700 os.path.exists(os.path.join(d, store, '_package'))
1702 def parse_disturl(disturl):
1703 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1704 revision), else raises an oscerr.WrongArgs exception
1707 m = DISTURL_RE.match(disturl)
1709 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1711 apiurl = m.group('apiurl')
1712 if apiurl.split('.')[0] != 'api':
1713 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1714 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1716 def parse_buildlogurl(buildlogurl):
1717 """Parse a build log url, returns a tuple (apiurl, project, package,
1718 repository, arch), else raises oscerr.WrongArgs exception"""
1720 global BUILDLOGURL_RE
1722 m = BUILDLOGURL_RE.match(buildlogurl)
1724 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1726 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1729 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1730 This is handy to allow copy/paste a project/package combination in this form.
1732 Trailing slashes are removed before the split, because the split would
1733 otherwise give an additional empty string.
1741 def expand_proj_pack(args, idx=0, howmany=0):
1742 """looks for occurance of '.' at the position idx.
1743 If howmany is 2, both proj and pack are expanded together
1744 using the current directory, or none of them, if not possible.
1745 If howmany is 0, proj is expanded if possible, then, if there
1746 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1747 expanded, if possible.
1748 If howmany is 1, only proj is expanded if possible.
1750 If args[idx] does not exists, an implicit '.' is assumed.
1751 if not enough elements up to idx exist, an error is raised.
1753 See also parseargs(args), slash_split(args), findpacs(args)
1754 All these need unification, somehow.
1757 # print args,idx,howmany
1760 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1762 if len(args) == idx:
1764 if args[idx+0] == '.':
1765 if howmany == 0 and len(args) > idx+1:
1766 if args[idx+1] == '.':
1768 # remove one dot and make sure to expand both proj and pack
1773 # print args,idx,howmany
1775 args[idx+0] = store_read_project('.')
1778 package = store_read_package('.')
1779 args.insert(idx+1, package)
1783 package = store_read_package('.')
1784 args.insert(idx+1, package)
1788 def findpacs(files, progress_obj=None):
1789 """collect Package objects belonging to the given files
1790 and make sure each Package is returned only once"""
1793 p = filedir_to_pac(f, progress_obj)
1796 if i.name == p.name:
1806 def read_filemeta(dir):
1808 r = ET.parse(os.path.join(dir, store, '_files'))
1809 except SyntaxError, e:
1810 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1811 'When parsing .osc/_files, the following error was encountered:\n'
1816 def read_tobedeleted(dir):
1818 fname = os.path.join(dir, store, '_to_be_deleted')
1820 if os.path.exists(fname):
1821 r = [ line.strip() for line in open(fname) ]
1826 def read_inconflict(dir):
1828 fname = os.path.join(dir, store, '_in_conflict')
1830 if os.path.exists(fname):
1831 r = [ line.strip() for line in open(fname) ]
1836 def parseargs(list_of_args):
1837 """Convenience method osc's commandline argument parsing.
1839 If called with an empty tuple (or list), return a list containing the current directory.
1840 Otherwise, return a list of the arguments."""
1842 return list(list_of_args)
1847 def filedir_to_pac(f, progress_obj=None):
1848 """Takes a working copy path, or a path to a file inside a working copy,
1849 and returns a Package object instance
1851 If the argument was a filename, add it onto the "todo" list of the Package """
1853 if os.path.isdir(f):
1855 p = Package(wd, progress_obj=progress_obj)
1858 wd = os.path.dirname(f)
1861 p = Package(wd, progress_obj=progress_obj)
1862 p.todo = [ os.path.basename(f) ]
1867 def statfrmt(statusletter, filename):
1868 return '%s %s' % (statusletter, filename)
1871 def pathjoin(a, *p):
1872 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1873 path = os.path.join(a, *p)
1874 if path.startswith('./'):
1879 def makeurl(baseurl, l, query=[]):
1880 """Given a list of path compoments, construct a complete URL.
1882 Optional parameters for a query string can be given as a list, as a
1883 dictionary, or as an already assembled string.
1884 In case of a dictionary, the parameters will be urlencoded by this
1885 function. In case of a list not -- this is to be backwards compatible.
1888 if conf.config['verbose'] > 1:
1889 print 'makeurl:', baseurl, l, query
1891 if type(query) == type(list()):
1892 query = '&'.join(query)
1893 elif type(query) == type(dict()):
1894 query = urlencode(query)
1896 scheme, netloc = urlsplit(baseurl)[0:2]
1897 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1900 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1901 """wrapper around urllib2.urlopen for error handling,
1902 and to support additional (PUT, DELETE) methods"""
1906 if conf.config['http_debug']:
1909 print '--', method, url
1911 if method == 'POST' and not file and not data:
1912 # adding data to an urllib2 request transforms it into a POST
1915 req = urllib2.Request(url)
1917 api_host_options=conf.get_apiurl_api_host_options(url)
1919 for header, value in api_host_options['http_headers']:
1920 req.add_header(header, value)
1922 req.get_method = lambda: method
1924 # POST requests are application/x-www-form-urlencoded per default
1925 # since we change the request into PUT, we also need to adjust the content type header
1926 if method == 'PUT' or (method == 'POST' and data):
1927 req.add_header('Content-Type', 'application/octet-stream')
1929 if type(headers) == type({}):
1930 for i in headers.keys():
1932 req.add_header(i, headers[i])
1934 if file and not data:
1935 size = os.path.getsize(file)
1937 data = open(file, 'rb').read()
1940 filefd = open(file, 'rb')
1942 if sys.platform[:3] != 'win':
1943 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1945 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1947 except EnvironmentError, e:
1949 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1950 '\non a filesystem which does not support this.' % (e, file))
1951 elif hasattr(e, 'winerror') and e.winerror == 5:
1952 # falling back to the default io
1953 data = open(file, 'rb').read()
1957 if conf.config['debug']: print method, url
1959 old_timeout = socket.getdefaulttimeout()
1960 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1961 if old_timeout != timeout and not api_host_options['sslcertck']:
1962 socket.setdefaulttimeout(timeout)
1964 fd = urllib2.urlopen(req, data=data)
1966 if old_timeout != timeout and not api_host_options['sslcertck']:
1967 socket.setdefaulttimeout(old_timeout)
1968 if hasattr(conf.cookiejar, 'save'):
1969 conf.cookiejar.save(ignore_discard=True)
1971 if filefd: filefd.close()
1976 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1977 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1978 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1979 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1982 def init_project_dir(apiurl, dir, project):
1983 if not os.path.exists(dir):
1984 if conf.config['checkout_no_colon']:
1985 os.makedirs(dir) # helpful with checkout_no_colon
1988 if not os.path.exists(os.path.join(dir, store)):
1989 os.mkdir(os.path.join(dir, store))
1991 # print 'project=',project,' dir=',dir
1992 store_write_project(dir, project)
1993 store_write_apiurl(dir, apiurl)
1994 if conf.config['do_package_tracking']:
1995 store_write_initial_packages(dir, project, [])
1997 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
1998 if not os.path.isdir(store):
2001 f = open('_project', 'w')
2002 f.write(project + '\n')
2004 f = open('_package', 'w')
2005 f.write(package + '\n')
2009 f = open('_files', 'w')
2010 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2014 ET.ElementTree(element=ET.Element('directory')).write('_files')
2016 f = open('_osclib_version', 'w')
2017 f.write(__store_version__ + '\n')
2020 store_write_apiurl(os.path.pardir, apiurl)
2026 def check_store_version(dir):
2027 versionfile = os.path.join(dir, store, '_osclib_version')
2029 v = open(versionfile).read().strip()
2034 msg = 'Error: "%s" is not an osc working copy.' % os.path.abspath(dir)
2035 if os.path.exists(os.path.join(dir, '.svn')):
2036 msg = msg + '\nTry svn instead of osc.'
2037 raise oscerr.NoWorkingCopy(msg)
2039 if v != __store_version__:
2040 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2041 # version is fine, no migration needed
2042 f = open(versionfile, 'w')
2043 f.write(__store_version__ + '\n')
2046 msg = 'The osc metadata of your working copy "%s"' % dir
2047 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2048 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2049 raise oscerr.WorkingCopyWrongVersion, msg
2052 def meta_get_packagelist(apiurl, prj):
2054 u = makeurl(apiurl, ['source', prj])
2056 root = ET.parse(f).getroot()
2057 return [ node.get('name') for node in root.findall('entry') ]
2060 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2061 """return a list of file names,
2062 or a list File() instances if verbose=True"""
2068 query['rev'] = revision
2070 query['rev'] = 'latest'
2072 u = makeurl(apiurl, ['source', prj, package], query=query)
2074 root = ET.parse(f).getroot()
2077 return [ node.get('name') for node in root.findall('entry') ]
2081 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2082 rev = root.get('rev')
2083 for node in root.findall('entry'):
2084 f = File(node.get('name'),
2086 int(node.get('size')),
2087 int(node.get('mtime')))
2093 def meta_get_project_list(apiurl):
2094 u = makeurl(apiurl, ['source'])
2096 root = ET.parse(f).getroot()
2097 return sorted([ node.get('name') for node in root ])
2100 def show_project_meta(apiurl, prj):
2101 url = makeurl(apiurl, ['source', prj, '_meta'])
2103 return f.readlines()
2106 def show_project_conf(apiurl, prj):
2107 url = makeurl(apiurl, ['source', prj, '_config'])
2109 return f.readlines()
2112 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2113 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2117 except urllib2.HTTPError, e:
2118 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2122 def show_package_meta(apiurl, prj, pac):
2123 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2126 return f.readlines()
2127 except urllib2.HTTPError, e:
2128 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2132 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2134 path.append('source')
2140 path.append('_attribute')
2142 path.append(attribute)
2145 query.append("with_default=1")
2147 query.append("with_project=1")
2148 url = makeurl(apiurl, path, query)
2151 return f.readlines()
2152 except urllib2.HTTPError, e:
2153 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2157 def show_develproject(apiurl, prj, pac):
2158 m = show_package_meta(apiurl, prj, pac)
2160 return ET.fromstring(''.join(m)).find('devel').get('project')
2165 def show_pattern_metalist(apiurl, prj):
2166 url = makeurl(apiurl, ['source', prj, '_pattern'])
2170 except urllib2.HTTPError, e:
2171 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2173 r = [ node.get('name') for node in tree.getroot() ]
2178 def show_pattern_meta(apiurl, prj, pattern):
2179 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2182 return f.readlines()
2183 except urllib2.HTTPError, e:
2184 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2189 """metafile that can be manipulated and is stored back after manipulation."""
2190 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2194 self.change_is_required = change_is_required
2195 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2196 f = os.fdopen(fd, 'w')
2197 f.write(''.join(input))
2199 self.hash_orig = dgst(self.filename)
2202 hash = dgst(self.filename)
2203 if self.change_is_required == True and hash == self.hash_orig:
2204 print 'File unchanged. Not saving.'
2205 os.unlink(self.filename)
2208 print 'Sending meta data...'
2209 # don't do any exception handling... it's up to the caller what to do in case
2211 http_PUT(self.url, file=self.filename)
2212 os.unlink(self.filename)
2216 if sys.platform[:3] != 'win':
2217 editor = os.getenv('EDITOR', default='vim')
2219 editor = os.getenv('EDITOR', default='notepad')
2222 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2226 except urllib2.HTTPError, e:
2227 error_help = "%d" % e.code
2228 if e.headers.get('X-Opensuse-Errorcode'):
2229 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2231 print >>sys.stderr, 'BuildService API error:', error_help
2232 # examine the error - we can't raise an exception because we might want
2235 if '<summary>' in data:
2236 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2237 input = raw_input('Try again? ([y/N]): ')
2238 if input not in ['y', 'Y']:
2244 if os.path.exists(self.filename):
2245 print 'discarding %s' % self.filename
2246 os.unlink(self.filename)
2249 # different types of metadata
2250 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2251 'template': new_project_templ,
2254 'pkg': { 'path' : 'source/%s/%s/_meta',
2255 'template': new_package_templ,
2258 'attribute': { 'path' : 'source/%s/%s/_meta',
2259 'template': new_attribute_templ,
2262 'prjconf': { 'path': 'source/%s/_config',
2266 'user': { 'path': 'person/%s',
2267 'template': new_user_template,
2270 'pattern': { 'path': 'source/%s/_pattern/%s',
2271 'template': new_pattern_template,
2276 def meta_exists(metatype,
2283 apiurl = conf.config['apiurl']
2284 url = make_meta_url(metatype, path_args, apiurl)
2286 data = http_GET(url).readlines()
2287 except urllib2.HTTPError, e:
2288 if e.code == 404 and create_new:
2289 data = metatypes[metatype]['template']
2291 data = StringIO(data % template_args).readlines()
2296 def make_meta_url(metatype, path_args=None, apiurl=None):
2298 apiurl = conf.config['apiurl']
2299 if metatype not in metatypes.keys():
2300 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2301 path = metatypes[metatype]['path']
2304 path = path % path_args
2306 return makeurl(apiurl, [path])
2309 def edit_meta(metatype,
2314 change_is_required=False,
2318 apiurl = conf.config['apiurl']
2320 data = meta_exists(metatype,
2323 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2327 change_is_required = True
2329 url = make_meta_url(metatype, path_args, apiurl)
2330 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2338 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2341 query['rev'] = revision
2343 query['rev'] = 'latest'
2345 query['linkrev'] = linkrev
2346 elif conf.config['linkcontrol']:
2347 query['linkrev'] = 'base'
2351 query['emptylink'] = 1
2352 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2353 return f.readlines()
2356 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2357 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2358 return ET.fromstring(''.join(m)).get('srcmd5')
2361 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2362 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2364 # only source link packages have a <linkinfo> element.
2365 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2373 raise oscerr.LinkExpandError(prj, pac, li.error)
2377 def show_upstream_rev(apiurl, prj, pac):
2378 m = show_files_meta(apiurl, prj, pac)
2379 return ET.fromstring(''.join(m)).get('rev')
2382 def read_meta_from_spec(specfile, *args):
2383 import codecs, locale, re
2385 Read tags and sections from spec file. To read out
2386 a tag the passed argument mustn't end with a colon. To
2387 read out a section the passed argument must start with
2389 This method returns a dictionary which contains the
2393 if not os.path.isfile(specfile):
2394 raise IOError('\'%s\' is not a regular file' % specfile)
2397 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2398 except UnicodeDecodeError:
2399 lines = open(specfile).readlines()
2406 if itm.startswith('%'):
2407 sections.append(itm)
2411 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2413 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2414 if m and m.group('val'):
2415 spec_data[tag] = m.group('val').strip()
2417 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2420 section_pat = '^%s\s*?$'
2421 for section in sections:
2422 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2424 start = lines.index(m.group()+'\n') + 1
2426 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2429 for line in lines[start:]:
2430 if line.startswith('%'):
2433 spec_data[section] = data
2438 def edit_message(footer='', template=''):
2439 delim = '--This line, and those below, will be ignored--\n'
2441 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2442 f = os.fdopen(fd, 'w')
2451 if sys.platform[:3] != 'win':
2452 editor = os.getenv('EDITOR', default='vim')
2454 editor = os.getenv('EDITOR', default='notepad')
2457 subprocess.call('%s %s' % (editor, filename), shell=True)
2458 msg = open(filename).read().split(delim)[0].rstrip()
2463 input = raw_input('Log message not specified\n'
2464 'a)bort, c)ontinue, e)dit: ')
2466 raise oscerr.UserAbort()
2476 def create_delete_request(apiurl, project, package, message):
2481 package = """package="%s" """ % (package)
2487 <action type="delete">
2488 <target project="%s" %s/>
2491 <description>%s</description>
2493 """ % (project, package,
2494 cgi.escape(message or ''))
2496 u = makeurl(apiurl, ['request'], query='cmd=create')
2497 f = http_POST(u, data=xml)
2499 root = ET.parse(f).getroot()
2500 return root.get('id')
2503 def create_change_devel_request(apiurl,
2504 devel_project, devel_package,
2511 <action type="change_devel">
2512 <source project="%s" package="%s" />
2513 <target project="%s" package="%s" />
2516 <description>%s</description>
2518 """ % (devel_project,
2522 cgi.escape(message or ''))
2524 u = makeurl(apiurl, ['request'], query='cmd=create')
2525 f = http_POST(u, data=xml)
2527 root = ET.parse(f).getroot()
2528 return root.get('id')
2531 # This creates an old style submit request for server api 1.0
2532 def create_submit_request(apiurl,
2533 src_project, src_package,
2534 dst_project=None, dst_package=None,
2535 message=None, orev=None, src_update=None):
2540 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2542 # Yes, this kind of xml construction is horrible
2547 packagexml = """package="%s" """ %( dst_package )
2548 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2549 # XXX: keep the old template for now in order to work with old obs instances
2551 <request type="submit">
2553 <source project="%s" package="%s" rev="%s"/>
2558 <description>%s</description>
2562 orev or show_upstream_rev(apiurl, src_project, src_package),
2565 cgi.escape(message or ""))
2567 u = makeurl(apiurl, ['request'], query='cmd=create')
2568 f = http_POST(u, data=xml)
2570 root = ET.parse(f).getroot()
2571 return root.get('id')
2574 def get_request(apiurl, reqid):
2575 u = makeurl(apiurl, ['request', reqid])
2577 root = ET.parse(f).getroot()
2584 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2587 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2588 f = http_POST(u, data=message)
2591 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2594 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2595 f = http_POST(u, data=message)
2599 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2601 if not 'all' in req_state:
2602 for state in req_state:
2603 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2605 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2607 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2611 todo['project'] = project
2613 todo['package'] = package
2614 for kind, val in todo.iteritems():
2615 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2616 'action/source/@%(kind)s=\'%(val)s\' or ' \
2617 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2618 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2620 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2621 for i in exclude_target_projects:
2622 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2623 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2625 if conf.config['verbose'] > 1:
2626 print '[ %s ]' % xpath
2627 res = search(apiurl, request=xpath)
2628 collection = res['request']
2630 for root in collection.findall('request'):
2636 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2637 """Return all new requests for all projects/packages where is user is involved"""
2639 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2640 for i in res['project_id'].findall('project'):
2641 projpkgs[i.get('name')] = []
2642 for i in res['package_id'].findall('package'):
2643 if not i.get('project') in projpkgs.keys():
2644 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2646 for prj, pacs in projpkgs.iteritems():
2648 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2652 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2653 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2654 xpath = xpath_join(xpath, xp, inner=True)
2656 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2657 if not 'all' in req_state:
2659 for state in req_state:
2660 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2661 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2662 res = search(apiurl, request=xpath)
2664 for root in res['request'].findall('request'):
2670 def get_request_log(apiurl, reqid):
2671 r = get_request(conf.config['apiurl'], reqid)
2673 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2674 # the description of the request is used for the initial log entry
2675 # otherwise its comment attribute would contain None
2676 if len(r.statehistory) >= 1:
2677 r.statehistory[-1].comment = r.descr
2679 r.state.comment = r.descr
2680 for state in [ r.state ] + r.statehistory:
2681 s = frmt % (state.name, state.who, state.when, str(state.comment))
2686 def get_user_meta(apiurl, user):
2687 u = makeurl(apiurl, ['person', quote_plus(user)])
2690 return ''.join(f.readlines())
2691 except urllib2.HTTPError:
2692 print 'user \'%s\' not found' % user
2696 def get_user_data(apiurl, user, *tags):
2697 """get specified tags from the user meta"""
2698 meta = get_user_meta(apiurl, user)
2701 root = ET.fromstring(meta)
2704 if root.find(tag).text != None:
2705 data.append(root.find(tag).text)
2709 except AttributeError:
2710 # this part is reached if the tags tuple contains an invalid tag
2711 print 'The xml file for user \'%s\' seems to be broken' % user
2716 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2717 import tempfile, shutil
2720 query = { 'rev': revision }
2724 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2725 o = os.fdopen(fd, 'wb')
2726 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2727 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2730 shutil.move(tmpfile, targetfilename or filename)
2731 os.chmod(targetfilename or filename, 0644)
2739 def get_binary_file(apiurl, prj, repo, arch,
2742 target_filename = None,
2743 target_mtime = None,
2744 progress_meter = False):
2746 target_filename = target_filename or filename
2748 where = package or '_repository'
2749 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2752 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2756 binsize = int(f.headers['content-length'])
2759 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2760 os.chmod(tmpfilename, 0644)
2763 o = os.fdopen(fd, 'wb')
2767 #buf = f.read(BUFSIZE)
2771 downloaded += len(buf)
2773 completion = str(int((float(downloaded)/binsize)*100))
2774 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2779 sys.stdout.write('\n')
2781 shutil.move(tmpfilename, target_filename)
2783 os.utime(target_filename, (-1, target_mtime))
2785 # make sure that the temp file is cleaned up when we are interrupted
2787 try: os.unlink(tmpfilename)
2790 def dgst_from_string(str):
2791 # Python 2.5 depracates the md5 modules
2792 # Python 2.4 doesn't have hashlib yet
2795 md5_hash = hashlib.md5()
2798 md5_hash = md5.new()
2799 md5_hash.update(str)
2800 return md5_hash.hexdigest()
2804 #if not os.path.exists(file):
2814 f = open(file, 'rb')
2816 buf = f.read(BUFSIZE)
2819 return s.hexdigest()
2824 """return true if a string is binary data using diff's heuristic"""
2825 if s and '\0' in s[:4096]:
2830 def binary_file(fn):
2831 """read 4096 bytes from a file named fn, and call binary() on the data"""
2832 return binary(open(fn, 'rb').read(4096))
2835 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2837 This methods diffs oldfilename against filename (so filename will
2838 be shown as the new file).
2839 The variable origfilename is used if filename and oldfilename differ
2840 in their names (for instance if a tempfile is used for filename etc.)
2846 oldfilename = filename
2849 olddir = os.path.join(dir, store)
2851 if not origfilename:
2852 origfilename = filename
2854 file1 = os.path.join(olddir, oldfilename) # old/stored original
2855 file2 = os.path.join(dir, filename) # working copy
2857 f1 = open(file1, 'rb')
2861 f2 = open(file2, 'rb')
2865 if binary(s1) or binary (s2):
2866 d = ['Binary file %s has changed\n' % origfilename]
2869 d = difflib.unified_diff(\
2872 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2873 tofile = '%s\t(working copy)' % origfilename)
2875 # if file doesn't end with newline, we need to append one in the diff result
2877 for i, line in enumerate(d):
2878 if not line.endswith('\n'):
2879 d[i] += '\n\\ No newline at end of file'
2885 def make_diff(wc, revision):
2891 diff_hdr = 'Index: %s\n'
2892 diff_hdr += '===================================================================\n'
2894 olddir = os.getcwd()
2898 for file in wc.todo:
2899 if file in wc.filenamelist+wc.filenamelist_unvers:
2900 state = wc.status(file)
2902 added_files.append(file)
2904 removed_files.append(file)
2905 elif state == 'M' or state == 'C':
2906 changed_files.append(file)
2908 diff.append('osc: \'%s\' is not under version control' % file)
2910 for file in wc.filenamelist+wc.filenamelist_unvers:
2911 state = wc.status(file)
2912 if state == 'M' or state == 'C':
2913 changed_files.append(file)
2915 added_files.append(file)
2917 removed_files.append(file)
2919 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2921 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2922 cmp_pac = Package(tmpdir)
2924 for file in wc.todo:
2925 if file in cmp_pac.filenamelist:
2926 if file in wc.filenamelist:
2927 changed_files.append(file)
2929 diff.append('osc: \'%s\' is not under version control' % file)
2931 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2933 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2935 for file in changed_files:
2936 diff.append(diff_hdr % file)
2938 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2940 cmp_pac.updatefile(file, revision)
2941 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2942 cmp_pac.absdir, file))
2943 (fd, tmpfile) = tempfile.mkstemp()
2944 for file in added_files:
2945 diff.append(diff_hdr % file)
2947 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2948 os.path.dirname(tmpfile), file))
2950 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2951 os.path.dirname(tmpfile), file))
2953 # FIXME: this is ugly but it cannot be avoided atm
2954 # if a file is deleted via "osc rm file" we should keep the storefile.
2956 if cmp_pac == None and removed_files:
2957 tmpdir = tempfile.mkdtemp()
2959 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2960 tmp_pac = Package(tmpdir)
2963 for file in removed_files:
2964 diff.append(diff_hdr % file)
2966 tmp_pac.updatefile(file, tmp_pac.rev)
2967 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2968 wc.rev, file, tmp_pac.storedir, file))
2970 cmp_pac.updatefile(file, revision)
2971 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2972 revision, file, cmp_pac.storedir, file))
2976 delete_dir(cmp_pac.absdir)
2978 delete_dir(tmp_pac.absdir)
2982 def server_diff(apiurl,
2983 old_project, old_package, old_revision,
2984 new_project, new_package, new_revision, unified=False):
2986 query = {'cmd': 'diff', 'expand': '1'}
2988 query['oproject'] = old_project
2990 query['opackage'] = old_package
2992 query['orev'] = old_revision
2994 query['rev'] = new_revision
2996 query['unified'] = 1
2998 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3004 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3006 creates the plain directory structure for a package dir.
3007 The 'apiurl' parameter is needed for the project dir initialization.
3008 The 'project' and 'package' parameters specify the name of the
3009 project and the package. The optional 'pathname' parameter is used
3010 for printing out the message that a new dir was created (default: 'prj_dir/package').
3011 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3013 prj_dir = prj_dir or project
3015 # FIXME: carefully test each patch component of prj_dir,
3016 # if we have a .osc/_files entry at that level.
3017 # -> if so, we have a package/project clash,
3018 # and should rename this path component by appending '.proj'
3019 # and give user a warning message, to discourage such clashes
3021 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3022 if is_package_dir(prj_dir):
3023 # we want this to become a project directory,
3024 # but it already is a package directory.
3025 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3027 if not is_project_dir(prj_dir):
3028 # this directory could exist as a parent direory for one of our earlier
3029 # checked out sub-projects. in this case, we still need to initialize it.
3030 print statfrmt('A', prj_dir)
3031 init_project_dir(apiurl, prj_dir, project)
3033 if is_project_dir(os.path.join(prj_dir, package)):
3034 # the thing exists, but is a project directory and not a package directory
3035 # FIXME: this should be a warning message to discourage package/project clashes
3036 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3038 if not os.path.exists(os.path.join(prj_dir, package)):
3039 print statfrmt('A', pathname)
3040 os.mkdir(os.path.join(prj_dir, package))
3041 os.mkdir(os.path.join(prj_dir, package, store))
3043 return(os.path.join(prj_dir, package))
3046 def checkout_package(apiurl, project, package,
3047 revision=None, pathname=None, prj_obj=None,
3048 expand_link=False, prj_dir=None, service_files=None, progress_obj=None):
3050 # the project we're in might be deleted.
3051 # that'll throw an error then.
3052 olddir = os.getcwd()
3054 olddir = os.environ.get("PWD")
3059 if sys.platform[:3] == 'win':
3060 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3062 if conf.config['checkout_no_colon']:
3063 prj_dir = prj_dir.replace(':', '/')
3066 pathname = getTransActPath(os.path.join(prj_dir, package))
3068 # before we create directories and stuff, check if the package actually
3070 show_package_meta(apiurl, project, package)
3074 # try to read from the linkinfo
3075 # if it is a link we use the xsrcmd5 as the revision to be
3078 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3080 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3085 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3086 init_package_dir(apiurl, project, package, store, revision)
3088 p = Package(package, progress_obj=progress_obj)
3091 for filename in p.filenamelist:
3092 if service_files or not filename.startswith('_service:'):
3093 p.updatefile(filename, revision)
3094 # print 'A ', os.path.join(project, package, filename)
3095 print statfrmt('A', os.path.join(pathname, filename))
3096 if conf.config['do_package_tracking']:
3097 # check if we can re-use an existing project object
3099 prj_obj = Project(os.getcwd())
3100 prj_obj.set_state(p.name, ' ')
3101 prj_obj.write_packages()
3105 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3106 dst_userid = None, keep_develproject = False):
3108 update pkgmeta with new new_name and new_prj and set calling user as the
3109 only maintainer (unless keep_maintainers is set). Additionally remove the
3110 develproject entry (<devel />) unless keep_develproject is true.
3112 root = ET.fromstring(''.join(pkgmeta))
3113 root.set('name', new_name)
3114 root.set('project', new_prj)
3115 if not keep_maintainers:
3116 for person in root.findall('person'):
3118 if not keep_develproject:
3119 for dp in root.findall('devel'):
3121 return ET.tostring(root)
3123 def link_to_branch(apiurl, project, package):
3125 convert a package with a _link + project.diff to a branch
3128 if '_link' in meta_get_filelist(apiurl, project, package):
3129 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3132 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3134 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3136 create a linked package
3137 - "src" is the original package
3138 - "dst" is the "link" package that we are creating here
3143 dst_meta = meta_exists(metatype='pkg',
3144 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3146 create_new=False, apiurl=conf.config['apiurl'])
3148 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3149 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3154 root = ET.fromstring(''.join(dst_meta))
3155 elm = root.find('publish')
3157 elm = ET.SubElement(root, 'publish')
3159 ET.SubElement(elm, 'disable')
3160 dst_meta = ET.tostring(root)
3163 path_args=(dst_project, dst_package),
3165 # create the _link file
3166 # but first, make sure not to overwrite an existing one
3167 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3169 print >>sys.stderr, 'forced overwrite of existing _link file'
3172 print >>sys.stderr, '_link file already exists...! Aborting'
3176 rev = 'rev="%s"' % rev
3181 cicount = 'cicount="%s"' % cicount
3185 print 'Creating _link...',
3186 link_template = """\
3187 <link project="%s" package="%s" %s %s>
3189 <!-- <apply name="patch" /> apply a patch on the source directory -->
3190 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3191 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3192 <!-- <delete>filename</delete> delete a file -->
3195 """ % (src_project, src_package, rev, cicount)
3197 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3198 http_PUT(u, data=link_template)
3201 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3204 - "src" is the original package
3205 - "dst" is the "aggregate" package that we are creating here
3206 - "map" is a dictionary SRC => TARGET repository mappings
3211 dst_meta = meta_exists(metatype='pkg',
3212 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3214 create_new=False, apiurl=conf.config['apiurl'])
3216 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3217 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3222 root = ET.fromstring(''.join(dst_meta))
3223 elm = root.find('publish')
3225 elm = ET.SubElement(root, 'publish')
3227 ET.SubElement(elm, 'disable')
3228 dst_meta = ET.tostring(root)
3231 path_args=(dst_project, dst_package),
3234 # create the _aggregate file
3235 # but first, make sure not to overwrite an existing one
3236 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3238 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3241 print 'Creating _aggregate...',
3242 aggregate_template = """\
3244 <aggregate project="%s">
3246 for tgt, src in repo_map.iteritems():
3247 aggregate_template += """\
3248 <repository target="%s" source="%s" />
3251 aggregate_template += """\
3252 <package>%s</package>
3255 """ % ( src_package)
3257 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3258 http_PUT(u, data=aggregate_template)
3262 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3264 Branch packages defined via attributes (via API call)
3266 query = { 'cmd': 'branch' }
3267 query['attribute'] = attribute
3269 query['target_project'] = targetproject
3271 query['package'] = package
3272 if maintained_update_project_attribute:
3273 query['update_project_attribute'] = maintained_update_project_attribute
3275 u = makeurl(apiurl, ['source'], query=query)
3279 except urllib2.HTTPError, e:
3280 msg = ''.join(e.readlines())
3281 msg = msg.split('<summary>')[1]
3282 msg = msg.split('</summary>')[0]
3283 m = re.match(r"attribute branch call failed: (\S+)/", msg)
3287 r = r.split('targetproject">')[1]
3288 r = r.split('</data>')[0]
3292 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False, msg=''):
3294 Branch a package (via API call)
3296 query = { 'cmd': 'branch' }
3298 query['ignoredevel'] = '1'
3302 query['target_project'] = target_project
3304 query['target_package'] = target_package
3306 query['comment'] = msg
3307 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3310 except urllib2.HTTPError, e:
3311 if not return_existing:
3313 msg = ''.join(e.readlines())
3314 msg = msg.split('<summary>')[1]
3315 msg = msg.split('</summary>')[0]
3316 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3320 return (True, m.group(1), m.group(2), None, None)
3323 for i in ET.fromstring(f.read()).findall('data'):
3324 data[i.get('name')] = i.text
3325 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3326 data.get('sourceproject', None), data.get('sourcepackage', None))
3329 def copy_pac(src_apiurl, src_project, src_package,
3330 dst_apiurl, dst_project, dst_package,
3331 client_side_copy = False,
3332 keep_maintainers = False,
3333 keep_develproject = False,
3338 Create a copy of a package.
3340 Copying can be done by downloading the files from one package and commit
3341 them into the other by uploading them (client-side copy) --
3342 or by the server, in a single api call.
3345 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3346 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3347 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3348 dst_userid, keep_develproject)
3350 print 'Sending meta data...'
3351 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3352 http_PUT(u, data=src_meta)
3354 print 'Copying files...'
3355 if not client_side_copy:
3356 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3358 query['expand'] = '1'
3360 query['orev'] = revision
3362 query['comment'] = comment
3363 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3368 # copy one file after the other
3370 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')