1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E',
199 # os.path.samefile is available only under Unix
200 def os_path_samefile(path1, path2):
202 return os.path.samefile(path1, path2)
204 return os.path.realpath(path1) == os.path.realpath(path2)
207 """represent a file, including its metadata"""
208 def __init__(self, name, md5, size, mtime):
218 """Source service content
221 """creates an empty serviceinfo instance"""
224 def read(self, serviceinfo_node):
225 """read in the source services <services> element passed as
228 if serviceinfo_node == None:
231 services = serviceinfo_node.findall('service')
233 for service in services:
234 name = service.get('name')
236 for param in service.findall('param'):
237 option = param.get('name', None)
239 name += " --" + option + " '" + value + "'"
240 self.commands.append(name)
242 msg = 'invalid service format:\n%s' % ET.tostring(root)
243 raise oscerr.APIError(msg)
245 def execute(self, dir):
248 for call in self.commands:
249 temp_dir = tempfile.mkdtemp()
250 name = call.split(None, 1)[0]
251 if not os.path.exists("/usr/lib/obs/service/"+name):
252 msg = "ERROR: service is not installed !"
253 msg += "Can maybe solved with: zypper in obs-server-" + name
254 raise oscerr.APIError(msg)
255 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
256 ret = subprocess.call(c, shell=True)
258 print "ERROR: service call failed: " + c
260 for file in os.listdir(temp_dir):
261 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
265 """linkinfo metadata (which is part of the xml representing a directory
268 """creates an empty linkinfo instance"""
278 def read(self, linkinfo_node):
279 """read in the linkinfo metadata from the <linkinfo> element passed as
281 If the passed element is None, the method does nothing.
283 if linkinfo_node == None:
285 self.project = linkinfo_node.get('project')
286 self.package = linkinfo_node.get('package')
287 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
288 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
289 self.srcmd5 = linkinfo_node.get('srcmd5')
290 self.error = linkinfo_node.get('error')
291 self.rev = linkinfo_node.get('rev')
292 self.baserev = linkinfo_node.get('baserev')
295 """returns True if the linkinfo is not empty, otherwise False"""
296 if self.xsrcmd5 or self.lsrcmd5:
300 def isexpanded(self):
301 """returns True if the package is an expanded link"""
302 if self.lsrcmd5 and not self.xsrcmd5:
307 """returns True if the link is in error state (could not be applied)"""
313 """return an informatory string representation"""
314 if self.islink() and not self.isexpanded():
315 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
316 % (self.project, self.package, self.xsrcmd5, self.rev)
317 elif self.islink() and self.isexpanded():
319 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
320 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
322 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
323 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
329 """represent a project directory, holding packages"""
330 def __init__(self, dir, getPackageList=True, progress_obj=None):
333 self.absdir = os.path.abspath(dir)
334 self.progress_obj = progress_obj
336 self.name = store_read_project(self.dir)
337 self.apiurl = store_read_apiurl(self.dir)
340 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
342 self.pacs_available = []
344 if conf.config['do_package_tracking']:
345 self.pac_root = self.read_packages().getroot()
346 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
347 self.pacs_excluded = [ i for i in os.listdir(self.dir)
348 for j in conf.config['exclude_glob']
349 if fnmatch.fnmatch(i, j) ]
350 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
351 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
352 # in the self.pacs_broken list
353 self.pacs_broken = []
354 for p in self.pacs_have:
355 if not os.path.isdir(os.path.join(self.absdir, p)):
356 # all states will be replaced with the '!'-state
357 # (except it is already marked as deleted ('D'-state))
358 self.pacs_broken.append(p)
360 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
362 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
364 def checkout_missing_pacs(self, expand_link=False):
365 for pac in self.pacs_missing:
367 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
368 # pac is not under version control but a local file/dir exists
369 msg = 'can\'t add package \'%s\': Object already exists' % pac
370 raise oscerr.PackageExists(self.name, pac, msg)
372 print 'checking out new package %s' % pac
373 checkout_package(self.apiurl, self.name, pac, \
374 pathname=getTransActPath(os.path.join(self.dir, pac)), \
375 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
377 def set_state(self, pac, state):
378 node = self.get_package_node(pac)
380 self.new_package_entry(pac, state)
382 node.attrib['state'] = state
384 def get_package_node(self, pac):
385 for node in self.pac_root.findall('package'):
386 if pac == node.get('name'):
390 def del_package_node(self, pac):
391 for node in self.pac_root.findall('package'):
392 if pac == node.get('name'):
393 self.pac_root.remove(node)
395 def get_state(self, pac):
396 node = self.get_package_node(pac)
398 return node.get('state')
402 def new_package_entry(self, name, state):
403 ET.SubElement(self.pac_root, 'package', name=name, state=state)
405 def read_packages(self):
406 packages_file = os.path.join(self.absdir, store, '_packages')
407 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
408 return ET.parse(packages_file)
410 # scan project for existing packages and migrate them
412 for data in os.listdir(self.dir):
413 pac_dir = os.path.join(self.absdir, data)
414 # we cannot use self.pacs_available because we cannot guarantee that the package list
415 # was fetched from the server
416 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
417 and Package(pac_dir).name == data:
418 cur_pacs.append(ET.Element('package', name=data, state=' '))
419 store_write_initial_packages(self.absdir, self.name, cur_pacs)
420 return ET.parse(os.path.join(self.absdir, store, '_packages'))
422 def write_packages(self):
423 # TODO: should we only modify the existing file instead of overwriting?
424 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
426 def addPackage(self, pac):
428 for i in conf.config['exclude_glob']:
429 if fnmatch.fnmatch(pac, i):
430 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
431 raise oscerr.OscIOError(None, msg)
432 state = self.get_state(pac)
433 if state == None or state == 'D':
434 self.new_package_entry(pac, 'A')
435 self.write_packages()
436 # sometimes the new pac doesn't exist in the list because
437 # it would take too much time to update all data structs regularly
438 if pac in self.pacs_unvers:
439 self.pacs_unvers.remove(pac)
441 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
443 def delPackage(self, pac, force = False):
444 state = self.get_state(pac.name)
446 if state == ' ' or state == 'D':
448 for file in pac.filenamelist + pac.filenamelist_unvers:
449 filestate = pac.status(file)
450 if filestate == 'M' or filestate == 'C' or \
451 filestate == 'A' or filestate == '?':
454 del_files.append(file)
455 if can_delete or force:
456 for file in del_files:
457 pac.delete_localfile(file)
458 if pac.status(file) != '?':
459 pac.delete_storefile(file)
460 # this is not really necessary
461 pac.put_on_deletelist(file)
462 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
464 pac.write_deletelist()
465 self.set_state(pac.name, 'D')
466 self.write_packages()
468 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
471 delete_dir(pac.absdir)
472 self.del_package_node(pac.name)
473 self.write_packages()
474 print statfrmt('D', pac.name)
476 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
478 print 'package is not under version control'
480 print 'unsupported state'
482 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
485 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
487 # we need to make sure that the _packages file will be written (even if an exception
490 # update complete project
491 # packages which no longer exists upstream
492 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
494 for pac in upstream_del:
495 p = Package(os.path.join(self.dir, pac))
496 self.delPackage(p, force = True)
497 delete_storedir(p.storedir)
502 self.pac_root.remove(self.get_package_node(p.name))
503 self.pacs_have.remove(pac)
505 for pac in self.pacs_have:
506 state = self.get_state(pac)
507 if pac in self.pacs_broken:
508 if self.get_state(pac) != 'A':
509 checkout_package(self.apiurl, self.name, pac,
510 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
511 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
514 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
516 if expand_link and p.islink() and not p.isexpanded():
519 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
524 rev = p.linkinfo.xsrcmd5
525 print 'Expanding to rev', rev
526 elif unexpand_link and p.islink() and p.isexpanded():
527 rev = p.linkinfo.lsrcmd5
528 print 'Unexpanding to rev', rev
529 elif p.islink() and p.isexpanded():
531 print 'Updating %s' % p.name
532 p.update(rev, service_files)
536 # TODO: Package::update has to fixed to behave like svn does
537 if pac in self.pacs_broken:
538 checkout_package(self.apiurl, self.name, pac,
539 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
540 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
542 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
543 elif state == 'A' and pac in self.pacs_available:
544 # file/dir called pac already exists and is under version control
545 msg = 'can\'t add package \'%s\': Object already exists' % pac
546 raise oscerr.PackageExists(self.name, pac, msg)
551 print 'unexpected state.. package \'%s\'' % pac
553 self.checkout_missing_pacs(expand_link=not unexpand_link)
555 self.write_packages()
557 def commit(self, pacs = (), msg = '', files = {}):
562 if files.has_key(pac):
564 state = self.get_state(pac)
566 self.commitNewPackage(pac, msg, todo)
568 self.commitDelPackage(pac)
570 # display the correct dir when sending the changes
571 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
574 p = Package(os.path.join(self.dir, pac))
577 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
578 print 'osc: \'%s\' is not under version control' % pac
579 elif pac in self.pacs_broken:
580 print 'osc: \'%s\' package not found' % pac
582 self.commitExtPackage(pac, msg, todo)
584 self.write_packages()
586 # if we have packages marked as '!' we cannot commit
587 for pac in self.pacs_broken:
588 if self.get_state(pac) != 'D':
589 msg = 'commit failed: package \'%s\' is missing' % pac
590 raise oscerr.PackageMissing(self.name, pac, msg)
592 for pac in self.pacs_have:
593 state = self.get_state(pac)
596 Package(os.path.join(self.dir, pac)).commit(msg)
598 self.commitDelPackage(pac)
600 self.commitNewPackage(pac, msg)
602 self.write_packages()
604 def commitNewPackage(self, pac, msg = '', files = []):
605 """creates and commits a new package if it does not exist on the server"""
606 if pac in self.pacs_available:
607 print 'package \'%s\' already exists' % pac
609 user = conf.get_apiurl_usr(self.apiurl)
610 edit_meta(metatype='pkg',
611 path_args=(quote_plus(self.name), quote_plus(pac)),
616 # display the correct dir when sending the changes
618 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
622 p = Package(os.path.join(self.dir, pac))
624 print statfrmt('Sending', os.path.normpath(p.dir))
626 self.set_state(pac, ' ')
629 def commitDelPackage(self, pac):
630 """deletes a package on the server and in the working copy"""
632 # display the correct dir when sending the changes
633 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
636 pac_dir = os.path.join(self.dir, pac)
637 p = Package(os.path.join(self.dir, pac))
638 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
639 delete_storedir(p.storedir)
645 pac_dir = os.path.join(self.dir, pac)
646 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
647 print statfrmt('Deleting', getTransActPath(pac_dir))
648 delete_package(self.apiurl, self.name, pac)
649 self.del_package_node(pac)
651 def commitExtPackage(self, pac, msg, files = []):
652 """commits a package from an external project"""
653 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
656 pac_path = os.path.join(self.dir, pac)
658 project = store_read_project(pac_path)
659 package = store_read_package(pac_path)
660 apiurl = store_read_apiurl(pac_path)
661 if meta_exists(metatype='pkg',
662 path_args=(quote_plus(project), quote_plus(package)),
664 create_new=False, apiurl=apiurl):
665 p = Package(pac_path)
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(project), quote_plus(package)),
676 p = Package(pac_path)
682 r.append('*****************************************************')
683 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
684 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
685 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
686 r.append('*****************************************************')
692 """represent a package (its directory) and read/keep/write its metadata"""
693 def __init__(self, workingdir, progress_obj=None, limit_size=None):
694 self.dir = workingdir
695 self.absdir = os.path.abspath(self.dir)
696 self.storedir = os.path.join(self.absdir, store)
697 self.progress_obj = progress_obj
698 self.limit_size = limit_size
699 if limit_size and limit_size == 0:
700 self.limit_size = None
702 check_store_version(self.dir)
704 self.prjname = store_read_project(self.dir)
705 self.name = store_read_package(self.dir)
706 self.apiurl = store_read_apiurl(self.dir)
708 self.update_datastructs()
712 self.todo_delete = []
715 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
716 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
719 def addfile(self, n):
720 st = os.stat(os.path.join(self.dir, n))
721 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
723 def delete_file(self, n, force=False):
724 """deletes a file if possible and marks the file as deleted"""
727 state = self.status(n)
731 if state in ['?', 'A', 'M'] and not force:
732 return (False, state)
733 self.delete_localfile(n)
735 self.put_on_deletelist(n)
736 self.write_deletelist()
738 self.delete_storefile(n)
741 def delete_storefile(self, n):
742 try: os.unlink(os.path.join(self.storedir, n))
745 def delete_localfile(self, n):
746 try: os.unlink(os.path.join(self.dir, n))
749 def put_on_deletelist(self, n):
750 if n not in self.to_be_deleted:
751 self.to_be_deleted.append(n)
753 def put_on_conflictlist(self, n):
754 if n not in self.in_conflict:
755 self.in_conflict.append(n)
757 def clear_from_conflictlist(self, n):
758 """delete an entry from the file, and remove the file if it would be empty"""
759 if n in self.in_conflict:
761 filename = os.path.join(self.dir, n)
762 storefilename = os.path.join(self.storedir, n)
763 myfilename = os.path.join(self.dir, n + '.mine')
764 if self.islinkrepair() or self.ispulled():
765 upfilename = os.path.join(self.dir, n + '.new')
767 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
770 os.unlink(myfilename)
771 # the working copy may be updated, so the .r* ending may be obsolete...
773 os.unlink(upfilename)
774 if self.islinkrepair() or self.ispulled():
775 os.unlink(os.path.join(self.dir, n + '.old'))
779 self.in_conflict.remove(n)
781 self.write_conflictlist()
783 def write_sizelimit(self):
784 if self.size_limit and self.size_limit <= 0:
786 os.unlink(os.path.join(self.storedir, '_size_limit'))
790 fname = os.path.join(self.storedir, '_size_limit')
792 f.write(str(self.size_limit))
795 def write_deletelist(self):
796 if len(self.to_be_deleted) == 0:
798 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
802 fname = os.path.join(self.storedir, '_to_be_deleted')
804 f.write('\n'.join(self.to_be_deleted))
808 def delete_source_file(self, n):
809 """delete local a source file"""
810 self.delete_localfile(n)
811 self.delete_storefile(n)
813 def delete_remote_source_file(self, n):
814 """delete a remote source file (e.g. from the server)"""
816 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
819 def put_source_file(self, n):
821 # escaping '+' in the URL path (note: not in the URL query string) is
822 # only a workaround for ruby on rails, which swallows it otherwise
824 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
825 http_PUT(u, file = os.path.join(self.dir, n))
827 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
829 def commit(self, msg=''):
830 # commit only if the upstream revision is the same as the working copy's
831 upstream_rev = self.latest_rev()
832 if self.rev != upstream_rev:
833 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
836 self.todo = self.filenamelist_unvers + self.filenamelist
838 pathn = getTransActPath(self.dir)
840 have_conflicts = False
841 for filename in self.todo:
842 if not filename.startswith('_service:') and not filename.startswith('_service_'):
843 st = self.status(filename)
845 self.todo.remove(filename)
846 elif st == 'A' or st == 'M':
847 self.todo_send.append(filename)
848 print statfrmt('Sending', os.path.join(pathn, filename))
850 self.todo_delete.append(filename)
851 print statfrmt('Deleting', os.path.join(pathn, filename))
853 have_conflicts = True
856 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
859 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
860 print 'nothing to do for package %s' % self.name
863 if self.islink() and self.isexpanded():
864 # resolve the link into the upload revision
865 # XXX: do this always?
866 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
867 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
870 print 'Transmitting file data ',
872 for filename in self.todo_delete:
873 # do not touch local files on commit --
874 # delete remotely instead
875 self.delete_remote_source_file(filename)
876 self.to_be_deleted.remove(filename)
877 for filename in self.todo_send:
878 sys.stdout.write('.')
880 self.put_source_file(filename)
882 # all source files are committed - now comes the log
883 query = { 'cmd' : 'commit',
885 'user' : conf.get_apiurl_usr(self.apiurl),
887 if self.islink() and self.isexpanded():
888 query['keeplink'] = '1'
889 if conf.config['linkcontrol'] or self.isfrozen():
890 query['linkrev'] = self.linkinfo.srcmd5
892 query['repairlink'] = '1'
893 query['linkrev'] = self.get_pulled_srcmd5()
894 if self.islinkrepair():
895 query['repairlink'] = '1'
896 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
899 # delete upload revision
901 query = { 'cmd': 'deleteuploadrev' }
902 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
908 root = ET.parse(f).getroot()
909 self.rev = int(root.get('rev'))
911 print 'Committed revision %s.' % self.rev
914 os.unlink(os.path.join(self.storedir, '_pulled'))
915 if self.islinkrepair():
916 os.unlink(os.path.join(self.storedir, '_linkrepair'))
917 self.linkrepair = False
918 # XXX: mark package as invalid?
919 print 'The source link has been repaired. This directory can now be removed.'
920 if self.islink() and self.isexpanded():
921 self.update_local_filesmeta(revision=self.latest_rev())
923 self.update_local_filesmeta()
924 self.write_deletelist()
925 self.update_datastructs()
927 if self.filenamelist.count('_service'):
928 print 'The package contains a source service.'
929 for filename in self.todo:
930 if filename.startswith('_service:') and os.path.exists(filename):
931 os.unlink(filename) # remove local files
932 print_request_list(self.apiurl, self.prjname, self.name)
934 def write_conflictlist(self):
935 if len(self.in_conflict) == 0:
937 os.unlink(os.path.join(self.storedir, '_in_conflict'))
941 fname = os.path.join(self.storedir, '_in_conflict')
943 f.write('\n'.join(self.in_conflict))
947 def updatefile(self, n, revision):
948 filename = os.path.join(self.dir, n)
949 storefilename = os.path.join(self.storedir, n)
950 mtime = self.findfilebyname(n).mtime
952 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
953 os.utime(filename, (-1, mtime))
955 shutil.copyfile(filename, storefilename)
957 def mergefile(self, n):
958 filename = os.path.join(self.dir, n)
959 storefilename = os.path.join(self.storedir, n)
960 myfilename = os.path.join(self.dir, n + '.mine')
961 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
962 os.rename(filename, myfilename)
964 mtime = self.findfilebyname(n).mtime
965 get_source_file(self.apiurl, self.prjname, self.name, n,
966 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
967 os.utime(upfilename, (-1, mtime))
969 if binary_file(myfilename) or binary_file(upfilename):
971 shutil.copyfile(upfilename, filename)
972 shutil.copyfile(upfilename, storefilename)
973 self.in_conflict.append(n)
974 self.write_conflictlist()
978 # diff3 OPTIONS... MINE OLDER YOURS
979 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
980 # we would rather use the subprocess module, but it is not availablebefore 2.4
981 ret = subprocess.call(merge_cmd, shell=True)
983 # "An exit status of 0 means `diff3' was successful, 1 means some
984 # conflicts were found, and 2 means trouble."
986 # merge was successful... clean up
987 shutil.copyfile(upfilename, storefilename)
988 os.unlink(upfilename)
989 os.unlink(myfilename)
993 shutil.copyfile(upfilename, storefilename)
994 self.in_conflict.append(n)
995 self.write_conflictlist()
998 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
999 print >>sys.stderr, 'the command line was:'
1000 print >>sys.stderr, merge_cmd
1005 def update_local_filesmeta(self, revision=None):
1007 Update the local _files file in the store.
1008 It is replaced with the version pulled from upstream.
1010 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1011 store_write_string(self.absdir, '_files', meta)
1013 def update_datastructs(self):
1015 Update the internal data structures if the local _files
1016 file has changed (e.g. update_local_filesmeta() has been
1020 files_tree = read_filemeta(self.dir)
1021 files_tree_root = files_tree.getroot()
1023 self.rev = files_tree_root.get('rev')
1024 self.srcmd5 = files_tree_root.get('srcmd5')
1026 self.linkinfo = Linkinfo()
1027 self.linkinfo.read(files_tree_root.find('linkinfo'))
1029 self.filenamelist = []
1032 for node in files_tree_root.findall('entry'):
1034 f = File(node.get('name'),
1036 int(node.get('size')),
1037 int(node.get('mtime')))
1038 if node.get('skipped'):
1039 self.skipped.append(f.name)
1041 # okay, a very old version of _files, which didn't contain any metadata yet...
1042 f = File(node.get('name'), '', 0, 0)
1043 self.filelist.append(f)
1044 self.filenamelist.append(f.name)
1046 self.to_be_deleted = read_tobedeleted(self.dir)
1047 self.in_conflict = read_inconflict(self.dir)
1048 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1049 self.size_limit = read_sizelimit(self.dir)
1051 # gather unversioned files, but ignore some stuff
1052 self.excluded = [ i for i in os.listdir(self.dir)
1053 for j in conf.config['exclude_glob']
1054 if fnmatch.fnmatch(i, j) ]
1055 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1056 if i not in self.excluded
1057 if i not in self.filenamelist ]
1060 """tells us if the package is a link (has 'linkinfo').
1061 A package with linkinfo is a package which links to another package.
1062 Returns True if the package is a link, otherwise False."""
1063 return self.linkinfo.islink()
1065 def isexpanded(self):
1066 """tells us if the package is a link which is expanded.
1067 Returns True if the package is expanded, otherwise False."""
1068 return self.linkinfo.isexpanded()
1070 def islinkrepair(self):
1071 """tells us if we are repairing a broken source link."""
1072 return self.linkrepair
1075 """tells us if we have pulled a link."""
1076 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1079 """tells us if the link is frozen."""
1080 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1082 def get_pulled_srcmd5(self):
1084 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1085 pulledrev = line.strip()
1088 def haslinkerror(self):
1090 Returns True if the link is broken otherwise False.
1091 If the package is not a link it returns False.
1093 return self.linkinfo.haserror()
1095 def linkerror(self):
1097 Returns an error message if the link is broken otherwise None.
1098 If the package is not a link it returns None.
1100 return self.linkinfo.error
1102 def update_local_pacmeta(self):
1104 Update the local _meta file in the store.
1105 It is replaced with the version pulled from upstream.
1107 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1108 store_write_string(self.absdir, '_meta', meta)
1110 def findfilebyname(self, n):
1111 for i in self.filelist:
1115 def status(self, n):
1119 file storefile file present STATUS
1120 exists exists in _files
1123 x x x ' ' if digest differs: 'M'
1124 and if in conflicts file: 'C'
1126 x - x 'D' and listed in _to_be_deleted
1128 - x - 'D' (when file in working copy is already deleted)
1129 - - x 'F' (new in repo, but not yet in working copy)
1134 known_by_meta = False
1136 exists_in_store = False
1137 if n in self.filenamelist:
1138 known_by_meta = True
1139 if os.path.exists(os.path.join(self.absdir, n)):
1141 if os.path.exists(os.path.join(self.storedir, n)):
1142 exists_in_store = True
1145 if n in self.skipped:
1147 elif exists and not exists_in_store and known_by_meta:
1149 elif n in self.to_be_deleted:
1151 elif n in self.in_conflict:
1153 elif exists and exists_in_store and known_by_meta:
1154 #print self.findfilebyname(n)
1155 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1159 elif exists and not exists_in_store and not known_by_meta:
1161 elif exists and exists_in_store and not known_by_meta:
1163 elif not exists and exists_in_store and known_by_meta:
1165 elif not exists and not exists_in_store and known_by_meta:
1167 elif not exists and exists_in_store and not known_by_meta:
1169 elif not exists and not exists_in_store and not known_by_meta:
1170 # this case shouldn't happen (except there was a typo in the filename etc.)
1171 raise IOError('osc: \'%s\' is not under version control' % n)
1175 def comparePac(self, cmp_pac):
1177 This method compares the local filelist with
1178 the filelist of the passed package to see which files
1179 were added, removed and changed.
1186 for file in self.filenamelist+self.filenamelist_unvers:
1187 state = self.status(file)
1188 if file in self.skipped:
1190 if state == 'A' and (not file in cmp_pac.filenamelist):
1191 added_files.append(file)
1192 elif file in cmp_pac.filenamelist and state == 'D':
1193 removed_files.append(file)
1194 elif state == ' ' and not file in cmp_pac.filenamelist:
1195 added_files.append(file)
1196 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1197 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1198 changed_files.append(file)
1199 for file in cmp_pac.filenamelist:
1200 if not file in self.filenamelist:
1201 removed_files.append(file)
1202 removed_files = set(removed_files)
1204 return changed_files, added_files, removed_files
1206 def merge(self, otherpac):
1207 self.todo += otherpac.todo
1221 '\n '.join(self.filenamelist),
1229 def read_meta_from_spec(self, spec = None):
1234 # scan for spec files
1235 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1236 if len(speclist) == 1:
1237 specfile = speclist[0]
1238 elif len(speclist) > 1:
1239 print 'the following specfiles were found:'
1240 for file in speclist:
1242 print 'please specify one with --specfile'
1245 print 'no specfile was found - please specify one ' \
1249 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1250 self.summary = data['Summary']
1251 self.url = data['Url']
1252 self.descr = data['%description']
1255 def update_package_meta(self, force=False):
1257 for the updatepacmetafromspec subcommand
1258 argument force supress the confirm question
1261 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1263 root = ET.fromstring(m)
1264 root.find('title').text = self.summary
1265 root.find('description').text = ''.join(self.descr)
1266 url = root.find('url')
1268 url = ET.SubElement(root, 'url')
1271 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1272 mf = metafile(u, ET.tostring(root))
1275 print '*' * 36, 'old', '*' * 36
1277 print '*' * 36, 'new', '*' * 36
1278 print ET.tostring(root)
1280 repl = raw_input('Write? (y/N/e) ')
1291 def mark_frozen(self):
1292 store_write_string(self.absdir, '_frozenlink', '')
1294 print "The link in this package is currently broken. Checking"
1295 print "out the last working version instead; please use 'osc pull'"
1296 print "to repair the link."
1299 def unmark_frozen(self):
1300 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1301 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1303 def latest_rev(self):
1304 if self.islinkrepair():
1305 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1306 elif self.islink() and self.isexpanded():
1307 if self.isfrozen() or self.ispulled():
1308 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1311 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1314 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1316 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1319 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1322 def update(self, rev = None, service_files = False, limit_size = None):
1323 # save filelist and (modified) status before replacing the meta file
1324 saved_filenames = self.filenamelist
1325 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1329 self.limit_size = limit_size
1331 self.limit_size = read_sizelimit(self.dir)
1332 self.update_local_filesmeta(rev)
1333 self = Package(self.dir, progress_obj=self.progress_obj)
1335 # which files do no longer exist upstream?
1336 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1338 pathn = getTransActPath(self.dir)
1340 for filename in saved_filenames:
1341 if filename in self.skipped:
1343 if not filename.startswith('_service:') and filename in disappeared:
1344 print statfrmt('D', os.path.join(pathn, filename))
1345 # keep file if it has local modifications
1346 if oldp.status(filename) == ' ':
1347 self.delete_localfile(filename)
1348 self.delete_storefile(filename)
1350 for filename in self.filenamelist:
1351 if filename in self.skipped:
1354 state = self.status(filename)
1355 if not service_files and filename.startswith('_service:'):
1357 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1358 # no merge necessary... local file is changed, but upstream isn't
1360 elif state == 'M' and filename in saved_modifiedfiles:
1361 status_after_merge = self.mergefile(filename)
1362 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1364 self.updatefile(filename, rev)
1365 print statfrmt('U', os.path.join(pathn, filename))
1367 self.updatefile(filename, rev)
1368 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1370 self.updatefile(filename, rev)
1371 print statfrmt('A', os.path.join(pathn, filename))
1372 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1373 self.updatefile(filename, rev)
1374 self.delete_storefile(filename)
1375 print statfrmt('U', os.path.join(pathn, filename))
1379 self.update_local_pacmeta()
1381 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1382 print 'At revision %s.' % self.rev
1384 if not service_files:
1385 self.run_source_services()
1387 def run_source_services(self):
1388 if self.filenamelist.count('_service'):
1389 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1392 si.execute(self.absdir)
1394 def prepare_filelist(self):
1395 """Prepare a list of files, which will be processed by process_filelist
1396 method. This allows easy modifications of a file list in commit
1400 self.todo = self.filenamelist + self.filenamelist_unvers
1404 for f in [f for f in self.todo if not os.path.isdir(f)]:
1406 status = self.status(f)
1411 ret += "%s %s %s\n" % (action, status, f)
1414 # Edit a filelist for package \'%s\'
1416 # l, leave = leave a file as is
1417 # r, remove = remove a file
1418 # a, add = add a file
1420 # If you remove file from a list, it will be unchanged
1421 # If you remove all, commit will be aborted""" % self.name
1425 def edit_filelist(self):
1426 """Opens a package list in editor for editing. This allows easy
1427 modifications of it just by simple text editing
1431 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1432 f = os.fdopen(fd, 'w')
1433 f.write(self.prepare_filelist())
1435 mtime_orig = os.stat(filename).st_mtime
1437 if sys.platform[:3] != 'win':
1438 editor = os.getenv('EDITOR', default='vim')
1440 editor = os.getenv('EDITOR', default='notepad')
1442 subprocess.call('%s %s' % (editor, filename), shell=True)
1443 mtime = os.stat(filename).st_mtime
1444 if mtime_orig < mtime:
1445 filelist = open(filename).readlines()
1449 raise oscerr.UserAbort()
1451 return self.process_filelist(filelist)
1453 def process_filelist(self, filelist):
1454 """Process a filelist - it add/remove or leave files. This depends on
1455 user input. If no file is processed, it raises an ValueError
1459 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1461 foo = line.split(' ')
1463 action, state, name = (foo[0], ' ', foo[3])
1465 action, state, name = (foo[0], foo[1], foo[2])
1468 action = action.lower()
1471 if action in ('r', 'remove'):
1472 if self.status(name) == '?':
1474 if name in self.todo:
1475 self.todo.remove(name)
1477 self.delete_file(name, True)
1478 elif action in ('a', 'add'):
1479 if self.status(name) != '?':
1480 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1483 elif action in ('l', 'leave'):
1486 raise ValueError("Unknow action `%s'" % action)
1489 raise ValueError("Empty filelist")
1492 """for objects to represent the review state in a request"""
1493 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1495 self.by_user = by_user
1496 self.by_group = by_group
1499 self.comment = comment
1502 """for objects to represent the "state" of a request"""
1503 def __init__(self, name=None, who=None, when=None, comment=None):
1507 self.comment = comment
1510 """represents an action"""
1511 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1513 self.src_project = src_project
1514 self.src_package = src_package
1515 self.src_rev = src_rev
1516 self.dst_project = dst_project
1517 self.dst_package = dst_package
1518 self.src_update = src_update
1521 """represent a request and holds its metadata
1522 it has methods to read in metadata from xml,
1523 different views, ..."""
1526 self.state = RequestState()
1529 self.last_author = None
1532 self.statehistory = []
1535 def read(self, root):
1536 self.reqid = int(root.get('id'))
1537 actions = root.findall('action')
1538 if len(actions) == 0:
1539 actions = [ root.find('submit') ] # for old style requests
1541 for action in actions:
1542 type = action.get('type', 'submit')
1544 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1545 if action.findall('source'):
1546 n = action.find('source')
1547 src_prj = n.get('project', None)
1548 src_pkg = n.get('package', None)
1549 src_rev = n.get('rev', None)
1550 if action.findall('target'):
1551 n = action.find('target')
1552 dst_prj = n.get('project', None)
1553 dst_pkg = n.get('package', None)
1554 if action.findall('options'):
1555 n = action.find('options')
1556 if n.findall('sourceupdate'):
1557 src_update = n.find('sourceupdate').text.strip()
1558 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1560 msg = 'invalid request format:\n%s' % ET.tostring(root)
1561 raise oscerr.APIError(msg)
1564 n = root.find('state')
1565 self.state.name, self.state.who, self.state.when \
1566 = n.get('name'), n.get('who'), n.get('when')
1568 self.state.comment = n.find('comment').text.strip()
1570 self.state.comment = None
1572 # read the review states
1573 for r in root.findall('review'):
1575 s.state = r.get('state')
1576 s.by_user = r.get('by_user')
1577 s.by_group = r.get('by_group')
1578 s.who = r.get('who')
1579 s.when = r.get('when')
1581 s.comment = r.find('comment').text.strip()
1584 self.reviews.append(s)
1586 # read the state history
1587 for h in root.findall('history'):
1589 s.name = h.get('name')
1590 s.who = h.get('who')
1591 s.when = h.get('when')
1593 s.comment = h.find('comment').text.strip()
1596 self.statehistory.append(s)
1597 self.statehistory.reverse()
1599 # read a description, if it exists
1601 n = root.find('description').text
1606 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1607 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1608 dst_prj, dst_pkg, src_update)
1611 def list_view(self):
1612 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1614 for a in self.actions:
1615 dst = "%s/%s" % (a.dst_project, a.dst_package)
1616 if a.src_package == a.dst_package:
1620 if a.type=="submit":
1621 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1622 if a.type=="change_devel":
1623 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1624 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1626 ret += '\n %s: %-50s %-20s ' % \
1627 (a.type, sr_source, dst)
1629 if self.statehistory and self.statehistory[0]:
1631 for h in self.statehistory:
1632 who.append("%s(%s)" % (h.who,h.name))
1634 ret += "\n From: %s" % (' -> '.join(who))
1636 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1638 lines = txt.splitlines()
1639 wrapper = textwrap.TextWrapper( width = 80,
1640 initial_indent=' Descr: ',
1641 subsequent_indent=' ')
1642 ret += "\n" + wrapper.fill(lines[0])
1643 wrapper.initial_indent = ' '
1644 for line in lines[1:]:
1645 ret += "\n" + wrapper.fill(line)
1651 def __cmp__(self, other):
1652 return cmp(self.reqid, other.reqid)
1656 for action in self.actions:
1657 action_list=" %s: " % (action.type)
1658 if action.type=="submit":
1661 r="(r%s)" % (action.src_rev)
1663 if action.src_update:
1664 m="(%s)" % (action.src_update)
1665 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1666 if action.dst_package:
1667 action_list=action_list+"/%s" % ( action.dst_package )
1668 elif action.type=="delete":
1669 action_list=action_list+" %s" % ( action.dst_project )
1670 if action.dst_package:
1671 action_list=action_list+"/%s" % ( action.dst_package )
1672 elif action.type=="change_devel":
1673 action_list=action_list+" %s/%s developed in %s/%s" % \
1674 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1675 action_list=action_list+"\n"
1690 self.state.name, self.state.when, self.state.who,
1693 if len(self.reviews):
1694 reviewitems = [ '%-10s %s %s %s %s %s' \
1695 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1696 for i in self.reviews ]
1697 s += '\nReview: ' + '\n '.join(reviewitems)
1700 if len(self.statehistory):
1701 histitems = [ '%-10s %s %s' \
1702 % (i.name, i.when, i.who) \
1703 for i in self.statehistory ]
1704 s += '\nHistory: ' + '\n '.join(histitems)
1711 """format time as Apr 02 18:19
1713 depending on whether it is in the current year
1717 if time.localtime()[0] == time.localtime(t)[0]:
1719 return time.strftime('%b %d %H:%M',time.localtime(t))
1721 return time.strftime('%b %d %Y',time.localtime(t))
1724 def is_project_dir(d):
1725 return os.path.exists(os.path.join(d, store, '_project')) and not \
1726 os.path.exists(os.path.join(d, store, '_package'))
1729 def is_package_dir(d):
1730 return os.path.exists(os.path.join(d, store, '_project')) and \
1731 os.path.exists(os.path.join(d, store, '_package'))
1733 def parse_disturl(disturl):
1734 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1735 revision), else raises an oscerr.WrongArgs exception
1738 m = DISTURL_RE.match(disturl)
1740 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1742 apiurl = m.group('apiurl')
1743 if apiurl.split('.')[0] != 'api':
1744 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1745 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1747 def parse_buildlogurl(buildlogurl):
1748 """Parse a build log url, returns a tuple (apiurl, project, package,
1749 repository, arch), else raises oscerr.WrongArgs exception"""
1751 global BUILDLOGURL_RE
1753 m = BUILDLOGURL_RE.match(buildlogurl)
1755 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1757 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1760 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1761 This is handy to allow copy/paste a project/package combination in this form.
1763 Trailing slashes are removed before the split, because the split would
1764 otherwise give an additional empty string.
1772 def expand_proj_pack(args, idx=0, howmany=0):
1773 """looks for occurance of '.' at the position idx.
1774 If howmany is 2, both proj and pack are expanded together
1775 using the current directory, or none of them, if not possible.
1776 If howmany is 0, proj is expanded if possible, then, if there
1777 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1778 expanded, if possible.
1779 If howmany is 1, only proj is expanded if possible.
1781 If args[idx] does not exists, an implicit '.' is assumed.
1782 if not enough elements up to idx exist, an error is raised.
1784 See also parseargs(args), slash_split(args), findpacs(args)
1785 All these need unification, somehow.
1788 # print args,idx,howmany
1791 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1793 if len(args) == idx:
1795 if args[idx+0] == '.':
1796 if howmany == 0 and len(args) > idx+1:
1797 if args[idx+1] == '.':
1799 # remove one dot and make sure to expand both proj and pack
1804 # print args,idx,howmany
1806 args[idx+0] = store_read_project('.')
1809 package = store_read_package('.')
1810 args.insert(idx+1, package)
1814 package = store_read_package('.')
1815 args.insert(idx+1, package)
1819 def findpacs(files, progress_obj=None):
1820 """collect Package objects belonging to the given files
1821 and make sure each Package is returned only once"""
1824 p = filedir_to_pac(f, progress_obj)
1827 if i.name == p.name:
1837 def filedir_to_pac(f, progress_obj=None):
1838 """Takes a working copy path, or a path to a file inside a working copy,
1839 and returns a Package object instance
1841 If the argument was a filename, add it onto the "todo" list of the Package """
1843 if os.path.isdir(f):
1845 p = Package(wd, progress_obj=progress_obj)
1847 wd = os.path.dirname(f) or os.curdir
1848 p = Package(wd, progress_obj=progress_obj)
1849 p.todo = [ os.path.basename(f) ]
1853 def read_filemeta(dir):
1855 r = ET.parse(os.path.join(dir, store, '_files'))
1856 except SyntaxError, e:
1857 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1858 'When parsing .osc/_files, the following error was encountered:\n'
1863 def read_tobedeleted(dir):
1865 fname = os.path.join(dir, store, '_to_be_deleted')
1867 if os.path.exists(fname):
1868 r = [ line.strip() for line in open(fname) ]
1873 def read_sizelimit(dir):
1875 fname = os.path.join(dir, store, '_size_limit')
1877 if os.path.exists(fname):
1878 r = open(fname).readline()
1880 if r is None or not r.isdigit():
1884 def read_inconflict(dir):
1886 fname = os.path.join(dir, store, '_in_conflict')
1888 if os.path.exists(fname):
1889 r = [ line.strip() for line in open(fname) ]
1894 def parseargs(list_of_args):
1895 """Convenience method osc's commandline argument parsing.
1897 If called with an empty tuple (or list), return a list containing the current directory.
1898 Otherwise, return a list of the arguments."""
1900 return list(list_of_args)
1905 def statfrmt(statusletter, filename):
1906 return '%s %s' % (statusletter, filename)
1909 def pathjoin(a, *p):
1910 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1911 path = os.path.join(a, *p)
1912 if path.startswith('./'):
1917 def makeurl(baseurl, l, query=[]):
1918 """Given a list of path compoments, construct a complete URL.
1920 Optional parameters for a query string can be given as a list, as a
1921 dictionary, or as an already assembled string.
1922 In case of a dictionary, the parameters will be urlencoded by this
1923 function. In case of a list not -- this is to be backwards compatible.
1926 if conf.config['verbose'] > 1:
1927 print 'makeurl:', baseurl, l, query
1929 if type(query) == type(list()):
1930 query = '&'.join(query)
1931 elif type(query) == type(dict()):
1932 query = urlencode(query)
1934 scheme, netloc = urlsplit(baseurl)[0:2]
1935 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1938 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1939 """wrapper around urllib2.urlopen for error handling,
1940 and to support additional (PUT, DELETE) methods"""
1944 if conf.config['http_debug']:
1947 print '--', method, url
1949 if method == 'POST' and not file and not data:
1950 # adding data to an urllib2 request transforms it into a POST
1953 req = urllib2.Request(url)
1954 api_host_options = {}
1956 api_host_options = conf.get_apiurl_api_host_options(url)
1957 for header, value in api_host_options['http_headers']:
1958 req.add_header(header, value)
1960 # "external" request (url is no apiurl)
1963 req.get_method = lambda: method
1965 # POST requests are application/x-www-form-urlencoded per default
1966 # since we change the request into PUT, we also need to adjust the content type header
1967 if method == 'PUT' or (method == 'POST' and data):
1968 req.add_header('Content-Type', 'application/octet-stream')
1970 if type(headers) == type({}):
1971 for i in headers.keys():
1973 req.add_header(i, headers[i])
1975 if file and not data:
1976 size = os.path.getsize(file)
1978 data = open(file, 'rb').read()
1981 filefd = open(file, 'rb')
1983 if sys.platform[:3] != 'win':
1984 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1986 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1988 except EnvironmentError, e:
1990 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1991 '\non a filesystem which does not support this.' % (e, file))
1992 elif hasattr(e, 'winerror') and e.winerror == 5:
1993 # falling back to the default io
1994 data = open(file, 'rb').read()
1998 if conf.config['debug']: print method, url
2000 old_timeout = socket.getdefaulttimeout()
2001 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2002 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2003 socket.setdefaulttimeout(timeout)
2005 fd = urllib2.urlopen(req, data=data)
2007 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2008 socket.setdefaulttimeout(old_timeout)
2009 if hasattr(conf.cookiejar, 'save'):
2010 conf.cookiejar.save(ignore_discard=True)
2012 if filefd: filefd.close()
2017 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2018 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2019 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2020 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2023 def init_project_dir(apiurl, dir, project):
2024 if not os.path.exists(dir):
2025 if conf.config['checkout_no_colon']:
2026 os.makedirs(dir) # helpful with checkout_no_colon
2029 if not os.path.exists(os.path.join(dir, store)):
2030 os.mkdir(os.path.join(dir, store))
2032 # print 'project=',project,' dir=',dir
2033 store_write_project(dir, project)
2034 store_write_apiurl(dir, apiurl)
2035 if conf.config['do_package_tracking']:
2036 store_write_initial_packages(dir, project, [])
2038 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2039 if not os.path.isdir(store):
2042 f = open('_project', 'w')
2043 f.write(project + '\n')
2045 f = open('_package', 'w')
2046 f.write(package + '\n')
2050 f = open('_size_limit', 'w')
2051 f.write(str(limit_size))
2055 f = open('_files', 'w')
2056 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2060 ET.ElementTree(element=ET.Element('directory')).write('_files')
2062 f = open('_osclib_version', 'w')
2063 f.write(__store_version__ + '\n')
2066 store_write_apiurl(os.path.pardir, apiurl)
2072 def check_store_version(dir):
2073 versionfile = os.path.join(dir, store, '_osclib_version')
2075 v = open(versionfile).read().strip()
2080 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2081 if os.path.exists(os.path.join(dir, '.svn')):
2082 msg = msg + '\nTry svn instead of osc.'
2083 raise oscerr.NoWorkingCopy(msg)
2085 if v != __store_version__:
2086 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2087 # version is fine, no migration needed
2088 f = open(versionfile, 'w')
2089 f.write(__store_version__ + '\n')
2092 msg = 'The osc metadata of your working copy "%s"' % dir
2093 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2094 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2095 raise oscerr.WorkingCopyWrongVersion, msg
2098 def meta_get_packagelist(apiurl, prj):
2100 u = makeurl(apiurl, ['source', prj])
2102 root = ET.parse(f).getroot()
2103 return [ node.get('name') for node in root.findall('entry') ]
2106 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2107 """return a list of file names,
2108 or a list File() instances if verbose=True"""
2114 query['rev'] = revision
2116 query['rev'] = 'latest'
2118 u = makeurl(apiurl, ['source', prj, package], query=query)
2120 root = ET.parse(f).getroot()
2123 return [ node.get('name') for node in root.findall('entry') ]
2127 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2128 rev = root.get('rev')
2129 for node in root.findall('entry'):
2130 f = File(node.get('name'),
2132 int(node.get('size')),
2133 int(node.get('mtime')))
2139 def meta_get_project_list(apiurl):
2140 u = makeurl(apiurl, ['source'])
2142 root = ET.parse(f).getroot()
2143 return sorted([ node.get('name') for node in root ])
2146 def show_project_meta(apiurl, prj):
2147 url = makeurl(apiurl, ['source', prj, '_meta'])
2149 return f.readlines()
2152 def show_project_conf(apiurl, prj):
2153 url = makeurl(apiurl, ['source', prj, '_config'])
2155 return f.readlines()
2158 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2159 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2163 except urllib2.HTTPError, e:
2164 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2168 def show_package_meta(apiurl, prj, pac):
2169 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2172 return f.readlines()
2173 except urllib2.HTTPError, e:
2174 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2178 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2180 path.append('source')
2186 path.append('_attribute')
2188 path.append(attribute)
2191 query.append("with_default=1")
2193 query.append("with_project=1")
2194 url = makeurl(apiurl, path, query)
2197 return f.readlines()
2198 except urllib2.HTTPError, e:
2199 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2203 def show_develproject(apiurl, prj, pac):
2204 m = show_package_meta(apiurl, prj, pac)
2206 return ET.fromstring(''.join(m)).find('devel').get('project')
2211 def show_pattern_metalist(apiurl, prj):
2212 url = makeurl(apiurl, ['source', prj, '_pattern'])
2216 except urllib2.HTTPError, e:
2217 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2219 r = [ node.get('name') for node in tree.getroot() ]
2224 def show_pattern_meta(apiurl, prj, pattern):
2225 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2228 return f.readlines()
2229 except urllib2.HTTPError, e:
2230 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2235 """metafile that can be manipulated and is stored back after manipulation."""
2236 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2240 self.change_is_required = change_is_required
2241 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2242 f = os.fdopen(fd, 'w')
2243 f.write(''.join(input))
2245 self.hash_orig = dgst(self.filename)
2248 hash = dgst(self.filename)
2249 if self.change_is_required == True and hash == self.hash_orig:
2250 print 'File unchanged. Not saving.'
2251 os.unlink(self.filename)
2254 print 'Sending meta data...'
2255 # don't do any exception handling... it's up to the caller what to do in case
2257 http_PUT(self.url, file=self.filename)
2258 os.unlink(self.filename)
2262 if sys.platform[:3] != 'win':
2263 editor = os.getenv('EDITOR', default='vim')
2265 editor = os.getenv('EDITOR', default='notepad')
2268 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2272 except urllib2.HTTPError, e:
2273 error_help = "%d" % e.code
2274 if e.headers.get('X-Opensuse-Errorcode'):
2275 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2277 print >>sys.stderr, 'BuildService API error:', error_help
2278 # examine the error - we can't raise an exception because we might want
2281 if '<summary>' in data:
2282 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2283 input = raw_input('Try again? ([y/N]): ')
2284 if input not in ['y', 'Y']:
2290 if os.path.exists(self.filename):
2291 print 'discarding %s' % self.filename
2292 os.unlink(self.filename)
2295 # different types of metadata
2296 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2297 'template': new_project_templ,
2300 'pkg': { 'path' : 'source/%s/%s/_meta',
2301 'template': new_package_templ,
2304 'attribute': { 'path' : 'source/%s/%s/_meta',
2305 'template': new_attribute_templ,
2308 'prjconf': { 'path': 'source/%s/_config',
2312 'user': { 'path': 'person/%s',
2313 'template': new_user_template,
2316 'pattern': { 'path': 'source/%s/_pattern/%s',
2317 'template': new_pattern_template,
2322 def meta_exists(metatype,
2329 apiurl = conf.config['apiurl']
2330 url = make_meta_url(metatype, path_args, apiurl)
2332 data = http_GET(url).readlines()
2333 except urllib2.HTTPError, e:
2334 if e.code == 404 and create_new:
2335 data = metatypes[metatype]['template']
2337 data = StringIO(data % template_args).readlines()
2342 def make_meta_url(metatype, path_args=None, apiurl=None):
2344 apiurl = conf.config['apiurl']
2345 if metatype not in metatypes.keys():
2346 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2347 path = metatypes[metatype]['path']
2350 path = path % path_args
2352 return makeurl(apiurl, [path])
2355 def edit_meta(metatype,
2360 change_is_required=False,
2364 apiurl = conf.config['apiurl']
2366 data = meta_exists(metatype,
2369 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2373 change_is_required = True
2375 url = make_meta_url(metatype, path_args, apiurl)
2376 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2384 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2387 query['rev'] = revision
2389 query['rev'] = 'latest'
2391 query['linkrev'] = linkrev
2392 elif conf.config['linkcontrol']:
2393 query['linkrev'] = 'base'
2397 query['emptylink'] = 1
2398 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2400 # look for "too large" files according to size limit and mark them
2401 root = ET.fromstring(''.join(f.readlines()))
2402 for e in root.findall('entry'):
2403 size = e.get('size')
2404 if size and limit_size and int(size) > int(limit_size):
2405 e.set('skipped', 'true')
2406 return ET.tostring(root)
2409 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2410 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2411 return ET.fromstring(''.join(m)).get('srcmd5')
2414 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2415 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2417 # only source link packages have a <linkinfo> element.
2418 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2426 raise oscerr.LinkExpandError(prj, pac, li.error)
2430 def show_upstream_rev(apiurl, prj, pac):
2431 m = show_files_meta(apiurl, prj, pac)
2432 return ET.fromstring(''.join(m)).get('rev')
2435 def read_meta_from_spec(specfile, *args):
2436 import codecs, locale, re
2438 Read tags and sections from spec file. To read out
2439 a tag the passed argument mustn't end with a colon. To
2440 read out a section the passed argument must start with
2442 This method returns a dictionary which contains the
2446 if not os.path.isfile(specfile):
2447 raise IOError('\'%s\' is not a regular file' % specfile)
2450 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2451 except UnicodeDecodeError:
2452 lines = open(specfile).readlines()
2459 if itm.startswith('%'):
2460 sections.append(itm)
2464 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2466 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2467 if m and m.group('val'):
2468 spec_data[tag] = m.group('val').strip()
2470 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2473 section_pat = '^%s\s*?$'
2474 for section in sections:
2475 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2477 start = lines.index(m.group()+'\n') + 1
2479 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2482 for line in lines[start:]:
2483 if line.startswith('%'):
2486 spec_data[section] = data
2491 def edit_message(footer='', template=''):
2492 delim = '--This line, and those below, will be ignored--\n'
2494 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2495 f = os.fdopen(fd, 'w')
2504 if sys.platform[:3] != 'win':
2505 editor = os.getenv('EDITOR', default='vim')
2507 editor = os.getenv('EDITOR', default='notepad')
2510 subprocess.call('%s %s' % (editor, filename), shell=True)
2511 msg = open(filename).read().split(delim)[0].rstrip()
2516 input = raw_input('Log message not specified\n'
2517 'a)bort, c)ontinue, e)dit: ')
2519 raise oscerr.UserAbort()
2529 def create_delete_request(apiurl, project, package, message):
2534 package = """package="%s" """ % (package)
2540 <action type="delete">
2541 <target project="%s" %s/>
2544 <description>%s</description>
2546 """ % (project, package,
2547 cgi.escape(message or ''))
2549 u = makeurl(apiurl, ['request'], query='cmd=create')
2550 f = http_POST(u, data=xml)
2552 root = ET.parse(f).getroot()
2553 return root.get('id')
2556 def create_change_devel_request(apiurl,
2557 devel_project, devel_package,
2564 <action type="change_devel">
2565 <source project="%s" package="%s" />
2566 <target project="%s" package="%s" />
2569 <description>%s</description>
2571 """ % (devel_project,
2575 cgi.escape(message or ''))
2577 u = makeurl(apiurl, ['request'], query='cmd=create')
2578 f = http_POST(u, data=xml)
2580 root = ET.parse(f).getroot()
2581 return root.get('id')
2584 # This creates an old style submit request for server api 1.0
2585 def create_submit_request(apiurl,
2586 src_project, src_package,
2587 dst_project=None, dst_package=None,
2588 message=None, orev=None, src_update=None):
2593 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2595 # Yes, this kind of xml construction is horrible
2600 packagexml = """package="%s" """ %( dst_package )
2601 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2602 # XXX: keep the old template for now in order to work with old obs instances
2604 <request type="submit">
2606 <source project="%s" package="%s" rev="%s"/>
2611 <description>%s</description>
2615 orev or show_upstream_rev(apiurl, src_project, src_package),
2618 cgi.escape(message or ""))
2620 u = makeurl(apiurl, ['request'], query='cmd=create')
2621 f = http_POST(u, data=xml)
2623 root = ET.parse(f).getroot()
2624 return root.get('id')
2627 def get_request(apiurl, reqid):
2628 u = makeurl(apiurl, ['request', reqid])
2630 root = ET.parse(f).getroot()
2637 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2640 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2641 f = http_POST(u, data=message)
2644 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2647 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2648 f = http_POST(u, data=message)
2652 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2654 if not 'all' in req_state:
2655 for state in req_state:
2656 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2658 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2660 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2664 todo['project'] = project
2666 todo['package'] = package
2667 for kind, val in todo.iteritems():
2668 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2669 'action/source/@%(kind)s=\'%(val)s\' or ' \
2670 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2671 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2673 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2674 for i in exclude_target_projects:
2675 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2676 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2678 if conf.config['verbose'] > 1:
2679 print '[ %s ]' % xpath
2680 res = search(apiurl, request=xpath)
2681 collection = res['request']
2683 for root in collection.findall('request'):
2689 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2690 """Return all new requests for all projects/packages where is user is involved"""
2692 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2693 for i in res['project_id'].findall('project'):
2694 projpkgs[i.get('name')] = []
2695 for i in res['package_id'].findall('package'):
2696 if not i.get('project') in projpkgs.keys():
2697 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2699 for prj, pacs in projpkgs.iteritems():
2701 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2705 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2706 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2707 xpath = xpath_join(xpath, xp, inner=True)
2709 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2710 if not 'all' in req_state:
2712 for state in req_state:
2713 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2714 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2715 res = search(apiurl, request=xpath)
2717 for root in res['request'].findall('request'):
2723 def get_request_log(apiurl, reqid):
2724 r = get_request(conf.config['apiurl'], reqid)
2726 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2727 # the description of the request is used for the initial log entry
2728 # otherwise its comment attribute would contain None
2729 if len(r.statehistory) >= 1:
2730 r.statehistory[-1].comment = r.descr
2732 r.state.comment = r.descr
2733 for state in [ r.state ] + r.statehistory:
2734 s = frmt % (state.name, state.who, state.when, str(state.comment))
2739 def get_user_meta(apiurl, user):
2740 u = makeurl(apiurl, ['person', quote_plus(user)])
2743 return ''.join(f.readlines())
2744 except urllib2.HTTPError:
2745 print 'user \'%s\' not found' % user
2749 def get_user_data(apiurl, user, *tags):
2750 """get specified tags from the user meta"""
2751 meta = get_user_meta(apiurl, user)
2754 root = ET.fromstring(meta)
2757 if root.find(tag).text != None:
2758 data.append(root.find(tag).text)
2762 except AttributeError:
2763 # this part is reached if the tags tuple contains an invalid tag
2764 print 'The xml file for user \'%s\' seems to be broken' % user
2769 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2770 import tempfile, shutil
2773 query = { 'rev': revision }
2777 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2778 o = os.fdopen(fd, 'wb')
2779 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2780 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2783 shutil.move(tmpfile, targetfilename or filename)
2784 os.chmod(targetfilename or filename, 0644)
2792 def get_binary_file(apiurl, prj, repo, arch,
2795 target_filename = None,
2796 target_mtime = None,
2797 progress_meter = False):
2799 target_filename = target_filename or filename
2801 where = package or '_repository'
2802 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2805 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2809 binsize = int(f.headers['content-length'])
2812 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2813 os.chmod(tmpfilename, 0644)
2816 o = os.fdopen(fd, 'wb')
2820 #buf = f.read(BUFSIZE)
2824 downloaded += len(buf)
2826 completion = str(int((float(downloaded)/binsize)*100))
2827 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2832 sys.stdout.write('\n')
2834 shutil.move(tmpfilename, target_filename)
2836 os.utime(target_filename, (-1, target_mtime))
2838 # make sure that the temp file is cleaned up when we are interrupted
2840 try: os.unlink(tmpfilename)
2843 def dgst_from_string(str):
2844 # Python 2.5 depracates the md5 modules
2845 # Python 2.4 doesn't have hashlib yet
2848 md5_hash = hashlib.md5()
2851 md5_hash = md5.new()
2852 md5_hash.update(str)
2853 return md5_hash.hexdigest()
2857 #if not os.path.exists(file):
2867 f = open(file, 'rb')
2869 buf = f.read(BUFSIZE)
2872 return s.hexdigest()
2877 """return true if a string is binary data using diff's heuristic"""
2878 if s and '\0' in s[:4096]:
2883 def binary_file(fn):
2884 """read 4096 bytes from a file named fn, and call binary() on the data"""
2885 return binary(open(fn, 'rb').read(4096))
2888 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2890 This methods diffs oldfilename against filename (so filename will
2891 be shown as the new file).
2892 The variable origfilename is used if filename and oldfilename differ
2893 in their names (for instance if a tempfile is used for filename etc.)
2899 oldfilename = filename
2902 olddir = os.path.join(dir, store)
2904 if not origfilename:
2905 origfilename = filename
2907 file1 = os.path.join(olddir, oldfilename) # old/stored original
2908 file2 = os.path.join(dir, filename) # working copy
2910 f1 = open(file1, 'rb')
2914 f2 = open(file2, 'rb')
2918 if binary(s1) or binary (s2):
2919 d = ['Binary file %s has changed\n' % origfilename]
2922 d = difflib.unified_diff(\
2925 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2926 tofile = '%s\t(working copy)' % origfilename)
2928 # if file doesn't end with newline, we need to append one in the diff result
2930 for i, line in enumerate(d):
2931 if not line.endswith('\n'):
2932 d[i] += '\n\\ No newline at end of file'
2938 def make_diff(wc, revision):
2944 diff_hdr = 'Index: %s\n'
2945 diff_hdr += '===================================================================\n'
2947 olddir = os.getcwd()
2951 for file in wc.todo:
2952 if file in wc.skipped:
2954 if file in wc.filenamelist+wc.filenamelist_unvers:
2955 state = wc.status(file)
2957 added_files.append(file)
2959 removed_files.append(file)
2960 elif state == 'M' or state == 'C':
2961 changed_files.append(file)
2963 diff.append('osc: \'%s\' is not under version control' % file)
2965 for file in wc.filenamelist+wc.filenamelist_unvers:
2966 if file in wc.skipped:
2968 state = wc.status(file)
2969 if state == 'M' or state == 'C':
2970 changed_files.append(file)
2972 added_files.append(file)
2974 removed_files.append(file)
2976 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2978 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2979 cmp_pac = Package(tmpdir)
2981 for file in wc.todo:
2982 if file in cmp_pac.skipped:
2984 if file in cmp_pac.filenamelist:
2985 if file in wc.filenamelist:
2986 changed_files.append(file)
2988 diff.append('osc: \'%s\' is not under version control' % file)
2990 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2992 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2994 for file in changed_files:
2995 diff.append(diff_hdr % file)
2997 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2999 cmp_pac.updatefile(file, revision)
3000 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3001 cmp_pac.absdir, file))
3002 (fd, tmpfile) = tempfile.mkstemp()
3003 for file in added_files:
3004 diff.append(diff_hdr % file)
3006 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3007 os.path.dirname(tmpfile), file))
3009 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3010 os.path.dirname(tmpfile), file))
3012 # FIXME: this is ugly but it cannot be avoided atm
3013 # if a file is deleted via "osc rm file" we should keep the storefile.
3015 if cmp_pac == None and removed_files:
3016 tmpdir = tempfile.mkdtemp()
3018 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3019 tmp_pac = Package(tmpdir)
3022 for file in removed_files:
3023 diff.append(diff_hdr % file)
3025 tmp_pac.updatefile(file, tmp_pac.rev)
3026 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3027 wc.rev, file, tmp_pac.storedir, file))
3029 cmp_pac.updatefile(file, revision)
3030 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3031 revision, file, cmp_pac.storedir, file))
3035 delete_dir(cmp_pac.absdir)
3037 delete_dir(tmp_pac.absdir)
3041 def server_diff(apiurl,
3042 old_project, old_package, old_revision,
3043 new_project, new_package, new_revision, unified=False, missingok=False):
3044 query = {'cmd': 'diff', 'expand': '1'}
3046 query['oproject'] = old_project
3048 query['opackage'] = old_package
3050 query['orev'] = old_revision
3052 query['rev'] = new_revision
3054 query['unified'] = 1
3056 query['missingok'] = 1
3058 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3064 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3066 creates the plain directory structure for a package dir.
3067 The 'apiurl' parameter is needed for the project dir initialization.
3068 The 'project' and 'package' parameters specify the name of the
3069 project and the package. The optional 'pathname' parameter is used
3070 for printing out the message that a new dir was created (default: 'prj_dir/package').
3071 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3073 prj_dir = prj_dir or project
3075 # FIXME: carefully test each patch component of prj_dir,
3076 # if we have a .osc/_files entry at that level.
3077 # -> if so, we have a package/project clash,
3078 # and should rename this path component by appending '.proj'
3079 # and give user a warning message, to discourage such clashes
3081 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3082 if is_package_dir(prj_dir):
3083 # we want this to become a project directory,
3084 # but it already is a package directory.
3085 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3087 if not is_project_dir(prj_dir):
3088 # this directory could exist as a parent direory for one of our earlier
3089 # checked out sub-projects. in this case, we still need to initialize it.
3090 print statfrmt('A', prj_dir)
3091 init_project_dir(apiurl, prj_dir, project)
3093 if is_project_dir(os.path.join(prj_dir, package)):
3094 # the thing exists, but is a project directory and not a package directory
3095 # FIXME: this should be a warning message to discourage package/project clashes
3096 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3098 if not os.path.exists(os.path.join(prj_dir, package)):
3099 print statfrmt('A', pathname)
3100 os.mkdir(os.path.join(prj_dir, package))
3101 os.mkdir(os.path.join(prj_dir, package, store))
3103 return(os.path.join(prj_dir, package))
3106 def checkout_package(apiurl, project, package,
3107 revision=None, pathname=None, prj_obj=None,
3108 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3110 # the project we're in might be deleted.
3111 # that'll throw an error then.
3112 olddir = os.getcwd()
3114 olddir = os.environ.get("PWD")
3119 if sys.platform[:3] == 'win':
3120 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3122 if conf.config['checkout_no_colon']:
3123 prj_dir = prj_dir.replace(':', '/')
3126 pathname = getTransActPath(os.path.join(prj_dir, package))
3128 # before we create directories and stuff, check if the package actually
3130 show_package_meta(apiurl, project, package)
3134 # try to read from the linkinfo
3135 # if it is a link we use the xsrcmd5 as the revision to be
3138 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3140 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3145 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3146 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3148 p = Package(package, progress_obj=progress_obj)
3151 for filename in p.filenamelist:
3152 if filename in p.skipped:
3154 if service_files or not filename.startswith('_service:'):
3155 p.updatefile(filename, revision)
3156 # print 'A ', os.path.join(project, package, filename)
3157 print statfrmt('A', os.path.join(pathname, filename))
3158 if conf.config['do_package_tracking']:
3159 # check if we can re-use an existing project object
3161 prj_obj = Project(os.getcwd())
3162 prj_obj.set_state(p.name, ' ')
3163 prj_obj.write_packages()
3167 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3168 dst_userid = None, keep_develproject = False):
3170 update pkgmeta with new new_name and new_prj and set calling user as the
3171 only maintainer (unless keep_maintainers is set). Additionally remove the
3172 develproject entry (<devel />) unless keep_develproject is true.
3174 root = ET.fromstring(''.join(pkgmeta))
3175 root.set('name', new_name)
3176 root.set('project', new_prj)
3177 if not keep_maintainers:
3178 for person in root.findall('person'):
3180 if not keep_develproject:
3181 for dp in root.findall('devel'):
3183 return ET.tostring(root)
3185 def link_to_branch(apiurl, project, package):
3187 convert a package with a _link + project.diff to a branch
3190 if '_link' in meta_get_filelist(apiurl, project, package):
3191 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3194 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3196 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3198 create a linked package
3199 - "src" is the original package
3200 - "dst" is the "link" package that we are creating here
3205 dst_meta = meta_exists(metatype='pkg',
3206 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3208 create_new=False, apiurl=conf.config['apiurl'])
3210 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3211 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3216 root = ET.fromstring(''.join(dst_meta))
3217 elm = root.find('publish')
3219 elm = ET.SubElement(root, 'publish')
3221 ET.SubElement(elm, 'disable')
3222 dst_meta = ET.tostring(root)
3225 path_args=(dst_project, dst_package),
3227 # create the _link file
3228 # but first, make sure not to overwrite an existing one
3229 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3231 print >>sys.stderr, 'forced overwrite of existing _link file'
3234 print >>sys.stderr, '_link file already exists...! Aborting'
3238 rev = 'rev="%s"' % rev
3243 cicount = 'cicount="%s"' % cicount
3247 print 'Creating _link...',
3248 link_template = """\
3249 <link project="%s" package="%s" %s %s>
3251 <!-- <apply name="patch" /> apply a patch on the source directory -->
3252 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3253 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3254 <!-- <delete>filename</delete> delete a file -->
3257 """ % (src_project, src_package, rev, cicount)
3259 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3260 http_PUT(u, data=link_template)
3263 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3266 - "src" is the original package
3267 - "dst" is the "aggregate" package that we are creating here
3268 - "map" is a dictionary SRC => TARGET repository mappings
3273 dst_meta = meta_exists(metatype='pkg',
3274 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3276 create_new=False, apiurl=conf.config['apiurl'])
3278 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3279 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3284 root = ET.fromstring(''.join(dst_meta))
3285 elm = root.find('publish')
3287 elm = ET.SubElement(root, 'publish')
3289 ET.SubElement(elm, 'disable')
3290 dst_meta = ET.tostring(root)
3293 path_args=(dst_project, dst_package),
3296 # create the _aggregate file
3297 # but first, make sure not to overwrite an existing one
3298 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3300 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3303 print 'Creating _aggregate...',
3304 aggregate_template = """\
3306 <aggregate project="%s">
3308 for tgt, src in repo_map.iteritems():
3309 aggregate_template += """\
3310 <repository target="%s" source="%s" />
3313 aggregate_template += """\
3314 <package>%s</package>
3317 """ % ( src_package)
3319 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3320 http_PUT(u, data=aggregate_template)
3324 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3326 Branch packages defined via attributes (via API call)
3328 query = { 'cmd': 'branch' }
3329 query['attribute'] = attribute
3331 query['target_project'] = targetproject
3333 query['package'] = package
3334 if maintained_update_project_attribute:
3335 query['update_project_attribute'] = maintained_update_project_attribute
3337 u = makeurl(apiurl, ['source'], query=query)
3341 except urllib2.HTTPError, e:
3342 msg = ''.join(e.readlines())
3343 msg = msg.split('<summary>')[1]
3344 msg = msg.split('</summary>')[0]
3345 raise oscerr.APIError(msg)
3348 r = r.split('targetproject">')[1]
3349 r = r.split('</data>')[0]
3353 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False, msg=''):
3355 Branch a package (via API call)
3357 query = { 'cmd': 'branch' }
3359 query['ignoredevel'] = '1'
3363 query['target_project'] = target_project
3365 query['target_package'] = target_package
3367 query['comment'] = msg
3368 u = makeurl(apiurl, ['source', src_project, src_package], query=query)