3 # Copyright (C) 2006 Novell Inc. All rights reserved.
4 # This program is free software; it may be used, copied, modified
5 # and distributed under the terms of the GNU General Public Licence,
6 # either version 2, or version 3 (at your option).
8 __version__ = '0.125git'
10 # __store_version__ is to be incremented when the format of the working copy
11 # "store" changes in an incompatible way. Please add any needed migration
12 # functionality to check_store_version().
13 __store_version__ = '1.0'
19 from urllib import pathname2url, quote_plus, urlencode, unquote
20 from urlparse import urlsplit, urlunsplit
21 from cStringIO import StringIO
29 from xml.etree import cElementTree as ET
31 import cElementTree as ET
35 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
36 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
40 # NOTE: do not use this anymore, use conf.exclude_glob instead.
41 # but this needs to stay to avoid breakage of tools which use osc lib
42 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
44 new_project_templ = """\
45 <project name="%(name)s">
47 <title></title> <!-- Short title of NewProject -->
49 <!-- This is for a longer description of the purpose of the project -->
52 <person role="maintainer" userid="%(user)s" />
53 <person role="bugowner" userid="%(user)s" />
54 <!-- remove this block to publish your packages on the mirrors -->
65 <!-- remove this comment to enable one or more build targets
67 <repository name="openSUSE_Factory">
68 <path project="openSUSE:Factory" repository="standard" />
72 <repository name="openSUSE_11.2">
73 <path project="openSUSE:11.2" repository="standard"/>
77 <repository name="openSUSE_11.1">
78 <path project="openSUSE:11.1" repository="standard"/>
82 <repository name="openSUSE_11.0">
83 <path project="openSUSE:11.0" repository="standard"/>
87 <repository name="Fedora_11">
88 <path project="Fedora:11" repository="standard" />
92 <repository name="SLE_11">
93 <path project="SUSE:SLE-11" repository="standard" />
97 <repository name="SLE_10">
98 <path project="SUSE:SLE-10:SDK" repository="standard" />
107 new_package_templ = """\
108 <package name="%(name)s">
110 <title></title> <!-- Title of package -->
113 <!-- for long description -->
116 <person role="maintainer" userid="%(user)s"/>
117 <person role="bugowner" userid="%(user)s"/>
119 <url>PUT_UPSTREAM_URL_HERE</url>
123 use one of the examples below to disable building of this package
124 on a certain architecture, in a certain repository,
125 or a combination thereof:
127 <disable arch="x86_64"/>
128 <disable repository="SUSE_SLE-10"/>
129 <disable repository="SUSE_SLE-10" arch="x86_64"/>
131 Possible sections where you can use the tags above:
141 Please have a look at:
142 http://en.opensuse.org/Restricted_Formats
143 Packages containing formats listed there are NOT allowed to
144 be packaged in the openSUSE Buildservice and will be deleted!
151 new_attribute_templ = """\
153 <attribute namespace="" name="">
159 new_user_template = """\
161 <login>%(user)s</login>
162 <email>PUT_EMAIL_ADDRESS_HERE</email>
163 <realname>PUT_REAL_NAME_HERE</realname>
165 <project name="home:%(user)s"/>
181 new_pattern_template = """\
182 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
188 buildstatus_symbols = {'succeeded': '.',
190 'expansion error': 'E',
201 # os.path.samefile is available only under Unix
202 def os_path_samefile(path1, path2):
204 return os.path.samefile(path1, path2)
206 return os.path.realpath(path1) == os.path.realpath(path2)
209 """represent a file, including its metadata"""
210 def __init__(self, name, md5, size, mtime):
220 """Source service content
223 """creates an empty serviceinfo instance"""
226 def read(self, serviceinfo_node):
227 """read in the source services <services> element passed as
230 if serviceinfo_node == None:
233 services = serviceinfo_node.findall('service')
235 for service in services:
236 name = service.get('name')
238 for param in service.findall('param'):
239 option = param.get('name', None)
241 name += " --" + option + " '" + value + "'"
242 self.commands.append(name)
244 msg = 'invalid service format:\n%s' % ET.tostring(root)
245 raise oscerr.APIError(msg)
247 def execute(self, dir):
250 for call in self.commands:
251 temp_dir = tempfile.mkdtemp()
252 name = call.split(None, 1)[0]
253 if not os.path.exists("/usr/lib/obs/service/"+name):
254 msg = "ERROR: service is not installed !"
255 msg += "Can maybe solved with: zypper in obs-server-" + name
256 raise oscerr.APIError(msg)
257 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
258 ret = subprocess.call(c, shell=True)
260 print "ERROR: service call failed: " + c
262 for file in os.listdir(temp_dir):
263 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
267 """linkinfo metadata (which is part of the xml representing a directory
270 """creates an empty linkinfo instance"""
280 def read(self, linkinfo_node):
281 """read in the linkinfo metadata from the <linkinfo> element passed as
283 If the passed element is None, the method does nothing.
285 if linkinfo_node == None:
287 self.project = linkinfo_node.get('project')
288 self.package = linkinfo_node.get('package')
289 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
290 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
291 self.srcmd5 = linkinfo_node.get('srcmd5')
292 self.error = linkinfo_node.get('error')
293 self.rev = linkinfo_node.get('rev')
294 self.baserev = linkinfo_node.get('baserev')
297 """returns True if the linkinfo is not empty, otherwise False"""
298 if self.xsrcmd5 or self.lsrcmd5:
302 def isexpanded(self):
303 """returns True if the package is an expanded link"""
304 if self.lsrcmd5 and not self.xsrcmd5:
309 """returns True if the link is in error state (could not be applied)"""
315 """return an informatory string representation"""
316 if self.islink() and not self.isexpanded():
317 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
318 % (self.project, self.package, self.xsrcmd5, self.rev)
319 elif self.islink() and self.isexpanded():
321 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
322 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
324 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
325 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
331 """represent a project directory, holding packages"""
332 def __init__(self, dir, getPackageList=True, progress_obj=None):
335 self.absdir = os.path.abspath(dir)
336 self.progress_obj = progress_obj
338 self.name = store_read_project(self.dir)
339 self.apiurl = store_read_apiurl(self.dir)
342 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
344 self.pacs_available = []
346 if conf.config['do_package_tracking']:
347 self.pac_root = self.read_packages().getroot()
348 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
349 self.pacs_excluded = [ i for i in os.listdir(self.dir)
350 for j in conf.config['exclude_glob']
351 if fnmatch.fnmatch(i, j) ]
352 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
353 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
354 # in the self.pacs_broken list
355 self.pacs_broken = []
356 for p in self.pacs_have:
357 if not os.path.isdir(os.path.join(self.absdir, p)):
358 # all states will be replaced with the '!'-state
359 # (except it is already marked as deleted ('D'-state))
360 self.pacs_broken.append(p)
362 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
364 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
366 def checkout_missing_pacs(self, expand_link=False):
367 for pac in self.pacs_missing:
369 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
370 # pac is not under version control but a local file/dir exists
371 msg = 'can\'t add package \'%s\': Object already exists' % pac
372 raise oscerr.PackageExists(self.name, pac, msg)
374 print 'checking out new package %s' % pac
375 checkout_package(self.apiurl, self.name, pac, \
376 pathname=getTransActPath(os.path.join(self.dir, pac)), \
377 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
379 def set_state(self, pac, state):
380 node = self.get_package_node(pac)
382 self.new_package_entry(pac, state)
384 node.attrib['state'] = state
386 def get_package_node(self, pac):
387 for node in self.pac_root.findall('package'):
388 if pac == node.get('name'):
392 def del_package_node(self, pac):
393 for node in self.pac_root.findall('package'):
394 if pac == node.get('name'):
395 self.pac_root.remove(node)
397 def get_state(self, pac):
398 node = self.get_package_node(pac)
400 return node.get('state')
404 def new_package_entry(self, name, state):
405 ET.SubElement(self.pac_root, 'package', name=name, state=state)
407 def read_packages(self):
408 packages_file = os.path.join(self.absdir, store, '_packages')
409 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
410 return ET.parse(packages_file)
412 # scan project for existing packages and migrate them
414 for data in os.listdir(self.dir):
415 pac_dir = os.path.join(self.absdir, data)
416 # we cannot use self.pacs_available because we cannot guarantee that the package list
417 # was fetched from the server
418 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
419 and Package(pac_dir).name == data:
420 cur_pacs.append(ET.Element('package', name=data, state=' '))
421 store_write_initial_packages(self.absdir, self.name, cur_pacs)
422 return ET.parse(os.path.join(self.absdir, store, '_packages'))
424 def write_packages(self):
425 # TODO: should we only modify the existing file instead of overwriting?
426 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
428 def addPackage(self, pac):
430 for i in conf.config['exclude_glob']:
431 if fnmatch.fnmatch(pac, i):
432 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
433 raise oscerr.OscIOError(None, msg)
434 state = self.get_state(pac)
435 if state == None or state == 'D':
436 self.new_package_entry(pac, 'A')
437 self.write_packages()
438 # sometimes the new pac doesn't exist in the list because
439 # it would take too much time to update all data structs regularly
440 if pac in self.pacs_unvers:
441 self.pacs_unvers.remove(pac)
443 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
445 def delPackage(self, pac, force = False):
446 state = self.get_state(pac.name)
448 if state == ' ' or state == 'D':
450 for file in pac.filenamelist + pac.filenamelist_unvers:
451 filestate = pac.status(file)
452 if filestate == 'M' or filestate == 'C' or \
453 filestate == 'A' or filestate == '?':
456 del_files.append(file)
457 if can_delete or force:
458 for file in del_files:
459 pac.delete_localfile(file)
460 if pac.status(file) != '?':
461 pac.delete_storefile(file)
462 # this is not really necessary
463 pac.put_on_deletelist(file)
464 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
465 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
466 pac.write_deletelist()
467 self.set_state(pac.name, 'D')
468 self.write_packages()
470 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
473 delete_dir(pac.absdir)
474 self.del_package_node(pac.name)
475 self.write_packages()
476 print statfrmt('D', pac.name)
478 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
480 print 'package is not under version control'
482 print 'unsupported state'
484 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
487 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
489 # we need to make sure that the _packages file will be written (even if an exception
492 # update complete project
493 # packages which no longer exists upstream
494 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
496 for pac in upstream_del:
497 p = Package(os.path.join(self.dir, pac))
498 self.delPackage(p, force = True)
499 delete_storedir(p.storedir)
504 self.pac_root.remove(self.get_package_node(p.name))
505 self.pacs_have.remove(pac)
507 for pac in self.pacs_have:
508 state = self.get_state(pac)
509 if pac in self.pacs_broken:
510 if self.get_state(pac) != 'A':
511 checkout_package(self.apiurl, self.name, pac,
512 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
513 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
516 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
518 if expand_link and p.islink() and not p.isexpanded():
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
523 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
526 rev = p.linkinfo.xsrcmd5
527 print 'Expanding to rev', rev
528 elif unexpand_link and p.islink() and p.isexpanded():
529 rev = p.linkinfo.lsrcmd5
530 print 'Unexpanding to rev', rev
531 elif p.islink() and p.isexpanded():
533 print 'Updating %s' % p.name
534 p.update(rev, service_files)
538 # TODO: Package::update has to fixed to behave like svn does
539 if pac in self.pacs_broken:
540 checkout_package(self.apiurl, self.name, pac,
541 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
542 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
544 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
545 elif state == 'A' and pac in self.pacs_available:
546 # file/dir called pac already exists and is under version control
547 msg = 'can\'t add package \'%s\': Object already exists' % pac
548 raise oscerr.PackageExists(self.name, pac, msg)
553 print 'unexpected state.. package \'%s\'' % pac
555 self.checkout_missing_pacs(expand_link=not unexpand_link)
557 self.write_packages()
559 def commit(self, pacs = (), msg = '', files = {}):
564 if files.has_key(pac):
566 state = self.get_state(pac)
568 self.commitNewPackage(pac, msg, todo)
570 self.commitDelPackage(pac)
572 # display the correct dir when sending the changes
573 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
576 p = Package(os.path.join(self.dir, pac))
579 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
580 print 'osc: \'%s\' is not under version control' % pac
581 elif pac in self.pacs_broken:
582 print 'osc: \'%s\' package not found' % pac
584 self.commitExtPackage(pac, msg, todo)
586 self.write_packages()
588 # if we have packages marked as '!' we cannot commit
589 for pac in self.pacs_broken:
590 if self.get_state(pac) != 'D':
591 msg = 'commit failed: package \'%s\' is missing' % pac
592 raise oscerr.PackageMissing(self.name, pac, msg)
594 for pac in self.pacs_have:
595 state = self.get_state(pac)
598 Package(os.path.join(self.dir, pac)).commit(msg)
600 self.commitDelPackage(pac)
602 self.commitNewPackage(pac, msg)
604 self.write_packages()
606 def commitNewPackage(self, pac, msg = '', files = []):
607 """creates and commits a new package if it does not exist on the server"""
608 if pac in self.pacs_available:
609 print 'package \'%s\' already exists' % pac
611 user = conf.get_apiurl_usr(self.apiurl)
612 edit_meta(metatype='pkg',
613 path_args=(quote_plus(self.name), quote_plus(pac)),
618 # display the correct dir when sending the changes
620 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
624 p = Package(os.path.join(self.dir, pac))
626 print statfrmt('Sending', os.path.normpath(p.dir))
628 self.set_state(pac, ' ')
631 def commitDelPackage(self, pac):
632 """deletes a package on the server and in the working copy"""
634 # display the correct dir when sending the changes
635 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
638 pac_dir = os.path.join(self.dir, pac)
639 p = Package(os.path.join(self.dir, pac))
640 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
641 delete_storedir(p.storedir)
647 pac_dir = os.path.join(self.dir, pac)
648 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
649 print statfrmt('Deleting', getTransActPath(pac_dir))
650 delete_package(self.apiurl, self.name, pac)
651 self.del_package_node(pac)
653 def commitExtPackage(self, pac, msg, files = []):
654 """commits a package from an external project"""
655 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
658 pac_path = os.path.join(self.dir, pac)
660 project = store_read_project(pac_path)
661 package = store_read_package(pac_path)
662 apiurl = store_read_apiurl(pac_path)
663 if meta_exists(metatype='pkg',
664 path_args=(quote_plus(project), quote_plus(package)),
666 create_new=False, apiurl=apiurl):
667 p = Package(pac_path)
671 user = conf.get_apiurl_usr(self.apiurl)
672 edit_meta(metatype='pkg',
673 path_args=(quote_plus(project), quote_plus(package)),
678 p = Package(pac_path)
684 r.append('*****************************************************')
685 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
686 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
687 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
688 r.append('*****************************************************')
694 """represent a package (its directory) and read/keep/write its metadata"""
695 def __init__(self, workingdir, progress_obj=None):
696 self.dir = workingdir
697 self.absdir = os.path.abspath(self.dir)
698 self.storedir = os.path.join(self.absdir, store)
699 self.progress_obj = progress_obj
701 check_store_version(self.dir)
703 self.prjname = store_read_project(self.dir)
704 self.name = store_read_package(self.dir)
705 self.apiurl = store_read_apiurl(self.dir)
707 self.update_datastructs()
711 self.todo_delete = []
714 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
715 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
718 def addfile(self, n):
719 st = os.stat(os.path.join(self.dir, n))
720 f = File(n, None, st.st_size, st.st_mtime)
721 self.filelist.append(f)
722 self.filenamelist.append(n)
723 self.filenamelist_unvers.remove(n)
724 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
726 def delete_file(self, n, force=False):
727 """deletes a file if possible and marks the file as deleted"""
728 state = self.status(n)
729 if state in ['?', 'A', 'M'] and not force:
730 return (False, state)
731 self.delete_localfile(n)
733 self.put_on_deletelist(n)
734 self.write_deletelist()
736 self.delete_storefile(n)
739 def delete_storefile(self, n):
740 try: os.unlink(os.path.join(self.storedir, n))
743 def delete_localfile(self, n):
744 try: os.unlink(os.path.join(self.dir, n))
747 def put_on_deletelist(self, n):
748 if n not in self.to_be_deleted:
749 self.to_be_deleted.append(n)
751 def put_on_conflictlist(self, n):
752 if n not in self.in_conflict:
753 self.in_conflict.append(n)
755 def clear_from_conflictlist(self, n):
756 """delete an entry from the file, and remove the file if it would be empty"""
757 if n in self.in_conflict:
759 filename = os.path.join(self.dir, n)
760 storefilename = os.path.join(self.storedir, n)
761 myfilename = os.path.join(self.dir, n + '.mine')
762 if self.islinkrepair() or self.ispulled():
763 upfilename = os.path.join(self.dir, n + '.new')
765 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
768 os.unlink(myfilename)
769 # the working copy may be updated, so the .r* ending may be obsolete...
771 os.unlink(upfilename)
772 if self.islinkrepair() or self.ispulled():
773 os.unlink(os.path.join(self.dir, n + '.old'))
777 self.in_conflict.remove(n)
779 self.write_conflictlist()
781 def write_deletelist(self):
782 if len(self.to_be_deleted) == 0:
784 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
788 fname = os.path.join(self.storedir, '_to_be_deleted')
790 f.write('\n'.join(self.to_be_deleted))
794 def delete_source_file(self, n):
795 """delete local a source file"""
796 self.delete_localfile(n)
797 self.delete_storefile(n)
799 def delete_remote_source_file(self, n):
800 """delete a remote source file (e.g. from the server)"""
802 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
805 def put_source_file(self, n):
807 # escaping '+' in the URL path (note: not in the URL query string) is
808 # only a workaround for ruby on rails, which swallows it otherwise
810 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
811 http_PUT(u, file = os.path.join(self.dir, n))
813 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
815 def commit(self, msg=''):
816 # commit only if the upstream revision is the same as the working copy's
817 upstream_rev = self.latest_rev()
818 if self.rev != upstream_rev:
819 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
822 self.todo = self.filenamelist_unvers + self.filenamelist
824 pathn = getTransActPath(self.dir)
826 have_conflicts = False
827 for filename in self.todo:
828 if not filename.startswith('_service:') and not filename.startswith('_service_'):
829 st = self.status(filename)
830 if st == 'A' or st == 'M':
831 self.todo_send.append(filename)
832 print statfrmt('Sending', os.path.join(pathn, filename))
834 self.todo_delete.append(filename)
835 print statfrmt('Deleting', os.path.join(pathn, filename))
837 have_conflicts = True
840 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
843 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
844 print 'nothing to do for package %s' % self.name
847 if self.islink() and self.isexpanded():
848 # resolve the link into the upload revision
849 # XXX: do this always?
850 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
851 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
854 print 'Transmitting file data ',
856 for filename in self.todo_delete:
857 # do not touch local files on commit --
858 # delete remotely instead
859 self.delete_remote_source_file(filename)
860 self.to_be_deleted.remove(filename)
861 for filename in self.todo_send:
862 sys.stdout.write('.')
864 self.put_source_file(filename)
866 # all source files are committed - now comes the log
867 query = { 'cmd' : 'commit',
869 'user' : conf.get_apiurl_usr(self.apiurl),
871 if self.islink() and self.isexpanded():
872 query['keeplink'] = '1'
873 if conf.config['linkcontrol'] or self.isfrozen():
874 query['linkrev'] = self.linkinfo.srcmd5
876 query['repairlink'] = '1'
877 query['linkrev'] = self.get_pulled_srcmd5()
878 if self.islinkrepair():
879 query['repairlink'] = '1'
880 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
882 except urllib2.HTTPError, e:
883 # delete upload revision
885 query = { 'cmd': 'deleteuploadrev' }
886 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
892 root = ET.parse(f).getroot()
893 self.rev = int(root.get('rev'))
895 print 'Committed revision %s.' % self.rev
898 os.unlink(os.path.join(self.storedir, '_pulled'))
899 if self.islinkrepair():
900 os.unlink(os.path.join(self.storedir, '_linkrepair'))
901 self.linkrepair = False
902 # XXX: mark package as invalid?
903 print 'The source link has been repaired. This directory can now be removed.'
904 if self.islink() and self.isexpanded():
905 self.update_local_filesmeta(revision=self.latest_rev())
907 self.update_local_filesmeta()
908 self.write_deletelist()
909 self.update_datastructs()
911 if self.filenamelist.count('_service'):
912 print 'The package contains a source service.'
913 for filename in self.todo:
914 if filename.startswith('_service:') and os.path.exists(filename):
915 os.unlink(filename) # remove local files
916 print_request_list(self.apiurl, self.prjname, self.name)
918 def write_conflictlist(self):
919 if len(self.in_conflict) == 0:
921 os.unlink(os.path.join(self.storedir, '_in_conflict'))
925 fname = os.path.join(self.storedir, '_in_conflict')
927 f.write('\n'.join(self.in_conflict))
931 def updatefile(self, n, revision):
932 filename = os.path.join(self.dir, n)
933 storefilename = os.path.join(self.storedir, n)
934 mtime = self.findfilebyname(n).mtime
936 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
937 os.utime(filename, (-1, mtime))
939 shutil.copyfile(filename, storefilename)
941 def mergefile(self, n):
942 filename = os.path.join(self.dir, n)
943 storefilename = os.path.join(self.storedir, n)
944 myfilename = os.path.join(self.dir, n + '.mine')
945 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
946 os.rename(filename, myfilename)
948 mtime = self.findfilebyname(n).mtime
949 get_source_file(self.apiurl, self.prjname, self.name, n,
950 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
951 os.utime(upfilename, (-1, mtime))
953 if binary_file(myfilename) or binary_file(upfilename):
955 shutil.copyfile(upfilename, filename)
956 shutil.copyfile(upfilename, storefilename)
957 self.in_conflict.append(n)
958 self.write_conflictlist()
962 # diff3 OPTIONS... MINE OLDER YOURS
963 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
964 # we would rather use the subprocess module, but it is not availablebefore 2.4
965 ret = subprocess.call(merge_cmd, shell=True)
967 # "An exit status of 0 means `diff3' was successful, 1 means some
968 # conflicts were found, and 2 means trouble."
970 # merge was successful... clean up
971 shutil.copyfile(upfilename, storefilename)
972 os.unlink(upfilename)
973 os.unlink(myfilename)
977 shutil.copyfile(upfilename, storefilename)
978 self.in_conflict.append(n)
979 self.write_conflictlist()
982 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
983 print >>sys.stderr, 'the command line was:'
984 print >>sys.stderr, merge_cmd
989 def update_local_filesmeta(self, revision=None):
991 Update the local _files file in the store.
992 It is replaced with the version pulled from upstream.
994 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
996 f = open(os.path.join(self.storedir, '_files.new'), 'w')
999 os.rename(os.path.join(self.storedir, '_files.new'), os.path.join(self.storedir, '_files'))
1001 if os.path.exists(os.path.join(self.storedir, '_files.new')):
1002 os.unlink(os.path.join(self.storedir, '_files.new'))
1005 def update_datastructs(self):
1007 Update the internal data structures if the local _files
1008 file has changed (e.g. update_local_filesmeta() has been
1012 files_tree = read_filemeta(self.dir)
1013 files_tree_root = files_tree.getroot()
1015 self.rev = files_tree_root.get('rev')
1016 self.srcmd5 = files_tree_root.get('srcmd5')
1018 self.linkinfo = Linkinfo()
1019 self.linkinfo.read(files_tree_root.find('linkinfo'))
1021 self.filenamelist = []
1023 for node in files_tree_root.findall('entry'):
1025 f = File(node.get('name'),
1027 int(node.get('size')),
1028 int(node.get('mtime')))
1030 # okay, a very old version of _files, which didn't contain any metadata yet...
1031 f = File(node.get('name'), '', 0, 0)
1032 self.filelist.append(f)
1033 self.filenamelist.append(f.name)
1035 self.to_be_deleted = read_tobedeleted(self.dir)
1036 self.in_conflict = read_inconflict(self.dir)
1037 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1039 # gather unversioned files, but ignore some stuff
1040 self.excluded = [ i for i in os.listdir(self.dir)
1041 for j in conf.config['exclude_glob']
1042 if fnmatch.fnmatch(i, j) ]
1043 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1044 if i not in self.excluded
1045 if i not in self.filenamelist ]
1048 """tells us if the package is a link (has 'linkinfo').
1049 A package with linkinfo is a package which links to another package.
1050 Returns True if the package is a link, otherwise False."""
1051 return self.linkinfo.islink()
1053 def isexpanded(self):
1054 """tells us if the package is a link which is expanded.
1055 Returns True if the package is expanded, otherwise False."""
1056 return self.linkinfo.isexpanded()
1058 def islinkrepair(self):
1059 """tells us if we are repairing a broken source link."""
1060 return self.linkrepair
1063 """tells us if we have pulled a link."""
1064 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1067 """tells us if the link is frozen."""
1068 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1070 def get_pulled_srcmd5(self):
1072 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1073 pulledrev = line.strip()
1076 def haslinkerror(self):
1078 Returns True if the link is broken otherwise False.
1079 If the package is not a link it returns False.
1081 return self.linkinfo.haserror()
1083 def linkerror(self):
1085 Returns an error message if the link is broken otherwise None.
1086 If the package is not a link it returns None.
1088 return self.linkinfo.error
1090 def update_local_pacmeta(self):
1092 Update the local _meta file in the store.
1093 It is replaced with the version pulled from upstream.
1095 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1096 f = open(os.path.join(self.storedir, '_meta'), 'w')
1100 def findfilebyname(self, n):
1101 for i in self.filelist:
1105 def status(self, n):
1109 file storefile file present STATUS
1110 exists exists in _files
1113 x x x ' ' if digest differs: 'M'
1114 and if in conflicts file: 'C'
1116 x - x 'D' and listed in _to_be_deleted
1118 - x - 'D' (when file in working copy is already deleted)
1119 - - x 'F' (new in repo, but not yet in working copy)
1124 known_by_meta = False
1126 exists_in_store = False
1127 if n in self.filenamelist:
1128 known_by_meta = True
1129 if os.path.exists(os.path.join(self.absdir, n)):
1131 if os.path.exists(os.path.join(self.storedir, n)):
1132 exists_in_store = True
1135 if exists and not exists_in_store and known_by_meta:
1137 elif n in self.to_be_deleted:
1139 elif n in self.in_conflict:
1141 elif exists and exists_in_store and known_by_meta:
1142 #print self.findfilebyname(n)
1143 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1147 elif exists and not exists_in_store and not known_by_meta:
1149 elif exists and exists_in_store and not known_by_meta:
1151 elif not exists and exists_in_store and known_by_meta:
1153 elif not exists and not exists_in_store and known_by_meta:
1155 elif not exists and exists_in_store and not known_by_meta:
1157 elif not exists and not exists_in_store and not known_by_meta:
1158 # this case shouldn't happen (except there was a typo in the filename etc.)
1159 raise IOError('osc: \'%s\' is not under version control' % n)
1163 def comparePac(self, cmp_pac):
1165 This method compares the local filelist with
1166 the filelist of the passed package to see which files
1167 were added, removed and changed.
1174 for file in self.filenamelist+self.filenamelist_unvers:
1175 state = self.status(file)
1176 if state == 'A' and (not file in cmp_pac.filenamelist):
1177 added_files.append(file)
1178 elif file in cmp_pac.filenamelist and state == 'D':
1179 removed_files.append(file)
1180 elif state == ' ' and not file in cmp_pac.filenamelist:
1181 added_files.append(file)
1182 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1183 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1184 changed_files.append(file)
1185 for file in cmp_pac.filenamelist:
1186 if not file in self.filenamelist:
1187 removed_files.append(file)
1188 removed_files = set(removed_files)
1190 return changed_files, added_files, removed_files
1192 def merge(self, otherpac):
1193 self.todo += otherpac.todo
1207 '\n '.join(self.filenamelist),
1215 def read_meta_from_spec(self, spec = None):
1220 # scan for spec files
1221 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1222 if len(speclist) == 1:
1223 specfile = speclist[0]
1224 elif len(speclist) > 1:
1225 print 'the following specfiles were found:'
1226 for file in speclist:
1228 print 'please specify one with --specfile'
1231 print 'no specfile was found - please specify one ' \
1235 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1236 self.summary = data['Summary']
1237 self.url = data['Url']
1238 self.descr = data['%description']
1241 def update_package_meta(self, force=False):
1243 for the updatepacmetafromspec subcommand
1244 argument force supress the confirm question
1247 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1249 root = ET.fromstring(m)
1250 root.find('title').text = self.summary
1251 root.find('description').text = ''.join(self.descr)
1252 url = root.find('url')
1254 url = ET.SubElement(root, 'url')
1257 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1258 mf = metafile(u, ET.tostring(root))
1261 print '*' * 36, 'old', '*' * 36
1263 print '*' * 36, 'new', '*' * 36
1264 print ET.tostring(root)
1266 repl = raw_input('Write? (y/N/e) ')
1277 def mark_frozen(self):
1278 store_write_string(self.absdir, '_frozenlink', '')
1280 print "The link in this package is currently broken. I have checked"
1281 print "out the last working version instead, please use 'osc pull'"
1282 print "to repair the link."
1285 def unmark_frozen(self):
1286 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1287 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1289 def latest_rev(self):
1290 if self.islinkrepair():
1291 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1292 elif self.islink() and self.isexpanded():
1293 if self.isfrozen() or self.ispulled():
1294 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1297 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1300 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1302 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1305 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1308 def update(self, rev = None, service_files = False):
1309 # save filelist and (modified) status before replacing the meta file
1310 saved_filenames = self.filenamelist
1311 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1314 self.update_local_filesmeta(rev)
1315 self = Package(self.dir, progress_obj=self.progress_obj)
1317 # which files do no longer exist upstream?
1318 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1320 pathn = getTransActPath(self.dir)
1322 for filename in saved_filenames:
1323 if not filename.startswith('_service:') and filename in disappeared:
1324 print statfrmt('D', os.path.join(pathn, filename))
1325 # keep file if it has local modifications
1326 if oldp.status(filename) == ' ':
1327 self.delete_localfile(filename)
1328 self.delete_storefile(filename)
1330 for filename in self.filenamelist:
1332 state = self.status(filename)
1333 if not service_files and filename.startswith('_service:'):
1335 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1336 # no merge necessary... local file is changed, but upstream isn't
1338 elif state == 'M' and filename in saved_modifiedfiles:
1339 status_after_merge = self.mergefile(filename)
1340 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1342 self.updatefile(filename, rev)
1343 print statfrmt('U', os.path.join(pathn, filename))
1345 self.updatefile(filename, rev)
1346 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1348 self.updatefile(filename, rev)
1349 print statfrmt('A', os.path.join(pathn, filename))
1350 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1351 self.updatefile(filename, rev)
1352 self.delete_storefile(filename)
1353 print statfrmt('U', os.path.join(pathn, filename))
1357 self.update_local_pacmeta()
1359 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1360 print 'At revision %s.' % self.rev
1362 if not service_files:
1363 self.run_source_services()
1365 def run_source_services(self):
1366 if self.filenamelist.count('_service'):
1367 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1370 si.execute(self.absdir)
1372 def prepare_filelist(self):
1373 """Prepare a list of files, which will be processed by process_filelist
1374 method. This allows easy modifications of a file list in commit
1378 self.todo = self.filenamelist + self.filenamelist_unvers
1382 for f in (f for f in self.todo if not os.path.isdir(f)):
1384 status = self.status(f)
1387 ret += "%s %s %s\n" % (action, status, f)
1390 # Edit a filelist for package %s
1392 # l, leave = leave a file as is
1393 # r, remove = remove a file
1394 # a, add = add a file
1396 # If you remove file from a list, it will be unchanged
1397 # If you remove all, commit will be aborted"""
1401 def edit_filelist(self):
1402 """Opens a package list in editor for eediting. This allows easy
1403 modifications of it just by simple text editing
1407 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1408 f = os.fdopen(fd, 'w')
1409 f.write(self.prepare_filelist())
1411 mtime_orig = os.stat(filename).st_mtime
1413 if sys.platform[:3] != 'win':
1414 editor = os.getenv('EDITOR', default='vim')
1416 editor = os.getenv('EDITOR', default='notepad')
1418 subprocess.call('%s %s' % (editor, filename), shell=True)
1419 mtime = os.stat(filename).st_mtime
1420 if mtime_orig < mtime:
1421 filelist = open(filename).readlines()
1425 raise oscerr.UserAbort()
1427 return self.process_filelist(filelist)
1429 def process_filelist(self, filelist):
1430 """Process a filelist - it add/remove or leave files. This depends on
1431 user input. If no file is processed, it raises an ValueError
1435 for line in (l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')):
1437 foo = line.split(' ')
1439 action, state, name = (foo[0], ' ', foo[3])
1441 action, state, name = (foo[0], foo[1], foo[2])
1444 action = action.lower()
1447 if action in ('r', 'remove'):
1448 if self.status(name) == '?':
1450 if name in self.todo:
1451 self.todo.remove(name)
1453 self.delete_file(name, True)
1454 elif action in ('a', 'add'):
1455 if self.status(name) != '?':
1456 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1459 elif action in ('l', 'leave'):
1462 raise ValueError("Unknow action `%s'" % action)
1465 raise ValueError("Empty filelist")
1468 """for objects to represent the review state in a request"""
1469 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1471 self.by_user = by_user
1472 self.by_group = by_group
1475 self.comment = comment
1478 """for objects to represent the "state" of a request"""
1479 def __init__(self, name=None, who=None, when=None, comment=None):
1483 self.comment = comment
1486 """represents an action"""
1487 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1489 self.src_project = src_project
1490 self.src_package = src_package
1491 self.src_rev = src_rev
1492 self.dst_project = dst_project
1493 self.dst_package = dst_package
1494 self.src_update = src_update
1497 """represent a request and holds its metadata
1498 it has methods to read in metadata from xml,
1499 different views, ..."""
1502 self.state = RequestState()
1505 self.last_author = None
1508 self.statehistory = []
1511 def read(self, root):
1512 self.reqid = int(root.get('id'))
1513 actions = root.findall('action')
1514 if len(actions) == 0:
1515 actions = [ root.find('submit') ] # for old style requests
1517 for action in actions:
1518 type = action.get('type', 'submit')
1520 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1521 if action.findall('source'):
1522 n = action.find('source')
1523 src_prj = n.get('project', None)
1524 src_pkg = n.get('package', None)
1525 src_rev = n.get('rev', None)
1526 if action.findall('target'):
1527 n = action.find('target')
1528 dst_prj = n.get('project', None)
1529 dst_pkg = n.get('package', None)
1530 if action.findall('options'):
1531 n = action.find('options')
1532 if n.findall('sourceupdate'):
1533 src_update = n.find('sourceupdate').text.strip()
1534 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1536 msg = 'invalid request format:\n%s' % ET.tostring(root)
1537 raise oscerr.APIError(msg)
1540 n = root.find('state')
1541 self.state.name, self.state.who, self.state.when \
1542 = n.get('name'), n.get('who'), n.get('when')
1544 self.state.comment = n.find('comment').text.strip()
1546 self.state.comment = None
1548 # read the review states
1549 for r in root.findall('review'):
1551 s.state = r.get('state')
1552 s.by_user = r.get('by_user')
1553 s.by_group = r.get('by_group')
1554 s.who = r.get('who')
1555 s.when = r.get('when')
1557 s.comment = r.find('comment').text.strip()
1560 self.reviews.append(s)
1562 # read the state history
1563 for h in root.findall('history'):
1565 s.name = h.get('name')
1566 s.who = h.get('who')
1567 s.when = h.get('when')
1569 s.comment = h.find('comment').text.strip()
1572 self.statehistory.append(s)
1573 self.statehistory.reverse()
1575 # read a description, if it exists
1577 n = root.find('description').text
1582 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1583 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1584 dst_prj, dst_pkg, src_update)
1587 def list_view(self):
1588 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1590 for a in self.actions:
1591 dst = "%s/%s" % (a.dst_project, a.dst_package)
1592 if a.src_package == a.dst_package:
1596 if a.type=="submit":
1597 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1598 if a.type=="change_devel":
1599 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1600 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1602 ret += '\n %s: %-50s %-20s ' % \
1603 (a.type, sr_source, dst)
1605 if self.statehistory and self.statehistory[0]:
1607 for h in self.statehistory:
1608 who.append("%s(%s)" % (h.who,h.name))
1610 ret += "\n From: %s" % (' -> '.join(who))
1612 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1614 lines = txt.splitlines()
1615 wrapper = textwrap.TextWrapper( width = 80,
1616 initial_indent=' Descr: ',
1617 subsequent_indent=' ')
1618 ret += "\n" + wrapper.fill(lines[0])
1619 wrapper.initial_indent = ' '
1620 for line in lines[1:]:
1621 ret += "\n" + wrapper.fill(line)
1627 def __cmp__(self, other):
1628 return cmp(self.reqid, other.reqid)
1632 for action in self.actions:
1633 action_list=" %s: " % (action.type)
1634 if action.type=="submit":
1637 r="(r%s)" % (action.src_rev)
1639 if action.src_update:
1640 m="(%s)" % (action.src_update)
1641 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1642 if action.dst_package:
1643 action_list=action_list+"/%s" % ( action.dst_package )
1644 elif action.type=="delete":
1645 action_list=action_list+" %s" % ( action.dst_project )
1646 if action.dst_package:
1647 action_list=action_list+"/%s" % ( action.dst_package )
1648 elif action.type=="change_devel":
1649 action_list=action_list+" %s/%s developed in %s/%s" % \
1650 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1651 action_list=action_list+"\n"
1666 self.state.name, self.state.when, self.state.who,
1669 if len(self.reviews):
1670 reviewitems = [ '%-10s %s %s %s %s %s' \
1671 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1672 for i in self.reviews ]
1673 s += '\nReview: ' + '\n '.join(reviewitems)
1676 if len(self.statehistory):
1677 histitems = [ '%-10s %s %s' \
1678 % (i.name, i.when, i.who) \
1679 for i in self.statehistory ]
1680 s += '\nHistory: ' + '\n '.join(histitems)
1687 """format time as Apr 02 18:19
1689 depending on whether it is in the current year
1693 if time.localtime()[0] == time.localtime(t)[0]:
1695 return time.strftime('%b %d %H:%M',time.localtime(t))
1697 return time.strftime('%b %d %Y',time.localtime(t))
1700 def is_project_dir(d):
1701 return os.path.exists(os.path.join(d, store, '_project')) and not \
1702 os.path.exists(os.path.join(d, store, '_package'))
1705 def is_package_dir(d):
1706 return os.path.exists(os.path.join(d, store, '_project')) and \
1707 os.path.exists(os.path.join(d, store, '_package'))
1709 def parse_disturl(disturl):
1710 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1711 revision), else raises an oscerr.WrongArgs exception
1714 m = DISTURL_RE.match(disturl)
1716 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1718 apiurl = m.group('apiurl')
1719 if apiurl.split('.')[0] != 'api':
1720 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1721 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1723 def parse_buildlogurl(buildlogurl):
1724 """Parse a build log url, returns a tuple (apiurl, project, package,
1725 repository, arch), else raises oscerr.WrongArgs exception"""
1727 global BUILDLOGURL_RE
1729 m = BUILDLOGURL_RE.match(buildlogurl)
1731 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1733 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1736 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1737 This is handy to allow copy/paste a project/package combination in this form.
1739 Trailing slashes are removed before the split, because the split would
1740 otherwise give an additional empty string.
1748 def expand_proj_pack(args, idx=0, howmany=0):
1749 """looks for occurance of '.' at the position idx.
1750 If howmany is 2, both proj and pack are expanded together
1751 using the current directory, or none of them, if not possible.
1752 If howmany is 0, proj is expanded if possible, then, if there
1753 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1754 expanded, if possible.
1755 If howmany is 1, only proj is expanded if possible.
1757 If args[idx] does not exists, an implicit '.' is assumed.
1758 if not enough elements up to idx exist, an error is raised.
1760 See also parseargs(args), slash_split(args), findpacs(args)
1761 All these need unification, somehow.
1764 # print args,idx,howmany
1767 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1769 if len(args) == idx:
1771 if args[idx+0] == '.':
1772 if howmany == 0 and len(args) > idx+1:
1773 if args[idx+1] == '.':
1775 # remove one dot and make sure to expand both proj and pack
1780 # print args,idx,howmany
1782 args[idx+0] = store_read_project('.')
1785 package = store_read_package('.')
1786 args.insert(idx+1, package)
1790 package = store_read_package('.')
1791 args.insert(idx+1, package)
1795 def findpacs(files, progress_obj=None):
1796 """collect Package objects belonging to the given files
1797 and make sure each Package is returned only once"""
1800 p = filedir_to_pac(f, progress_obj)
1803 if i.name == p.name:
1813 def read_filemeta(dir):
1815 r = ET.parse(os.path.join(dir, store, '_files'))
1816 except SyntaxError, e:
1817 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1818 'When parsing .osc/_files, the following error was encountered:\n'
1823 def read_tobedeleted(dir):
1825 fname = os.path.join(dir, store, '_to_be_deleted')
1827 if os.path.exists(fname):
1828 r = [ line.strip() for line in open(fname) ]
1833 def read_inconflict(dir):
1835 fname = os.path.join(dir, store, '_in_conflict')
1837 if os.path.exists(fname):
1838 r = [ line.strip() for line in open(fname) ]
1843 def parseargs(list_of_args):
1844 """Convenience method osc's commandline argument parsing.
1846 If called with an empty tuple (or list), return a list containing the current directory.
1847 Otherwise, return a list of the arguments."""
1849 return list(list_of_args)
1854 def filedir_to_pac(f, progress_obj=None):
1855 """Takes a working copy path, or a path to a file inside a working copy,
1856 and returns a Package object instance
1858 If the argument was a filename, add it onto the "todo" list of the Package """
1860 if os.path.isdir(f):
1862 p = Package(wd, progress_obj=progress_obj)
1865 wd = os.path.dirname(f)
1868 p = Package(wd, progress_obj=progress_obj)
1869 p.todo = [ os.path.basename(f) ]
1874 def statfrmt(statusletter, filename):
1875 return '%s %s' % (statusletter, filename)
1878 def pathjoin(a, *p):
1879 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1880 path = os.path.join(a, *p)
1881 if path.startswith('./'):
1886 def makeurl(baseurl, l, query=[]):
1887 """Given a list of path compoments, construct a complete URL.
1889 Optional parameters for a query string can be given as a list, as a
1890 dictionary, or as an already assembled string.
1891 In case of a dictionary, the parameters will be urlencoded by this
1892 function. In case of a list not -- this is to be backwards compatible.
1895 if conf.config['verbose'] > 1:
1896 print 'makeurl:', baseurl, l, query
1898 if type(query) == type(list()):
1899 query = '&'.join(query)
1900 elif type(query) == type(dict()):
1901 query = urlencode(query)
1903 scheme, netloc = urlsplit(baseurl)[0:2]
1904 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1907 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1908 """wrapper around urllib2.urlopen for error handling,
1909 and to support additional (PUT, DELETE) methods"""
1913 if conf.config['http_debug']:
1916 print '--', method, url
1918 if method == 'POST' and not file and not data:
1919 # adding data to an urllib2 request transforms it into a POST
1922 req = urllib2.Request(url)
1924 api_host_options=conf.get_apiurl_api_host_options(url)
1926 for header, value in api_host_options['http_headers']:
1927 req.add_header(header, value)
1929 req.get_method = lambda: method
1931 # POST requests are application/x-www-form-urlencoded per default
1932 # since we change the request into PUT, we also need to adjust the content type header
1933 if method == 'PUT' or (method == 'POST' and data):
1934 req.add_header('Content-Type', 'application/octet-stream')
1936 if type(headers) == type({}):
1937 for i in headers.keys():
1939 req.add_header(i, headers[i])
1941 if file and not data:
1942 size = os.path.getsize(file)
1944 data = open(file, 'rb').read()
1947 filefd = open(file, 'rb')
1949 if sys.platform[:3] != 'win':
1950 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1952 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1954 except EnvironmentError, e:
1956 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1957 '\non a filesystem which does not support this.' % (e, file))
1958 elif hasattr(e, 'winerror') and e.winerror == 5:
1959 # falling back to the default io
1960 data = open(file, 'rb').read()
1964 if conf.config['debug']: print method, url
1966 old_timeout = socket.getdefaulttimeout()
1967 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1968 if old_timeout != timeout and not api_host_options['sslcertck']:
1969 socket.setdefaulttimeout(timeout)
1971 fd = urllib2.urlopen(req, data=data)
1973 if old_timeout != timeout and not api_host_options['sslcertck']:
1974 socket.setdefaulttimeout(old_timeout)
1975 if hasattr(conf.cookiejar, 'save'):
1976 conf.cookiejar.save(ignore_discard=True)
1978 if filefd: filefd.close()
1983 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1984 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1985 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1986 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1989 def init_project_dir(apiurl, dir, project):
1990 if not os.path.exists(dir):
1991 if conf.config['checkout_no_colon']:
1992 os.makedirs(dir) # helpful with checkout_no_colon
1995 if not os.path.exists(os.path.join(dir, store)):
1996 os.mkdir(os.path.join(dir, store))
1998 # print 'project=',project,' dir=',dir
1999 store_write_project(dir, project)
2000 store_write_apiurl(dir, apiurl)
2001 if conf.config['do_package_tracking']:
2002 store_write_initial_packages(dir, project, [])
2004 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
2005 if not os.path.isdir(store):
2008 f = open('_project', 'w')
2009 f.write(project + '\n')
2011 f = open('_package', 'w')
2012 f.write(package + '\n')
2016 f = open('_files', 'w')
2017 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2021 ET.ElementTree(element=ET.Element('directory')).write('_files')
2023 f = open('_osclib_version', 'w')
2024 f.write(__store_version__ + '\n')
2027 store_write_apiurl(os.path.pardir, apiurl)
2033 def check_store_version(dir):
2034 versionfile = os.path.join(dir, store, '_osclib_version')
2036 v = open(versionfile).read().strip()
2041 msg = 'Error: "%s" is not an osc working copy.' % os.path.abspath(dir)
2042 if os.path.exists(os.path.join(dir, '.svn')):
2043 msg = msg + '\nTry svn instead of osc.'
2044 raise oscerr.NoWorkingCopy(msg)
2046 if v != __store_version__:
2047 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2048 # version is fine, no migration needed
2049 f = open(versionfile, 'w')
2050 f.write(__store_version__ + '\n')
2053 msg = 'The osc metadata of your working copy "%s"' % dir
2054 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2055 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2056 raise oscerr.WorkingCopyWrongVersion, msg
2059 def meta_get_packagelist(apiurl, prj):
2061 u = makeurl(apiurl, ['source', prj])
2063 root = ET.parse(f).getroot()
2064 return [ node.get('name') for node in root.findall('entry') ]
2067 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2068 """return a list of file names,
2069 or a list File() instances if verbose=True"""
2075 query['rev'] = revision
2077 query['rev'] = 'latest'
2079 u = makeurl(apiurl, ['source', prj, package], query=query)
2081 root = ET.parse(f).getroot()
2084 return [ node.get('name') for node in root.findall('entry') ]
2088 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2089 rev = root.get('rev')
2090 for node in root.findall('entry'):
2091 f = File(node.get('name'),
2093 int(node.get('size')),
2094 int(node.get('mtime')))
2100 def meta_get_project_list(apiurl):
2101 u = makeurl(apiurl, ['source'])
2103 root = ET.parse(f).getroot()
2104 return sorted([ node.get('name') for node in root ])
2107 def show_project_meta(apiurl, prj):
2108 url = makeurl(apiurl, ['source', prj, '_meta'])
2110 return f.readlines()
2113 def show_project_conf(apiurl, prj):
2114 url = makeurl(apiurl, ['source', prj, '_config'])
2116 return f.readlines()
2119 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2120 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2124 except urllib2.HTTPError, e:
2125 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2129 def show_package_meta(apiurl, prj, pac):
2130 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2133 return f.readlines()
2134 except urllib2.HTTPError, e:
2135 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2139 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2141 path.append('source')
2147 path.append('_attribute')
2149 path.append(attribute)
2152 query.append("with_default=1")
2154 query.append("with_project=1")
2155 url = makeurl(apiurl, path, query)
2158 return f.readlines()
2159 except urllib2.HTTPError, e:
2160 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2164 def show_develproject(apiurl, prj, pac):
2165 m = show_package_meta(apiurl, prj, pac)
2167 return ET.fromstring(''.join(m)).find('devel').get('project')
2172 def show_pattern_metalist(apiurl, prj):
2173 url = makeurl(apiurl, ['source', prj, '_pattern'])
2177 except urllib2.HTTPError, e:
2178 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2180 r = [ node.get('name') for node in tree.getroot() ]
2185 def show_pattern_meta(apiurl, prj, pattern):
2186 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2189 return f.readlines()
2190 except urllib2.HTTPError, e:
2191 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2196 """metafile that can be manipulated and is stored back after manipulation."""
2197 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2201 self.change_is_required = change_is_required
2202 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2203 f = os.fdopen(fd, 'w')
2204 f.write(''.join(input))
2206 self.hash_orig = dgst(self.filename)
2209 hash = dgst(self.filename)
2210 if self.change_is_required == True and hash == self.hash_orig:
2211 print 'File unchanged. Not saving.'
2212 os.unlink(self.filename)
2215 print 'Sending meta data...'
2216 # don't do any exception handling... it's up to the caller what to do in case
2218 http_PUT(self.url, file=self.filename)
2219 os.unlink(self.filename)
2223 if sys.platform[:3] != 'win':
2224 editor = os.getenv('EDITOR', default='vim')
2226 editor = os.getenv('EDITOR', default='notepad')
2229 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2233 except urllib2.HTTPError, e:
2234 error_help = "%d" % e.code
2235 if e.headers.get('X-Opensuse-Errorcode'):
2236 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2238 print >>sys.stderr, 'BuildService API error:', error_help
2239 # examine the error - we can't raise an exception because we might want
2242 if '<summary>' in data:
2243 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2244 input = raw_input('Try again? ([y/N]): ')
2245 if input not in ['y', 'Y']:
2251 if os.path.exists(self.filename):
2252 print 'discarding %s' % self.filename
2253 os.unlink(self.filename)
2256 # different types of metadata
2257 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2258 'template': new_project_templ,
2261 'pkg': { 'path' : 'source/%s/%s/_meta',
2262 'template': new_package_templ,
2265 'attribute': { 'path' : 'source/%s/%s/_meta',
2266 'template': new_attribute_templ,
2269 'prjconf': { 'path': 'source/%s/_config',
2273 'user': { 'path': 'person/%s',
2274 'template': new_user_template,
2277 'pattern': { 'path': 'source/%s/_pattern/%s',
2278 'template': new_pattern_template,
2283 def meta_exists(metatype,
2290 apiurl = conf.config['apiurl']
2291 url = make_meta_url(metatype, path_args, apiurl)
2293 data = http_GET(url).readlines()
2294 except urllib2.HTTPError, e:
2295 if e.code == 404 and create_new:
2296 data = metatypes[metatype]['template']
2298 data = StringIO(data % template_args).readlines()
2303 def make_meta_url(metatype, path_args=None, apiurl=None):
2305 apiurl = conf.config['apiurl']
2306 if metatype not in metatypes.keys():
2307 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2308 path = metatypes[metatype]['path']
2311 path = path % path_args
2313 return makeurl(apiurl, [path])
2316 def edit_meta(metatype,
2321 change_is_required=False,
2325 apiurl = conf.config['apiurl']
2327 data = meta_exists(metatype,
2330 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2334 change_is_required = True
2336 url = make_meta_url(metatype, path_args, apiurl)
2337 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2345 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2348 query['rev'] = revision
2350 query['rev'] = 'latest'
2352 query['linkrev'] = linkrev
2353 elif conf.config['linkcontrol']:
2354 query['linkrev'] = 'base'
2358 query['emptylink'] = 1
2359 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2360 return f.readlines()
2363 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2364 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2365 return ET.fromstring(''.join(m)).get('srcmd5')
2368 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2369 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2371 # only source link packages have a <linkinfo> element.
2372 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2380 raise oscerr.LinkExpandError(prj, pac, li.error)
2384 def show_upstream_rev(apiurl, prj, pac):
2385 m = show_files_meta(apiurl, prj, pac)
2386 return ET.fromstring(''.join(m)).get('rev')
2389 def read_meta_from_spec(specfile, *args):
2390 import codecs, locale, re
2392 Read tags and sections from spec file. To read out
2393 a tag the passed argument mustn't end with a colon. To
2394 read out a section the passed argument must start with
2396 This method returns a dictionary which contains the
2400 if not os.path.isfile(specfile):
2401 raise IOError('\'%s\' is not a regular file' % specfile)
2404 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2405 except UnicodeDecodeError:
2406 lines = open(specfile).readlines()
2413 if itm.startswith('%'):
2414 sections.append(itm)
2418 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2420 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2421 if m and m.group('val'):
2422 spec_data[tag] = m.group('val').strip()
2424 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2427 section_pat = '^%s\s*?$'
2428 for section in sections:
2429 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2431 start = lines.index(m.group()+'\n') + 1
2433 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2436 for line in lines[start:]:
2437 if line.startswith('%'):
2440 spec_data[section] = data
2445 def edit_message(footer='', template=''):
2446 delim = '--This line, and those below, will be ignored--\n'
2448 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2449 f = os.fdopen(fd, 'w')
2458 if sys.platform[:3] != 'win':
2459 editor = os.getenv('EDITOR', default='vim')
2461 editor = os.getenv('EDITOR', default='notepad')
2464 subprocess.call('%s %s' % (editor, filename), shell=True)
2465 msg = open(filename).read().split(delim)[0].rstrip()
2470 input = raw_input('Log message not specified\n'
2471 'a)bort, c)ontinue, e)dit: ')
2473 raise oscerr.UserAbort()
2483 def create_delete_request(apiurl, project, package, message):
2488 package = """package="%s" """ % (package)
2494 <action type="delete">
2495 <target project="%s" %s/>
2498 <description>%s</description>
2500 """ % (project, package,
2501 cgi.escape(message or ''))
2503 u = makeurl(apiurl, ['request'], query='cmd=create')
2504 f = http_POST(u, data=xml)
2506 root = ET.parse(f).getroot()
2507 return root.get('id')
2510 def create_change_devel_request(apiurl,
2511 devel_project, devel_package,
2518 <action type="change_devel">
2519 <source project="%s" package="%s" />
2520 <target project="%s" package="%s" />
2523 <description>%s</description>
2525 """ % (devel_project,
2529 cgi.escape(message or ''))
2531 u = makeurl(apiurl, ['request'], query='cmd=create')
2532 f = http_POST(u, data=xml)
2534 root = ET.parse(f).getroot()
2535 return root.get('id')
2538 # This creates an old style submit request for server api 1.0
2539 def create_submit_request(apiurl,
2540 src_project, src_package,
2541 dst_project=None, dst_package=None,
2542 message=None, orev=None, src_update=None):
2547 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2549 # Yes, this kind of xml construction is horrible
2554 packagexml = """package="%s" """ %( dst_package )
2555 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2556 # XXX: keep the old template for now in order to work with old obs instances
2558 <request type="submit">
2560 <source project="%s" package="%s" rev="%s"/>
2565 <description>%s</description>
2569 orev or show_upstream_rev(apiurl, src_project, src_package),
2572 cgi.escape(message or ""))
2574 u = makeurl(apiurl, ['request'], query='cmd=create')
2575 f = http_POST(u, data=xml)
2577 root = ET.parse(f).getroot()
2578 return root.get('id')
2581 def get_request(apiurl, reqid):
2582 u = makeurl(apiurl, ['request', reqid])
2584 root = ET.parse(f).getroot()
2591 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2594 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2595 f = http_POST(u, data=message)
2598 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2601 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2602 f = http_POST(u, data=message)
2606 def get_request_list(apiurl, project, package, req_who='', req_state=('new',), req_type=None ):
2612 if not "all" in req_state:
2613 for state in req_state:
2614 if len(m): m += '%20or%20'
2615 m += 'state/@name=\'%s\'' % quote_plus(state)
2616 if len(m): match += "(" + m + ")"
2619 if len(m): m += '%20and%20'
2620 m += 'state/@who=\'%s\'' % quote_plus(req_who)
2622 m += 'history/@who=\'%s\'' % quote_plus(req_who)
2624 if len(match): match += "%20and%20"
2625 match += "(" + m + ")"
2627 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2629 if project or package:
2630 for what in ['action', 'submit']:
2633 if len(m): m += '%20and%20'
2634 m += '(%s/target/@project=\'%s\'%%20or%%20' % (what, quote_plus(project))
2635 m += '%s/source/@project=\'%s\')' % (what, quote_plus(project))
2637 if len(m): m += '%20and%20'
2638 m += '(%s/target/@package=\'%s\'%%20or%%20' % (what, quote_plus(package))
2639 m += '%s/source/@package=\'%s\')' % (what, quote_plus(package))
2641 if len(m): m += '%20and%20'
2642 m += '%s/@type=\'%s\'' % (what, quote_plus(req_type))
2647 if len(match): match += '%20and%20'
2648 match += 'action/@type=\'%s\'' % quote_plus(req_type)
2649 matches.append(match)
2651 for match in matches:
2652 if conf.config['verbose'] > 1:
2654 u = makeurl(apiurl, ['search', 'request'], ['match=%s' % match])
2656 collection = ET.parse(f).getroot()
2658 for root in collection.findall('request'):
2666 def get_request_log(apiurl, reqid):
2667 r = get_request(conf.config['apiurl'], reqid)
2669 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2670 # the description of the request is used for the initial log entry
2671 # otherwise its comment attribute would contain None
2672 if len(r.statehistory) >= 1:
2673 r.statehistory[-1].comment = r.descr
2675 r.state.comment = r.descr
2676 for state in [ r.state ] + r.statehistory:
2677 s = frmt % (state.name, state.who, state.when, str(state.comment))
2682 def get_user_meta(apiurl, user):
2683 u = makeurl(apiurl, ['person', quote_plus(user)])
2686 return ''.join(f.readlines())
2687 except urllib2.HTTPError:
2688 print 'user \'%s\' not found' % user
2692 def get_user_data(apiurl, user, *tags):
2693 """get specified tags from the user meta"""
2694 meta = get_user_meta(apiurl, user)
2697 root = ET.fromstring(meta)
2700 if root.find(tag).text != None:
2701 data.append(root.find(tag).text)
2705 except AttributeError:
2706 # this part is reached if the tags tuple contains an invalid tag
2707 print 'The xml file for user \'%s\' seems to be broken' % user
2712 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2713 import tempfile, shutil
2716 query = { 'rev': revision }
2720 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2721 o = os.fdopen(fd, 'wb')
2722 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2723 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2726 shutil.move(tmpfile, targetfilename or filename)
2734 def get_binary_file(apiurl, prj, repo, arch,
2737 target_filename = None,
2738 target_mtime = None,
2739 progress_meter = False):
2741 target_filename = target_filename or filename
2743 where = package or '_repository'
2744 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2747 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2751 binsize = int(f.headers['content-length'])
2754 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2755 os.chmod(tmpfilename, 0644)
2758 o = os.fdopen(fd, 'wb')
2762 #buf = f.read(BUFSIZE)
2766 downloaded += len(buf)
2768 completion = str(int((float(downloaded)/binsize)*100))
2769 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2774 sys.stdout.write('\n')
2776 shutil.move(tmpfilename, target_filename)
2778 os.utime(target_filename, (-1, target_mtime))
2780 # make sure that the temp file is cleaned up when we are interrupted
2782 try: os.unlink(tmpfilename)
2785 def dgst_from_string(str):
2786 # Python 2.5 depracates the md5 modules
2787 # Python 2.4 doesn't have hashlib yet
2790 md5_hash = hashlib.md5()
2793 md5_hash = md5.new()
2794 md5_hash.update(str)
2795 return md5_hash.hexdigest()
2799 #if not os.path.exists(file):
2809 f = open(file, 'rb')
2811 buf = f.read(BUFSIZE)
2814 return s.hexdigest()
2819 """return true if a string is binary data using diff's heuristic"""
2820 if s and '\0' in s[:4096]:
2825 def binary_file(fn):
2826 """read 4096 bytes from a file named fn, and call binary() on the data"""
2827 return binary(open(fn, 'rb').read(4096))
2830 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2832 This methods diffs oldfilename against filename (so filename will
2833 be shown as the new file).
2834 The variable origfilename is used if filename and oldfilename differ
2835 in their names (for instance if a tempfile is used for filename etc.)
2841 oldfilename = filename
2844 olddir = os.path.join(dir, store)
2846 if not origfilename:
2847 origfilename = filename
2849 file1 = os.path.join(olddir, oldfilename) # old/stored original
2850 file2 = os.path.join(dir, filename) # working copy
2852 f1 = open(file1, 'rb')
2856 f2 = open(file2, 'rb')
2860 if binary(s1) or binary (s2):
2861 d = ['Binary file %s has changed\n' % origfilename]
2864 d = difflib.unified_diff(\
2867 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2868 tofile = '%s\t(working copy)' % origfilename)
2870 # if file doesn't end with newline, we need to append one in the diff result
2872 for i, line in enumerate(d):
2873 if not line.endswith('\n'):
2874 d[i] += '\n\\ No newline at end of file'
2880 def make_diff(wc, revision):
2886 diff_hdr = 'Index: %s\n'
2887 diff_hdr += '===================================================================\n'
2889 olddir = os.getcwd()
2893 for file in wc.todo:
2894 if file in wc.filenamelist+wc.filenamelist_unvers:
2895 state = wc.status(file)
2897 added_files.append(file)
2899 removed_files.append(file)
2900 elif state == 'M' or state == 'C':
2901 changed_files.append(file)
2903 diff.append('osc: \'%s\' is not under version control' % file)
2905 for file in wc.filenamelist+wc.filenamelist_unvers:
2906 state = wc.status(file)
2907 if state == 'M' or state == 'C':
2908 changed_files.append(file)
2910 added_files.append(file)
2912 removed_files.append(file)
2914 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2916 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2917 cmp_pac = Package(tmpdir)
2919 for file in wc.todo:
2920 if file in cmp_pac.filenamelist:
2921 if file in wc.filenamelist:
2922 changed_files.append(file)
2924 diff.append('osc: \'%s\' is not under version control' % file)
2926 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2928 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2930 for file in changed_files:
2931 diff.append(diff_hdr % file)
2933 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2935 cmp_pac.updatefile(file, revision)
2936 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2937 cmp_pac.absdir, file))
2938 (fd, tmpfile) = tempfile.mkstemp()
2939 for file in added_files:
2940 diff.append(diff_hdr % file)
2942 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2943 os.path.dirname(tmpfile), file))
2945 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2946 os.path.dirname(tmpfile), file))
2948 # FIXME: this is ugly but it cannot be avoided atm
2949 # if a file is deleted via "osc rm file" we should keep the storefile.
2951 if cmp_pac == None and removed_files:
2952 tmpdir = tempfile.mkdtemp()
2954 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2955 tmp_pac = Package(tmpdir)
2958 for file in removed_files:
2959 diff.append(diff_hdr % file)
2961 tmp_pac.updatefile(file, tmp_pac.rev)
2962 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2963 wc.rev, file, tmp_pac.storedir, file))
2965 cmp_pac.updatefile(file, revision)
2966 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2967 revision, file, cmp_pac.storedir, file))
2971 delete_dir(cmp_pac.absdir)
2973 delete_dir(tmp_pac.absdir)
2977 def server_diff(apiurl,
2978 old_project, old_package, old_revision,
2979 new_project, new_package, new_revision, unified=False):
2981 query = {'cmd': 'diff', 'expand': '1'}
2983 query['oproject'] = old_project
2985 query['opackage'] = old_package
2987 query['orev'] = old_revision
2989 query['rev'] = new_revision
2991 query['unified'] = 1
2993 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
2999 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3001 creates the plain directory structure for a package dir.
3002 The 'apiurl' parameter is needed for the project dir initialization.
3003 The 'project' and 'package' parameters specify the name of the
3004 project and the package. The optional 'pathname' parameter is used
3005 for printing out the message that a new dir was created (default: 'prj_dir/package').
3006 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3008 prj_dir = prj_dir or project
3010 # FIXME: carefully test each patch component of prj_dir,
3011 # if we have a .osc/_files entry at that level.
3012 # -> if so, we have a package/project clash,
3013 # and should rename this path component by appending '.proj'
3014 # and give user a warning message, to discourage such clashes
3016 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3017 if is_package_dir(prj_dir):
3018 # we want this to become a project directory,
3019 # but it already is a package directory.
3020 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3022 if not is_project_dir(prj_dir):
3023 # this directory could exist as a parent direory for one of our earlier
3024 # checked out sub-projects. in this case, we still need to initialize it.
3025 print statfrmt('A', prj_dir)
3026 init_project_dir(apiurl, prj_dir, project)
3028 if is_project_dir(os.path.join(prj_dir, package)):
3029 # the thing exists, but is a project directory and not a package directory
3030 # FIXME: this should be a warning message to discourage package/project clashes
3031 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3033 if not os.path.exists(os.path.join(prj_dir, package)):
3034 print statfrmt('A', pathname)
3035 os.mkdir(os.path.join(prj_dir, package))
3036 os.mkdir(os.path.join(prj_dir, package, store))
3038 return(os.path.join(prj_dir, package))
3041 def checkout_package(apiurl, project, package,
3042 revision=None, pathname=None, prj_obj=None,
3043 expand_link=False, prj_dir=None, service_files=None, progress_obj=None):
3045 # the project we're in might be deleted.
3046 # that'll throw an error then.
3047 olddir = os.getcwd()
3049 olddir = os.environ.get("PWD")
3054 if sys.platform[:3] == 'win':
3055 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3057 if conf.config['checkout_no_colon']:
3058 prj_dir = prj_dir.replace(':', '/')
3061 pathname = getTransActPath(os.path.join(prj_dir, package))
3063 # before we create directories and stuff, check if the package actually
3065 show_package_meta(apiurl, project, package)
3069 # try to read from the linkinfo
3070 # if it is a link we use the xsrcmd5 as the revision to be
3073 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3075 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3080 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3081 init_package_dir(apiurl, project, package, store, revision)
3083 p = Package(package, progress_obj=progress_obj)
3086 for filename in p.filenamelist:
3087 if service_files or not filename.startswith('_service:'):
3088 p.updatefile(filename, revision)
3089 # print 'A ', os.path.join(project, package, filename)
3090 print statfrmt('A', os.path.join(pathname, filename))
3091 if conf.config['do_package_tracking']:
3092 # check if we can re-use an existing project object
3094 prj_obj = Project(os.getcwd())
3095 prj_obj.set_state(p.name, ' ')
3096 prj_obj.write_packages()
3100 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3101 dst_userid = None, keep_develproject = False):
3103 update pkgmeta with new new_name and new_prj and set calling user as the
3104 only maintainer (unless keep_maintainers is set). Additionally remove the
3105 develproject entry (<devel />) unless keep_develproject is true.
3107 root = ET.fromstring(''.join(pkgmeta))
3108 root.set('name', new_name)
3109 root.set('project', new_prj)
3110 if not keep_maintainers:
3111 for person in root.findall('person'):
3113 if not keep_develproject:
3114 for dp in root.findall('devel'):
3116 return ET.tostring(root)
3118 def link_to_branch(apiurl, project, package):
3120 convert a package with a _link + project.diff to a branch
3123 if '_link' in meta_get_filelist(apiurl, project, package):
3124 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3127 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3129 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3131 create a linked package
3132 - "src" is the original package
3133 - "dst" is the "link" package that we are creating here
3138 dst_meta = meta_exists(metatype='pkg',
3139 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3141 create_new=False, apiurl=conf.config['apiurl'])
3143 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3144 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3149 root = ET.fromstring(''.join(dst_meta))
3150 elm = root.find('publish')
3152 elm = ET.SubElement(root, 'publish')
3154 ET.SubElement(elm, 'disable')
3155 dst_meta = ET.tostring(root)
3158 path_args=(dst_project, dst_package),
3160 # create the _link file
3161 # but first, make sure not to overwrite an existing one
3162 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3164 print >>sys.stderr, 'forced overwrite of existing _link file'
3167 print >>sys.stderr, '_link file already exists...! Aborting'
3171 rev = 'rev="%s"' % rev
3176 cicount = 'cicount="%s"' % cicount
3180 print 'Creating _link...',
3181 link_template = """\
3182 <link project="%s" package="%s" %s %s>
3184 <!-- <apply name="patch" /> apply a patch on the source directory -->
3185 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3186 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3187 <!-- <delete>filename</delete> delete a file -->
3190 """ % (src_project, src_package, rev, cicount)
3192 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3193 http_PUT(u, data=link_template)
3196 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3199 - "src" is the original package
3200 - "dst" is the "aggregate" package that we are creating here
3201 - "map" is a dictionary SRC => TARGET repository mappings
3206 dst_meta = meta_exists(metatype='pkg',
3207 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3209 create_new=False, apiurl=conf.config['apiurl'])
3211 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3212 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3217 root = ET.fromstring(''.join(dst_meta))
3218 elm = root.find('publish')
3220 elm = ET.SubElement(root, 'publish')
3222 ET.SubElement(elm, 'disable')
3223 dst_meta = ET.tostring(root)
3226 path_args=(dst_project, dst_package),
3229 # create the _aggregate file
3230 # but first, make sure not to overwrite an existing one
3231 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3233 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3236 print 'Creating _aggregate...',
3237 aggregate_template = """\
3239 <aggregate project="%s">
3241 for tgt, src in repo_map.iteritems():
3242 aggregate_template += """\
3243 <repository target="%s" source="%s" />
3246 aggregate_template += """\
3247 <package>%s</package>
3250 """ % ( src_package)
3252 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3253 http_PUT(u, data=aggregate_template)
3257 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3259 Branch packages defined via attributes (via API call)
3261 query = { 'cmd': 'branch' }
3262 query['attribute'] = attribute
3264 query['target_project'] = targetproject
3266 query['package'] = package
3267 if maintained_update_project_attribute:
3268 query['update_project_attribute'] = maintained_update_project_attribute
3270 u = makeurl(apiurl, ['source'], query=query)
3274 except urllib2.HTTPError, e:
3275 msg = ''.join(e.readlines())
3276 msg = msg.split('<summary>')[1]
3277 msg = msg.split('</summary>')[0]
3278 m = re.match(r"attribute branch call failed: (\S+)/", msg)
3282 r = r.split('targetproject">')[1]
3283 r = r.split('</data>')[0]
3287 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False):
3289 Branch a package (via API call)
3291 query = { 'cmd': 'branch' }
3293 query['ignoredevel'] = '1'
3297 query['target_project'] = target_project
3299 query['target_package'] = target_package
3300 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3303 except urllib2.HTTPError, e:
3304 if not return_existing:
3306 msg = ''.join(e.readlines())
3307 msg = msg.split('<summary>')[1]
3308 msg = msg.split('</summary>')[0]
3309 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3313 return (True, m.group(1), m.group(2), None, None)
3316 for i in ET.fromstring(f.read()).findall('data'):
3317 data[i.get('name')] = i.text
3318 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3319 data.get('sourceproject', None), data.get('sourcepackage', None))
3322 def copy_pac(src_apiurl, src_project, src_package,
3323 dst_apiurl, dst_project, dst_package,
3324 client_side_copy = False,
3325 keep_maintainers = False,
3326 keep_develproject = False,
3331 Create a copy of a package.
3333 Copying can be done by downloading the files from one package and commit
3334 them into the other by uploading them (client-side copy) --
3335 or by the server, in a single api call.
3338 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3339 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3340 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3341 dst_userid, keep_develproject)
3343 print 'Sending meta data...'
3344 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3345 http_PUT(u, data=src_meta)
3347 print 'Copying files...'
3348 if not client_side_copy:
3349 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3351 query['expand'] = '1'
3353 query['orev'] = revision
3355 query['comment'] = comment
3356 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3361 # copy one file after the other
3363 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')
3365 query = {'rev': 'upload'}
3366 for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand):
3368 get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n, revision=revision)
3369 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, pathname2url(n)], query=query)
3370 http_PUT(u, file = n)
3373 query['comment'] = comment