3 # Copyright (C) 2006 Novell Inc. All rights reserved.
4 # This program is free software; it may be used, copied, modified
5 # and distributed under the terms of the GNU General Public Licence,
6 # either version 2, or version 3 (at your option).
8 __version__ = '0.125git'
10 # __store_version__ is to be incremented when the format of the working copy
11 # "store" changes in an incompatible way. Please add any needed migration
12 # functionality to check_store_version().
13 __store_version__ = '1.0'
19 from urllib import pathname2url, quote_plus, urlencode, unquote
20 from urlparse import urlsplit, urlunsplit
21 from cStringIO import StringIO
29 from xml.etree import cElementTree as ET
31 import cElementTree as ET
35 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
36 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
40 # NOTE: do not use this anymore, use conf.exclude_glob instead.
41 # but this needs to stay to avoid breakage of tools which use osc lib
42 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
44 new_project_templ = """\
45 <project name="%(name)s">
47 <title></title> <!-- Short title of NewProject -->
49 <!-- This is for a longer description of the purpose of the project -->
52 <person role="maintainer" userid="%(user)s" />
53 <person role="bugowner" userid="%(user)s" />
54 <!-- remove this block to publish your packages on the mirrors -->
65 <!-- remove this comment to enable one or more build targets
67 <repository name="openSUSE_Factory">
68 <path project="openSUSE:Factory" repository="standard" />
72 <repository name="openSUSE_11.2">
73 <path project="openSUSE:11.2" repository="standard"/>
77 <repository name="openSUSE_11.1">
78 <path project="openSUSE:11.1" repository="standard"/>
82 <repository name="openSUSE_11.0">
83 <path project="openSUSE:11.0" repository="standard"/>
87 <repository name="Fedora_11">
88 <path project="Fedora:11" repository="standard" />
92 <repository name="SLE_11">
93 <path project="SUSE:SLE-11" repository="standard" />
97 <repository name="SLE_10">
98 <path project="SUSE:SLE-10:SDK" repository="standard" />
107 new_package_templ = """\
108 <package name="%(name)s">
110 <title></title> <!-- Title of package -->
113 <!-- for long description -->
116 <person role="maintainer" userid="%(user)s"/>
117 <person role="bugowner" userid="%(user)s"/>
119 <url>PUT_UPSTREAM_URL_HERE</url>
123 use one of the examples below to disable building of this package
124 on a certain architecture, in a certain repository,
125 or a combination thereof:
127 <disable arch="x86_64"/>
128 <disable repository="SUSE_SLE-10"/>
129 <disable repository="SUSE_SLE-10" arch="x86_64"/>
131 Possible sections where you can use the tags above:
141 Please have a look at:
142 http://en.opensuse.org/Restricted_Formats
143 Packages containing formats listed there are NOT allowed to
144 be packaged in the openSUSE Buildservice and will be deleted!
151 new_attribute_templ = """\
153 <attribute namespace="" name="">
159 new_user_template = """\
161 <login>%(user)s</login>
162 <email>PUT_EMAIL_ADDRESS_HERE</email>
163 <realname>PUT_REAL_NAME_HERE</realname>
165 <project name="home:%(user)s"/>
181 new_pattern_template = """\
182 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
188 buildstatus_symbols = {'succeeded': '.',
190 'expansion error': 'E',
201 # os.path.samefile is available only under Unix
202 def os_path_samefile(path1, path2):
204 return os.path.samefile(path1, path2)
206 return os.path.realpath(path1) == os.path.realpath(path2)
209 """represent a file, including its metadata"""
210 def __init__(self, name, md5, size, mtime):
220 """Source service content
223 """creates an empty serviceinfo instance"""
226 def read(self, serviceinfo_node):
227 """read in the source services <services> element passed as
230 if serviceinfo_node == None:
233 services = serviceinfo_node.findall('service')
235 for service in services:
236 name = service.get('name')
238 for param in service.findall('param'):
239 option = param.get('name', None)
241 name += " --" + option + " '" + value + "'"
242 self.commands.append(name)
244 msg = 'invalid service format:\n%s' % ET.tostring(root)
245 raise oscerr.APIError(msg)
247 def execute(self, dir):
250 for call in self.commands:
251 temp_dir = tempfile.mkdtemp()
252 name = call.split(None, 1)[0]
253 if not os.path.exists("/usr/lib/obs/service/"+name):
254 msg = "ERROR: service is not installed !"
255 msg += "Can maybe solved with: zypper in obs-server-" + name
256 raise oscerr.APIError(msg)
257 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
258 ret = subprocess.call(c, shell=True)
260 print "ERROR: service call failed: " + c
262 for file in os.listdir(temp_dir):
263 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
267 """linkinfo metadata (which is part of the xml representing a directory
270 """creates an empty linkinfo instance"""
280 def read(self, linkinfo_node):
281 """read in the linkinfo metadata from the <linkinfo> element passed as
283 If the passed element is None, the method does nothing.
285 if linkinfo_node == None:
287 self.project = linkinfo_node.get('project')
288 self.package = linkinfo_node.get('package')
289 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
290 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
291 self.srcmd5 = linkinfo_node.get('srcmd5')
292 self.error = linkinfo_node.get('error')
293 self.rev = linkinfo_node.get('rev')
294 self.baserev = linkinfo_node.get('baserev')
297 """returns True if the linkinfo is not empty, otherwise False"""
298 if self.xsrcmd5 or self.lsrcmd5:
302 def isexpanded(self):
303 """returns True if the package is an expanded link"""
304 if self.lsrcmd5 and not self.xsrcmd5:
309 """returns True if the link is in error state (could not be applied)"""
315 """return an informatory string representation"""
316 if self.islink() and not self.isexpanded():
317 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
318 % (self.project, self.package, self.xsrcmd5, self.rev)
319 elif self.islink() and self.isexpanded():
321 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
322 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
324 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
325 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
331 """represent a project directory, holding packages"""
332 def __init__(self, dir, getPackageList=True):
335 self.absdir = os.path.abspath(dir)
337 self.name = store_read_project(self.dir)
338 self.apiurl = store_read_apiurl(self.dir)
341 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
343 self.pacs_available = []
345 if conf.config['do_package_tracking']:
346 self.pac_root = self.read_packages().getroot()
347 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
348 self.pacs_excluded = [ i for i in os.listdir(self.dir)
349 for j in conf.config['exclude_glob']
350 if fnmatch.fnmatch(i, j) ]
351 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
352 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
353 # in the self.pacs_broken list
354 self.pacs_broken = []
355 for p in self.pacs_have:
356 if not os.path.isdir(os.path.join(self.absdir, p)):
357 # all states will be replaced with the '!'-state
358 # (except it is already marked as deleted ('D'-state))
359 self.pacs_broken.append(p)
361 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
363 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
365 def checkout_missing_pacs(self, expand_link=False):
366 for pac in self.pacs_missing:
368 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
369 # pac is not under version control but a local file/dir exists
370 msg = 'can\'t add package \'%s\': Object already exists' % pac
371 raise oscerr.PackageExists(self.name, pac, msg)
373 print 'checking out new package %s' % pac
374 checkout_package(self.apiurl, self.name, pac, \
375 pathname=getTransActPath(os.path.join(self.dir, pac)), \
376 prj_obj=self, prj_dir=self.dir, expand_link=expand_link)
378 def set_state(self, pac, state):
379 node = self.get_package_node(pac)
381 self.new_package_entry(pac, state)
383 node.attrib['state'] = state
385 def get_package_node(self, pac):
386 for node in self.pac_root.findall('package'):
387 if pac == node.get('name'):
391 def del_package_node(self, pac):
392 for node in self.pac_root.findall('package'):
393 if pac == node.get('name'):
394 self.pac_root.remove(node)
396 def get_state(self, pac):
397 node = self.get_package_node(pac)
399 return node.get('state')
403 def new_package_entry(self, name, state):
404 ET.SubElement(self.pac_root, 'package', name=name, state=state)
406 def read_packages(self):
407 packages_file = os.path.join(self.absdir, store, '_packages')
408 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
409 return ET.parse(packages_file)
411 # scan project for existing packages and migrate them
413 for data in os.listdir(self.dir):
414 pac_dir = os.path.join(self.absdir, data)
415 # we cannot use self.pacs_available because we cannot guarantee that the package list
416 # was fetched from the server
417 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
418 and Package(pac_dir).name == data:
419 cur_pacs.append(ET.Element('package', name=data, state=' '))
420 store_write_initial_packages(self.absdir, self.name, cur_pacs)
421 return ET.parse(os.path.join(self.absdir, store, '_packages'))
423 def write_packages(self):
424 # TODO: should we only modify the existing file instead of overwriting?
425 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
427 def addPackage(self, pac):
429 for i in conf.config['exclude_glob']:
430 if fnmatch.fnmatch(pac, i):
431 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
432 raise oscerr.OscIOError(None, msg)
433 state = self.get_state(pac)
434 if state == None or state == 'D':
435 self.new_package_entry(pac, 'A')
436 self.write_packages()
437 # sometimes the new pac doesn't exist in the list because
438 # it would take too much time to update all data structs regularly
439 if pac in self.pacs_unvers:
440 self.pacs_unvers.remove(pac)
442 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
444 def delPackage(self, pac, force = False):
445 state = self.get_state(pac.name)
447 if state == ' ' or state == 'D':
449 for file in pac.filenamelist + pac.filenamelist_unvers:
450 filestate = pac.status(file)
451 if filestate == 'M' or filestate == 'C' or \
452 filestate == 'A' or filestate == '?':
455 del_files.append(file)
456 if can_delete or force:
457 for file in del_files:
458 pac.delete_localfile(file)
459 if pac.status(file) != '?':
460 pac.delete_storefile(file)
461 # this is not really necessary
462 pac.put_on_deletelist(file)
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
464 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
465 pac.write_deletelist()
466 self.set_state(pac.name, 'D')
467 self.write_packages()
469 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
472 delete_dir(pac.absdir)
473 self.del_package_node(pac.name)
474 self.write_packages()
475 print statfrmt('D', pac.name)
477 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
479 print 'package is not under version control'
481 print 'unsupported state'
483 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
486 Package(os.path.join(self.dir, pac)).update()
488 # we need to make sure that the _packages file will be written (even if an exception
491 # update complete project
492 # packages which no longer exists upstream
493 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
495 for pac in upstream_del:
496 p = Package(os.path.join(self.dir, pac))
497 self.delPackage(p, force = True)
498 delete_storedir(p.storedir)
503 self.pac_root.remove(self.get_package_node(p.name))
504 self.pacs_have.remove(pac)
506 for pac in self.pacs_have:
507 state = self.get_state(pac)
508 if pac in self.pacs_broken:
509 if self.get_state(pac) != 'A':
510 checkout_package(self.apiurl, self.name, pac,
511 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, prj_dir=self.dir, expand_link=not unexpand_link)
514 p = Package(os.path.join(self.dir, pac))
516 if expand_link and p.islink() and not p.isexpanded():
519 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
524 rev = p.linkinfo.xsrcmd5
525 print 'Expanding to rev', rev
526 elif unexpand_link and p.islink() and p.isexpanded():
527 rev = p.linkinfo.lsrcmd5
528 print 'Unexpanding to rev', rev
529 elif p.islink() and p.isexpanded():
531 print 'Updating %s' % p.name
532 p.update(rev, service_files)
536 # TODO: Package::update has to fixed to behave like svn does
537 if pac in self.pacs_broken:
538 checkout_package(self.apiurl, self.name, pac,
539 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, prj_dir=self.dir, expand_link=expand_link)
541 Package(os.path.join(self.dir, pac)).update()
542 elif state == 'A' and pac in self.pacs_available:
543 # file/dir called pac already exists and is under version control
544 msg = 'can\'t add package \'%s\': Object already exists' % pac
545 raise oscerr.PackageExists(self.name, pac, msg)
550 print 'unexpected state.. package \'%s\'' % pac
552 self.checkout_missing_pacs(expand_link=not unexpand_link)
554 self.write_packages()
556 def commit(self, pacs = (), msg = '', files = {}):
561 if files.has_key(pac):
563 state = self.get_state(pac)
565 self.commitNewPackage(pac, msg, todo)
567 self.commitDelPackage(pac)
569 # display the correct dir when sending the changes
570 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
573 p = Package(os.path.join(self.dir, pac))
576 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
577 print 'osc: \'%s\' is not under version control' % pac
578 elif pac in self.pacs_broken:
579 print 'osc: \'%s\' package not found' % pac
581 self.commitExtPackage(pac, msg, todo)
583 self.write_packages()
585 # if we have packages marked as '!' we cannot commit
586 for pac in self.pacs_broken:
587 if self.get_state(pac) != 'D':
588 msg = 'commit failed: package \'%s\' is missing' % pac
589 raise oscerr.PackageMissing(self.name, pac, msg)
591 for pac in self.pacs_have:
592 state = self.get_state(pac)
595 Package(os.path.join(self.dir, pac)).commit(msg)
597 self.commitDelPackage(pac)
599 self.commitNewPackage(pac, msg)
601 self.write_packages()
603 def commitNewPackage(self, pac, msg = '', files = []):
604 """creates and commits a new package if it does not exist on the server"""
605 if pac in self.pacs_available:
606 print 'package \'%s\' already exists' % pac
608 user = conf.get_apiurl_usr(self.apiurl)
609 edit_meta(metatype='pkg',
610 path_args=(quote_plus(self.name), quote_plus(pac)),
615 # display the correct dir when sending the changes
617 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
621 p = Package(os.path.join(self.dir, pac))
623 print statfrmt('Sending', os.path.normpath(p.dir))
625 self.set_state(pac, ' ')
628 def commitDelPackage(self, pac):
629 """deletes a package on the server and in the working copy"""
631 # display the correct dir when sending the changes
632 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
635 pac_dir = os.path.join(self.dir, pac)
636 p = Package(os.path.join(self.dir, pac))
637 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
638 delete_storedir(p.storedir)
644 pac_dir = os.path.join(self.dir, pac)
645 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
646 print statfrmt('Deleting', getTransActPath(pac_dir))
647 delete_package(self.apiurl, self.name, pac)
648 self.del_package_node(pac)
650 def commitExtPackage(self, pac, msg, files = []):
651 """commits a package from an external project"""
652 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
655 pac_path = os.path.join(self.dir, pac)
657 project = store_read_project(pac_path)
658 package = store_read_package(pac_path)
659 apiurl = store_read_apiurl(pac_path)
660 if meta_exists(metatype='pkg',
661 path_args=(quote_plus(project), quote_plus(package)),
663 create_new=False, apiurl=apiurl):
664 p = Package(pac_path)
668 user = conf.get_apiurl_usr(self.apiurl)
669 edit_meta(metatype='pkg',
670 path_args=(quote_plus(project), quote_plus(package)),
675 p = Package(pac_path)
681 r.append('*****************************************************')
682 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
683 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
684 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
685 r.append('*****************************************************')
691 """represent a package (its directory) and read/keep/write its metadata"""
692 def __init__(self, workingdir):
693 self.dir = workingdir
694 self.absdir = os.path.abspath(self.dir)
695 self.storedir = os.path.join(self.absdir, store)
697 check_store_version(self.dir)
699 self.prjname = store_read_project(self.dir)
700 self.name = store_read_package(self.dir)
701 self.apiurl = store_read_apiurl(self.dir)
703 self.update_datastructs()
707 self.todo_delete = []
710 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
711 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
714 def addfile(self, n):
715 st = os.stat(os.path.join(self.dir, n))
716 f = File(n, None, st.st_size, st.st_mtime)
717 self.filelist.append(f)
718 self.filenamelist.append(n)
719 self.filenamelist_unvers.remove(n)
720 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
722 def delete_file(self, n, force=False):
723 """deletes a file if possible and marks the file as deleted"""
724 state = self.status(n)
725 if state in ['?', 'A', 'M'] and not force:
726 return (False, state)
727 self.delete_localfile(n)
729 self.put_on_deletelist(n)
730 self.write_deletelist()
732 self.delete_storefile(n)
735 def delete_storefile(self, n):
736 try: os.unlink(os.path.join(self.storedir, n))
739 def delete_localfile(self, n):
740 try: os.unlink(os.path.join(self.dir, n))
743 def put_on_deletelist(self, n):
744 if n not in self.to_be_deleted:
745 self.to_be_deleted.append(n)
747 def put_on_conflictlist(self, n):
748 if n not in self.in_conflict:
749 self.in_conflict.append(n)
751 def clear_from_conflictlist(self, n):
752 """delete an entry from the file, and remove the file if it would be empty"""
753 if n in self.in_conflict:
755 filename = os.path.join(self.dir, n)
756 storefilename = os.path.join(self.storedir, n)
757 myfilename = os.path.join(self.dir, n + '.mine')
758 if self.islinkrepair() or self.ispulled():
759 upfilename = os.path.join(self.dir, n + '.new')
761 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
764 os.unlink(myfilename)
765 # the working copy may be updated, so the .r* ending may be obsolete...
767 os.unlink(upfilename)
768 if self.islinkrepair() or self.ispulled():
769 os.unlink(os.path.join(self.dir, n + '.old'))
773 self.in_conflict.remove(n)
775 self.write_conflictlist()
777 def write_deletelist(self):
778 if len(self.to_be_deleted) == 0:
780 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
784 fname = os.path.join(self.storedir, '_to_be_deleted')
786 f.write('\n'.join(self.to_be_deleted))
790 def delete_source_file(self, n):
791 """delete local a source file"""
792 self.delete_localfile(n)
793 self.delete_storefile(n)
795 def delete_remote_source_file(self, n):
796 """delete a remote source file (e.g. from the server)"""
798 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
801 def put_source_file(self, n):
803 # escaping '+' in the URL path (note: not in the URL query string) is
804 # only a workaround for ruby on rails, which swallows it otherwise
806 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
807 http_PUT(u, file = os.path.join(self.dir, n))
809 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
811 def commit(self, msg=''):
812 # commit only if the upstream revision is the same as the working copy's
813 upstream_rev = self.latest_rev()
814 if self.rev != upstream_rev:
815 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
818 self.todo = self.filenamelist_unvers + self.filenamelist
820 pathn = getTransActPath(self.dir)
822 have_conflicts = False
823 for filename in self.todo:
824 if not filename.startswith('_service:') and not filename.startswith('_service_'):
825 st = self.status(filename)
826 if st == 'A' or st == 'M':
827 self.todo_send.append(filename)
828 print statfrmt('Sending', os.path.join(pathn, filename))
830 self.todo_delete.append(filename)
831 print statfrmt('Deleting', os.path.join(pathn, filename))
833 have_conflicts = True
836 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
839 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
840 print 'nothing to do for package %s' % self.name
843 if self.islink() and self.isexpanded():
844 # resolve the link into the upload revision
845 # XXX: do this always?
846 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
847 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
850 print 'Transmitting file data ',
852 for filename in self.todo_delete:
853 # do not touch local files on commit --
854 # delete remotely instead
855 self.delete_remote_source_file(filename)
856 self.to_be_deleted.remove(filename)
857 for filename in self.todo_send:
858 sys.stdout.write('.')
860 self.put_source_file(filename)
862 # all source files are committed - now comes the log
863 query = { 'cmd' : 'commit',
865 'user' : conf.get_apiurl_usr(self.apiurl),
867 if self.islink() and self.isexpanded():
868 query['keeplink'] = '1'
869 if conf.config['linkcontrol'] or self.isfrozen():
870 query['linkrev'] = self.linkinfo.srcmd5
872 query['repairlink'] = '1'
873 query['linkrev'] = self.get_pulled_srcmd5()
874 if self.islinkrepair():
875 query['repairlink'] = '1'
876 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
878 except urllib2.HTTPError, e:
879 # delete upload revision
881 query = { 'cmd': 'deleteuploadrev' }
882 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
888 root = ET.parse(f).getroot()
889 self.rev = int(root.get('rev'))
891 print 'Committed revision %s.' % self.rev
894 os.unlink(os.path.join(self.storedir, '_pulled'))
895 if self.islinkrepair():
896 os.unlink(os.path.join(self.storedir, '_linkrepair'))
897 self.linkrepair = False
898 # XXX: mark package as invalid?
899 print 'The source link has been repaired. This directory can now be removed.'
900 if self.islink() and self.isexpanded():
901 self.update_local_filesmeta(revision=self.latest_rev())
903 self.update_local_filesmeta()
904 self.write_deletelist()
905 self.update_datastructs()
907 if self.filenamelist.count('_service'):
908 print 'The package contains a source service.'
909 for filename in self.todo:
910 if filename.startswith('_service:') and os.path.exists(filename):
911 os.unlink(filename) # remove local files
912 print_request_list(self.apiurl, self.prjname, self.name)
914 def write_conflictlist(self):
915 if len(self.in_conflict) == 0:
917 os.unlink(os.path.join(self.storedir, '_in_conflict'))
921 fname = os.path.join(self.storedir, '_in_conflict')
923 f.write('\n'.join(self.in_conflict))
927 def updatefile(self, n, revision):
928 filename = os.path.join(self.dir, n)
929 storefilename = os.path.join(self.storedir, n)
930 mtime = self.findfilebyname(n).mtime
932 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision)
933 os.utime(filename, (-1, mtime))
935 shutil.copyfile(filename, storefilename)
937 def mergefile(self, n):
938 filename = os.path.join(self.dir, n)
939 storefilename = os.path.join(self.storedir, n)
940 myfilename = os.path.join(self.dir, n + '.mine')
941 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
942 os.rename(filename, myfilename)
944 mtime = self.findfilebyname(n).mtime
945 get_source_file(self.apiurl, self.prjname, self.name, n,
946 revision=self.rev, targetfilename=upfilename)
947 os.utime(upfilename, (-1, mtime))
949 if binary_file(myfilename) or binary_file(upfilename):
951 shutil.copyfile(upfilename, filename)
952 shutil.copyfile(upfilename, storefilename)
953 self.in_conflict.append(n)
954 self.write_conflictlist()
958 # diff3 OPTIONS... MINE OLDER YOURS
959 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
960 # we would rather use the subprocess module, but it is not availablebefore 2.4
961 ret = subprocess.call(merge_cmd, shell=True)
963 # "An exit status of 0 means `diff3' was successful, 1 means some
964 # conflicts were found, and 2 means trouble."
966 # merge was successful... clean up
967 shutil.copyfile(upfilename, storefilename)
968 os.unlink(upfilename)
969 os.unlink(myfilename)
973 shutil.copyfile(upfilename, storefilename)
974 self.in_conflict.append(n)
975 self.write_conflictlist()
978 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
979 print >>sys.stderr, 'the command line was:'
980 print >>sys.stderr, merge_cmd
985 def update_local_filesmeta(self, revision=None):
987 Update the local _files file in the store.
988 It is replaced with the version pulled from upstream.
990 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
992 f = open(os.path.join(self.storedir, '_files.new'), 'w')
995 os.rename(os.path.join(self.storedir, '_files.new'), os.path.join(self.storedir, '_files'))
997 if os.path.exists(os.path.join(self.storedir, '_files.new')):
998 os.unlink(os.path.join(self.storedir, '_files.new'))
1001 def update_datastructs(self):
1003 Update the internal data structures if the local _files
1004 file has changed (e.g. update_local_filesmeta() has been
1008 files_tree = read_filemeta(self.dir)
1009 files_tree_root = files_tree.getroot()
1011 self.rev = files_tree_root.get('rev')
1012 self.srcmd5 = files_tree_root.get('srcmd5')
1014 self.linkinfo = Linkinfo()
1015 self.linkinfo.read(files_tree_root.find('linkinfo'))
1017 self.filenamelist = []
1019 for node in files_tree_root.findall('entry'):
1021 f = File(node.get('name'),
1023 int(node.get('size')),
1024 int(node.get('mtime')))
1026 # okay, a very old version of _files, which didn't contain any metadata yet...
1027 f = File(node.get('name'), '', 0, 0)
1028 self.filelist.append(f)
1029 self.filenamelist.append(f.name)
1031 self.to_be_deleted = read_tobedeleted(self.dir)
1032 self.in_conflict = read_inconflict(self.dir)
1033 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1035 # gather unversioned files, but ignore some stuff
1036 self.excluded = [ i for i in os.listdir(self.dir)
1037 for j in conf.config['exclude_glob']
1038 if fnmatch.fnmatch(i, j) ]
1039 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1040 if i not in self.excluded
1041 if i not in self.filenamelist ]
1044 """tells us if the package is a link (has 'linkinfo').
1045 A package with linkinfo is a package which links to another package.
1046 Returns True if the package is a link, otherwise False."""
1047 return self.linkinfo.islink()
1049 def isexpanded(self):
1050 """tells us if the package is a link which is expanded.
1051 Returns True if the package is expanded, otherwise False."""
1052 return self.linkinfo.isexpanded()
1054 def islinkrepair(self):
1055 """tells us if we are repairing a broken source link."""
1056 return self.linkrepair
1059 """tells us if we have pulled a link."""
1060 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1063 """tells us if the link is frozen."""
1064 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1066 def get_pulled_srcmd5(self):
1068 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1069 pulledrev = line.strip()
1072 def haslinkerror(self):
1074 Returns True if the link is broken otherwise False.
1075 If the package is not a link it returns False.
1077 return self.linkinfo.haserror()
1079 def linkerror(self):
1081 Returns an error message if the link is broken otherwise None.
1082 If the package is not a link it returns None.
1084 return self.linkinfo.error
1086 def update_local_pacmeta(self):
1088 Update the local _meta file in the store.
1089 It is replaced with the version pulled from upstream.
1091 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1092 f = open(os.path.join(self.storedir, '_meta'), 'w')
1096 def findfilebyname(self, n):
1097 for i in self.filelist:
1101 def status(self, n):
1105 file storefile file present STATUS
1106 exists exists in _files
1109 x x x ' ' if digest differs: 'M'
1110 and if in conflicts file: 'C'
1112 x - x 'D' and listed in _to_be_deleted
1114 - x - 'D' (when file in working copy is already deleted)
1115 - - x 'F' (new in repo, but not yet in working copy)
1120 known_by_meta = False
1122 exists_in_store = False
1123 if n in self.filenamelist:
1124 known_by_meta = True
1125 if os.path.exists(os.path.join(self.absdir, n)):
1127 if os.path.exists(os.path.join(self.storedir, n)):
1128 exists_in_store = True
1131 if exists and not exists_in_store and known_by_meta:
1133 elif n in self.to_be_deleted:
1135 elif n in self.in_conflict:
1137 elif exists and exists_in_store and known_by_meta:
1138 #print self.findfilebyname(n)
1139 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1143 elif exists and not exists_in_store and not known_by_meta:
1145 elif exists and exists_in_store and not known_by_meta:
1147 elif not exists and exists_in_store and known_by_meta:
1149 elif not exists and not exists_in_store and known_by_meta:
1151 elif not exists and exists_in_store and not known_by_meta:
1153 elif not exists and not exists_in_store and not known_by_meta:
1154 # this case shouldn't happen (except there was a typo in the filename etc.)
1155 raise IOError('osc: \'%s\' is not under version control' % n)
1159 def comparePac(self, cmp_pac):
1161 This method compares the local filelist with
1162 the filelist of the passed package to see which files
1163 were added, removed and changed.
1170 for file in self.filenamelist+self.filenamelist_unvers:
1171 state = self.status(file)
1172 if state == 'A' and (not file in cmp_pac.filenamelist):
1173 added_files.append(file)
1174 elif file in cmp_pac.filenamelist and state == 'D':
1175 removed_files.append(file)
1176 elif state == ' ' and not file in cmp_pac.filenamelist:
1177 added_files.append(file)
1178 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1179 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1180 changed_files.append(file)
1181 for file in cmp_pac.filenamelist:
1182 if not file in self.filenamelist:
1183 removed_files.append(file)
1184 removed_files = set(removed_files)
1186 return changed_files, added_files, removed_files
1188 def merge(self, otherpac):
1189 self.todo += otherpac.todo
1203 '\n '.join(self.filenamelist),
1211 def read_meta_from_spec(self, spec = None):
1216 # scan for spec files
1217 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1218 if len(speclist) == 1:
1219 specfile = speclist[0]
1220 elif len(speclist) > 1:
1221 print 'the following specfiles were found:'
1222 for file in speclist:
1224 print 'please specify one with --specfile'
1227 print 'no specfile was found - please specify one ' \
1231 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1232 self.summary = data['Summary']
1233 self.url = data['Url']
1234 self.descr = data['%description']
1237 def update_package_meta(self, force=False):
1239 for the updatepacmetafromspec subcommand
1240 argument force supress the confirm question
1243 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1245 root = ET.fromstring(m)
1246 root.find('title').text = self.summary
1247 root.find('description').text = ''.join(self.descr)
1248 url = root.find('url')
1250 url = ET.SubElement(root, 'url')
1253 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1254 mf = metafile(u, ET.tostring(root))
1257 print '*' * 36, 'old', '*' * 36
1259 print '*' * 36, 'new', '*' * 36
1260 print ET.tostring(root)
1262 repl = raw_input('Write? (y/N/e) ')
1273 def mark_frozen(self):
1274 store_write_string(self.absdir, '_frozenlink', '')
1276 print "The link in this package is currently broken. I have checked"
1277 print "out the last working version instead, please use 'osc pull'"
1278 print "to repair the link."
1281 def unmark_frozen(self):
1282 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1283 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1285 def latest_rev(self):
1286 if self.islinkrepair():
1287 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1288 elif self.islink() and self.isexpanded():
1289 if self.isfrozen() or self.ispulled():
1290 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1293 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1296 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1298 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1301 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1304 def update(self, rev = None, service_files = False):
1305 # save filelist and (modified) status before replacing the meta file
1306 saved_filenames = self.filenamelist
1307 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1310 self.update_local_filesmeta(rev)
1311 self = Package(self.dir)
1313 # which files do no longer exist upstream?
1314 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1316 pathn = getTransActPath(self.dir)
1318 for filename in saved_filenames:
1319 if not filename.startswith('_service:') and filename in disappeared:
1320 print statfrmt('D', os.path.join(pathn, filename))
1321 # keep file if it has local modifications
1322 if oldp.status(filename) == ' ':
1323 self.delete_localfile(filename)
1324 self.delete_storefile(filename)
1326 for filename in self.filenamelist:
1328 state = self.status(filename)
1329 if not service_files and filename.startswith('_service:'):
1331 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1332 # no merge necessary... local file is changed, but upstream isn't
1334 elif state == 'M' and filename in saved_modifiedfiles:
1335 status_after_merge = self.mergefile(filename)
1336 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1338 self.updatefile(filename, rev)
1339 print statfrmt('U', os.path.join(pathn, filename))
1341 self.updatefile(filename, rev)
1342 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1344 self.updatefile(filename, rev)
1345 print statfrmt('A', os.path.join(pathn, filename))
1346 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1347 self.updatefile(filename, rev)
1348 self.delete_storefile(filename)
1349 print statfrmt('U', os.path.join(pathn, filename))
1353 self.update_local_pacmeta()
1355 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1356 print 'At revision %s.' % self.rev
1358 if not service_files:
1359 self.run_source_services()
1361 def run_source_services(self):
1362 if self.filenamelist.count('_service'):
1363 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1366 si.execute(self.absdir)
1368 def prepare_filelist(self):
1369 """Prepare a list of files, which will be processed by process_filelist
1370 method. This allows easy modifications of a file list in commit
1374 self.todo = self.filenamelist + self.filenamelist_unvers
1378 for f in (f for f in self.todo if not os.path.isdir(f)):
1380 status = self.status(f)
1383 ret += "%s %s %s\n" % (action, status, f)
1386 # Edit a filelist for package %s
1388 # l, leave = leave a file as is
1389 # r, remove = remove a file
1390 # a, add = add a file
1392 # If you remove file from a list, it will be unchanged
1393 # If you remove all, commit will be aborted"""
1397 def edit_filelist(self):
1398 """Opens a package list in editor for eediting. This allows easy
1399 modifications of it just by simple text editing
1403 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1404 f = os.fdopen(fd, 'w')
1405 f.write(self.prepare_filelist())
1407 mtime_orig = os.stat(filename).st_mtime
1409 if sys.platform[:3] != 'win':
1410 editor = os.getenv('EDITOR', default='vim')
1412 editor = os.getenv('EDITOR', default='notepad')
1414 subprocess.call('%s %s' % (editor, filename), shell=True)
1415 mtime = os.stat(filename).st_mtime
1416 if mtime_orig < mtime:
1417 filelist = open(filename).readlines()
1421 raise oscerr.UserAbort()
1423 return self.process_filelist(filelist)
1425 def process_filelist(self, filelist):
1426 """Process a filelist - it add/remove or leave files. This depends on
1427 user input. If no file is processed, it raises an ValueError
1431 for line in (l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')):
1433 foo = line.split(' ')
1435 action, state, name = (foo[0], ' ', foo[3])
1437 action, state, name = (foo[0], foo[1], foo[2])
1440 action = action.lower()
1443 if action in ('r', 'remove'):
1444 if self.status(name) == '?':
1446 if name in self.todo:
1447 self.todo.remove(name)
1449 self.delete_file(name, True)
1450 elif action in ('a', 'add'):
1451 if self.status(name) != '?':
1452 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1455 elif action in ('l', 'leave'):
1458 raise ValueError("Unknow action `%s'" % action)
1461 raise ValueError("Empty filelist")
1464 """for objects to represent the review state in a request"""
1465 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1467 self.by_user = by_user
1468 self.by_group = by_group
1471 self.comment = comment
1474 """for objects to represent the "state" of a request"""
1475 def __init__(self, name=None, who=None, when=None, comment=None):
1479 self.comment = comment
1482 """represents an action"""
1483 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1485 self.src_project = src_project
1486 self.src_package = src_package
1487 self.src_rev = src_rev
1488 self.dst_project = dst_project
1489 self.dst_package = dst_package
1490 self.src_update = src_update
1493 """represent a request and holds its metadata
1494 it has methods to read in metadata from xml,
1495 different views, ..."""
1498 self.state = RequestState()
1501 self.last_author = None
1504 self.statehistory = []
1507 def read(self, root):
1508 self.reqid = int(root.get('id'))
1509 actions = root.findall('action')
1510 if len(actions) == 0:
1511 actions = [ root.find('submit') ] # for old style requests
1513 for action in actions:
1514 type = action.get('type', 'submit')
1516 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1517 if action.findall('source'):
1518 n = action.find('source')
1519 src_prj = n.get('project', None)
1520 src_pkg = n.get('package', None)
1521 src_rev = n.get('rev', None)
1522 if action.findall('target'):
1523 n = action.find('target')
1524 dst_prj = n.get('project', None)
1525 dst_pkg = n.get('package', None)
1526 if action.findall('options'):
1527 n = action.find('options')
1528 if n.findall('sourceupdate'):
1529 src_update = n.find('sourceupdate').text.strip()
1530 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1532 msg = 'invalid request format:\n%s' % ET.tostring(root)
1533 raise oscerr.APIError(msg)
1536 n = root.find('state')
1537 self.state.name, self.state.who, self.state.when \
1538 = n.get('name'), n.get('who'), n.get('when')
1540 self.state.comment = n.find('comment').text.strip()
1542 self.state.comment = None
1544 # read the review states
1545 for r in root.findall('review'):
1547 s.state = r.get('state')
1548 s.by_user = r.get('by_user')
1549 s.by_group = r.get('by_group')
1550 s.who = r.get('who')
1551 s.when = r.get('when')
1553 s.comment = r.find('comment').text.strip()
1556 self.reviews.append(s)
1558 # read the state history
1559 for h in root.findall('history'):
1561 s.name = h.get('name')
1562 s.who = h.get('who')
1563 s.when = h.get('when')
1565 s.comment = h.find('comment').text.strip()
1568 self.statehistory.append(s)
1569 self.statehistory.reverse()
1571 # read a description, if it exists
1573 n = root.find('description').text
1578 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1579 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1580 dst_prj, dst_pkg, src_update)
1583 def list_view(self):
1584 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1586 for a in self.actions:
1587 dst = "%s/%s" % (a.dst_project, a.dst_package)
1588 if a.src_package == a.dst_package:
1592 if a.type=="submit":
1593 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1594 if a.type=="change_devel":
1595 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1596 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1598 ret += '\n %s: %-50s %-20s ' % \
1599 (a.type, sr_source, dst)
1601 if self.statehistory and self.statehistory[0]:
1603 for h in self.statehistory:
1604 who.append("%s(%s)" % (h.who,h.name))
1606 ret += "\n From: %s" % (' -> '.join(who))
1608 ret += "\n Descr: %s" % (repr(self.descr))
1613 def __cmp__(self, other):
1614 return cmp(self.reqid, other.reqid)
1618 for action in self.actions:
1619 action_list=" %s: " % (action.type)
1620 if action.type=="submit":
1623 r="(r%s)" % (action.src_rev)
1625 if action.src_update:
1626 m="(%s)" % (action.src_update)
1627 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1628 if action.dst_package:
1629 action_list=action_list+"/%s" % ( action.dst_package )
1630 elif action.type=="delete":
1631 action_list=action_list+" %s" % ( action.dst_project )
1632 if action.dst_package:
1633 action_list=action_list+"/%s" % ( action.dst_package )
1634 elif action.type=="change_devel":
1635 action_list=action_list+" %s/%s developed in %s/%s" % \
1636 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1637 action_list=action_list+"\n"
1652 self.state.name, self.state.when, self.state.who,
1655 if len(self.reviews):
1656 reviewitems = [ '%-10s %s %s %s %s %s' \
1657 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1658 for i in self.reviews ]
1659 s += '\nReview: ' + '\n '.join(reviewitems)
1662 if len(self.statehistory):
1663 histitems = [ '%-10s %s %s' \
1664 % (i.name, i.when, i.who) \
1665 for i in self.statehistory ]
1666 s += '\nHistory: ' + '\n '.join(histitems)
1673 """format time as Apr 02 18:19
1675 depending on whether it is in the current year
1679 if time.localtime()[0] == time.localtime(t)[0]:
1681 return time.strftime('%b %d %H:%M',time.localtime(t))
1683 return time.strftime('%b %d %Y',time.localtime(t))
1686 def is_project_dir(d):
1687 return os.path.exists(os.path.join(d, store, '_project')) and not \
1688 os.path.exists(os.path.join(d, store, '_package'))
1691 def is_package_dir(d):
1692 return os.path.exists(os.path.join(d, store, '_project')) and \
1693 os.path.exists(os.path.join(d, store, '_package'))
1695 def parse_disturl(disturl):
1696 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1697 revision), else raises an oscerr.WrongArgs exception
1700 m = DISTURL_RE.match(disturl)
1702 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1704 apiurl = m.group('apiurl')
1705 if apiurl.split('.')[0] != 'api':
1706 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1707 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1709 def parse_buildlogurl(buildlogurl):
1710 """Parse a build log url, returns a tuple (apiurl, project, package,
1711 repository, arch), else raises oscerr.WrongArgs exception"""
1713 global BUILDLOGURL_RE
1715 m = BUILDLOGURL_RE.match(buildlogurl)
1717 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1719 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1722 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1723 This is handy to allow copy/paste a project/package combination in this form.
1725 Trailing slashes are removed before the split, because the split would
1726 otherwise give an additional empty string.
1734 def expand_proj_pack(args, idx=0, howmany=0):
1735 """looks for occurance of '.' at the position idx.
1736 If howmany is 2, both proj and pack are expanded together
1737 using the current directory, or none of them, if not possible.
1738 If howmany is 0, proj is expanded if possible, then, if there
1739 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1740 expanded, if possible.
1741 If howmany is 1, only proj is expanded if possible.
1743 If args[idx] does not exists, an implicit '.' is assumed.
1744 if not enough elements up to idx exist, an error is raised.
1746 See also parseargs(args), slash_split(args), findpacs(args)
1747 All these need unification, somehow.
1750 # print args,idx,howmany
1753 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1755 if len(args) == idx:
1757 if args[idx+0] == '.':
1758 if howmany == 0 and len(args) > idx+1:
1759 if args[idx+1] == '.':
1761 # remove one dot and make sure to expand both proj and pack
1766 # print args,idx,howmany
1768 args[idx+0] = store_read_project('.')
1771 package = store_read_package('.')
1772 args.insert(idx+1, package)
1776 package = store_read_package('.')
1777 args.insert(idx+1, package)
1781 def findpacs(files):
1782 """collect Package objects belonging to the given files
1783 and make sure each Package is returned only once"""
1786 p = filedir_to_pac(f)
1789 if i.name == p.name:
1799 def read_filemeta(dir):
1801 r = ET.parse(os.path.join(dir, store, '_files'))
1802 except SyntaxError, e:
1803 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1804 'When parsing .osc/_files, the following error was encountered:\n'
1809 def read_tobedeleted(dir):
1811 fname = os.path.join(dir, store, '_to_be_deleted')
1813 if os.path.exists(fname):
1814 r = [ line.strip() for line in open(fname) ]
1819 def read_inconflict(dir):
1821 fname = os.path.join(dir, store, '_in_conflict')
1823 if os.path.exists(fname):
1824 r = [ line.strip() for line in open(fname) ]
1829 def parseargs(list_of_args):
1830 """Convenience method osc's commandline argument parsing.
1832 If called with an empty tuple (or list), return a list containing the current directory.
1833 Otherwise, return a list of the arguments."""
1835 return list(list_of_args)
1840 def filedir_to_pac(f):
1841 """Takes a working copy path, or a path to a file inside a working copy,
1842 and returns a Package object instance
1844 If the argument was a filename, add it onto the "todo" list of the Package """
1846 if os.path.isdir(f):
1851 wd = os.path.dirname(f)
1855 p.todo = [ os.path.basename(f) ]
1860 def statfrmt(statusletter, filename):
1861 return '%s %s' % (statusletter, filename)
1864 def pathjoin(a, *p):
1865 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1866 path = os.path.join(a, *p)
1867 if path.startswith('./'):
1872 def makeurl(baseurl, l, query=[]):
1873 """Given a list of path compoments, construct a complete URL.
1875 Optional parameters for a query string can be given as a list, as a
1876 dictionary, or as an already assembled string.
1877 In case of a dictionary, the parameters will be urlencoded by this
1878 function. In case of a list not -- this is to be backwards compatible.
1881 if conf.config['verbose'] > 1:
1882 print 'makeurl:', baseurl, l, query
1884 if type(query) == type(list()):
1885 query = '&'.join(query)
1886 elif type(query) == type(dict()):
1887 query = urlencode(query)
1889 scheme, netloc = urlsplit(baseurl)[0:2]
1890 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1893 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1894 """wrapper around urllib2.urlopen for error handling,
1895 and to support additional (PUT, DELETE) methods"""
1899 if conf.config['http_debug']:
1902 print '--', method, url
1904 if method == 'POST' and not file and not data:
1905 # adding data to an urllib2 request transforms it into a POST
1908 req = urllib2.Request(url)
1910 api_host_options=conf.get_apiurl_api_host_options(url)
1912 for header, value in api_host_options['http_headers']:
1913 req.add_header(header, value)
1915 req.get_method = lambda: method
1917 # POST requests are application/x-www-form-urlencoded per default
1918 # since we change the request into PUT, we also need to adjust the content type header
1919 if method == 'PUT' or (method == 'POST' and data):
1920 req.add_header('Content-Type', 'application/octet-stream')
1922 if type(headers) == type({}):
1923 for i in headers.keys():
1925 req.add_header(i, headers[i])
1927 if file and not data:
1928 size = os.path.getsize(file)
1930 data = open(file, 'rb').read()
1933 filefd = open(file, 'rb')
1935 if sys.platform[:3] != 'win':
1936 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1938 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1940 except EnvironmentError, e:
1942 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1943 '\non a filesystem which does not support this.' % (e, file))
1944 elif hasattr(e, 'winerror') and e.winerror == 5:
1945 # falling back to the default io
1946 data = open(file, 'rb').read()
1950 if conf.config['debug']: print method, url
1952 old_timeout = socket.getdefaulttimeout()
1953 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1954 if old_timeout != timeout and not api_host_options['sslcertck']:
1955 socket.setdefaulttimeout(timeout)
1957 fd = urllib2.urlopen(req, data=data)
1959 if old_timeout != timeout and not api_host_options['sslcertck']:
1960 socket.setdefaulttimeout(old_timeout)
1961 if hasattr(conf.cookiejar, 'save'):
1962 conf.cookiejar.save(ignore_discard=True)
1964 if filefd: filefd.close()
1969 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1970 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1971 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1972 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1975 def init_project_dir(apiurl, dir, project):
1976 if not os.path.exists(dir):
1977 if conf.config['checkout_no_colon']:
1978 os.makedirs(dir) # helpful with checkout_no_colon
1981 if not os.path.exists(os.path.join(dir, store)):
1982 os.mkdir(os.path.join(dir, store))
1984 # print 'project=',project,' dir=',dir
1985 store_write_project(dir, project)
1986 store_write_apiurl(dir, apiurl)
1987 if conf.config['do_package_tracking']:
1988 store_write_initial_packages(dir, project, [])
1990 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
1991 if not os.path.isdir(store):
1994 f = open('_project', 'w')
1995 f.write(project + '\n')
1997 f = open('_package', 'w')
1998 f.write(package + '\n')
2002 f = open('_files', 'w')
2003 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2007 ET.ElementTree(element=ET.Element('directory')).write('_files')
2009 f = open('_osclib_version', 'w')
2010 f.write(__store_version__ + '\n')
2013 store_write_apiurl(os.path.pardir, apiurl)
2019 def check_store_version(dir):
2020 versionfile = os.path.join(dir, store, '_osclib_version')
2022 v = open(versionfile).read().strip()
2027 msg = 'Error: "%s" is not an osc working copy.' % os.path.abspath(dir)
2028 if os.path.exists(os.path.join(dir, '.svn')):
2029 msg = msg + '\nTry svn instead of osc.'
2030 raise oscerr.NoWorkingCopy(msg)
2032 if v != __store_version__:
2033 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2034 # version is fine, no migration needed
2035 f = open(versionfile, 'w')
2036 f.write(__store_version__ + '\n')
2039 msg = 'The osc metadata of your working copy "%s"' % dir
2040 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2041 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2042 raise oscerr.WorkingCopyWrongVersion, msg
2045 def meta_get_packagelist(apiurl, prj):
2047 u = makeurl(apiurl, ['source', prj])
2049 root = ET.parse(f).getroot()
2050 return [ node.get('name') for node in root.findall('entry') ]
2053 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2054 """return a list of file names,
2055 or a list File() instances if verbose=True"""
2061 query['rev'] = revision
2063 query['rev'] = 'latest'
2065 u = makeurl(apiurl, ['source', prj, package], query=query)
2067 root = ET.parse(f).getroot()
2070 return [ node.get('name') for node in root.findall('entry') ]
2074 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2075 rev = root.get('rev')
2076 for node in root.findall('entry'):
2077 f = File(node.get('name'),
2079 int(node.get('size')),
2080 int(node.get('mtime')))
2086 def meta_get_project_list(apiurl):
2087 u = makeurl(apiurl, ['source'])
2089 root = ET.parse(f).getroot()
2090 return sorted([ node.get('name') for node in root ])
2093 def show_project_meta(apiurl, prj):
2094 url = makeurl(apiurl, ['source', prj, '_meta'])
2096 return f.readlines()
2099 def show_project_conf(apiurl, prj):
2100 url = makeurl(apiurl, ['source', prj, '_config'])
2102 return f.readlines()
2105 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2106 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2110 except urllib2.HTTPError, e:
2111 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2115 def show_package_meta(apiurl, prj, pac):
2116 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2119 return f.readlines()
2120 except urllib2.HTTPError, e:
2121 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2125 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2127 path.append('source')
2133 path.append('_attribute')
2135 path.append(attribute)
2138 query.append("with_default=1")
2140 query.append("with_project=1")
2141 url = makeurl(apiurl, path, query)
2144 return f.readlines()
2145 except urllib2.HTTPError, e:
2146 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2150 def show_develproject(apiurl, prj, pac):
2151 m = show_package_meta(apiurl, prj, pac)
2153 return ET.fromstring(''.join(m)).find('devel').get('project')
2158 def show_pattern_metalist(apiurl, prj):
2159 url = makeurl(apiurl, ['source', prj, '_pattern'])
2163 except urllib2.HTTPError, e:
2164 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2166 r = [ node.get('name') for node in tree.getroot() ]
2171 def show_pattern_meta(apiurl, prj, pattern):
2172 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2175 return f.readlines()
2176 except urllib2.HTTPError, e:
2177 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2182 """metafile that can be manipulated and is stored back after manipulation."""
2183 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2187 self.change_is_required = change_is_required
2188 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2189 f = os.fdopen(fd, 'w')
2190 f.write(''.join(input))
2192 self.hash_orig = dgst(self.filename)
2195 hash = dgst(self.filename)
2196 if self.change_is_required == True and hash == self.hash_orig:
2197 print 'File unchanged. Not saving.'
2198 os.unlink(self.filename)
2201 print 'Sending meta data...'
2202 # don't do any exception handling... it's up to the caller what to do in case
2204 http_PUT(self.url, file=self.filename)
2205 os.unlink(self.filename)
2210 if sys.platform[:3] != 'win':
2211 editor = os.getenv('EDITOR', default='vim')
2213 editor = os.getenv('EDITOR', default='notepad')
2215 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2216 if self.change_is_required == True:
2219 except urllib2.HTTPError, e:
2220 error_help = "%d" % e.code
2221 if e.headers.get('X-Opensuse-Errorcode'):
2222 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2224 print >>sys.stderr, 'BuildService API error:', error_help
2225 # examine the error - we can't raise an exception because we might want
2228 if '<summary>' in data:
2229 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2230 input = raw_input('Try again? ([y/N]): ')
2231 if input != 'y' and input != 'Y':
2242 if os.path.exists(self.filename):
2243 print 'discarding', self.filename
2244 os.unlink(self.filename)
2248 # different types of metadata
2249 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2250 'template': new_project_templ,
2253 'pkg': { 'path' : 'source/%s/%s/_meta',
2254 'template': new_package_templ,
2257 'attribute': { 'path' : 'source/%s/%s/_meta',
2258 'template': new_attribute_templ,
2261 'prjconf': { 'path': 'source/%s/_config',
2265 'user': { 'path': 'person/%s',
2266 'template': new_user_template,
2269 'pattern': { 'path': 'source/%s/_pattern/%s',
2270 'template': new_pattern_template,
2275 def meta_exists(metatype,
2282 apiurl = conf.config['apiurl']
2283 url = make_meta_url(metatype, path_args, apiurl)
2285 data = http_GET(url).readlines()
2286 except urllib2.HTTPError, e:
2287 if e.code == 404 and create_new:
2288 data = metatypes[metatype]['template']
2290 data = StringIO(data % template_args).readlines()
2295 def make_meta_url(metatype, path_args=None, apiurl=None):
2297 apiurl = conf.config['apiurl']
2298 if metatype not in metatypes.keys():
2299 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2300 path = metatypes[metatype]['path']
2303 path = path % path_args
2305 return makeurl(apiurl, [path])
2308 def edit_meta(metatype,
2313 change_is_required=False,
2317 apiurl = conf.config['apiurl']
2319 data = meta_exists(metatype,
2322 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2326 change_is_required = True
2328 url = make_meta_url(metatype, path_args, apiurl)
2329 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2337 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2340 query['rev'] = revision
2342 query['rev'] = 'latest'
2344 query['linkrev'] = linkrev
2345 elif conf.config['linkcontrol']:
2346 query['linkrev'] = 'base'
2350 query['emptylink'] = 1
2351 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2352 return f.readlines()
2355 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2356 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2357 return ET.fromstring(''.join(m)).get('srcmd5')
2360 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2361 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2363 # only source link packages have a <linkinfo> element.
2364 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2372 raise oscerr.LinkExpandError(prj, pac, li.error)
2376 def show_upstream_rev(apiurl, prj, pac):
2377 m = show_files_meta(apiurl, prj, pac)
2378 return ET.fromstring(''.join(m)).get('rev')
2381 def read_meta_from_spec(specfile, *args):
2382 import codecs, locale, re
2384 Read tags and sections from spec file. To read out
2385 a tag the passed argument mustn't end with a colon. To
2386 read out a section the passed argument must start with
2388 This method returns a dictionary which contains the
2392 if not os.path.isfile(specfile):
2393 raise IOError('\'%s\' is not a regular file' % specfile)
2396 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2397 except UnicodeDecodeError:
2398 lines = open(specfile).readlines()
2405 if itm.startswith('%'):
2406 sections.append(itm)
2410 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2412 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2413 if m and m.group('val'):
2414 spec_data[tag] = m.group('val').strip()
2416 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2419 section_pat = '^%s\s*?$'
2420 for section in sections:
2421 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2423 start = lines.index(m.group()+'\n') + 1
2425 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2428 for line in lines[start:]:
2429 if line.startswith('%'):
2432 spec_data[section] = data
2437 def edit_message(footer='', template=''):
2438 delim = '--This line, and those below, will be ignored--\n\n' + footer
2440 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2441 f = os.fdopen(fd, 'w')
2447 mtime_orig = os.stat(filename).st_mtime
2449 if sys.platform[:3] != 'win':
2450 editor = os.getenv('EDITOR', default='vim')
2452 editor = os.getenv('EDITOR', default='notepad')
2454 subprocess.call('%s %s' % (editor, filename), shell=True)
2455 mtime = os.stat(filename).st_mtime
2457 if mtime_orig < mtime:
2458 msg = open(filename).read()
2460 return msg.split(delim)[0].rstrip()
2462 input = raw_input('Log message unchanged or not specified\n'
2463 'a)bort, c)ontinue, e)dit: ')
2466 raise oscerr.UserAbort
2474 def create_delete_request(apiurl, project, package, message):
2479 package = """package="%s" """ % (package)
2485 <action type="delete">
2486 <target project="%s" %s/>
2489 <description>%s</description>
2491 """ % (project, package,
2492 cgi.escape(message or ''))
2494 u = makeurl(apiurl, ['request'], query='cmd=create')
2495 f = http_POST(u, data=xml)
2497 root = ET.parse(f).getroot()
2498 return root.get('id')
2501 def create_change_devel_request(apiurl,
2502 devel_project, devel_package,
2509 <action type="change_devel">
2510 <source project="%s" package="%s" />
2511 <target project="%s" package="%s" />
2514 <description>%s</description>
2516 """ % (devel_project,
2520 cgi.escape(message or ''))
2522 u = makeurl(apiurl, ['request'], query='cmd=create')
2523 f = http_POST(u, data=xml)
2525 root = ET.parse(f).getroot()
2526 return root.get('id')
2529 # This creates an old style submit request for server api 1.0
2530 def create_submit_request(apiurl,
2531 src_project, src_package,
2532 dst_project=None, dst_package=None,
2533 message=None, orev=None, src_update=None):
2538 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2540 # Yes, this kind of xml construction is horrible
2545 packagexml = """package="%s" """ %( dst_package )
2546 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2547 # XXX: keep the old template for now in order to work with old obs instances
2549 <request type="submit">
2551 <source project="%s" package="%s" rev="%s"/>
2556 <description>%s</description>
2560 orev or show_upstream_rev(apiurl, src_project, src_package),
2563 cgi.escape(message or ""))
2565 u = makeurl(apiurl, ['request'], query='cmd=create')
2566 f = http_POST(u, data=xml)
2568 root = ET.parse(f).getroot()
2569 return root.get('id')
2572 def get_request(apiurl, reqid):
2573 u = makeurl(apiurl, ['request', reqid])
2575 root = ET.parse(f).getroot()
2582 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2585 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2586 f = http_POST(u, data=message)
2589 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2592 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2593 f = http_POST(u, data=message)
2597 def get_request_list(apiurl, project, package, req_who='', req_state=('new',), req_type=None ):
2603 if not "all" in req_state:
2604 for state in req_state:
2605 if len(m): m += '%20or%20'
2606 m += 'state/@name=\'%s\'' % quote_plus(state)
2607 if len(m): match += "(" + m + ")"
2610 if len(m): m += '%20and%20'
2611 m += 'state/@who=\'%s\'' % quote_plus(req_who)
2613 m += 'history/@who=\'%s\'' % quote_plus(req_who)
2615 if len(match): match += "%20and%20"
2616 match += "(" + m + ")"
2618 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2620 if project or package:
2621 for what in ['action', 'submit']:
2624 if len(m): m += '%20and%20'
2625 m += '(%s/target/@project=\'%s\'%%20or%%20' % (what, quote_plus(project))
2626 m += '%s/source/@project=\'%s\')' % (what, quote_plus(project))
2628 if len(m): m += '%20and%20'
2629 m += '(%s/target/@package=\'%s\'%%20or%%20' % (what, quote_plus(package))
2630 m += '%s/source/@package=\'%s\')' % (what, quote_plus(package))
2632 if len(m): m += '%20and%20'
2633 m += '%s/@type=\'%s\'' % (what, quote_plus(req_type))
2638 if len(match): match += '%20and%20'
2639 match += 'action/@type=\'%s\'' % quote_plus(req_type)
2640 matches.append(match)
2642 for match in matches:
2643 if conf.config['verbose'] > 1:
2645 u = makeurl(apiurl, ['search', 'request'], ['match=%s' % match])
2647 collection = ET.parse(f).getroot()
2649 for root in collection.findall('request'):
2657 def get_request_log(apiurl, reqid):
2658 r = get_request(conf.config['apiurl'], reqid)
2660 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2661 # the description of the request is used for the initial log entry
2662 # otherwise its comment attribute would contain None
2663 if len(r.statehistory) >= 1:
2664 r.statehistory[-1].comment = r.descr
2666 r.state.comment = r.descr
2667 for state in [ r.state ] + r.statehistory:
2668 s = frmt % (state.name, state.who, state.when, str(state.comment))
2673 def get_user_meta(apiurl, user):
2674 u = makeurl(apiurl, ['person', quote_plus(user)])
2677 return ''.join(f.readlines())
2678 except urllib2.HTTPError:
2679 print 'user \'%s\' not found' % user
2683 def get_user_data(apiurl, user, *tags):
2684 """get specified tags from the user meta"""
2685 meta = get_user_meta(apiurl, user)
2688 root = ET.fromstring(meta)
2691 if root.find(tag).text != None:
2692 data.append(root.find(tag).text)
2696 except AttributeError:
2697 # this part is reached if the tags tuple contains an invalid tag
2698 print 'The xml file for user \'%s\' seems to be broken' % user
2703 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision = None):
2704 import tempfile, shutil
2707 query = { 'rev': revision }
2711 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2712 o = os.fdopen(fd, 'wb')
2713 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2714 for buf in streamfile(u, http_GET, BUFSIZE):
2717 shutil.move(tmpfile, targetfilename or filename)
2725 def get_binary_file(apiurl, prj, repo, arch,
2728 target_filename = None,
2729 target_mtime = None,
2730 progress_meter = False):
2732 target_filename = target_filename or filename
2734 where = package or '_repository'
2735 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2738 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2742 binsize = int(f.headers['content-length'])
2745 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2746 os.chmod(tmpfilename, 0644)
2749 o = os.fdopen(fd, 'wb')
2753 #buf = f.read(BUFSIZE)
2757 downloaded += len(buf)
2759 completion = str(int((float(downloaded)/binsize)*100))
2760 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2765 sys.stdout.write('\n')
2767 shutil.move(tmpfilename, target_filename)
2769 os.utime(target_filename, (-1, target_mtime))
2771 # make sure that the temp file is cleaned up when we are interrupted
2773 try: os.unlink(tmpfilename)
2776 def dgst_from_string(str):
2777 # Python 2.5 depracates the md5 modules
2778 # Python 2.4 doesn't have hashlib yet
2781 md5_hash = hashlib.md5()
2784 md5_hash = md5.new()
2785 md5_hash.update(str)
2786 return md5_hash.hexdigest()
2790 #if not os.path.exists(file):
2800 f = open(file, 'rb')
2802 buf = f.read(BUFSIZE)
2805 return s.hexdigest()
2810 """return true if a string is binary data using diff's heuristic"""
2811 if s and '\0' in s[:4096]:
2816 def binary_file(fn):
2817 """read 4096 bytes from a file named fn, and call binary() on the data"""
2818 return binary(open(fn, 'rb').read(4096))
2821 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2823 This methods diffs oldfilename against filename (so filename will
2824 be shown as the new file).
2825 The variable origfilename is used if filename and oldfilename differ
2826 in their names (for instance if a tempfile is used for filename etc.)
2832 oldfilename = filename
2835 olddir = os.path.join(dir, store)
2837 if not origfilename:
2838 origfilename = filename
2840 file1 = os.path.join(olddir, oldfilename) # old/stored original
2841 file2 = os.path.join(dir, filename) # working copy
2843 f1 = open(file1, 'rb')
2847 f2 = open(file2, 'rb')
2851 if binary(s1) or binary (s2):
2852 d = ['Binary file %s has changed\n' % origfilename]
2855 d = difflib.unified_diff(\
2858 fromfile = '%s (revision %s)' % (origfilename, rev), \
2859 tofile = '%s (working copy)' % origfilename)
2861 # if file doesn't end with newline, we need to append one in the diff result
2863 for i, line in enumerate(d):
2864 if not line.endswith('\n'):
2865 d[i] += '\n\\ No newline at end of file'
2871 def make_diff(wc, revision):
2877 diff_hdr = 'Index: %s\n'
2878 diff_hdr += '===================================================================\n'
2880 olddir = os.getcwd()
2884 for file in wc.todo:
2885 if file in wc.filenamelist+wc.filenamelist_unvers:
2886 state = wc.status(file)
2888 added_files.append(file)
2890 removed_files.append(file)
2891 elif state == 'M' or state == 'C':
2892 changed_files.append(file)
2894 diff.append('osc: \'%s\' is not under version control' % file)
2896 for file in wc.filenamelist+wc.filenamelist_unvers:
2897 state = wc.status(file)
2898 if state == 'M' or state == 'C':
2899 changed_files.append(file)
2901 added_files.append(file)
2903 removed_files.append(file)
2905 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2907 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2908 cmp_pac = Package(tmpdir)
2910 for file in wc.todo:
2911 if file in cmp_pac.filenamelist:
2912 if file in wc.filenamelist:
2913 changed_files.append(file)
2915 diff.append('osc: \'%s\' is not under version control' % file)
2917 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2919 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2921 for file in changed_files:
2922 diff.append(diff_hdr % file)
2924 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2926 cmp_pac.updatefile(file, revision)
2927 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2928 cmp_pac.absdir, file))
2929 (fd, tmpfile) = tempfile.mkstemp()
2930 for file in added_files:
2931 diff.append(diff_hdr % file)
2933 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2934 os.path.dirname(tmpfile), file))
2936 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2937 os.path.dirname(tmpfile), file))
2939 # FIXME: this is ugly but it cannot be avoided atm
2940 # if a file is deleted via "osc rm file" we should keep the storefile.
2942 if cmp_pac == None and removed_files:
2943 tmpdir = tempfile.mkdtemp()
2945 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2946 tmp_pac = Package(tmpdir)
2949 for file in removed_files:
2950 diff.append(diff_hdr % file)
2952 tmp_pac.updatefile(file, tmp_pac.rev)
2953 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2954 wc.rev, file, tmp_pac.storedir, file))
2956 cmp_pac.updatefile(file, revision)
2957 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2958 revision, file, cmp_pac.storedir, file))
2962 delete_dir(cmp_pac.absdir)
2964 delete_dir(tmp_pac.absdir)
2968 def server_diff(apiurl,
2969 old_project, old_package, old_revision,
2970 new_project, new_package, new_revision, unified=False):
2972 # we default to the baserev here. an option to try to use the current base rev might be nice
2973 query = {'cmd': 'diff', 'expand': '1', 'linkrev': 'base', 'olinkrev': 'base'}
2975 query['oproject'] = old_project
2977 query['opackage'] = old_package
2979 query['orev'] = old_revision
2981 query['rev'] = new_revision
2983 query['unified'] = 1
2985 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
2991 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
2993 creates the plain directory structure for a package dir.
2994 The 'apiurl' parameter is needed for the project dir initialization.
2995 The 'project' and 'package' parameters specify the name of the
2996 project and the package. The optional 'pathname' parameter is used
2997 for printing out the message that a new dir was created (default: 'prj_dir/package').
2998 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3000 prj_dir = prj_dir or project
3002 # FIXME: carefully test each patch component of prj_dir,
3003 # if we have a .osc/_files entry at that level.
3004 # -> if so, we have a package/project clash,
3005 # and should rename this path component by appending '.proj'
3006 # and give user a warning message, to discourage such clashes
3008 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3009 if is_package_dir(prj_dir):
3010 # we want this to become a project directory,
3011 # but it already is a package directory.
3012 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3014 if not is_project_dir(prj_dir):
3015 # this directory could exist as a parent direory for one of our earlier
3016 # checked out sub-projects. in this case, we still need to initialize it.
3017 print statfrmt('A', prj_dir)
3018 init_project_dir(apiurl, prj_dir, project)
3020 if is_project_dir(os.path.join(prj_dir, package)):
3021 # the thing exists, but is a project directory and not a package directory
3022 # FIXME: this should be a warning message to discourage package/project clashes
3023 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3025 if not os.path.exists(os.path.join(prj_dir, package)):
3026 print statfrmt('A', pathname)
3027 os.mkdir(os.path.join(prj_dir, package))
3028 os.mkdir(os.path.join(prj_dir, package, store))
3030 return(os.path.join(prj_dir, package))
3033 def checkout_package(apiurl, project, package,
3034 revision=None, pathname=None, prj_obj=None,
3035 expand_link=False, prj_dir=None, service_files=None):
3037 # the project we're in might be deleted.
3038 # that'll throw an error then.
3039 olddir = os.getcwd()
3041 olddir = os.environ.get("PWD")
3046 if sys.platform[:3] == 'win':
3047 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3049 if conf.config['checkout_no_colon']:
3050 prj_dir = prj_dir.replace(':', '/')
3053 pathname = getTransActPath(os.path.join(prj_dir, package))
3055 # before we create directories and stuff, check if the package actually
3057 show_package_meta(apiurl, project, package)
3061 # try to read from the linkinfo
3062 # if it is a link we use the xsrcmd5 as the revision to be
3065 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3067 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3072 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3073 init_package_dir(apiurl, project, package, store, revision)
3075 p = Package(package)
3078 for filename in p.filenamelist:
3079 if service_files or not filename.startswith('_service:'):
3080 p.updatefile(filename, revision)
3081 # print 'A ', os.path.join(project, package, filename)
3082 print statfrmt('A', os.path.join(pathname, filename))
3083 if conf.config['do_package_tracking']:
3084 # check if we can re-use an existing project object
3086 prj_obj = Project(os.getcwd())
3087 prj_obj.set_state(p.name, ' ')
3088 prj_obj.write_packages()
3092 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3093 dst_userid = None, keep_develproject = False):
3095 update pkgmeta with new new_name and new_prj and set calling user as the
3096 only maintainer (unless keep_maintainers is set). Additionally remove the
3097 develproject entry (<devel />) unless keep_develproject is true.
3099 root = ET.fromstring(''.join(pkgmeta))
3100 root.set('name', new_name)
3101 root.set('project', new_prj)
3102 if not keep_maintainers:
3103 for person in root.findall('person'):
3105 if not keep_develproject:
3106 for dp in root.findall('devel'):
3108 return ET.tostring(root)
3110 def link_to_branch(apiurl, project, package):
3112 convert a package with a _link + project.diff to a branch
3115 if '_link' in meta_get_filelist(conf.config['apiurl'], project, package):
3116 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3119 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3121 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3123 create a linked package
3124 - "src" is the original package
3125 - "dst" is the "link" package that we are creating here
3130 dst_meta = meta_exists(metatype='pkg',
3131 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3133 create_new=False, apiurl=conf.config['apiurl'])
3135 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3136 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3141 root = ET.fromstring(''.join(dst_meta))
3142 elm = root.find('publish')
3144 elm = ET.SubElement(root, 'publish')
3146 ET.SubElement(elm, 'disable')
3147 dst_meta = ET.tostring(root)
3150 path_args=(dst_project, dst_package),
3152 # create the _link file
3153 # but first, make sure not to overwrite an existing one
3154 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3156 print >>sys.stderr, 'forced overwrite of existing _link file'
3159 print >>sys.stderr, '_link file already exists...! Aborting'
3163 rev = 'rev="%s"' % rev
3168 cicount = 'cicount="%s"' % cicount
3172 print 'Creating _link...',
3173 link_template = """\
3174 <link project="%s" package="%s" %s %s>
3176 <!-- <apply name="patch" /> apply a patch on the source directory -->
3177 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3178 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3179 <!-- <delete>filename</delete> delete a file -->
3182 """ % (src_project, src_package, rev, cicount)
3184 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3185 http_PUT(u, data=link_template)
3188 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3191 - "src" is the original package
3192 - "dst" is the "aggregate" package that we are creating here
3193 - "map" is a dictionary SRC => TARGET repository mappings
3198 dst_meta = meta_exists(metatype='pkg',
3199 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3201 create_new=False, apiurl=conf.config['apiurl'])
3203 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3204 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3209 root = ET.fromstring(''.join(dst_meta))
3210 elm = root.find('publish')
3212 elm = ET.SubElement(root, 'publish')
3214 ET.SubElement(elm, 'disable')
3215 dst_meta = ET.tostring(root)
3218 path_args=(dst_project, dst_package),
3221 # create the _aggregate file
3222 # but first, make sure not to overwrite an existing one
3223 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3225 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3228 print 'Creating _aggregate...',
3229 aggregate_template = """\
3231 <aggregate project="%s">
3233 for tgt, src in repo_map.iteritems():
3234 aggregate_template += """\
3235 <repository target="%s" source="%s" />
3238 aggregate_template += """\
3239 <package>%s</package>
3242 """ % ( src_package)
3244 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3245 http_PUT(u, data=aggregate_template)
3249 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3251 Branch packages defined via attributes (via API call)
3253 query = { 'cmd': 'branch' }
3254 query['attribute'] = attribute
3256 query['target_project'] = targetproject
3258 query['package'] = package
3259 if maintained_update_project_attribute:
3260 query['update_project_attribute'] = maintained_update_project_attribute
3262 u = makeurl(apiurl, ['source'], query=query)
3266 except urllib2.HTTPError, e:
3267 msg = ''.join(e.readlines())
3268 msg = msg.split('<summary>')[1]
3269 msg = msg.split('</summary>')[0]
3270 m = re.match(r"attribute branch call failed: (\S+)/", msg)
3274 r = r.split('targetproject">')[1]
3275 r = r.split('</data>')[0]
3279 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False):
3281 Branch a package (via API call)
3283 query = { 'cmd': 'branch' }
3285 query['ignoredevel'] = '1'
3289 query['target_project'] = target_project
3291 query['target_package'] = target_package
3292 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3295 except urllib2.HTTPError, e:
3296 if not return_existing:
3298 msg = ''.join(e.readlines())
3299 msg = msg.split('<summary>')[1]
3300 msg = msg.split('</summary>')[0]
3301 m = re.match(r"branch target package already exists: (\S+)/", msg)
3305 return (None, m.group(1))
3308 r = r.split('targetproject">')[1]
3309 r = r.split('</data>')[0]
3314 def copy_pac(src_apiurl, src_project, src_package,
3315 dst_apiurl, dst_project, dst_package,
3316 client_side_copy = False,
3317 keep_maintainers = False,
3318 keep_develproject = False,
3323 Create a copy of a package.
3325 Copying can be done by downloading the files from one package and commit
3326 them into the other by uploading them (client-side copy) --
3327 or by the server, in a single api call.
3330 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3331 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3332 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3333 dst_userid, keep_develproject)
3335 print 'Sending meta data...'
3336 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3337 http_PUT(u, data=src_meta)
3339 print 'Copying files...'
3340 if not client_side_copy:
3341 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3343 query['expand'] = '1'
3345 query['orev'] = revision
3347 query['comment'] = comment
3348 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3353 # copy one file after the other
3355 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')
3357 query = {'rev': 'upload'}
3358 for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand):
3360 get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n, revision=revision)
3361 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, pathname2url(n)], query=query)
3362 http_PUT(u, file = n)
3365 query['comment'] = comment
3366 query['cmd'] = 'commit'
3367 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3373 def delete_package(apiurl, prj, pac):
3374 u = makeurl(apiurl, ['source', prj, pac])
3378 def delete_project(apiurl, prj):
3379 u = makeurl(apiurl, ['source', prj])