3 # Copyright (C) 2006 Novell Inc. All rights reserved.
4 # This program is free software; it may be used, copied, modified
5 # and distributed under the terms of the GNU General Public Licence,
6 # either version 2, or version 3 (at your option).
8 __version__ = '0.125git'
10 # __store_version__ is to be incremented when the format of the working copy
11 # "store" changes in an incompatible way. Please add any needed migration
12 # functionality to check_store_version().
13 __store_version__ = '1.0'
19 from urllib import pathname2url, quote_plus, urlencode, unquote
20 from urlparse import urlsplit, urlunsplit
21 from cStringIO import StringIO
29 from xml.etree import cElementTree as ET
31 import cElementTree as ET
35 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
36 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
40 # NOTE: do not use this anymore, use conf.exclude_glob instead.
41 # but this needs to stay to avoid breakage of tools which use osc lib
42 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
44 new_project_templ = """\
45 <project name="%(name)s">
47 <title></title> <!-- Short title of NewProject -->
49 <!-- This is for a longer description of the purpose of the project -->
52 <person role="maintainer" userid="%(user)s" />
53 <person role="bugowner" userid="%(user)s" />
54 <!-- remove this block to publish your packages on the mirrors -->
65 <!-- remove this comment to enable one or more build targets
67 <repository name="openSUSE_Factory">
68 <path project="openSUSE:Factory" repository="standard" />
72 <repository name="openSUSE_11.2">
73 <path project="openSUSE:11.2" repository="standard"/>
77 <repository name="openSUSE_11.1">
78 <path project="openSUSE:11.1" repository="standard"/>
82 <repository name="openSUSE_11.0">
83 <path project="openSUSE:11.0" repository="standard"/>
87 <repository name="Fedora_11">
88 <path project="Fedora:11" repository="standard" />
92 <repository name="SLE_11">
93 <path project="SUSE:SLE-11" repository="standard" />
97 <repository name="SLE_10">
98 <path project="SUSE:SLE-10:SDK" repository="standard" />
107 new_package_templ = """\
108 <package name="%(name)s">
110 <title></title> <!-- Title of package -->
113 <!-- for long description -->
116 <person role="maintainer" userid="%(user)s"/>
117 <person role="bugowner" userid="%(user)s"/>
119 <url>PUT_UPSTREAM_URL_HERE</url>
123 use one of the examples below to disable building of this package
124 on a certain architecture, in a certain repository,
125 or a combination thereof:
127 <disable arch="x86_64"/>
128 <disable repository="SUSE_SLE-10"/>
129 <disable repository="SUSE_SLE-10" arch="x86_64"/>
131 Possible sections where you can use the tags above:
141 Please have a look at:
142 http://en.opensuse.org/Restricted_Formats
143 Packages containing formats listed there are NOT allowed to
144 be packaged in the openSUSE Buildservice and will be deleted!
151 new_attribute_templ = """\
153 <attribute namespace="" name="">
159 new_user_template = """\
161 <login>%(user)s</login>
162 <email>PUT_EMAIL_ADDRESS_HERE</email>
163 <realname>PUT_REAL_NAME_HERE</realname>
165 <project name="home:%(user)s"/>
181 new_pattern_template = """\
182 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
188 buildstatus_symbols = {'succeeded': '.',
190 'expansion error': 'E',
201 # os.path.samefile is available only under Unix
202 def os_path_samefile(path1, path2):
204 return os.path.samefile(path1, path2)
206 return os.path.realpath(path1) == os.path.realpath(path2)
209 """represent a file, including its metadata"""
210 def __init__(self, name, md5, size, mtime):
220 """Source service content
223 """creates an empty serviceinfo instance"""
226 def read(self, serviceinfo_node):
227 """read in the source services <services> element passed as
230 if serviceinfo_node == None:
233 services = serviceinfo_node.findall('service')
235 for service in services:
236 name = service.get('name')
238 for param in service.findall('param'):
239 option = param.get('name', None)
241 name += " --" + option + " '" + value + "'"
242 self.commands.append(name)
244 msg = 'invalid service format:\n%s' % ET.tostring(root)
245 raise oscerr.APIError(msg)
247 def execute(self, dir):
250 for call in self.commands:
251 temp_dir = tempfile.mkdtemp()
252 name = call.split(None, 1)[0]
253 if not os.path.exists("/usr/lib/obs/service/"+name):
254 msg = "ERROR: service is not installed !"
255 msg += "Can maybe solved with: zypper in obs-server-" + name
256 raise oscerr.APIError(msg)
257 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
258 ret = subprocess.call(c, shell=True)
260 print "ERROR: service call failed: " + c
262 for file in os.listdir(temp_dir):
263 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
267 """linkinfo metadata (which is part of the xml representing a directory
270 """creates an empty linkinfo instance"""
280 def read(self, linkinfo_node):
281 """read in the linkinfo metadata from the <linkinfo> element passed as
283 If the passed element is None, the method does nothing.
285 if linkinfo_node == None:
287 self.project = linkinfo_node.get('project')
288 self.package = linkinfo_node.get('package')
289 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
290 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
291 self.srcmd5 = linkinfo_node.get('srcmd5')
292 self.error = linkinfo_node.get('error')
293 self.rev = linkinfo_node.get('rev')
294 self.baserev = linkinfo_node.get('baserev')
297 """returns True if the linkinfo is not empty, otherwise False"""
298 if self.xsrcmd5 or self.lsrcmd5:
302 def isexpanded(self):
303 """returns True if the package is an expanded link"""
304 if self.lsrcmd5 and not self.xsrcmd5:
309 """returns True if the link is in error state (could not be applied)"""
315 """return an informatory string representation"""
316 if self.islink() and not self.isexpanded():
317 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
318 % (self.project, self.package, self.xsrcmd5, self.rev)
319 elif self.islink() and self.isexpanded():
321 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
322 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
324 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
325 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
331 """represent a project directory, holding packages"""
332 def __init__(self, dir, getPackageList=True, progress_obj=None):
335 self.absdir = os.path.abspath(dir)
336 self.progress_obj = progress_obj
338 self.name = store_read_project(self.dir)
339 self.apiurl = store_read_apiurl(self.dir)
342 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
344 self.pacs_available = []
346 if conf.config['do_package_tracking']:
347 self.pac_root = self.read_packages().getroot()
348 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
349 self.pacs_excluded = [ i for i in os.listdir(self.dir)
350 for j in conf.config['exclude_glob']
351 if fnmatch.fnmatch(i, j) ]
352 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
353 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
354 # in the self.pacs_broken list
355 self.pacs_broken = []
356 for p in self.pacs_have:
357 if not os.path.isdir(os.path.join(self.absdir, p)):
358 # all states will be replaced with the '!'-state
359 # (except it is already marked as deleted ('D'-state))
360 self.pacs_broken.append(p)
362 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
364 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
366 def checkout_missing_pacs(self, expand_link=False):
367 for pac in self.pacs_missing:
369 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
370 # pac is not under version control but a local file/dir exists
371 msg = 'can\'t add package \'%s\': Object already exists' % pac
372 raise oscerr.PackageExists(self.name, pac, msg)
374 print 'checking out new package %s' % pac
375 checkout_package(self.apiurl, self.name, pac, \
376 pathname=getTransActPath(os.path.join(self.dir, pac)), \
377 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
379 def set_state(self, pac, state):
380 node = self.get_package_node(pac)
382 self.new_package_entry(pac, state)
384 node.attrib['state'] = state
386 def get_package_node(self, pac):
387 for node in self.pac_root.findall('package'):
388 if pac == node.get('name'):
392 def del_package_node(self, pac):
393 for node in self.pac_root.findall('package'):
394 if pac == node.get('name'):
395 self.pac_root.remove(node)
397 def get_state(self, pac):
398 node = self.get_package_node(pac)
400 return node.get('state')
404 def new_package_entry(self, name, state):
405 ET.SubElement(self.pac_root, 'package', name=name, state=state)
407 def read_packages(self):
408 packages_file = os.path.join(self.absdir, store, '_packages')
409 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
410 return ET.parse(packages_file)
412 # scan project for existing packages and migrate them
414 for data in os.listdir(self.dir):
415 pac_dir = os.path.join(self.absdir, data)
416 # we cannot use self.pacs_available because we cannot guarantee that the package list
417 # was fetched from the server
418 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
419 and Package(pac_dir).name == data:
420 cur_pacs.append(ET.Element('package', name=data, state=' '))
421 store_write_initial_packages(self.absdir, self.name, cur_pacs)
422 return ET.parse(os.path.join(self.absdir, store, '_packages'))
424 def write_packages(self):
425 # TODO: should we only modify the existing file instead of overwriting?
426 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
428 def addPackage(self, pac):
430 for i in conf.config['exclude_glob']:
431 if fnmatch.fnmatch(pac, i):
432 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
433 raise oscerr.OscIOError(None, msg)
434 state = self.get_state(pac)
435 if state == None or state == 'D':
436 self.new_package_entry(pac, 'A')
437 self.write_packages()
438 # sometimes the new pac doesn't exist in the list because
439 # it would take too much time to update all data structs regularly
440 if pac in self.pacs_unvers:
441 self.pacs_unvers.remove(pac)
443 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
445 def delPackage(self, pac, force = False):
446 state = self.get_state(pac.name)
448 if state == ' ' or state == 'D':
450 for file in pac.filenamelist + pac.filenamelist_unvers:
451 filestate = pac.status(file)
452 if filestate == 'M' or filestate == 'C' or \
453 filestate == 'A' or filestate == '?':
456 del_files.append(file)
457 if can_delete or force:
458 for file in del_files:
459 pac.delete_localfile(file)
460 if pac.status(file) != '?':
461 pac.delete_storefile(file)
462 # this is not really necessary
463 pac.put_on_deletelist(file)
464 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
465 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
466 pac.write_deletelist()
467 self.set_state(pac.name, 'D')
468 self.write_packages()
470 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
473 delete_dir(pac.absdir)
474 self.del_package_node(pac.name)
475 self.write_packages()
476 print statfrmt('D', pac.name)
478 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
480 print 'package is not under version control'
482 print 'unsupported state'
484 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
487 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
489 # we need to make sure that the _packages file will be written (even if an exception
492 # update complete project
493 # packages which no longer exists upstream
494 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
496 for pac in upstream_del:
497 p = Package(os.path.join(self.dir, pac))
498 self.delPackage(p, force = True)
499 delete_storedir(p.storedir)
504 self.pac_root.remove(self.get_package_node(p.name))
505 self.pacs_have.remove(pac)
507 for pac in self.pacs_have:
508 state = self.get_state(pac)
509 if pac in self.pacs_broken:
510 if self.get_state(pac) != 'A':
511 checkout_package(self.apiurl, self.name, pac,
512 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
513 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
516 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
518 if expand_link and p.islink() and not p.isexpanded():
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
523 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
526 rev = p.linkinfo.xsrcmd5
527 print 'Expanding to rev', rev
528 elif unexpand_link and p.islink() and p.isexpanded():
529 rev = p.linkinfo.lsrcmd5
530 print 'Unexpanding to rev', rev
531 elif p.islink() and p.isexpanded():
533 print 'Updating %s' % p.name
534 p.update(rev, service_files)
538 # TODO: Package::update has to fixed to behave like svn does
539 if pac in self.pacs_broken:
540 checkout_package(self.apiurl, self.name, pac,
541 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
542 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
544 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
545 elif state == 'A' and pac in self.pacs_available:
546 # file/dir called pac already exists and is under version control
547 msg = 'can\'t add package \'%s\': Object already exists' % pac
548 raise oscerr.PackageExists(self.name, pac, msg)
553 print 'unexpected state.. package \'%s\'' % pac
555 self.checkout_missing_pacs(expand_link=not unexpand_link)
557 self.write_packages()
559 def commit(self, pacs = (), msg = '', files = {}):
564 if files.has_key(pac):
566 state = self.get_state(pac)
568 self.commitNewPackage(pac, msg, todo)
570 self.commitDelPackage(pac)
572 # display the correct dir when sending the changes
573 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
576 p = Package(os.path.join(self.dir, pac))
579 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
580 print 'osc: \'%s\' is not under version control' % pac
581 elif pac in self.pacs_broken:
582 print 'osc: \'%s\' package not found' % pac
584 self.commitExtPackage(pac, msg, todo)
586 self.write_packages()
588 # if we have packages marked as '!' we cannot commit
589 for pac in self.pacs_broken:
590 if self.get_state(pac) != 'D':
591 msg = 'commit failed: package \'%s\' is missing' % pac
592 raise oscerr.PackageMissing(self.name, pac, msg)
594 for pac in self.pacs_have:
595 state = self.get_state(pac)
598 Package(os.path.join(self.dir, pac)).commit(msg)
600 self.commitDelPackage(pac)
602 self.commitNewPackage(pac, msg)
604 self.write_packages()
606 def commitNewPackage(self, pac, msg = '', files = []):
607 """creates and commits a new package if it does not exist on the server"""
608 if pac in self.pacs_available:
609 print 'package \'%s\' already exists' % pac
611 user = conf.get_apiurl_usr(self.apiurl)
612 edit_meta(metatype='pkg',
613 path_args=(quote_plus(self.name), quote_plus(pac)),
618 # display the correct dir when sending the changes
620 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
624 p = Package(os.path.join(self.dir, pac))
626 print statfrmt('Sending', os.path.normpath(p.dir))
628 self.set_state(pac, ' ')
631 def commitDelPackage(self, pac):
632 """deletes a package on the server and in the working copy"""
634 # display the correct dir when sending the changes
635 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
638 pac_dir = os.path.join(self.dir, pac)
639 p = Package(os.path.join(self.dir, pac))
640 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
641 delete_storedir(p.storedir)
647 pac_dir = os.path.join(self.dir, pac)
648 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
649 print statfrmt('Deleting', getTransActPath(pac_dir))
650 delete_package(self.apiurl, self.name, pac)
651 self.del_package_node(pac)
653 def commitExtPackage(self, pac, msg, files = []):
654 """commits a package from an external project"""
655 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
658 pac_path = os.path.join(self.dir, pac)
660 project = store_read_project(pac_path)
661 package = store_read_package(pac_path)
662 apiurl = store_read_apiurl(pac_path)
663 if meta_exists(metatype='pkg',
664 path_args=(quote_plus(project), quote_plus(package)),
666 create_new=False, apiurl=apiurl):
667 p = Package(pac_path)
671 user = conf.get_apiurl_usr(self.apiurl)
672 edit_meta(metatype='pkg',
673 path_args=(quote_plus(project), quote_plus(package)),
678 p = Package(pac_path)
684 r.append('*****************************************************')
685 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
686 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
687 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
688 r.append('*****************************************************')
694 """represent a package (its directory) and read/keep/write its metadata"""
695 def __init__(self, workingdir, progress_obj=None):
696 self.dir = workingdir
697 self.absdir = os.path.abspath(self.dir)
698 self.storedir = os.path.join(self.absdir, store)
699 self.progress_obj = progress_obj
701 check_store_version(self.dir)
703 self.prjname = store_read_project(self.dir)
704 self.name = store_read_package(self.dir)
705 self.apiurl = store_read_apiurl(self.dir)
707 self.update_datastructs()
711 self.todo_delete = []
714 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
715 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
718 def addfile(self, n):
719 st = os.stat(os.path.join(self.dir, n))
720 f = File(n, None, st.st_size, st.st_mtime)
721 self.filelist.append(f)
722 self.filenamelist.append(n)
723 self.filenamelist_unvers.remove(n)
724 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
726 def delete_file(self, n, force=False):
727 """deletes a file if possible and marks the file as deleted"""
728 state = self.status(n)
729 if state in ['?', 'A', 'M'] and not force:
730 return (False, state)
731 self.delete_localfile(n)
733 self.put_on_deletelist(n)
734 self.write_deletelist()
736 self.delete_storefile(n)
739 def delete_storefile(self, n):
740 try: os.unlink(os.path.join(self.storedir, n))
743 def delete_localfile(self, n):
744 try: os.unlink(os.path.join(self.dir, n))
747 def put_on_deletelist(self, n):
748 if n not in self.to_be_deleted:
749 self.to_be_deleted.append(n)
751 def put_on_conflictlist(self, n):
752 if n not in self.in_conflict:
753 self.in_conflict.append(n)
755 def clear_from_conflictlist(self, n):
756 """delete an entry from the file, and remove the file if it would be empty"""
757 if n in self.in_conflict:
759 filename = os.path.join(self.dir, n)
760 storefilename = os.path.join(self.storedir, n)
761 myfilename = os.path.join(self.dir, n + '.mine')
762 if self.islinkrepair() or self.ispulled():
763 upfilename = os.path.join(self.dir, n + '.new')
765 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
768 os.unlink(myfilename)
769 # the working copy may be updated, so the .r* ending may be obsolete...
771 os.unlink(upfilename)
772 if self.islinkrepair() or self.ispulled():
773 os.unlink(os.path.join(self.dir, n + '.old'))
777 self.in_conflict.remove(n)
779 self.write_conflictlist()
781 def write_deletelist(self):
782 if len(self.to_be_deleted) == 0:
784 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
788 fname = os.path.join(self.storedir, '_to_be_deleted')
790 f.write('\n'.join(self.to_be_deleted))
794 def delete_source_file(self, n):
795 """delete local a source file"""
796 self.delete_localfile(n)
797 self.delete_storefile(n)
799 def delete_remote_source_file(self, n):
800 """delete a remote source file (e.g. from the server)"""
802 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
805 def put_source_file(self, n):
807 # escaping '+' in the URL path (note: not in the URL query string) is
808 # only a workaround for ruby on rails, which swallows it otherwise
810 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
811 http_PUT(u, file = os.path.join(self.dir, n))
813 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
815 def commit(self, msg=''):
816 # commit only if the upstream revision is the same as the working copy's
817 upstream_rev = self.latest_rev()
818 if self.rev != upstream_rev:
819 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
822 self.todo = self.filenamelist_unvers + self.filenamelist
824 pathn = getTransActPath(self.dir)
826 have_conflicts = False
827 for filename in self.todo:
828 if not filename.startswith('_service:') and not filename.startswith('_service_'):
829 st = self.status(filename)
830 if st == 'A' or st == 'M':
831 self.todo_send.append(filename)
832 print statfrmt('Sending', os.path.join(pathn, filename))
834 self.todo_delete.append(filename)
835 print statfrmt('Deleting', os.path.join(pathn, filename))
837 have_conflicts = True
840 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
843 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
844 print 'nothing to do for package %s' % self.name
847 if self.islink() and self.isexpanded():
848 # resolve the link into the upload revision
849 # XXX: do this always?
850 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
851 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
854 print 'Transmitting file data ',
856 for filename in self.todo_delete:
857 # do not touch local files on commit --
858 # delete remotely instead
859 self.delete_remote_source_file(filename)
860 self.to_be_deleted.remove(filename)
861 for filename in self.todo_send:
862 sys.stdout.write('.')
864 self.put_source_file(filename)
866 # all source files are committed - now comes the log
867 query = { 'cmd' : 'commit',
869 'user' : conf.get_apiurl_usr(self.apiurl),
871 if self.islink() and self.isexpanded():
872 query['keeplink'] = '1'
873 if conf.config['linkcontrol'] or self.isfrozen():
874 query['linkrev'] = self.linkinfo.srcmd5
876 query['repairlink'] = '1'
877 query['linkrev'] = self.get_pulled_srcmd5()
878 if self.islinkrepair():
879 query['repairlink'] = '1'
880 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
882 except urllib2.HTTPError, e:
883 # delete upload revision
885 query = { 'cmd': 'deleteuploadrev' }
886 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
892 root = ET.parse(f).getroot()
893 self.rev = int(root.get('rev'))
895 print 'Committed revision %s.' % self.rev
898 os.unlink(os.path.join(self.storedir, '_pulled'))
899 if self.islinkrepair():
900 os.unlink(os.path.join(self.storedir, '_linkrepair'))
901 self.linkrepair = False
902 # XXX: mark package as invalid?
903 print 'The source link has been repaired. This directory can now be removed.'
904 if self.islink() and self.isexpanded():
905 self.update_local_filesmeta(revision=self.latest_rev())
907 self.update_local_filesmeta()
908 self.write_deletelist()
909 self.update_datastructs()
911 if self.filenamelist.count('_service'):
912 print 'The package contains a source service.'
913 for filename in self.todo:
914 if filename.startswith('_service:') and os.path.exists(filename):
915 os.unlink(filename) # remove local files
916 print_request_list(self.apiurl, self.prjname, self.name)
918 def write_conflictlist(self):
919 if len(self.in_conflict) == 0:
921 os.unlink(os.path.join(self.storedir, '_in_conflict'))
925 fname = os.path.join(self.storedir, '_in_conflict')
927 f.write('\n'.join(self.in_conflict))
931 def updatefile(self, n, revision):
932 filename = os.path.join(self.dir, n)
933 storefilename = os.path.join(self.storedir, n)
934 mtime = self.findfilebyname(n).mtime
936 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
937 os.utime(filename, (-1, mtime))
939 shutil.copyfile(filename, storefilename)
941 def mergefile(self, n):
942 filename = os.path.join(self.dir, n)
943 storefilename = os.path.join(self.storedir, n)
944 myfilename = os.path.join(self.dir, n + '.mine')
945 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
946 os.rename(filename, myfilename)
948 mtime = self.findfilebyname(n).mtime
949 get_source_file(self.apiurl, self.prjname, self.name, n,
950 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
951 os.utime(upfilename, (-1, mtime))
953 if binary_file(myfilename) or binary_file(upfilename):
955 shutil.copyfile(upfilename, filename)
956 shutil.copyfile(upfilename, storefilename)
957 self.in_conflict.append(n)
958 self.write_conflictlist()
962 # diff3 OPTIONS... MINE OLDER YOURS
963 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
964 # we would rather use the subprocess module, but it is not availablebefore 2.4
965 ret = subprocess.call(merge_cmd, shell=True)
967 # "An exit status of 0 means `diff3' was successful, 1 means some
968 # conflicts were found, and 2 means trouble."
970 # merge was successful... clean up
971 shutil.copyfile(upfilename, storefilename)
972 os.unlink(upfilename)
973 os.unlink(myfilename)
977 shutil.copyfile(upfilename, storefilename)
978 self.in_conflict.append(n)
979 self.write_conflictlist()
982 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
983 print >>sys.stderr, 'the command line was:'
984 print >>sys.stderr, merge_cmd
989 def update_local_filesmeta(self, revision=None):
991 Update the local _files file in the store.
992 It is replaced with the version pulled from upstream.
994 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
996 f = open(os.path.join(self.storedir, '_files.new'), 'w')
999 os.rename(os.path.join(self.storedir, '_files.new'), os.path.join(self.storedir, '_files'))
1001 if os.path.exists(os.path.join(self.storedir, '_files.new')):
1002 os.unlink(os.path.join(self.storedir, '_files.new'))
1005 def update_datastructs(self):
1007 Update the internal data structures if the local _files
1008 file has changed (e.g. update_local_filesmeta() has been
1012 files_tree = read_filemeta(self.dir)
1013 files_tree_root = files_tree.getroot()
1015 self.rev = files_tree_root.get('rev')
1016 self.srcmd5 = files_tree_root.get('srcmd5')
1018 self.linkinfo = Linkinfo()
1019 self.linkinfo.read(files_tree_root.find('linkinfo'))
1021 self.filenamelist = []
1023 for node in files_tree_root.findall('entry'):
1025 f = File(node.get('name'),
1027 int(node.get('size')),
1028 int(node.get('mtime')))
1030 # okay, a very old version of _files, which didn't contain any metadata yet...
1031 f = File(node.get('name'), '', 0, 0)
1032 self.filelist.append(f)
1033 self.filenamelist.append(f.name)
1035 self.to_be_deleted = read_tobedeleted(self.dir)
1036 self.in_conflict = read_inconflict(self.dir)
1037 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1039 # gather unversioned files, but ignore some stuff
1040 self.excluded = [ i for i in os.listdir(self.dir)
1041 for j in conf.config['exclude_glob']
1042 if fnmatch.fnmatch(i, j) ]
1043 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1044 if i not in self.excluded
1045 if i not in self.filenamelist ]
1048 """tells us if the package is a link (has 'linkinfo').
1049 A package with linkinfo is a package which links to another package.
1050 Returns True if the package is a link, otherwise False."""
1051 return self.linkinfo.islink()
1053 def isexpanded(self):
1054 """tells us if the package is a link which is expanded.
1055 Returns True if the package is expanded, otherwise False."""
1056 return self.linkinfo.isexpanded()
1058 def islinkrepair(self):
1059 """tells us if we are repairing a broken source link."""
1060 return self.linkrepair
1063 """tells us if we have pulled a link."""
1064 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1067 """tells us if the link is frozen."""
1068 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1070 def get_pulled_srcmd5(self):
1072 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1073 pulledrev = line.strip()
1076 def haslinkerror(self):
1078 Returns True if the link is broken otherwise False.
1079 If the package is not a link it returns False.
1081 return self.linkinfo.haserror()
1083 def linkerror(self):
1085 Returns an error message if the link is broken otherwise None.
1086 If the package is not a link it returns None.
1088 return self.linkinfo.error
1090 def update_local_pacmeta(self):
1092 Update the local _meta file in the store.
1093 It is replaced with the version pulled from upstream.
1095 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1096 f = open(os.path.join(self.storedir, '_meta'), 'w')
1100 def findfilebyname(self, n):
1101 for i in self.filelist:
1105 def status(self, n):
1109 file storefile file present STATUS
1110 exists exists in _files
1113 x x x ' ' if digest differs: 'M'
1114 and if in conflicts file: 'C'
1116 x - x 'D' and listed in _to_be_deleted
1118 - x - 'D' (when file in working copy is already deleted)
1119 - - x 'F' (new in repo, but not yet in working copy)
1124 known_by_meta = False
1126 exists_in_store = False
1127 if n in self.filenamelist:
1128 known_by_meta = True
1129 if os.path.exists(os.path.join(self.absdir, n)):
1131 if os.path.exists(os.path.join(self.storedir, n)):
1132 exists_in_store = True
1135 if exists and not exists_in_store and known_by_meta:
1137 elif n in self.to_be_deleted:
1139 elif n in self.in_conflict:
1141 elif exists and exists_in_store and known_by_meta:
1142 #print self.findfilebyname(n)
1143 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1147 elif exists and not exists_in_store and not known_by_meta:
1149 elif exists and exists_in_store and not known_by_meta:
1151 elif not exists and exists_in_store and known_by_meta:
1153 elif not exists and not exists_in_store and known_by_meta:
1155 elif not exists and exists_in_store and not known_by_meta:
1157 elif not exists and not exists_in_store and not known_by_meta:
1158 # this case shouldn't happen (except there was a typo in the filename etc.)
1159 raise IOError('osc: \'%s\' is not under version control' % n)
1163 def comparePac(self, cmp_pac):
1165 This method compares the local filelist with
1166 the filelist of the passed package to see which files
1167 were added, removed and changed.
1174 for file in self.filenamelist+self.filenamelist_unvers:
1175 state = self.status(file)
1176 if state == 'A' and (not file in cmp_pac.filenamelist):
1177 added_files.append(file)
1178 elif file in cmp_pac.filenamelist and state == 'D':
1179 removed_files.append(file)
1180 elif state == ' ' and not file in cmp_pac.filenamelist:
1181 added_files.append(file)
1182 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1183 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1184 changed_files.append(file)
1185 for file in cmp_pac.filenamelist:
1186 if not file in self.filenamelist:
1187 removed_files.append(file)
1188 removed_files = set(removed_files)
1190 return changed_files, added_files, removed_files
1192 def merge(self, otherpac):
1193 self.todo += otherpac.todo
1207 '\n '.join(self.filenamelist),
1215 def read_meta_from_spec(self, spec = None):
1220 # scan for spec files
1221 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1222 if len(speclist) == 1:
1223 specfile = speclist[0]
1224 elif len(speclist) > 1:
1225 print 'the following specfiles were found:'
1226 for file in speclist:
1228 print 'please specify one with --specfile'
1231 print 'no specfile was found - please specify one ' \
1235 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1236 self.summary = data['Summary']
1237 self.url = data['Url']
1238 self.descr = data['%description']
1241 def update_package_meta(self, force=False):
1243 for the updatepacmetafromspec subcommand
1244 argument force supress the confirm question
1247 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1249 root = ET.fromstring(m)
1250 root.find('title').text = self.summary
1251 root.find('description').text = ''.join(self.descr)
1252 url = root.find('url')
1254 url = ET.SubElement(root, 'url')
1257 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1258 mf = metafile(u, ET.tostring(root))
1261 print '*' * 36, 'old', '*' * 36
1263 print '*' * 36, 'new', '*' * 36
1264 print ET.tostring(root)
1266 repl = raw_input('Write? (y/N/e) ')
1277 def mark_frozen(self):
1278 store_write_string(self.absdir, '_frozenlink', '')
1280 print "The link in this package is currently broken. I have checked"
1281 print "out the last working version instead, please use 'osc pull'"
1282 print "to repair the link."
1285 def unmark_frozen(self):
1286 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1287 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1289 def latest_rev(self):
1290 if self.islinkrepair():
1291 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1292 elif self.islink() and self.isexpanded():
1293 if self.isfrozen() or self.ispulled():
1294 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1297 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1300 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1302 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1305 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1308 def update(self, rev = None, service_files = False):
1309 # save filelist and (modified) status before replacing the meta file
1310 saved_filenames = self.filenamelist
1311 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1314 self.update_local_filesmeta(rev)
1315 self = Package(self.dir, progress_obj=self.progress_obj)
1317 # which files do no longer exist upstream?
1318 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1320 pathn = getTransActPath(self.dir)
1322 for filename in saved_filenames:
1323 if not filename.startswith('_service:') and filename in disappeared:
1324 print statfrmt('D', os.path.join(pathn, filename))
1325 # keep file if it has local modifications
1326 if oldp.status(filename) == ' ':
1327 self.delete_localfile(filename)
1328 self.delete_storefile(filename)
1330 for filename in self.filenamelist:
1332 state = self.status(filename)
1333 if not service_files and filename.startswith('_service:'):
1335 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1336 # no merge necessary... local file is changed, but upstream isn't
1338 elif state == 'M' and filename in saved_modifiedfiles:
1339 status_after_merge = self.mergefile(filename)
1340 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1342 self.updatefile(filename, rev)
1343 print statfrmt('U', os.path.join(pathn, filename))
1345 self.updatefile(filename, rev)
1346 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1348 self.updatefile(filename, rev)
1349 print statfrmt('A', os.path.join(pathn, filename))
1350 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1351 self.updatefile(filename, rev)
1352 self.delete_storefile(filename)
1353 print statfrmt('U', os.path.join(pathn, filename))
1357 self.update_local_pacmeta()
1359 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1360 print 'At revision %s.' % self.rev
1362 if not service_files:
1363 self.run_source_services()
1365 def run_source_services(self):
1366 if self.filenamelist.count('_service'):
1367 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1370 si.execute(self.absdir)
1372 def prepare_filelist(self):
1373 """Prepare a list of files, which will be processed by process_filelist
1374 method. This allows easy modifications of a file list in commit
1378 self.todo = self.filenamelist + self.filenamelist_unvers
1382 for f in (f for f in self.todo if not os.path.isdir(f)):
1384 status = self.status(f)
1387 ret += "%s %s %s\n" % (action, status, f)
1390 # Edit a filelist for package %s
1392 # l, leave = leave a file as is
1393 # r, remove = remove a file
1394 # a, add = add a file
1396 # If you remove file from a list, it will be unchanged
1397 # If you remove all, commit will be aborted"""
1401 def edit_filelist(self):
1402 """Opens a package list in editor for eediting. This allows easy
1403 modifications of it just by simple text editing
1407 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1408 f = os.fdopen(fd, 'w')
1409 f.write(self.prepare_filelist())
1411 mtime_orig = os.stat(filename).st_mtime
1413 if sys.platform[:3] != 'win':
1414 editor = os.getenv('EDITOR', default='vim')
1416 editor = os.getenv('EDITOR', default='notepad')
1418 subprocess.call('%s %s' % (editor, filename), shell=True)
1419 mtime = os.stat(filename).st_mtime
1420 if mtime_orig < mtime:
1421 filelist = open(filename).readlines()
1425 raise oscerr.UserAbort()
1427 return self.process_filelist(filelist)
1429 def process_filelist(self, filelist):
1430 """Process a filelist - it add/remove or leave files. This depends on
1431 user input. If no file is processed, it raises an ValueError
1435 for line in (l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')):
1437 foo = line.split(' ')
1439 action, state, name = (foo[0], ' ', foo[3])
1441 action, state, name = (foo[0], foo[1], foo[2])
1444 action = action.lower()
1447 if action in ('r', 'remove'):
1448 if self.status(name) == '?':
1450 if name in self.todo:
1451 self.todo.remove(name)
1453 self.delete_file(name, True)
1454 elif action in ('a', 'add'):
1455 if self.status(name) != '?':
1456 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1459 elif action in ('l', 'leave'):
1462 raise ValueError("Unknow action `%s'" % action)
1465 raise ValueError("Empty filelist")
1468 """for objects to represent the review state in a request"""
1469 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1471 self.by_user = by_user
1472 self.by_group = by_group
1475 self.comment = comment
1478 """for objects to represent the "state" of a request"""
1479 def __init__(self, name=None, who=None, when=None, comment=None):
1483 self.comment = comment
1486 """represents an action"""
1487 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1489 self.src_project = src_project
1490 self.src_package = src_package
1491 self.src_rev = src_rev
1492 self.dst_project = dst_project
1493 self.dst_package = dst_package
1494 self.src_update = src_update
1497 """represent a request and holds its metadata
1498 it has methods to read in metadata from xml,
1499 different views, ..."""
1502 self.state = RequestState()
1505 self.last_author = None
1508 self.statehistory = []
1511 def read(self, root):
1512 self.reqid = int(root.get('id'))
1513 actions = root.findall('action')
1514 if len(actions) == 0:
1515 actions = [ root.find('submit') ] # for old style requests
1517 for action in actions:
1518 type = action.get('type', 'submit')
1520 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1521 if action.findall('source'):
1522 n = action.find('source')
1523 src_prj = n.get('project', None)
1524 src_pkg = n.get('package', None)
1525 src_rev = n.get('rev', None)
1526 if action.findall('target'):
1527 n = action.find('target')
1528 dst_prj = n.get('project', None)
1529 dst_pkg = n.get('package', None)
1530 if action.findall('options'):
1531 n = action.find('options')
1532 if n.findall('sourceupdate'):
1533 src_update = n.find('sourceupdate').text.strip()
1534 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1536 msg = 'invalid request format:\n%s' % ET.tostring(root)
1537 raise oscerr.APIError(msg)
1540 n = root.find('state')
1541 self.state.name, self.state.who, self.state.when \
1542 = n.get('name'), n.get('who'), n.get('when')
1544 self.state.comment = n.find('comment').text.strip()
1546 self.state.comment = None
1548 # read the review states
1549 for r in root.findall('review'):
1551 s.state = r.get('state')
1552 s.by_user = r.get('by_user')
1553 s.by_group = r.get('by_group')
1554 s.who = r.get('who')
1555 s.when = r.get('when')
1557 s.comment = r.find('comment').text.strip()
1560 self.reviews.append(s)
1562 # read the state history
1563 for h in root.findall('history'):
1565 s.name = h.get('name')
1566 s.who = h.get('who')
1567 s.when = h.get('when')
1569 s.comment = h.find('comment').text.strip()
1572 self.statehistory.append(s)
1573 self.statehistory.reverse()
1575 # read a description, if it exists
1577 n = root.find('description').text
1582 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1583 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1584 dst_prj, dst_pkg, src_update)
1587 def list_view(self):
1588 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1590 for a in self.actions:
1591 dst = "%s/%s" % (a.dst_project, a.dst_package)
1592 if a.src_package == a.dst_package:
1596 if a.type=="submit":
1597 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1598 if a.type=="change_devel":
1599 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1600 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1602 ret += '\n %s: %-50s %-20s ' % \
1603 (a.type, sr_source, dst)
1605 if self.statehistory and self.statehistory[0]:
1607 for h in self.statehistory:
1608 who.append("%s(%s)" % (h.who,h.name))
1610 ret += "\n From: %s" % (' -> '.join(who))
1612 ret += "\n Descr: %s" % (repr(self.descr))
1617 def __cmp__(self, other):
1618 return cmp(self.reqid, other.reqid)
1622 for action in self.actions:
1623 action_list=" %s: " % (action.type)
1624 if action.type=="submit":
1627 r="(r%s)" % (action.src_rev)
1629 if action.src_update:
1630 m="(%s)" % (action.src_update)
1631 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1632 if action.dst_package:
1633 action_list=action_list+"/%s" % ( action.dst_package )
1634 elif action.type=="delete":
1635 action_list=action_list+" %s" % ( action.dst_project )
1636 if action.dst_package:
1637 action_list=action_list+"/%s" % ( action.dst_package )
1638 elif action.type=="change_devel":
1639 action_list=action_list+" %s/%s developed in %s/%s" % \
1640 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1641 action_list=action_list+"\n"
1656 self.state.name, self.state.when, self.state.who,
1659 if len(self.reviews):
1660 reviewitems = [ '%-10s %s %s %s %s %s' \
1661 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1662 for i in self.reviews ]
1663 s += '\nReview: ' + '\n '.join(reviewitems)
1666 if len(self.statehistory):
1667 histitems = [ '%-10s %s %s' \
1668 % (i.name, i.when, i.who) \
1669 for i in self.statehistory ]
1670 s += '\nHistory: ' + '\n '.join(histitems)
1677 """format time as Apr 02 18:19
1679 depending on whether it is in the current year
1683 if time.localtime()[0] == time.localtime(t)[0]:
1685 return time.strftime('%b %d %H:%M',time.localtime(t))
1687 return time.strftime('%b %d %Y',time.localtime(t))
1690 def is_project_dir(d):
1691 return os.path.exists(os.path.join(d, store, '_project')) and not \
1692 os.path.exists(os.path.join(d, store, '_package'))
1695 def is_package_dir(d):
1696 return os.path.exists(os.path.join(d, store, '_project')) and \
1697 os.path.exists(os.path.join(d, store, '_package'))
1699 def parse_disturl(disturl):
1700 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1701 revision), else raises an oscerr.WrongArgs exception
1704 m = DISTURL_RE.match(disturl)
1706 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1708 apiurl = m.group('apiurl')
1709 if apiurl.split('.')[0] != 'api':
1710 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1711 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1713 def parse_buildlogurl(buildlogurl):
1714 """Parse a build log url, returns a tuple (apiurl, project, package,
1715 repository, arch), else raises oscerr.WrongArgs exception"""
1717 global BUILDLOGURL_RE
1719 m = BUILDLOGURL_RE.match(buildlogurl)
1721 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1723 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1726 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1727 This is handy to allow copy/paste a project/package combination in this form.
1729 Trailing slashes are removed before the split, because the split would
1730 otherwise give an additional empty string.
1738 def expand_proj_pack(args, idx=0, howmany=0):
1739 """looks for occurance of '.' at the position idx.
1740 If howmany is 2, both proj and pack are expanded together
1741 using the current directory, or none of them, if not possible.
1742 If howmany is 0, proj is expanded if possible, then, if there
1743 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1744 expanded, if possible.
1745 If howmany is 1, only proj is expanded if possible.
1747 If args[idx] does not exists, an implicit '.' is assumed.
1748 if not enough elements up to idx exist, an error is raised.
1750 See also parseargs(args), slash_split(args), findpacs(args)
1751 All these need unification, somehow.
1754 # print args,idx,howmany
1757 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1759 if len(args) == idx:
1761 if args[idx+0] == '.':
1762 if howmany == 0 and len(args) > idx+1:
1763 if args[idx+1] == '.':
1765 # remove one dot and make sure to expand both proj and pack
1770 # print args,idx,howmany
1772 args[idx+0] = store_read_project('.')
1775 package = store_read_package('.')
1776 args.insert(idx+1, package)
1780 package = store_read_package('.')
1781 args.insert(idx+1, package)
1785 def findpacs(files, progress_obj=None):
1786 """collect Package objects belonging to the given files
1787 and make sure each Package is returned only once"""
1790 p = filedir_to_pac(f, progress_obj)
1793 if i.name == p.name:
1803 def read_filemeta(dir):
1805 r = ET.parse(os.path.join(dir, store, '_files'))
1806 except SyntaxError, e:
1807 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1808 'When parsing .osc/_files, the following error was encountered:\n'
1813 def read_tobedeleted(dir):
1815 fname = os.path.join(dir, store, '_to_be_deleted')
1817 if os.path.exists(fname):
1818 r = [ line.strip() for line in open(fname) ]
1823 def read_inconflict(dir):
1825 fname = os.path.join(dir, store, '_in_conflict')
1827 if os.path.exists(fname):
1828 r = [ line.strip() for line in open(fname) ]
1833 def parseargs(list_of_args):
1834 """Convenience method osc's commandline argument parsing.
1836 If called with an empty tuple (or list), return a list containing the current directory.
1837 Otherwise, return a list of the arguments."""
1839 return list(list_of_args)
1844 def filedir_to_pac(f, progress_obj=None):
1845 """Takes a working copy path, or a path to a file inside a working copy,
1846 and returns a Package object instance
1848 If the argument was a filename, add it onto the "todo" list of the Package """
1850 if os.path.isdir(f):
1852 p = Package(wd, progress_obj=progress_obj)
1855 wd = os.path.dirname(f)
1858 p = Package(wd, progress_obj=progress_obj)
1859 p.todo = [ os.path.basename(f) ]
1864 def statfrmt(statusletter, filename):
1865 return '%s %s' % (statusletter, filename)
1868 def pathjoin(a, *p):
1869 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1870 path = os.path.join(a, *p)
1871 if path.startswith('./'):
1876 def makeurl(baseurl, l, query=[]):
1877 """Given a list of path compoments, construct a complete URL.
1879 Optional parameters for a query string can be given as a list, as a
1880 dictionary, or as an already assembled string.
1881 In case of a dictionary, the parameters will be urlencoded by this
1882 function. In case of a list not -- this is to be backwards compatible.
1885 if conf.config['verbose'] > 1:
1886 print 'makeurl:', baseurl, l, query
1888 if type(query) == type(list()):
1889 query = '&'.join(query)
1890 elif type(query) == type(dict()):
1891 query = urlencode(query)
1893 scheme, netloc = urlsplit(baseurl)[0:2]
1894 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1897 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1898 """wrapper around urllib2.urlopen for error handling,
1899 and to support additional (PUT, DELETE) methods"""
1903 if conf.config['http_debug']:
1906 print '--', method, url
1908 if method == 'POST' and not file and not data:
1909 # adding data to an urllib2 request transforms it into a POST
1912 req = urllib2.Request(url)
1914 api_host_options=conf.get_apiurl_api_host_options(url)
1916 for header, value in api_host_options['http_headers']:
1917 req.add_header(header, value)
1919 req.get_method = lambda: method
1921 # POST requests are application/x-www-form-urlencoded per default
1922 # since we change the request into PUT, we also need to adjust the content type header
1923 if method == 'PUT' or (method == 'POST' and data):
1924 req.add_header('Content-Type', 'application/octet-stream')
1926 if type(headers) == type({}):
1927 for i in headers.keys():
1929 req.add_header(i, headers[i])
1931 if file and not data:
1932 size = os.path.getsize(file)
1934 data = open(file, 'rb').read()
1937 filefd = open(file, 'rb')
1939 if sys.platform[:3] != 'win':
1940 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1942 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1944 except EnvironmentError, e:
1946 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1947 '\non a filesystem which does not support this.' % (e, file))
1948 elif hasattr(e, 'winerror') and e.winerror == 5:
1949 # falling back to the default io
1950 data = open(file, 'rb').read()
1954 if conf.config['debug']: print method, url
1956 old_timeout = socket.getdefaulttimeout()
1957 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1958 if old_timeout != timeout and not api_host_options['sslcertck']:
1959 socket.setdefaulttimeout(timeout)
1961 fd = urllib2.urlopen(req, data=data)
1963 if old_timeout != timeout and not api_host_options['sslcertck']:
1964 socket.setdefaulttimeout(old_timeout)
1965 if hasattr(conf.cookiejar, 'save'):
1966 conf.cookiejar.save(ignore_discard=True)
1968 if filefd: filefd.close()
1973 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1974 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1975 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1976 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1979 def init_project_dir(apiurl, dir, project):
1980 if not os.path.exists(dir):
1981 if conf.config['checkout_no_colon']:
1982 os.makedirs(dir) # helpful with checkout_no_colon
1985 if not os.path.exists(os.path.join(dir, store)):
1986 os.mkdir(os.path.join(dir, store))
1988 # print 'project=',project,' dir=',dir
1989 store_write_project(dir, project)
1990 store_write_apiurl(dir, apiurl)
1991 if conf.config['do_package_tracking']:
1992 store_write_initial_packages(dir, project, [])
1994 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
1995 if not os.path.isdir(store):
1998 f = open('_project', 'w')
1999 f.write(project + '\n')
2001 f = open('_package', 'w')
2002 f.write(package + '\n')
2006 f = open('_files', 'w')
2007 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2011 ET.ElementTree(element=ET.Element('directory')).write('_files')
2013 f = open('_osclib_version', 'w')
2014 f.write(__store_version__ + '\n')
2017 store_write_apiurl(os.path.pardir, apiurl)
2023 def check_store_version(dir):
2024 versionfile = os.path.join(dir, store, '_osclib_version')
2026 v = open(versionfile).read().strip()
2031 msg = 'Error: "%s" is not an osc working copy.' % os.path.abspath(dir)
2032 if os.path.exists(os.path.join(dir, '.svn')):
2033 msg = msg + '\nTry svn instead of osc.'
2034 raise oscerr.NoWorkingCopy(msg)
2036 if v != __store_version__:
2037 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2038 # version is fine, no migration needed
2039 f = open(versionfile, 'w')
2040 f.write(__store_version__ + '\n')
2043 msg = 'The osc metadata of your working copy "%s"' % dir
2044 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2045 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2046 raise oscerr.WorkingCopyWrongVersion, msg
2049 def meta_get_packagelist(apiurl, prj):
2051 u = makeurl(apiurl, ['source', prj])
2053 root = ET.parse(f).getroot()
2054 return [ node.get('name') for node in root.findall('entry') ]
2057 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2058 """return a list of file names,
2059 or a list File() instances if verbose=True"""
2065 query['rev'] = revision
2067 query['rev'] = 'latest'
2069 u = makeurl(apiurl, ['source', prj, package], query=query)
2071 root = ET.parse(f).getroot()
2074 return [ node.get('name') for node in root.findall('entry') ]
2078 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2079 rev = root.get('rev')
2080 for node in root.findall('entry'):
2081 f = File(node.get('name'),
2083 int(node.get('size')),
2084 int(node.get('mtime')))
2090 def meta_get_project_list(apiurl):
2091 u = makeurl(apiurl, ['source'])
2093 root = ET.parse(f).getroot()
2094 return sorted([ node.get('name') for node in root ])
2097 def show_project_meta(apiurl, prj):
2098 url = makeurl(apiurl, ['source', prj, '_meta'])
2100 return f.readlines()
2103 def show_project_conf(apiurl, prj):
2104 url = makeurl(apiurl, ['source', prj, '_config'])
2106 return f.readlines()
2109 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2110 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2114 except urllib2.HTTPError, e:
2115 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2119 def show_package_meta(apiurl, prj, pac):
2120 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2123 return f.readlines()
2124 except urllib2.HTTPError, e:
2125 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2129 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2131 path.append('source')
2137 path.append('_attribute')
2139 path.append(attribute)
2142 query.append("with_default=1")
2144 query.append("with_project=1")
2145 url = makeurl(apiurl, path, query)
2148 return f.readlines()
2149 except urllib2.HTTPError, e:
2150 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2154 def show_develproject(apiurl, prj, pac):
2155 m = show_package_meta(apiurl, prj, pac)
2157 return ET.fromstring(''.join(m)).find('devel').get('project')
2162 def show_pattern_metalist(apiurl, prj):
2163 url = makeurl(apiurl, ['source', prj, '_pattern'])
2167 except urllib2.HTTPError, e:
2168 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2170 r = [ node.get('name') for node in tree.getroot() ]
2175 def show_pattern_meta(apiurl, prj, pattern):
2176 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2179 return f.readlines()
2180 except urllib2.HTTPError, e:
2181 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2186 """metafile that can be manipulated and is stored back after manipulation."""
2187 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2191 self.change_is_required = change_is_required
2192 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2193 f = os.fdopen(fd, 'w')
2194 f.write(''.join(input))
2196 self.hash_orig = dgst(self.filename)
2199 hash = dgst(self.filename)
2200 if self.change_is_required == True and hash == self.hash_orig:
2201 print 'File unchanged. Not saving.'
2202 os.unlink(self.filename)
2205 print 'Sending meta data...'
2206 # don't do any exception handling... it's up to the caller what to do in case
2208 http_PUT(self.url, file=self.filename)
2209 os.unlink(self.filename)
2214 if sys.platform[:3] != 'win':
2215 editor = os.getenv('EDITOR', default='vim')
2217 editor = os.getenv('EDITOR', default='notepad')
2219 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2220 if self.change_is_required == True:
2223 except urllib2.HTTPError, e:
2224 error_help = "%d" % e.code
2225 if e.headers.get('X-Opensuse-Errorcode'):
2226 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2228 print >>sys.stderr, 'BuildService API error:', error_help
2229 # examine the error - we can't raise an exception because we might want
2232 if '<summary>' in data:
2233 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2234 input = raw_input('Try again? ([y/N]): ')
2235 if input != 'y' and input != 'Y':
2246 if os.path.exists(self.filename):
2247 print 'discarding', self.filename
2248 os.unlink(self.filename)
2252 # different types of metadata
2253 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2254 'template': new_project_templ,
2257 'pkg': { 'path' : 'source/%s/%s/_meta',
2258 'template': new_package_templ,
2261 'attribute': { 'path' : 'source/%s/%s/_meta',
2262 'template': new_attribute_templ,
2265 'prjconf': { 'path': 'source/%s/_config',
2269 'user': { 'path': 'person/%s',
2270 'template': new_user_template,
2273 'pattern': { 'path': 'source/%s/_pattern/%s',
2274 'template': new_pattern_template,
2279 def meta_exists(metatype,
2286 apiurl = conf.config['apiurl']
2287 url = make_meta_url(metatype, path_args, apiurl)
2289 data = http_GET(url).readlines()
2290 except urllib2.HTTPError, e:
2291 if e.code == 404 and create_new:
2292 data = metatypes[metatype]['template']
2294 data = StringIO(data % template_args).readlines()
2299 def make_meta_url(metatype, path_args=None, apiurl=None):
2301 apiurl = conf.config['apiurl']
2302 if metatype not in metatypes.keys():
2303 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2304 path = metatypes[metatype]['path']
2307 path = path % path_args
2309 return makeurl(apiurl, [path])
2312 def edit_meta(metatype,
2317 change_is_required=False,
2321 apiurl = conf.config['apiurl']
2323 data = meta_exists(metatype,
2326 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2330 change_is_required = True
2332 url = make_meta_url(metatype, path_args, apiurl)
2333 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2341 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2344 query['rev'] = revision
2346 query['rev'] = 'latest'
2348 query['linkrev'] = linkrev
2349 elif conf.config['linkcontrol']:
2350 query['linkrev'] = 'base'
2354 query['emptylink'] = 1
2355 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2356 return f.readlines()
2359 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2360 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2361 return ET.fromstring(''.join(m)).get('srcmd5')
2364 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2365 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2367 # only source link packages have a <linkinfo> element.
2368 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2376 raise oscerr.LinkExpandError(prj, pac, li.error)
2380 def show_upstream_rev(apiurl, prj, pac):
2381 m = show_files_meta(apiurl, prj, pac)
2382 return ET.fromstring(''.join(m)).get('rev')
2385 def read_meta_from_spec(specfile, *args):
2386 import codecs, locale, re
2388 Read tags and sections from spec file. To read out
2389 a tag the passed argument mustn't end with a colon. To
2390 read out a section the passed argument must start with
2392 This method returns a dictionary which contains the
2396 if not os.path.isfile(specfile):
2397 raise IOError('\'%s\' is not a regular file' % specfile)
2400 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2401 except UnicodeDecodeError:
2402 lines = open(specfile).readlines()
2409 if itm.startswith('%'):
2410 sections.append(itm)
2414 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2416 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2417 if m and m.group('val'):
2418 spec_data[tag] = m.group('val').strip()
2420 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2423 section_pat = '^%s\s*?$'
2424 for section in sections:
2425 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2427 start = lines.index(m.group()+'\n') + 1
2429 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2432 for line in lines[start:]:
2433 if line.startswith('%'):
2436 spec_data[section] = data
2441 def edit_message(footer='', template=''):
2442 delim = '--This line, and those below, will be ignored--\n\n' + footer
2444 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2445 f = os.fdopen(fd, 'w')
2451 mtime_orig = os.stat(filename).st_mtime
2453 if sys.platform[:3] != 'win':
2454 editor = os.getenv('EDITOR', default='vim')
2456 editor = os.getenv('EDITOR', default='notepad')
2458 subprocess.call('%s %s' % (editor, filename), shell=True)
2459 mtime = os.stat(filename).st_mtime
2461 if mtime_orig < mtime:
2462 msg = open(filename).read()
2464 return msg.split(delim)[0].rstrip()
2466 input = raw_input('Log message unchanged or not specified\n'
2467 'a)bort, c)ontinue, e)dit: ')
2470 raise oscerr.UserAbort
2478 def create_delete_request(apiurl, project, package, message):
2483 package = """package="%s" """ % (package)
2489 <action type="delete">
2490 <target project="%s" %s/>
2493 <description>%s</description>
2495 """ % (project, package,
2496 cgi.escape(message or ''))
2498 u = makeurl(apiurl, ['request'], query='cmd=create')
2499 f = http_POST(u, data=xml)
2501 root = ET.parse(f).getroot()
2502 return root.get('id')
2505 def create_change_devel_request(apiurl,
2506 devel_project, devel_package,
2513 <action type="change_devel">
2514 <source project="%s" package="%s" />
2515 <target project="%s" package="%s" />
2518 <description>%s</description>
2520 """ % (devel_project,
2524 cgi.escape(message or ''))
2526 u = makeurl(apiurl, ['request'], query='cmd=create')
2527 f = http_POST(u, data=xml)
2529 root = ET.parse(f).getroot()
2530 return root.get('id')
2533 # This creates an old style submit request for server api 1.0
2534 def create_submit_request(apiurl,
2535 src_project, src_package,
2536 dst_project=None, dst_package=None,
2537 message=None, orev=None, src_update=None):
2542 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2544 # Yes, this kind of xml construction is horrible
2549 packagexml = """package="%s" """ %( dst_package )
2550 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2551 # XXX: keep the old template for now in order to work with old obs instances
2553 <request type="submit">
2555 <source project="%s" package="%s" rev="%s"/>
2560 <description>%s</description>
2564 orev or show_upstream_rev(apiurl, src_project, src_package),
2567 cgi.escape(message or ""))
2569 u = makeurl(apiurl, ['request'], query='cmd=create')
2570 f = http_POST(u, data=xml)
2572 root = ET.parse(f).getroot()
2573 return root.get('id')
2576 def get_request(apiurl, reqid):
2577 u = makeurl(apiurl, ['request', reqid])
2579 root = ET.parse(f).getroot()
2586 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2589 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2590 f = http_POST(u, data=message)
2593 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2596 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2597 f = http_POST(u, data=message)
2601 def get_request_list(apiurl, project, package, req_who='', req_state=('new',), req_type=None ):
2607 if not "all" in req_state:
2608 for state in req_state:
2609 if len(m): m += '%20or%20'
2610 m += 'state/@name=\'%s\'' % quote_plus(state)
2611 if len(m): match += "(" + m + ")"
2614 if len(m): m += '%20and%20'
2615 m += 'state/@who=\'%s\'' % quote_plus(req_who)
2617 m += 'history/@who=\'%s\'' % quote_plus(req_who)
2619 if len(match): match += "%20and%20"
2620 match += "(" + m + ")"
2622 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2624 if project or package:
2625 for what in ['action', 'submit']:
2628 if len(m): m += '%20and%20'
2629 m += '(%s/target/@project=\'%s\'%%20or%%20' % (what, quote_plus(project))
2630 m += '%s/source/@project=\'%s\')' % (what, quote_plus(project))
2632 if len(m): m += '%20and%20'
2633 m += '(%s/target/@package=\'%s\'%%20or%%20' % (what, quote_plus(package))
2634 m += '%s/source/@package=\'%s\')' % (what, quote_plus(package))
2636 if len(m): m += '%20and%20'
2637 m += '%s/@type=\'%s\'' % (what, quote_plus(req_type))
2642 if len(match): match += '%20and%20'
2643 match += 'action/@type=\'%s\'' % quote_plus(req_type)
2644 matches.append(match)
2646 for match in matches:
2647 if conf.config['verbose'] > 1:
2649 u = makeurl(apiurl, ['search', 'request'], ['match=%s' % match])
2651 collection = ET.parse(f).getroot()
2653 for root in collection.findall('request'):
2661 def get_request_log(apiurl, reqid):
2662 r = get_request(conf.config['apiurl'], reqid)
2664 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2665 # the description of the request is used for the initial log entry
2666 # otherwise its comment attribute would contain None
2667 if len(r.statehistory) >= 1:
2668 r.statehistory[-1].comment = r.descr
2670 r.state.comment = r.descr
2671 for state in [ r.state ] + r.statehistory:
2672 s = frmt % (state.name, state.who, state.when, str(state.comment))
2677 def get_user_meta(apiurl, user):
2678 u = makeurl(apiurl, ['person', quote_plus(user)])
2681 return ''.join(f.readlines())
2682 except urllib2.HTTPError:
2683 print 'user \'%s\' not found' % user
2687 def get_user_data(apiurl, user, *tags):
2688 """get specified tags from the user meta"""
2689 meta = get_user_meta(apiurl, user)
2692 root = ET.fromstring(meta)
2695 if root.find(tag).text != None:
2696 data.append(root.find(tag).text)
2700 except AttributeError:
2701 # this part is reached if the tags tuple contains an invalid tag
2702 print 'The xml file for user \'%s\' seems to be broken' % user
2707 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2708 import tempfile, shutil
2711 query = { 'rev': revision }
2715 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2716 o = os.fdopen(fd, 'wb')
2717 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2718 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2721 shutil.move(tmpfile, targetfilename or filename)
2729 def get_binary_file(apiurl, prj, repo, arch,
2732 target_filename = None,
2733 target_mtime = None,
2734 progress_meter = False):
2736 target_filename = target_filename or filename
2738 where = package or '_repository'
2739 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2742 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2746 binsize = int(f.headers['content-length'])
2749 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2750 os.chmod(tmpfilename, 0644)
2753 o = os.fdopen(fd, 'wb')
2757 #buf = f.read(BUFSIZE)
2761 downloaded += len(buf)
2763 completion = str(int((float(downloaded)/binsize)*100))
2764 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2769 sys.stdout.write('\n')
2771 shutil.move(tmpfilename, target_filename)
2773 os.utime(target_filename, (-1, target_mtime))
2775 # make sure that the temp file is cleaned up when we are interrupted
2777 try: os.unlink(tmpfilename)
2780 def dgst_from_string(str):
2781 # Python 2.5 depracates the md5 modules
2782 # Python 2.4 doesn't have hashlib yet
2785 md5_hash = hashlib.md5()
2788 md5_hash = md5.new()
2789 md5_hash.update(str)
2790 return md5_hash.hexdigest()
2794 #if not os.path.exists(file):
2804 f = open(file, 'rb')
2806 buf = f.read(BUFSIZE)
2809 return s.hexdigest()
2814 """return true if a string is binary data using diff's heuristic"""
2815 if s and '\0' in s[:4096]:
2820 def binary_file(fn):
2821 """read 4096 bytes from a file named fn, and call binary() on the data"""
2822 return binary(open(fn, 'rb').read(4096))
2825 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2827 This methods diffs oldfilename against filename (so filename will
2828 be shown as the new file).
2829 The variable origfilename is used if filename and oldfilename differ
2830 in their names (for instance if a tempfile is used for filename etc.)
2836 oldfilename = filename
2839 olddir = os.path.join(dir, store)
2841 if not origfilename:
2842 origfilename = filename
2844 file1 = os.path.join(olddir, oldfilename) # old/stored original
2845 file2 = os.path.join(dir, filename) # working copy
2847 f1 = open(file1, 'rb')
2851 f2 = open(file2, 'rb')
2855 if binary(s1) or binary (s2):
2856 d = ['Binary file %s has changed\n' % origfilename]
2859 d = difflib.unified_diff(\
2862 fromfile = '%s (revision %s)' % (origfilename, rev), \
2863 tofile = '%s (working copy)' % origfilename)
2865 # if file doesn't end with newline, we need to append one in the diff result
2867 for i, line in enumerate(d):
2868 if not line.endswith('\n'):
2869 d[i] += '\n\\ No newline at end of file'
2875 def make_diff(wc, revision):
2881 diff_hdr = 'Index: %s\n'
2882 diff_hdr += '===================================================================\n'
2884 olddir = os.getcwd()
2888 for file in wc.todo:
2889 if file in wc.filenamelist+wc.filenamelist_unvers:
2890 state = wc.status(file)
2892 added_files.append(file)
2894 removed_files.append(file)
2895 elif state == 'M' or state == 'C':
2896 changed_files.append(file)
2898 diff.append('osc: \'%s\' is not under version control' % file)
2900 for file in wc.filenamelist+wc.filenamelist_unvers:
2901 state = wc.status(file)
2902 if state == 'M' or state == 'C':
2903 changed_files.append(file)
2905 added_files.append(file)
2907 removed_files.append(file)
2909 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2911 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2912 cmp_pac = Package(tmpdir)
2914 for file in wc.todo:
2915 if file in cmp_pac.filenamelist:
2916 if file in wc.filenamelist:
2917 changed_files.append(file)
2919 diff.append('osc: \'%s\' is not under version control' % file)
2921 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2923 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2925 for file in changed_files:
2926 diff.append(diff_hdr % file)
2928 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2930 cmp_pac.updatefile(file, revision)
2931 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2932 cmp_pac.absdir, file))
2933 (fd, tmpfile) = tempfile.mkstemp()
2934 for file in added_files:
2935 diff.append(diff_hdr % file)
2937 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2938 os.path.dirname(tmpfile), file))
2940 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2941 os.path.dirname(tmpfile), file))
2943 # FIXME: this is ugly but it cannot be avoided atm
2944 # if a file is deleted via "osc rm file" we should keep the storefile.
2946 if cmp_pac == None and removed_files:
2947 tmpdir = tempfile.mkdtemp()
2949 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2950 tmp_pac = Package(tmpdir)
2953 for file in removed_files:
2954 diff.append(diff_hdr % file)
2956 tmp_pac.updatefile(file, tmp_pac.rev)
2957 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2958 wc.rev, file, tmp_pac.storedir, file))
2960 cmp_pac.updatefile(file, revision)
2961 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2962 revision, file, cmp_pac.storedir, file))
2966 delete_dir(cmp_pac.absdir)
2968 delete_dir(tmp_pac.absdir)
2972 def server_diff(apiurl,
2973 old_project, old_package, old_revision,
2974 new_project, new_package, new_revision, unified=False):
2976 query = {'cmd': 'diff', 'expand': '1'}
2978 query['oproject'] = old_project
2980 query['opackage'] = old_package
2982 query['orev'] = old_revision
2984 query['rev'] = new_revision
2986 query['unified'] = 1
2988 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
2994 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
2996 creates the plain directory structure for a package dir.
2997 The 'apiurl' parameter is needed for the project dir initialization.
2998 The 'project' and 'package' parameters specify the name of the
2999 project and the package. The optional 'pathname' parameter is used
3000 for printing out the message that a new dir was created (default: 'prj_dir/package').
3001 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3003 prj_dir = prj_dir or project
3005 # FIXME: carefully test each patch component of prj_dir,
3006 # if we have a .osc/_files entry at that level.
3007 # -> if so, we have a package/project clash,
3008 # and should rename this path component by appending '.proj'
3009 # and give user a warning message, to discourage such clashes
3011 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3012 if is_package_dir(prj_dir):
3013 # we want this to become a project directory,
3014 # but it already is a package directory.
3015 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3017 if not is_project_dir(prj_dir):
3018 # this directory could exist as a parent direory for one of our earlier
3019 # checked out sub-projects. in this case, we still need to initialize it.
3020 print statfrmt('A', prj_dir)
3021 init_project_dir(apiurl, prj_dir, project)
3023 if is_project_dir(os.path.join(prj_dir, package)):
3024 # the thing exists, but is a project directory and not a package directory
3025 # FIXME: this should be a warning message to discourage package/project clashes
3026 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3028 if not os.path.exists(os.path.join(prj_dir, package)):
3029 print statfrmt('A', pathname)
3030 os.mkdir(os.path.join(prj_dir, package))
3031 os.mkdir(os.path.join(prj_dir, package, store))
3033 return(os.path.join(prj_dir, package))
3036 def checkout_package(apiurl, project, package,
3037 revision=None, pathname=None, prj_obj=None,
3038 expand_link=False, prj_dir=None, service_files=None, progress_obj=None):
3040 # the project we're in might be deleted.
3041 # that'll throw an error then.
3042 olddir = os.getcwd()
3044 olddir = os.environ.get("PWD")
3049 if sys.platform[:3] == 'win':
3050 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3052 if conf.config['checkout_no_colon']:
3053 prj_dir = prj_dir.replace(':', '/')
3056 pathname = getTransActPath(os.path.join(prj_dir, package))
3058 # before we create directories and stuff, check if the package actually
3060 show_package_meta(apiurl, project, package)
3064 # try to read from the linkinfo
3065 # if it is a link we use the xsrcmd5 as the revision to be
3068 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3070 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3075 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3076 init_package_dir(apiurl, project, package, store, revision)
3078 p = Package(package, progress_obj=progress_obj)
3081 for filename in p.filenamelist:
3082 if service_files or not filename.startswith('_service:'):
3083 p.updatefile(filename, revision)
3084 # print 'A ', os.path.join(project, package, filename)
3085 print statfrmt('A', os.path.join(pathname, filename))
3086 if conf.config['do_package_tracking']:
3087 # check if we can re-use an existing project object
3089 prj_obj = Project(os.getcwd())
3090 prj_obj.set_state(p.name, ' ')
3091 prj_obj.write_packages()
3095 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3096 dst_userid = None, keep_develproject = False):
3098 update pkgmeta with new new_name and new_prj and set calling user as the
3099 only maintainer (unless keep_maintainers is set). Additionally remove the
3100 develproject entry (<devel />) unless keep_develproject is true.
3102 root = ET.fromstring(''.join(pkgmeta))
3103 root.set('name', new_name)
3104 root.set('project', new_prj)
3105 if not keep_maintainers:
3106 for person in root.findall('person'):
3108 if not keep_develproject:
3109 for dp in root.findall('devel'):
3111 return ET.tostring(root)
3113 def link_to_branch(apiurl, project, package):
3115 convert a package with a _link + project.diff to a branch
3118 if '_link' in meta_get_filelist(conf.config['apiurl'], project, package):
3119 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3122 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3124 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3126 create a linked package
3127 - "src" is the original package
3128 - "dst" is the "link" package that we are creating here
3133 dst_meta = meta_exists(metatype='pkg',
3134 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3136 create_new=False, apiurl=conf.config['apiurl'])
3138 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3139 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3144 root = ET.fromstring(''.join(dst_meta))
3145 elm = root.find('publish')
3147 elm = ET.SubElement(root, 'publish')
3149 ET.SubElement(elm, 'disable')
3150 dst_meta = ET.tostring(root)
3153 path_args=(dst_project, dst_package),
3155 # create the _link file
3156 # but first, make sure not to overwrite an existing one
3157 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3159 print >>sys.stderr, 'forced overwrite of existing _link file'
3162 print >>sys.stderr, '_link file already exists...! Aborting'
3166 rev = 'rev="%s"' % rev
3171 cicount = 'cicount="%s"' % cicount
3175 print 'Creating _link...',
3176 link_template = """\
3177 <link project="%s" package="%s" %s %s>
3179 <!-- <apply name="patch" /> apply a patch on the source directory -->
3180 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3181 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3182 <!-- <delete>filename</delete> delete a file -->
3185 """ % (src_project, src_package, rev, cicount)
3187 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3188 http_PUT(u, data=link_template)
3191 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3194 - "src" is the original package
3195 - "dst" is the "aggregate" package that we are creating here
3196 - "map" is a dictionary SRC => TARGET repository mappings
3201 dst_meta = meta_exists(metatype='pkg',
3202 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3204 create_new=False, apiurl=conf.config['apiurl'])
3206 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3207 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3212 root = ET.fromstring(''.join(dst_meta))
3213 elm = root.find('publish')
3215 elm = ET.SubElement(root, 'publish')
3217 ET.SubElement(elm, 'disable')
3218 dst_meta = ET.tostring(root)
3221 path_args=(dst_project, dst_package),
3224 # create the _aggregate file
3225 # but first, make sure not to overwrite an existing one
3226 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3228 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3231 print 'Creating _aggregate...',
3232 aggregate_template = """\
3234 <aggregate project="%s">
3236 for tgt, src in repo_map.iteritems():
3237 aggregate_template += """\
3238 <repository target="%s" source="%s" />
3241 aggregate_template += """\
3242 <package>%s</package>
3245 """ % ( src_package)
3247 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3248 http_PUT(u, data=aggregate_template)
3252 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3254 Branch packages defined via attributes (via API call)
3256 query = { 'cmd': 'branch' }
3257 query['attribute'] = attribute
3259 query['target_project'] = targetproject
3261 query['package'] = package
3262 if maintained_update_project_attribute:
3263 query['update_project_attribute'] = maintained_update_project_attribute
3265 u = makeurl(apiurl, ['source'], query=query)
3269 except urllib2.HTTPError, e:
3270 msg = ''.join(e.readlines())
3271 msg = msg.split('<summary>')[1]
3272 msg = msg.split('</summary>')[0]
3273 m = re.match(r"attribute branch call failed: (\S+)/", msg)
3277 r = r.split('targetproject">')[1]
3278 r = r.split('</data>')[0]
3282 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False):
3284 Branch a package (via API call)
3286 query = { 'cmd': 'branch' }
3288 query['ignoredevel'] = '1'
3292 query['target_project'] = target_project
3294 query['target_package'] = target_package
3295 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3298 except urllib2.HTTPError, e:
3299 if not return_existing:
3301 msg = ''.join(e.readlines())
3302 msg = msg.split('<summary>')[1]
3303 msg = msg.split('</summary>')[0]
3304 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3308 return (True, m.group(1), m.group(2), None, None)
3311 for i in ET.fromstring(f.read()).findall('data'):
3312 data[i.get('name')] = i.text
3313 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3314 data.get('sourceproject', None), data.get('sourcepackage', None))
3317 def copy_pac(src_apiurl, src_project, src_package,
3318 dst_apiurl, dst_project, dst_package,
3319 client_side_copy = False,
3320 keep_maintainers = False,
3321 keep_develproject = False,
3326 Create a copy of a package.
3328 Copying can be done by downloading the files from one package and commit
3329 them into the other by uploading them (client-side copy) --
3330 or by the server, in a single api call.
3333 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3334 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3335 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3336 dst_userid, keep_develproject)
3338 print 'Sending meta data...'
3339 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3340 http_PUT(u, data=src_meta)
3342 print 'Copying files...'
3343 if not client_side_copy:
3344 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3346 query['expand'] = '1'
3348 query['orev'] = revision
3350 query['comment'] = comment
3351 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3356 # copy one file after the other
3358 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')
3360 query = {'rev': 'upload'}
3361 for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand):
3363 get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n, revision=revision)
3364 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, pathname2url(n)], query=query)
3365 http_PUT(u, file = n)
3368 query['comment'] = comment
3369 query['cmd'] = 'commit'
3370 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)