1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E', # obsolete with OBS 2.0
200 # os.path.samefile is available only under Unix
201 def os_path_samefile(path1, path2):
203 return os.path.samefile(path1, path2)
205 return os.path.realpath(path1) == os.path.realpath(path2)
208 """represent a file, including its metadata"""
209 def __init__(self, name, md5, size, mtime):
219 """Source service content
222 """creates an empty serviceinfo instance"""
225 def read(self, serviceinfo_node):
226 """read in the source services <services> element passed as
229 if serviceinfo_node == None:
232 services = serviceinfo_node.findall('service')
234 for service in services:
235 name = service.get('name')
237 for param in service.findall('param'):
238 option = param.get('name', None)
240 name += " --" + option + " '" + value + "'"
241 self.commands.append(name)
243 msg = 'invalid service format:\n%s' % ET.tostring(root)
244 raise oscerr.APIError(msg)
246 def execute(self, dir):
249 for call in self.commands:
250 temp_dir = tempfile.mkdtemp()
251 name = call.split(None, 1)[0]
252 if not os.path.exists("/usr/lib/obs/service/"+name):
253 msg = "ERROR: service is not installed !"
254 msg += "Can maybe solved with: zypper in obs-server-" + name
255 raise oscerr.APIError(msg)
256 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
257 ret = subprocess.call(c, shell=True)
259 print "ERROR: service call failed: " + c
261 for file in os.listdir(temp_dir):
262 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
266 """linkinfo metadata (which is part of the xml representing a directory
269 """creates an empty linkinfo instance"""
279 def read(self, linkinfo_node):
280 """read in the linkinfo metadata from the <linkinfo> element passed as
282 If the passed element is None, the method does nothing.
284 if linkinfo_node == None:
286 self.project = linkinfo_node.get('project')
287 self.package = linkinfo_node.get('package')
288 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
289 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
290 self.srcmd5 = linkinfo_node.get('srcmd5')
291 self.error = linkinfo_node.get('error')
292 self.rev = linkinfo_node.get('rev')
293 self.baserev = linkinfo_node.get('baserev')
296 """returns True if the linkinfo is not empty, otherwise False"""
297 if self.xsrcmd5 or self.lsrcmd5:
301 def isexpanded(self):
302 """returns True if the package is an expanded link"""
303 if self.lsrcmd5 and not self.xsrcmd5:
308 """returns True if the link is in error state (could not be applied)"""
314 """return an informatory string representation"""
315 if self.islink() and not self.isexpanded():
316 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
317 % (self.project, self.package, self.xsrcmd5, self.rev)
318 elif self.islink() and self.isexpanded():
320 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
321 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
323 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
324 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
330 """represent a project directory, holding packages"""
331 def __init__(self, dir, getPackageList=True, progress_obj=None):
334 self.absdir = os.path.abspath(dir)
335 self.progress_obj = progress_obj
337 self.name = store_read_project(self.dir)
338 self.apiurl = store_read_apiurl(self.dir)
341 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
343 self.pacs_available = []
345 if conf.config['do_package_tracking']:
346 self.pac_root = self.read_packages().getroot()
347 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
348 self.pacs_excluded = [ i for i in os.listdir(self.dir)
349 for j in conf.config['exclude_glob']
350 if fnmatch.fnmatch(i, j) ]
351 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
352 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
353 # in the self.pacs_broken list
354 self.pacs_broken = []
355 for p in self.pacs_have:
356 if not os.path.isdir(os.path.join(self.absdir, p)):
357 # all states will be replaced with the '!'-state
358 # (except it is already marked as deleted ('D'-state))
359 self.pacs_broken.append(p)
361 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
363 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
365 def checkout_missing_pacs(self, expand_link=False):
366 for pac in self.pacs_missing:
368 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
369 # pac is not under version control but a local file/dir exists
370 msg = 'can\'t add package \'%s\': Object already exists' % pac
371 raise oscerr.PackageExists(self.name, pac, msg)
373 print 'checking out new package %s' % pac
374 checkout_package(self.apiurl, self.name, pac, \
375 pathname=getTransActPath(os.path.join(self.dir, pac)), \
376 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
378 def set_state(self, pac, state):
379 node = self.get_package_node(pac)
381 self.new_package_entry(pac, state)
383 node.attrib['state'] = state
385 def get_package_node(self, pac):
386 for node in self.pac_root.findall('package'):
387 if pac == node.get('name'):
391 def del_package_node(self, pac):
392 for node in self.pac_root.findall('package'):
393 if pac == node.get('name'):
394 self.pac_root.remove(node)
396 def get_state(self, pac):
397 node = self.get_package_node(pac)
399 return node.get('state')
403 def new_package_entry(self, name, state):
404 ET.SubElement(self.pac_root, 'package', name=name, state=state)
406 def read_packages(self):
407 packages_file = os.path.join(self.absdir, store, '_packages')
408 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
409 return ET.parse(packages_file)
411 # scan project for existing packages and migrate them
413 for data in os.listdir(self.dir):
414 pac_dir = os.path.join(self.absdir, data)
415 # we cannot use self.pacs_available because we cannot guarantee that the package list
416 # was fetched from the server
417 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
418 and Package(pac_dir).name == data:
419 cur_pacs.append(ET.Element('package', name=data, state=' '))
420 store_write_initial_packages(self.absdir, self.name, cur_pacs)
421 return ET.parse(os.path.join(self.absdir, store, '_packages'))
423 def write_packages(self):
424 # TODO: should we only modify the existing file instead of overwriting?
425 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
427 def addPackage(self, pac):
429 for i in conf.config['exclude_glob']:
430 if fnmatch.fnmatch(pac, i):
431 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
432 raise oscerr.OscIOError(None, msg)
433 state = self.get_state(pac)
434 if state == None or state == 'D':
435 self.new_package_entry(pac, 'A')
436 self.write_packages()
437 # sometimes the new pac doesn't exist in the list because
438 # it would take too much time to update all data structs regularly
439 if pac in self.pacs_unvers:
440 self.pacs_unvers.remove(pac)
442 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
444 def delPackage(self, pac, force = False):
445 state = self.get_state(pac.name)
447 if state == ' ' or state == 'D':
449 for file in pac.filenamelist + pac.filenamelist_unvers:
450 filestate = pac.status(file)
451 if filestate == 'M' or filestate == 'C' or \
452 filestate == 'A' or filestate == '?':
455 del_files.append(file)
456 if can_delete or force:
457 for file in del_files:
458 pac.delete_localfile(file)
459 if pac.status(file) != '?':
460 pac.delete_storefile(file)
461 # this is not really necessary
462 pac.put_on_deletelist(file)
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
464 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
465 pac.write_deletelist()
466 self.set_state(pac.name, 'D')
467 self.write_packages()
469 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
472 delete_dir(pac.absdir)
473 self.del_package_node(pac.name)
474 self.write_packages()
475 print statfrmt('D', pac.name)
477 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
479 print 'package is not under version control'
481 print 'unsupported state'
483 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
486 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
488 # we need to make sure that the _packages file will be written (even if an exception
491 # update complete project
492 # packages which no longer exists upstream
493 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
495 for pac in upstream_del:
496 p = Package(os.path.join(self.dir, pac))
497 self.delPackage(p, force = True)
498 delete_storedir(p.storedir)
503 self.pac_root.remove(self.get_package_node(p.name))
504 self.pacs_have.remove(pac)
506 for pac in self.pacs_have:
507 state = self.get_state(pac)
508 if pac in self.pacs_broken:
509 if self.get_state(pac) != 'A':
510 checkout_package(self.apiurl, self.name, pac,
511 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
512 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
515 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
517 if expand_link and p.islink() and not p.isexpanded():
520 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
522 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
525 rev = p.linkinfo.xsrcmd5
526 print 'Expanding to rev', rev
527 elif unexpand_link and p.islink() and p.isexpanded():
528 rev = p.linkinfo.lsrcmd5
529 print 'Unexpanding to rev', rev
530 elif p.islink() and p.isexpanded():
532 print 'Updating %s' % p.name
533 p.update(rev, service_files)
537 # TODO: Package::update has to fixed to behave like svn does
538 if pac in self.pacs_broken:
539 checkout_package(self.apiurl, self.name, pac,
540 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
541 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
543 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
544 elif state == 'A' and pac in self.pacs_available:
545 # file/dir called pac already exists and is under version control
546 msg = 'can\'t add package \'%s\': Object already exists' % pac
547 raise oscerr.PackageExists(self.name, pac, msg)
552 print 'unexpected state.. package \'%s\'' % pac
554 self.checkout_missing_pacs(expand_link=not unexpand_link)
556 self.write_packages()
558 def commit(self, pacs = (), msg = '', files = {}):
563 if files.has_key(pac):
565 state = self.get_state(pac)
567 self.commitNewPackage(pac, msg, todo)
569 self.commitDelPackage(pac)
571 # display the correct dir when sending the changes
572 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
575 p = Package(os.path.join(self.dir, pac))
578 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
579 print 'osc: \'%s\' is not under version control' % pac
580 elif pac in self.pacs_broken:
581 print 'osc: \'%s\' package not found' % pac
583 self.commitExtPackage(pac, msg, todo)
585 self.write_packages()
587 # if we have packages marked as '!' we cannot commit
588 for pac in self.pacs_broken:
589 if self.get_state(pac) != 'D':
590 msg = 'commit failed: package \'%s\' is missing' % pac
591 raise oscerr.PackageMissing(self.name, pac, msg)
593 for pac in self.pacs_have:
594 state = self.get_state(pac)
597 Package(os.path.join(self.dir, pac)).commit(msg)
599 self.commitDelPackage(pac)
601 self.commitNewPackage(pac, msg)
603 self.write_packages()
605 def commitNewPackage(self, pac, msg = '', files = []):
606 """creates and commits a new package if it does not exist on the server"""
607 if pac in self.pacs_available:
608 print 'package \'%s\' already exists' % pac
610 user = conf.get_apiurl_usr(self.apiurl)
611 edit_meta(metatype='pkg',
612 path_args=(quote_plus(self.name), quote_plus(pac)),
617 # display the correct dir when sending the changes
619 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
623 p = Package(os.path.join(self.dir, pac))
625 print statfrmt('Sending', os.path.normpath(p.dir))
627 self.set_state(pac, ' ')
630 def commitDelPackage(self, pac):
631 """deletes a package on the server and in the working copy"""
633 # display the correct dir when sending the changes
634 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
637 pac_dir = os.path.join(self.dir, pac)
638 p = Package(os.path.join(self.dir, pac))
639 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
640 delete_storedir(p.storedir)
646 pac_dir = os.path.join(self.dir, pac)
647 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
648 print statfrmt('Deleting', getTransActPath(pac_dir))
649 delete_package(self.apiurl, self.name, pac)
650 self.del_package_node(pac)
652 def commitExtPackage(self, pac, msg, files = []):
653 """commits a package from an external project"""
654 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
657 pac_path = os.path.join(self.dir, pac)
659 project = store_read_project(pac_path)
660 package = store_read_package(pac_path)
661 apiurl = store_read_apiurl(pac_path)
662 if meta_exists(metatype='pkg',
663 path_args=(quote_plus(project), quote_plus(package)),
665 create_new=False, apiurl=apiurl):
666 p = Package(pac_path)
670 user = conf.get_apiurl_usr(self.apiurl)
671 edit_meta(metatype='pkg',
672 path_args=(quote_plus(project), quote_plus(package)),
677 p = Package(pac_path)
683 r.append('*****************************************************')
684 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
685 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
686 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
687 r.append('*****************************************************')
693 """represent a package (its directory) and read/keep/write its metadata"""
694 def __init__(self, workingdir, progress_obj=None, limit_size=None):
695 self.dir = workingdir
696 self.absdir = os.path.abspath(self.dir)
697 self.storedir = os.path.join(self.absdir, store)
698 self.progress_obj = progress_obj
699 self.limit_size = limit_size
700 if limit_size and limit_size == 0:
701 self.limit_size = None
703 check_store_version(self.dir)
705 self.prjname = store_read_project(self.dir)
706 self.name = store_read_package(self.dir)
707 self.apiurl = store_read_apiurl(self.dir)
709 self.update_datastructs()
713 self.todo_delete = []
716 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
717 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
720 def addfile(self, n):
721 st = os.stat(os.path.join(self.dir, n))
722 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
724 def delete_file(self, n, force=False):
725 """deletes a file if possible and marks the file as deleted"""
728 state = self.status(n)
732 if state in ['?', 'A', 'M'] and not force:
733 return (False, state)
734 self.delete_localfile(n)
736 self.put_on_deletelist(n)
737 self.write_deletelist()
739 self.delete_storefile(n)
742 def delete_storefile(self, n):
743 try: os.unlink(os.path.join(self.storedir, n))
746 def delete_localfile(self, n):
747 try: os.unlink(os.path.join(self.dir, n))
750 def put_on_deletelist(self, n):
751 if n not in self.to_be_deleted:
752 self.to_be_deleted.append(n)
754 def put_on_conflictlist(self, n):
755 if n not in self.in_conflict:
756 self.in_conflict.append(n)
758 def clear_from_conflictlist(self, n):
759 """delete an entry from the file, and remove the file if it would be empty"""
760 if n in self.in_conflict:
762 filename = os.path.join(self.dir, n)
763 storefilename = os.path.join(self.storedir, n)
764 myfilename = os.path.join(self.dir, n + '.mine')
765 if self.islinkrepair() or self.ispulled():
766 upfilename = os.path.join(self.dir, n + '.new')
768 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
771 os.unlink(myfilename)
772 # the working copy may be updated, so the .r* ending may be obsolete...
774 os.unlink(upfilename)
775 if self.islinkrepair() or self.ispulled():
776 os.unlink(os.path.join(self.dir, n + '.old'))
780 self.in_conflict.remove(n)
782 self.write_conflictlist()
784 def write_sizelimit(self):
785 if self.size_limit and self.size_limit <= 0:
787 os.unlink(os.path.join(self.storedir, '_size_limit'))
791 fname = os.path.join(self.storedir, '_size_limit')
793 f.write(str(self.size_limit))
796 def write_deletelist(self):
797 if len(self.to_be_deleted) == 0:
799 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
803 fname = os.path.join(self.storedir, '_to_be_deleted')
805 f.write('\n'.join(self.to_be_deleted))
809 def delete_source_file(self, n):
810 """delete local a source file"""
811 self.delete_localfile(n)
812 self.delete_storefile(n)
814 def delete_remote_source_file(self, n):
815 """delete a remote source file (e.g. from the server)"""
817 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
820 def put_source_file(self, n):
822 # escaping '+' in the URL path (note: not in the URL query string) is
823 # only a workaround for ruby on rails, which swallows it otherwise
825 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
826 http_PUT(u, file = os.path.join(self.dir, n))
828 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
830 def commit(self, msg=''):
831 # commit only if the upstream revision is the same as the working copy's
832 upstream_rev = self.latest_rev()
833 if self.rev != upstream_rev:
834 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
837 self.todo = self.filenamelist_unvers + self.filenamelist
839 pathn = getTransActPath(self.dir)
841 have_conflicts = False
842 for filename in self.todo:
843 if not filename.startswith('_service:') and not filename.startswith('_service_'):
844 st = self.status(filename)
846 self.todo.remove(filename)
847 elif st == 'A' or st == 'M':
848 self.todo_send.append(filename)
849 print statfrmt('Sending', os.path.join(pathn, filename))
851 self.todo_delete.append(filename)
852 print statfrmt('Deleting', os.path.join(pathn, filename))
854 have_conflicts = True
857 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
860 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
861 print 'nothing to do for package %s' % self.name
864 if self.islink() and self.isexpanded():
865 # resolve the link into the upload revision
866 # XXX: do this always?
867 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
868 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
871 print 'Transmitting file data ',
873 for filename in self.todo_delete:
874 # do not touch local files on commit --
875 # delete remotely instead
876 self.delete_remote_source_file(filename)
877 self.to_be_deleted.remove(filename)
878 for filename in self.todo_send:
879 sys.stdout.write('.')
881 self.put_source_file(filename)
883 # all source files are committed - now comes the log
884 query = { 'cmd' : 'commit',
886 'user' : conf.get_apiurl_usr(self.apiurl),
888 if self.islink() and self.isexpanded():
889 query['keeplink'] = '1'
890 if conf.config['linkcontrol'] or self.isfrozen():
891 query['linkrev'] = self.linkinfo.srcmd5
893 query['repairlink'] = '1'
894 query['linkrev'] = self.get_pulled_srcmd5()
895 if self.islinkrepair():
896 query['repairlink'] = '1'
897 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
900 # delete upload revision
902 query = { 'cmd': 'deleteuploadrev' }
903 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
909 root = ET.parse(f).getroot()
910 self.rev = int(root.get('rev'))
912 print 'Committed revision %s.' % self.rev
915 os.unlink(os.path.join(self.storedir, '_pulled'))
916 if self.islinkrepair():
917 os.unlink(os.path.join(self.storedir, '_linkrepair'))
918 self.linkrepair = False
919 # XXX: mark package as invalid?
920 print 'The source link has been repaired. This directory can now be removed.'
921 if self.islink() and self.isexpanded():
922 self.update_local_filesmeta(revision=self.latest_rev())
924 self.update_local_filesmeta()
925 self.write_deletelist()
926 self.update_datastructs()
928 if self.filenamelist.count('_service'):
929 print 'The package contains a source service.'
930 for filename in self.todo:
931 if filename.startswith('_service:') and os.path.exists(filename):
932 os.unlink(filename) # remove local files
933 print_request_list(self.apiurl, self.prjname, self.name)
935 def write_conflictlist(self):
936 if len(self.in_conflict) == 0:
938 os.unlink(os.path.join(self.storedir, '_in_conflict'))
942 fname = os.path.join(self.storedir, '_in_conflict')
944 f.write('\n'.join(self.in_conflict))
948 def updatefile(self, n, revision):
949 filename = os.path.join(self.dir, n)
950 storefilename = os.path.join(self.storedir, n)
951 mtime = self.findfilebyname(n).mtime
953 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
954 os.utime(filename, (-1, mtime))
956 shutil.copyfile(filename, storefilename)
958 def mergefile(self, n):
959 filename = os.path.join(self.dir, n)
960 storefilename = os.path.join(self.storedir, n)
961 myfilename = os.path.join(self.dir, n + '.mine')
962 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
963 os.rename(filename, myfilename)
965 mtime = self.findfilebyname(n).mtime
966 get_source_file(self.apiurl, self.prjname, self.name, n,
967 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
968 os.utime(upfilename, (-1, mtime))
970 if binary_file(myfilename) or binary_file(upfilename):
972 shutil.copyfile(upfilename, filename)
973 shutil.copyfile(upfilename, storefilename)
974 self.in_conflict.append(n)
975 self.write_conflictlist()
979 # diff3 OPTIONS... MINE OLDER YOURS
980 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
981 # we would rather use the subprocess module, but it is not availablebefore 2.4
982 ret = subprocess.call(merge_cmd, shell=True)
984 # "An exit status of 0 means `diff3' was successful, 1 means some
985 # conflicts were found, and 2 means trouble."
987 # merge was successful... clean up
988 shutil.copyfile(upfilename, storefilename)
989 os.unlink(upfilename)
990 os.unlink(myfilename)
994 shutil.copyfile(upfilename, storefilename)
995 self.in_conflict.append(n)
996 self.write_conflictlist()
999 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1000 print >>sys.stderr, 'the command line was:'
1001 print >>sys.stderr, merge_cmd
1006 def update_local_filesmeta(self, revision=None):
1008 Update the local _files file in the store.
1009 It is replaced with the version pulled from upstream.
1011 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1012 store_write_string(self.absdir, '_files', meta)
1014 def update_datastructs(self):
1016 Update the internal data structures if the local _files
1017 file has changed (e.g. update_local_filesmeta() has been
1021 files_tree = read_filemeta(self.dir)
1022 files_tree_root = files_tree.getroot()
1024 self.rev = files_tree_root.get('rev')
1025 self.srcmd5 = files_tree_root.get('srcmd5')
1027 self.linkinfo = Linkinfo()
1028 self.linkinfo.read(files_tree_root.find('linkinfo'))
1030 self.filenamelist = []
1033 for node in files_tree_root.findall('entry'):
1035 f = File(node.get('name'),
1037 int(node.get('size')),
1038 int(node.get('mtime')))
1039 if node.get('skipped'):
1040 self.skipped.append(f.name)
1042 # okay, a very old version of _files, which didn't contain any metadata yet...
1043 f = File(node.get('name'), '', 0, 0)
1044 self.filelist.append(f)
1045 self.filenamelist.append(f.name)
1047 self.to_be_deleted = read_tobedeleted(self.dir)
1048 self.in_conflict = read_inconflict(self.dir)
1049 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1050 self.size_limit = read_sizelimit(self.dir)
1052 # gather unversioned files, but ignore some stuff
1053 self.excluded = [ i for i in os.listdir(self.dir)
1054 for j in conf.config['exclude_glob']
1055 if fnmatch.fnmatch(i, j) ]
1056 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1057 if i not in self.excluded
1058 if i not in self.filenamelist ]
1061 """tells us if the package is a link (has 'linkinfo').
1062 A package with linkinfo is a package which links to another package.
1063 Returns True if the package is a link, otherwise False."""
1064 return self.linkinfo.islink()
1066 def isexpanded(self):
1067 """tells us if the package is a link which is expanded.
1068 Returns True if the package is expanded, otherwise False."""
1069 return self.linkinfo.isexpanded()
1071 def islinkrepair(self):
1072 """tells us if we are repairing a broken source link."""
1073 return self.linkrepair
1076 """tells us if we have pulled a link."""
1077 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1080 """tells us if the link is frozen."""
1081 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1083 def get_pulled_srcmd5(self):
1085 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1086 pulledrev = line.strip()
1089 def haslinkerror(self):
1091 Returns True if the link is broken otherwise False.
1092 If the package is not a link it returns False.
1094 return self.linkinfo.haserror()
1096 def linkerror(self):
1098 Returns an error message if the link is broken otherwise None.
1099 If the package is not a link it returns None.
1101 return self.linkinfo.error
1103 def update_local_pacmeta(self):
1105 Update the local _meta file in the store.
1106 It is replaced with the version pulled from upstream.
1108 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1109 store_write_string(self.absdir, '_meta', meta)
1111 def findfilebyname(self, n):
1112 for i in self.filelist:
1116 def status(self, n):
1120 file storefile file present STATUS
1121 exists exists in _files
1124 x x x ' ' if digest differs: 'M'
1125 and if in conflicts file: 'C'
1127 x - x 'D' and listed in _to_be_deleted
1129 - x - 'D' (when file in working copy is already deleted)
1130 - - x 'F' (new in repo, but not yet in working copy)
1135 known_by_meta = False
1137 exists_in_store = False
1138 if n in self.filenamelist:
1139 known_by_meta = True
1140 if os.path.exists(os.path.join(self.absdir, n)):
1142 if os.path.exists(os.path.join(self.storedir, n)):
1143 exists_in_store = True
1146 if n in self.skipped:
1148 elif exists and not exists_in_store and known_by_meta:
1150 elif n in self.to_be_deleted:
1152 elif n in self.in_conflict:
1154 elif exists and exists_in_store and known_by_meta:
1155 #print self.findfilebyname(n)
1156 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1160 elif exists and not exists_in_store and not known_by_meta:
1162 elif exists and exists_in_store and not known_by_meta:
1164 elif not exists and exists_in_store and known_by_meta:
1166 elif not exists and not exists_in_store and known_by_meta:
1168 elif not exists and exists_in_store and not known_by_meta:
1170 elif not exists and not exists_in_store and not known_by_meta:
1171 # this case shouldn't happen (except there was a typo in the filename etc.)
1172 raise IOError('osc: \'%s\' is not under version control' % n)
1176 def comparePac(self, cmp_pac):
1178 This method compares the local filelist with
1179 the filelist of the passed package to see which files
1180 were added, removed and changed.
1187 for file in self.filenamelist+self.filenamelist_unvers:
1188 state = self.status(file)
1189 if file in self.skipped:
1191 if state == 'A' and (not file in cmp_pac.filenamelist):
1192 added_files.append(file)
1193 elif file in cmp_pac.filenamelist and state == 'D':
1194 removed_files.append(file)
1195 elif state == ' ' and not file in cmp_pac.filenamelist:
1196 added_files.append(file)
1197 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1198 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1199 changed_files.append(file)
1200 for file in cmp_pac.filenamelist:
1201 if not file in self.filenamelist:
1202 removed_files.append(file)
1203 removed_files = set(removed_files)
1205 return changed_files, added_files, removed_files
1207 def merge(self, otherpac):
1208 self.todo += otherpac.todo
1222 '\n '.join(self.filenamelist),
1230 def read_meta_from_spec(self, spec = None):
1235 # scan for spec files
1236 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1237 if len(speclist) == 1:
1238 specfile = speclist[0]
1239 elif len(speclist) > 1:
1240 print 'the following specfiles were found:'
1241 for file in speclist:
1243 print 'please specify one with --specfile'
1246 print 'no specfile was found - please specify one ' \
1250 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1251 self.summary = data['Summary']
1252 self.url = data['Url']
1253 self.descr = data['%description']
1256 def update_package_meta(self, force=False):
1258 for the updatepacmetafromspec subcommand
1259 argument force supress the confirm question
1262 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1264 root = ET.fromstring(m)
1265 root.find('title').text = self.summary
1266 root.find('description').text = ''.join(self.descr)
1267 url = root.find('url')
1269 url = ET.SubElement(root, 'url')
1272 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1273 mf = metafile(u, ET.tostring(root))
1276 print '*' * 36, 'old', '*' * 36
1278 print '*' * 36, 'new', '*' * 36
1279 print ET.tostring(root)
1281 repl = raw_input('Write? (y/N/e) ')
1292 def mark_frozen(self):
1293 store_write_string(self.absdir, '_frozenlink', '')
1295 print "The link in this package is currently broken. Checking"
1296 print "out the last working version instead; please use 'osc pull'"
1297 print "to repair the link."
1300 def unmark_frozen(self):
1301 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1302 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1304 def latest_rev(self):
1305 if self.islinkrepair():
1306 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1307 elif self.islink() and self.isexpanded():
1308 if self.isfrozen() or self.ispulled():
1309 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1312 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1315 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1317 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1320 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1323 def update(self, rev = None, service_files = False, limit_size = None):
1324 # save filelist and (modified) status before replacing the meta file
1325 saved_filenames = self.filenamelist
1326 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1330 self.limit_size = limit_size
1332 self.limit_size = read_sizelimit(self.dir)
1333 self.update_local_filesmeta(rev)
1334 self = Package(self.dir, progress_obj=self.progress_obj)
1336 # which files do no longer exist upstream?
1337 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1339 pathn = getTransActPath(self.dir)
1341 for filename in saved_filenames:
1342 if filename in self.skipped:
1344 if not filename.startswith('_service:') and filename in disappeared:
1345 print statfrmt('D', os.path.join(pathn, filename))
1346 # keep file if it has local modifications
1347 if oldp.status(filename) == ' ':
1348 self.delete_localfile(filename)
1349 self.delete_storefile(filename)
1351 for filename in self.filenamelist:
1352 if filename in self.skipped:
1355 state = self.status(filename)
1356 if not service_files and filename.startswith('_service:'):
1358 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1359 # no merge necessary... local file is changed, but upstream isn't
1361 elif state == 'M' and filename in saved_modifiedfiles:
1362 status_after_merge = self.mergefile(filename)
1363 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1365 self.updatefile(filename, rev)
1366 print statfrmt('U', os.path.join(pathn, filename))
1368 self.updatefile(filename, rev)
1369 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1371 self.updatefile(filename, rev)
1372 print statfrmt('A', os.path.join(pathn, filename))
1373 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1374 self.updatefile(filename, rev)
1375 self.delete_storefile(filename)
1376 print statfrmt('U', os.path.join(pathn, filename))
1380 self.update_local_pacmeta()
1382 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1383 print 'At revision %s.' % self.rev
1385 if not service_files:
1386 self.run_source_services()
1388 def run_source_services(self):
1389 if self.filenamelist.count('_service'):
1390 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1393 si.execute(self.absdir)
1395 def prepare_filelist(self):
1396 """Prepare a list of files, which will be processed by process_filelist
1397 method. This allows easy modifications of a file list in commit
1401 self.todo = self.filenamelist + self.filenamelist_unvers
1405 for f in [f for f in self.todo if not os.path.isdir(f)]:
1407 status = self.status(f)
1412 ret += "%s %s %s\n" % (action, status, f)
1415 # Edit a filelist for package \'%s\'
1417 # l, leave = leave a file as is
1418 # r, remove = remove a file
1419 # a, add = add a file
1421 # If you remove file from a list, it will be unchanged
1422 # If you remove all, commit will be aborted""" % self.name
1426 def edit_filelist(self):
1427 """Opens a package list in editor for editing. This allows easy
1428 modifications of it just by simple text editing
1432 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1433 f = os.fdopen(fd, 'w')
1434 f.write(self.prepare_filelist())
1436 mtime_orig = os.stat(filename).st_mtime
1439 run_editor(filename)
1440 mtime = os.stat(filename).st_mtime
1441 if mtime_orig < mtime:
1442 filelist = open(filename).readlines()
1446 raise oscerr.UserAbort()
1448 return self.process_filelist(filelist)
1450 def process_filelist(self, filelist):
1451 """Process a filelist - it add/remove or leave files. This depends on
1452 user input. If no file is processed, it raises an ValueError
1456 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1458 foo = line.split(' ')
1460 action, state, name = (foo[0], ' ', foo[3])
1462 action, state, name = (foo[0], foo[1], foo[2])
1465 action = action.lower()
1468 if action in ('r', 'remove'):
1469 if self.status(name) == '?':
1471 if name in self.todo:
1472 self.todo.remove(name)
1474 self.delete_file(name, True)
1475 elif action in ('a', 'add'):
1476 if self.status(name) != '?':
1477 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1480 elif action in ('l', 'leave'):
1483 raise ValueError("Unknow action `%s'" % action)
1486 raise ValueError("Empty filelist")
1489 """for objects to represent the review state in a request"""
1490 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1492 self.by_user = by_user
1493 self.by_group = by_group
1496 self.comment = comment
1499 """for objects to represent the "state" of a request"""
1500 def __init__(self, name=None, who=None, when=None, comment=None):
1504 self.comment = comment
1507 """represents an action"""
1508 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1510 self.src_project = src_project
1511 self.src_package = src_package
1512 self.src_rev = src_rev
1513 self.dst_project = dst_project
1514 self.dst_package = dst_package
1515 self.src_update = src_update
1518 """represent a request and holds its metadata
1519 it has methods to read in metadata from xml,
1520 different views, ..."""
1523 self.state = RequestState()
1526 self.last_author = None
1529 self.statehistory = []
1532 def read(self, root):
1533 self.reqid = int(root.get('id'))
1534 actions = root.findall('action')
1535 if len(actions) == 0:
1536 actions = [ root.find('submit') ] # for old style requests
1538 for action in actions:
1539 type = action.get('type', 'submit')
1541 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1542 if action.findall('source'):
1543 n = action.find('source')
1544 src_prj = n.get('project', None)
1545 src_pkg = n.get('package', None)
1546 src_rev = n.get('rev', None)
1547 if action.findall('target'):
1548 n = action.find('target')
1549 dst_prj = n.get('project', None)
1550 dst_pkg = n.get('package', None)
1551 if action.findall('options'):
1552 n = action.find('options')
1553 if n.findall('sourceupdate'):
1554 src_update = n.find('sourceupdate').text.strip()
1555 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1557 msg = 'invalid request format:\n%s' % ET.tostring(root)
1558 raise oscerr.APIError(msg)
1561 n = root.find('state')
1562 self.state.name, self.state.who, self.state.when \
1563 = n.get('name'), n.get('who'), n.get('when')
1565 self.state.comment = n.find('comment').text.strip()
1567 self.state.comment = None
1569 # read the review states
1570 for r in root.findall('review'):
1572 s.state = r.get('state')
1573 s.by_user = r.get('by_user')
1574 s.by_group = r.get('by_group')
1575 s.who = r.get('who')
1576 s.when = r.get('when')
1578 s.comment = r.find('comment').text.strip()
1581 self.reviews.append(s)
1583 # read the state history
1584 for h in root.findall('history'):
1586 s.name = h.get('name')
1587 s.who = h.get('who')
1588 s.when = h.get('when')
1590 s.comment = h.find('comment').text.strip()
1593 self.statehistory.append(s)
1594 self.statehistory.reverse()
1596 # read a description, if it exists
1598 n = root.find('description').text
1603 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1604 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1605 dst_prj, dst_pkg, src_update)
1608 def list_view(self):
1609 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1611 for a in self.actions:
1612 dst = "%s/%s" % (a.dst_project, a.dst_package)
1613 if a.src_package == a.dst_package:
1617 if a.type=="submit":
1618 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1619 if a.type=="change_devel":
1620 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1621 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1623 ret += '\n %s: %-50s %-20s ' % \
1624 (a.type, sr_source, dst)
1626 if self.statehistory and self.statehistory[0]:
1628 for h in self.statehistory:
1629 who.append("%s(%s)" % (h.who,h.name))
1631 ret += "\n From: %s" % (' -> '.join(who))
1633 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1635 lines = txt.splitlines()
1636 wrapper = textwrap.TextWrapper( width = 80,
1637 initial_indent=' Descr: ',
1638 subsequent_indent=' ')
1639 ret += "\n" + wrapper.fill(lines[0])
1640 wrapper.initial_indent = ' '
1641 for line in lines[1:]:
1642 ret += "\n" + wrapper.fill(line)
1648 def __cmp__(self, other):
1649 return cmp(self.reqid, other.reqid)
1653 for action in self.actions:
1654 action_list=" %s: " % (action.type)
1655 if action.type=="submit":
1658 r="(r%s)" % (action.src_rev)
1660 if action.src_update:
1661 m="(%s)" % (action.src_update)
1662 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1663 if action.dst_package:
1664 action_list=action_list+"/%s" % ( action.dst_package )
1665 elif action.type=="delete":
1666 action_list=action_list+" %s" % ( action.dst_project )
1667 if action.dst_package:
1668 action_list=action_list+"/%s" % ( action.dst_package )
1669 elif action.type=="change_devel":
1670 action_list=action_list+" %s/%s developed in %s/%s" % \
1671 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1672 action_list=action_list+"\n"
1687 self.state.name, self.state.when, self.state.who,
1690 if len(self.reviews):
1691 reviewitems = [ '%-10s %s %s %s %s %s' \
1692 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1693 for i in self.reviews ]
1694 s += '\nReview: ' + '\n '.join(reviewitems)
1697 if len(self.statehistory):
1698 histitems = [ '%-10s %s %s' \
1699 % (i.name, i.when, i.who) \
1700 for i in self.statehistory ]
1701 s += '\nHistory: ' + '\n '.join(histitems)
1708 """format time as Apr 02 18:19
1710 depending on whether it is in the current year
1714 if time.localtime()[0] == time.localtime(t)[0]:
1716 return time.strftime('%b %d %H:%M',time.localtime(t))
1718 return time.strftime('%b %d %Y',time.localtime(t))
1721 def is_project_dir(d):
1722 return os.path.exists(os.path.join(d, store, '_project')) and not \
1723 os.path.exists(os.path.join(d, store, '_package'))
1726 def is_package_dir(d):
1727 return os.path.exists(os.path.join(d, store, '_project')) and \
1728 os.path.exists(os.path.join(d, store, '_package'))
1730 def parse_disturl(disturl):
1731 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1732 revision), else raises an oscerr.WrongArgs exception
1735 m = DISTURL_RE.match(disturl)
1737 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1739 apiurl = m.group('apiurl')
1740 if apiurl.split('.')[0] != 'api':
1741 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1742 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1744 def parse_buildlogurl(buildlogurl):
1745 """Parse a build log url, returns a tuple (apiurl, project, package,
1746 repository, arch), else raises oscerr.WrongArgs exception"""
1748 global BUILDLOGURL_RE
1750 m = BUILDLOGURL_RE.match(buildlogurl)
1752 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1754 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1757 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1758 This is handy to allow copy/paste a project/package combination in this form.
1760 Trailing slashes are removed before the split, because the split would
1761 otherwise give an additional empty string.
1769 def expand_proj_pack(args, idx=0, howmany=0):
1770 """looks for occurance of '.' at the position idx.
1771 If howmany is 2, both proj and pack are expanded together
1772 using the current directory, or none of them, if not possible.
1773 If howmany is 0, proj is expanded if possible, then, if there
1774 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1775 expanded, if possible.
1776 If howmany is 1, only proj is expanded if possible.
1778 If args[idx] does not exists, an implicit '.' is assumed.
1779 if not enough elements up to idx exist, an error is raised.
1781 See also parseargs(args), slash_split(args), findpacs(args)
1782 All these need unification, somehow.
1785 # print args,idx,howmany
1788 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1790 if len(args) == idx:
1792 if args[idx+0] == '.':
1793 if howmany == 0 and len(args) > idx+1:
1794 if args[idx+1] == '.':
1796 # remove one dot and make sure to expand both proj and pack
1801 # print args,idx,howmany
1803 args[idx+0] = store_read_project('.')
1806 package = store_read_package('.')
1807 args.insert(idx+1, package)
1811 package = store_read_package('.')
1812 args.insert(idx+1, package)
1816 def findpacs(files, progress_obj=None):
1817 """collect Package objects belonging to the given files
1818 and make sure each Package is returned only once"""
1821 p = filedir_to_pac(f, progress_obj)
1824 if i.name == p.name:
1834 def filedir_to_pac(f, progress_obj=None):
1835 """Takes a working copy path, or a path to a file inside a working copy,
1836 and returns a Package object instance
1838 If the argument was a filename, add it onto the "todo" list of the Package """
1840 if os.path.isdir(f):
1842 p = Package(wd, progress_obj=progress_obj)
1844 wd = os.path.dirname(f) or os.curdir
1845 p = Package(wd, progress_obj=progress_obj)
1846 p.todo = [ os.path.basename(f) ]
1850 def read_filemeta(dir):
1852 r = ET.parse(os.path.join(dir, store, '_files'))
1853 except SyntaxError, e:
1854 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1855 'When parsing .osc/_files, the following error was encountered:\n'
1860 def read_tobedeleted(dir):
1862 fname = os.path.join(dir, store, '_to_be_deleted')
1864 if os.path.exists(fname):
1865 r = [ line.strip() for line in open(fname) ]
1870 def read_sizelimit(dir):
1872 fname = os.path.join(dir, store, '_size_limit')
1874 if os.path.exists(fname):
1875 r = open(fname).readline()
1877 if r is None or not r.isdigit():
1881 def read_inconflict(dir):
1883 fname = os.path.join(dir, store, '_in_conflict')
1885 if os.path.exists(fname):
1886 r = [ line.strip() for line in open(fname) ]
1891 def parseargs(list_of_args):
1892 """Convenience method osc's commandline argument parsing.
1894 If called with an empty tuple (or list), return a list containing the current directory.
1895 Otherwise, return a list of the arguments."""
1897 return list(list_of_args)
1902 def statfrmt(statusletter, filename):
1903 return '%s %s' % (statusletter, filename)
1906 def pathjoin(a, *p):
1907 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1908 path = os.path.join(a, *p)
1909 if path.startswith('./'):
1914 def makeurl(baseurl, l, query=[]):
1915 """Given a list of path compoments, construct a complete URL.
1917 Optional parameters for a query string can be given as a list, as a
1918 dictionary, or as an already assembled string.
1919 In case of a dictionary, the parameters will be urlencoded by this
1920 function. In case of a list not -- this is to be backwards compatible.
1923 if conf.config['verbose'] > 1:
1924 print 'makeurl:', baseurl, l, query
1926 if type(query) == type(list()):
1927 query = '&'.join(query)
1928 elif type(query) == type(dict()):
1929 query = urlencode(query)
1931 scheme, netloc = urlsplit(baseurl)[0:2]
1932 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1935 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1936 """wrapper around urllib2.urlopen for error handling,
1937 and to support additional (PUT, DELETE) methods"""
1941 if conf.config['http_debug']:
1944 print '--', method, url
1946 if method == 'POST' and not file and not data:
1947 # adding data to an urllib2 request transforms it into a POST
1950 req = urllib2.Request(url)
1951 api_host_options = {}
1953 api_host_options = conf.get_apiurl_api_host_options(url)
1954 for header, value in api_host_options['http_headers']:
1955 req.add_header(header, value)
1957 # "external" request (url is no apiurl)
1960 req.get_method = lambda: method
1962 # POST requests are application/x-www-form-urlencoded per default
1963 # since we change the request into PUT, we also need to adjust the content type header
1964 if method == 'PUT' or (method == 'POST' and data):
1965 req.add_header('Content-Type', 'application/octet-stream')
1967 if type(headers) == type({}):
1968 for i in headers.keys():
1970 req.add_header(i, headers[i])
1972 if file and not data:
1973 size = os.path.getsize(file)
1975 data = open(file, 'rb').read()
1978 filefd = open(file, 'rb')
1980 if sys.platform[:3] != 'win':
1981 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1983 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1985 except EnvironmentError, e:
1987 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1988 '\non a filesystem which does not support this.' % (e, file))
1989 elif hasattr(e, 'winerror') and e.winerror == 5:
1990 # falling back to the default io
1991 data = open(file, 'rb').read()
1995 if conf.config['debug']: print method, url
1997 old_timeout = socket.getdefaulttimeout()
1998 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1999 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2000 socket.setdefaulttimeout(timeout)
2002 fd = urllib2.urlopen(req, data=data)
2004 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2005 socket.setdefaulttimeout(old_timeout)
2006 if hasattr(conf.cookiejar, 'save'):
2007 conf.cookiejar.save(ignore_discard=True)
2009 if filefd: filefd.close()
2014 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2015 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2016 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2017 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2020 def init_project_dir(apiurl, dir, project):
2021 if not os.path.exists(dir):
2022 if conf.config['checkout_no_colon']:
2023 os.makedirs(dir) # helpful with checkout_no_colon
2026 if not os.path.exists(os.path.join(dir, store)):
2027 os.mkdir(os.path.join(dir, store))
2029 # print 'project=',project,' dir=',dir
2030 store_write_project(dir, project)
2031 store_write_apiurl(dir, apiurl)
2032 if conf.config['do_package_tracking']:
2033 store_write_initial_packages(dir, project, [])
2035 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2036 if not os.path.isdir(store):
2039 f = open('_project', 'w')
2040 f.write(project + '\n')
2042 f = open('_package', 'w')
2043 f.write(package + '\n')
2047 f = open('_size_limit', 'w')
2048 f.write(str(limit_size))
2052 f = open('_files', 'w')
2053 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2057 ET.ElementTree(element=ET.Element('directory')).write('_files')
2059 f = open('_osclib_version', 'w')
2060 f.write(__store_version__ + '\n')
2063 store_write_apiurl(os.path.pardir, apiurl)
2069 def check_store_version(dir):
2070 versionfile = os.path.join(dir, store, '_osclib_version')
2072 v = open(versionfile).read().strip()
2077 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2078 if os.path.exists(os.path.join(dir, '.svn')):
2079 msg = msg + '\nTry svn instead of osc.'
2080 raise oscerr.NoWorkingCopy(msg)
2082 if v != __store_version__:
2083 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2084 # version is fine, no migration needed
2085 f = open(versionfile, 'w')
2086 f.write(__store_version__ + '\n')
2089 msg = 'The osc metadata of your working copy "%s"' % dir
2090 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2091 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2092 raise oscerr.WorkingCopyWrongVersion, msg
2095 def meta_get_packagelist(apiurl, prj):
2097 u = makeurl(apiurl, ['source', prj])
2099 root = ET.parse(f).getroot()
2100 return [ node.get('name') for node in root.findall('entry') ]
2103 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2104 """return a list of file names,
2105 or a list File() instances if verbose=True"""
2111 query['rev'] = revision
2113 query['rev'] = 'latest'
2115 u = makeurl(apiurl, ['source', prj, package], query=query)
2117 root = ET.parse(f).getroot()
2120 return [ node.get('name') for node in root.findall('entry') ]
2124 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2125 rev = root.get('rev')
2126 for node in root.findall('entry'):
2127 f = File(node.get('name'),
2129 int(node.get('size')),
2130 int(node.get('mtime')))
2136 def meta_get_project_list(apiurl):
2137 u = makeurl(apiurl, ['source'])
2139 root = ET.parse(f).getroot()
2140 return sorted([ node.get('name') for node in root ])
2143 def show_project_meta(apiurl, prj):
2144 url = makeurl(apiurl, ['source', prj, '_meta'])
2146 return f.readlines()
2149 def show_project_conf(apiurl, prj):
2150 url = makeurl(apiurl, ['source', prj, '_config'])
2152 return f.readlines()
2155 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2156 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2160 except urllib2.HTTPError, e:
2161 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2165 def show_package_meta(apiurl, prj, pac):
2166 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2169 return f.readlines()
2170 except urllib2.HTTPError, e:
2171 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2175 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2177 path.append('source')
2183 path.append('_attribute')
2185 path.append(attribute)
2188 query.append("with_default=1")
2190 query.append("with_project=1")
2191 url = makeurl(apiurl, path, query)
2194 return f.readlines()
2195 except urllib2.HTTPError, e:
2196 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2200 def show_develproject(apiurl, prj, pac):
2201 m = show_package_meta(apiurl, prj, pac)
2203 return ET.fromstring(''.join(m)).find('devel').get('project')
2208 def show_pattern_metalist(apiurl, prj):
2209 url = makeurl(apiurl, ['source', prj, '_pattern'])
2213 except urllib2.HTTPError, e:
2214 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2216 r = [ node.get('name') for node in tree.getroot() ]
2221 def show_pattern_meta(apiurl, prj, pattern):
2222 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2225 return f.readlines()
2226 except urllib2.HTTPError, e:
2227 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2232 """metafile that can be manipulated and is stored back after manipulation."""
2233 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2237 self.change_is_required = change_is_required
2238 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2239 f = os.fdopen(fd, 'w')
2240 f.write(''.join(input))
2242 self.hash_orig = dgst(self.filename)
2245 hash = dgst(self.filename)
2246 if self.change_is_required == True and hash == self.hash_orig:
2247 print 'File unchanged. Not saving.'
2248 os.unlink(self.filename)
2251 print 'Sending meta data...'
2252 # don't do any exception handling... it's up to the caller what to do in case
2254 http_PUT(self.url, file=self.filename)
2255 os.unlink(self.filename)
2261 run_editor(self.filename)
2265 except urllib2.HTTPError, e:
2266 error_help = "%d" % e.code
2267 if e.headers.get('X-Opensuse-Errorcode'):
2268 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2270 print >>sys.stderr, 'BuildService API error:', error_help
2271 # examine the error - we can't raise an exception because we might want
2274 if '<summary>' in data:
2275 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2276 input = raw_input('Try again? ([y/N]): ')
2277 if input not in ['y', 'Y']:
2283 if os.path.exists(self.filename):
2284 print 'discarding %s' % self.filename
2285 os.unlink(self.filename)
2288 # different types of metadata
2289 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2290 'template': new_project_templ,
2293 'pkg': { 'path' : 'source/%s/%s/_meta',
2294 'template': new_package_templ,
2297 'attribute': { 'path' : 'source/%s/%s/_meta',
2298 'template': new_attribute_templ,
2301 'prjconf': { 'path': 'source/%s/_config',
2305 'user': { 'path': 'person/%s',
2306 'template': new_user_template,
2309 'pattern': { 'path': 'source/%s/_pattern/%s',
2310 'template': new_pattern_template,
2315 def meta_exists(metatype,
2322 apiurl = conf.config['apiurl']
2323 url = make_meta_url(metatype, path_args, apiurl)
2325 data = http_GET(url).readlines()
2326 except urllib2.HTTPError, e:
2327 if e.code == 404 and create_new:
2328 data = metatypes[metatype]['template']
2330 data = StringIO(data % template_args).readlines()
2335 def make_meta_url(metatype, path_args=None, apiurl=None):
2337 apiurl = conf.config['apiurl']
2338 if metatype not in metatypes.keys():
2339 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2340 path = metatypes[metatype]['path']
2343 path = path % path_args
2345 return makeurl(apiurl, [path])
2348 def edit_meta(metatype,
2353 change_is_required=False,
2357 apiurl = conf.config['apiurl']
2359 data = meta_exists(metatype,
2362 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2366 change_is_required = True
2368 url = make_meta_url(metatype, path_args, apiurl)
2369 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2377 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2380 query['rev'] = revision
2382 query['rev'] = 'latest'
2384 query['linkrev'] = linkrev
2385 elif conf.config['linkcontrol']:
2386 query['linkrev'] = 'base'
2390 query['emptylink'] = 1
2391 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2393 # look for "too large" files according to size limit and mark them
2394 root = ET.fromstring(''.join(f.readlines()))
2395 for e in root.findall('entry'):
2396 size = e.get('size')
2397 if size and limit_size and int(size) > int(limit_size):
2398 e.set('skipped', 'true')
2399 return ET.tostring(root)
2402 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2403 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2404 return ET.fromstring(''.join(m)).get('srcmd5')
2407 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2408 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2410 # only source link packages have a <linkinfo> element.
2411 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2419 raise oscerr.LinkExpandError(prj, pac, li.error)
2423 def show_upstream_rev(apiurl, prj, pac):
2424 m = show_files_meta(apiurl, prj, pac)
2425 return ET.fromstring(''.join(m)).get('rev')
2428 def read_meta_from_spec(specfile, *args):
2429 import codecs, locale, re
2431 Read tags and sections from spec file. To read out
2432 a tag the passed argument mustn't end with a colon. To
2433 read out a section the passed argument must start with
2435 This method returns a dictionary which contains the
2439 if not os.path.isfile(specfile):
2440 raise IOError('\'%s\' is not a regular file' % specfile)
2443 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2444 except UnicodeDecodeError:
2445 lines = open(specfile).readlines()
2452 if itm.startswith('%'):
2453 sections.append(itm)
2457 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2459 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2460 if m and m.group('val'):
2461 spec_data[tag] = m.group('val').strip()
2463 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2466 section_pat = '^%s\s*?$'
2467 for section in sections:
2468 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2470 start = lines.index(m.group()+'\n') + 1
2472 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2475 for line in lines[start:]:
2476 if line.startswith('%'):
2479 spec_data[section] = data
2483 def run_editor(filename):
2484 if sys.platform[:3] != 'win':
2485 editor = os.getenv('EDITOR', default='vim')
2487 editor = os.getenv('EDITOR', default='notepad')
2489 return subprocess.call([ editor, filename ])
2491 def edit_message(footer='', template=''):
2492 delim = '--This line, and those below, will be ignored--\n'
2494 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2495 f = os.fdopen(fd, 'w')
2506 run_editor(filename)
2507 msg = open(filename).read().split(delim)[0].rstrip()
2512 input = raw_input('Log message not specified\n'
2513 'a)bort, c)ontinue, e)dit: ')
2515 raise oscerr.UserAbort()
2525 def create_delete_request(apiurl, project, package, message):
2530 package = """package="%s" """ % (package)
2536 <action type="delete">
2537 <target project="%s" %s/>
2540 <description>%s</description>
2542 """ % (project, package,
2543 cgi.escape(message or ''))
2545 u = makeurl(apiurl, ['request'], query='cmd=create')
2546 f = http_POST(u, data=xml)
2548 root = ET.parse(f).getroot()
2549 return root.get('id')
2552 def create_change_devel_request(apiurl,
2553 devel_project, devel_package,
2560 <action type="change_devel">
2561 <source project="%s" package="%s" />
2562 <target project="%s" package="%s" />
2565 <description>%s</description>
2567 """ % (devel_project,
2571 cgi.escape(message or ''))
2573 u = makeurl(apiurl, ['request'], query='cmd=create')
2574 f = http_POST(u, data=xml)
2576 root = ET.parse(f).getroot()
2577 return root.get('id')
2580 # This creates an old style submit request for server api 1.0
2581 def create_submit_request(apiurl,
2582 src_project, src_package,
2583 dst_project=None, dst_package=None,
2584 message=None, orev=None, src_update=None):
2589 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2591 # Yes, this kind of xml construction is horrible
2596 packagexml = """package="%s" """ %( dst_package )
2597 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2598 # XXX: keep the old template for now in order to work with old obs instances
2600 <request type="submit">
2602 <source project="%s" package="%s" rev="%s"/>
2607 <description>%s</description>
2611 orev or show_upstream_rev(apiurl, src_project, src_package),
2614 cgi.escape(message or ""))
2616 u = makeurl(apiurl, ['request'], query='cmd=create')
2617 f = http_POST(u, data=xml)
2619 root = ET.parse(f).getroot()
2620 return root.get('id')
2623 def get_request(apiurl, reqid):
2624 u = makeurl(apiurl, ['request', reqid])
2626 root = ET.parse(f).getroot()
2633 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2636 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2637 f = http_POST(u, data=message)
2640 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2643 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2644 f = http_POST(u, data=message)
2648 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2650 if not 'all' in req_state:
2651 for state in req_state:
2652 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2654 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2656 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2660 todo['project'] = project
2662 todo['package'] = package
2663 for kind, val in todo.iteritems():
2664 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2665 'action/source/@%(kind)s=\'%(val)s\' or ' \
2666 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2667 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2669 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2670 for i in exclude_target_projects:
2671 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2672 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2674 if conf.config['verbose'] > 1:
2675 print '[ %s ]' % xpath
2676 res = search(apiurl, request=xpath)
2677 collection = res['request']
2679 for root in collection.findall('request'):
2685 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2686 """Return all new requests for all projects/packages where is user is involved"""
2688 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2689 for i in res['project_id'].findall('project'):
2690 projpkgs[i.get('name')] = []
2691 for i in res['package_id'].findall('package'):
2692 if not i.get('project') in projpkgs.keys():
2693 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2695 for prj, pacs in projpkgs.iteritems():
2697 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2701 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2702 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2703 xpath = xpath_join(xpath, xp, inner=True)
2705 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2706 if not 'all' in req_state:
2708 for state in req_state:
2709 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2710 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2711 res = search(apiurl, request=xpath)
2713 for root in res['request'].findall('request'):
2719 def get_request_log(apiurl, reqid):
2720 r = get_request(conf.config['apiurl'], reqid)
2722 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2723 # the description of the request is used for the initial log entry
2724 # otherwise its comment attribute would contain None
2725 if len(r.statehistory) >= 1:
2726 r.statehistory[-1].comment = r.descr
2728 r.state.comment = r.descr
2729 for state in [ r.state ] + r.statehistory:
2730 s = frmt % (state.name, state.who, state.when, str(state.comment))
2735 def get_user_meta(apiurl, user):
2736 u = makeurl(apiurl, ['person', quote_plus(user)])
2739 return ''.join(f.readlines())
2740 except urllib2.HTTPError:
2741 print 'user \'%s\' not found' % user
2745 def get_user_data(apiurl, user, *tags):
2746 """get specified tags from the user meta"""
2747 meta = get_user_meta(apiurl, user)
2750 root = ET.fromstring(meta)
2753 if root.find(tag).text != None:
2754 data.append(root.find(tag).text)
2758 except AttributeError:
2759 # this part is reached if the tags tuple contains an invalid tag
2760 print 'The xml file for user \'%s\' seems to be broken' % user
2765 def download(url, filename, progress_obj = None):
2766 import tempfile, shutil
2769 if filename[0] != '/':
2770 prefix = os.getcwd() + '/' + filename
2773 (fd, tmpfile) = tempfile.mkstemp(prefix = prefix, suffix = '.osc')
2774 os.chmod(tmpfile, 0644)
2776 o = os.fdopen(fd, 'wb')
2777 for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
2780 shutil.move(tmpfile, filename)
2788 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2789 targetfilename = targetfilename or filename
2792 query = { 'rev': revision }
2793 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2794 return download(u, targetfilename, progress_obj)
2796 def get_binary_file(apiurl, prj, repo, arch,
2799 target_filename = None,
2800 target_mtime = None,
2801 progress_meter = False):
2804 from meter import TextMeter
2805 progress_obj = TextMeter()
2807 target_filename = target_filename or filename
2809 where = package or '_repository'
2810 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2811 return download(u, target_filename, progress_obj)
2813 def dgst_from_string(str):
2814 # Python 2.5 depracates the md5 modules
2815 # Python 2.4 doesn't have hashlib yet
2818 md5_hash = hashlib.md5()
2821 md5_hash = md5.new()
2822 md5_hash.update(str)
2823 return md5_hash.hexdigest()
2827 #if not os.path.exists(file):
2837 f = open(file, 'rb')
2839 buf = f.read(BUFSIZE)
2842 return s.hexdigest()
2847 """return true if a string is binary data using diff's heuristic"""
2848 if s and '\0' in s[:4096]:
2853 def binary_file(fn):
2854 """read 4096 bytes from a file named fn, and call binary() on the data"""
2855 return binary(open(fn, 'rb').read(4096))
2858 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2860 This methods diffs oldfilename against filename (so filename will
2861 be shown as the new file).
2862 The variable origfilename is used if filename and oldfilename differ
2863 in their names (for instance if a tempfile is used for filename etc.)
2869 oldfilename = filename
2872 olddir = os.path.join(dir, store)
2874 if not origfilename:
2875 origfilename = filename
2877 file1 = os.path.join(olddir, oldfilename) # old/stored original
2878 file2 = os.path.join(dir, filename) # working copy
2880 f1 = open(file1, 'rb')
2884 f2 = open(file2, 'rb')
2888 if binary(s1) or binary (s2):
2889 d = ['Binary file %s has changed\n' % origfilename]
2892 d = difflib.unified_diff(\
2895 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2896 tofile = '%s\t(working copy)' % origfilename)
2898 # if file doesn't end with newline, we need to append one in the diff result
2900 for i, line in enumerate(d):
2901 if not line.endswith('\n'):
2902 d[i] += '\n\\ No newline at end of file'
2908 def make_diff(wc, revision):
2914 diff_hdr = 'Index: %s\n'
2915 diff_hdr += '===================================================================\n'
2917 olddir = os.getcwd()
2921 for file in wc.todo:
2922 if file in wc.skipped:
2924 if file in wc.filenamelist+wc.filenamelist_unvers:
2925 state = wc.status(file)
2927 added_files.append(file)
2929 removed_files.append(file)
2930 elif state == 'M' or state == 'C':
2931 changed_files.append(file)
2933 diff.append('osc: \'%s\' is not under version control' % file)
2935 for file in wc.filenamelist+wc.filenamelist_unvers:
2936 if file in wc.skipped:
2938 state = wc.status(file)
2939 if state == 'M' or state == 'C':
2940 changed_files.append(file)
2942 added_files.append(file)
2944 removed_files.append(file)
2946 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2948 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2949 cmp_pac = Package(tmpdir)
2951 for file in wc.todo:
2952 if file in cmp_pac.skipped:
2954 if file in cmp_pac.filenamelist:
2955 if file in wc.filenamelist:
2956 changed_files.append(file)
2958 diff.append('osc: \'%s\' is not under version control' % file)
2960 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2962 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2964 for file in changed_files:
2965 diff.append(diff_hdr % file)
2967 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2969 cmp_pac.updatefile(file, revision)
2970 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2971 cmp_pac.absdir, file))
2972 (fd, tmpfile) = tempfile.mkstemp()
2973 for file in added_files:
2974 diff.append(diff_hdr % file)
2976 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2977 os.path.dirname(tmpfile), file))
2979 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2980 os.path.dirname(tmpfile), file))
2982 # FIXME: this is ugly but it cannot be avoided atm
2983 # if a file is deleted via "osc rm file" we should keep the storefile.
2985 if cmp_pac == None and removed_files:
2986 tmpdir = tempfile.mkdtemp()
2988 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2989 tmp_pac = Package(tmpdir)
2992 for file in removed_files:
2993 diff.append(diff_hdr % file)
2995 tmp_pac.updatefile(file, tmp_pac.rev)
2996 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2997 wc.rev, file, tmp_pac.storedir, file))
2999 cmp_pac.updatefile(file, revision)
3000 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3001 revision, file, cmp_pac.storedir, file))
3005 delete_dir(cmp_pac.absdir)
3007 delete_dir(tmp_pac.absdir)
3011 def server_diff(apiurl,
3012 old_project, old_package, old_revision,
3013 new_project, new_package, new_revision, unified=False, missingok=False):
3014 query = {'cmd': 'diff', 'expand': '1'}
3016 query['oproject'] = old_project
3018 query['opackage'] = old_package
3020 query['orev'] = old_revision
3022 query['rev'] = new_revision
3024 query['unified'] = 1
3026 query['missingok'] = 1
3028 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3034 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3036 creates the plain directory structure for a package dir.
3037 The 'apiurl' parameter is needed for the project dir initialization.
3038 The 'project' and 'package' parameters specify the name of the
3039 project and the package. The optional 'pathname' parameter is used
3040 for printing out the message that a new dir was created (default: 'prj_dir/package').
3041 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3043 prj_dir = prj_dir or project
3045 # FIXME: carefully test each patch component of prj_dir,
3046 # if we have a .osc/_files entry at that level.
3047 # -> if so, we have a package/project clash,
3048 # and should rename this path component by appending '.proj'
3049 # and give user a warning message, to discourage such clashes
3051 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3052 if is_package_dir(prj_dir):
3053 # we want this to become a project directory,
3054 # but it already is a package directory.
3055 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3057 if not is_project_dir(prj_dir):
3058 # this directory could exist as a parent direory for one of our earlier
3059 # checked out sub-projects. in this case, we still need to initialize it.
3060 print statfrmt('A', prj_dir)
3061 init_project_dir(apiurl, prj_dir, project)
3063 if is_project_dir(os.path.join(prj_dir, package)):
3064 # the thing exists, but is a project directory and not a package directory
3065 # FIXME: this should be a warning message to discourage package/project clashes
3066 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3068 if not os.path.exists(os.path.join(prj_dir, package)):
3069 print statfrmt('A', pathname)
3070 os.mkdir(os.path.join(prj_dir, package))
3071 os.mkdir(os.path.join(prj_dir, package, store))
3073 return(os.path.join(prj_dir, package))
3076 def checkout_package(apiurl, project, package,
3077 revision=None, pathname=None, prj_obj=None,
3078 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3080 # the project we're in might be deleted.
3081 # that'll throw an error then.
3082 olddir = os.getcwd()
3084 olddir = os.environ.get("PWD")
3089 if sys.platform[:3] == 'win':
3090 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3092 if conf.config['checkout_no_colon']:
3093 prj_dir = prj_dir.replace(':', '/')
3096 pathname = getTransActPath(os.path.join(prj_dir, package))
3098 # before we create directories and stuff, check if the package actually
3100 show_package_meta(apiurl, project, package)
3104 # try to read from the linkinfo
3105 # if it is a link we use the xsrcmd5 as the revision to be
3108 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3110 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3115 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3116 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3118 p = Package(package, progress_obj=progress_obj)
3121 for filename in p.filenamelist:
3122 if filename in p.skipped:
3124 if service_files or not filename.startswith('_service:'):
3125 p.updatefile(filename, revision)
3126 # print 'A ', os.path.join(project, package, filename)
3127 print statfrmt('A', os.path.join(pathname, filename))
3128 if conf.config['do_package_tracking']:
3129 # check if we can re-use an existing project object
3131 prj_obj = Project(os.getcwd())
3132 prj_obj.set_state(p.name, ' ')
3133 prj_obj.write_packages()
3137 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3138 dst_userid = None, keep_develproject = False):
3140 update pkgmeta with new new_name and new_prj and set calling user as the
3141 only maintainer (unless keep_maintainers is set). Additionally remove the
3142 develproject entry (<devel />) unless keep_develproject is true.
3144 root = ET.fromstring(''.join(pkgmeta))
3145 root.set('name', new_name)
3146 root.set('project', new_prj)
3147 if not keep_maintainers:
3148 for person in root.findall('person'):
3150 if not keep_develproject:
3151 for dp in root.findall('devel'):
3153 return ET.tostring(root)
3155 def link_to_branch(apiurl, project, package):
3157 convert a package with a _link + project.diff to a branch
3160 if '_link' in meta_get_filelist(apiurl, project, package):
3161 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3164 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3166 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3168 create a linked package
3169 - "src" is the original package
3170 - "dst" is the "link" package that we are creating here
3175 dst_meta = meta_exists(metatype='pkg',
3176 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3178 create_new=False, apiurl=conf.config['apiurl'])
3179 root = ET.fromstring(''.join(dst_meta))
3180 print root.attrib['project']
3181 if root.attrib['project'] != dst_project:
3182 # The source comes from a different project via a project link, we need to create this instance
3188 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3189 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3193 root = ET.fromstring(''.join(dst_meta))
3194 elm = root.find('publish')
3196 elm = ET.SubElement(root, 'publish')
3198 ET.SubElement(elm, 'disable')
3199 dst_meta = ET.tostring(root)
3203 path_args=(dst_project, dst_package),
3205 # create the _link file
3206 # but first, make sure not to overwrite an existing one
3207 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3209 print >>sys.stderr, 'forced overwrite of existing _link file'
3212 print >>sys.stderr, '_link file already exists...! Aborting'
3216 rev = 'rev="%s"' % rev
3221 cicount = 'cicount="%s"' % cicount
3225 print 'Creating _link...',
3226 link_template = """\
3227 <link project="%s" package="%s" %s %s>
3229 <!-- <apply name="patch" /> apply a patch on the source directory -->
3230 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3231 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3232 <!-- <delete>filename</delete> delete a file -->
3235 """ % (src_project, src_package, rev, cicount)
3237 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3238 http_PUT(u, data=link_template)
3241 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3244 - "src" is the original package
3245 - "dst" is the "aggregate" package that we are creating here
3246 - "map" is a dictionary SRC => TARGET repository mappings
3251 dst_meta = meta_exists(metatype='pkg',
3252 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3254 create_new=False, apiurl=conf.config['apiurl'])
3256 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3257 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3262 root = ET.fromstring(''.join(dst_meta))
3263 elm = root.find('publish')
3265 elm = ET.SubElement(root, 'publish')
3267 ET.SubElement(elm, 'disable')
3268 dst_meta = ET.tostring(root)
3271 path_args=(dst_project, dst_package),
3274 # create the _aggregate file
3275 # but first, make sure not to overwrite an existing one
3276 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3278 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3281 print 'Creating _aggregate...',
3282 aggregate_template = """\
3284 <aggregate project="%s">
3286 for tgt, src in repo_map.iteritems():
3287 aggregate_template += """\
3288 <repository target="%s" source="%s" />
3291 aggregate_template += """\
3292 <package>%s</package>
3295 """ % ( src_package)
3297 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3298 http_PUT(u, data=aggregate_template)
3302 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3304 Branch packages defined via attributes (via API call)
3306 query = { 'cmd': 'branch' }
3307 query['attribute'] = attribute
3309 query['target_project'] = targetproject
3311 query['package'] = package
3312 if maintained_update_project_attribute:
3313 query['update_project_attribute'] = maintained_update_project_attribute
3315 u = makeurl(apiurl, ['source'], query=query)
3319 except urllib2.HTTPError, e:
3320 msg = ''.join(e.readlines())
3321 msg = msg.split('<summary>')[1]
3322 msg = msg.split('</summary>')[0]
3323 raise oscerr.APIError(msg)
3326 r = r.split('targetproject">')[1]
3327 r = r.split('</data>')[0]
3331 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False, msg=''):
3333 Branch a package (via API call)
3335 query = { 'cmd': 'branch' }
3337 query['ignoredevel'] = '1'
3341 query['target_project'] = target_project
3343 query['target_package'] = target_package
3345 query['comment'] = msg
3346 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3349 except urllib2.HTTPError, e:
3350 if not return_existing:
3352 msg = ''.join(e.readlines())
3353 msg = msg.split('<summary>')[1]
3354 msg = msg.split('</summary>')[0]
3355 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3359 return (True, m.group(1), m.group(2), None, None)
3362 for i in ET.fromstring(f.read()).findall('data'):
3363 data[i.get('name')] = i.text
3364 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3365 data.get('sourceproject', None), data.get('sourcepackage', None))
3368 def copy_pac(src_apiurl, src_project, src_package,
3369 dst_apiurl, dst_project, dst_package,