1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.126git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E', # obsolete with OBS 2.0
200 # os.path.samefile is available only under Unix
201 def os_path_samefile(path1, path2):
203 return os.path.samefile(path1, path2)
205 return os.path.realpath(path1) == os.path.realpath(path2)
208 """represent a file, including its metadata"""
209 def __init__(self, name, md5, size, mtime):
219 """Source service content
222 """creates an empty serviceinfo instance"""
225 def read(self, serviceinfo_node):
226 """read in the source services <services> element passed as
229 if serviceinfo_node == None:
232 services = serviceinfo_node.findall('service')
234 for service in services:
235 name = service.get('name')
237 for param in service.findall('param'):
238 option = param.get('name', None)
240 name += " --" + option + " '" + value + "'"
241 self.commands.append(name)
243 msg = 'invalid service format:\n%s' % ET.tostring(root)
244 raise oscerr.APIError(msg)
246 def execute(self, dir):
249 for call in self.commands:
250 temp_dir = tempfile.mkdtemp()
251 name = call.split(None, 1)[0]
252 if not os.path.exists("/usr/lib/obs/service/"+name):
253 msg = "ERROR: service is not installed !"
254 msg += "Can maybe solved with: zypper in obs-server-" + name
255 raise oscerr.APIError(msg)
256 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
257 ret = subprocess.call(c, shell=True)
259 print "ERROR: service call failed: " + c
261 for file in os.listdir(temp_dir):
262 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
266 """linkinfo metadata (which is part of the xml representing a directory
269 """creates an empty linkinfo instance"""
279 def read(self, linkinfo_node):
280 """read in the linkinfo metadata from the <linkinfo> element passed as
282 If the passed element is None, the method does nothing.
284 if linkinfo_node == None:
286 self.project = linkinfo_node.get('project')
287 self.package = linkinfo_node.get('package')
288 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
289 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
290 self.srcmd5 = linkinfo_node.get('srcmd5')
291 self.error = linkinfo_node.get('error')
292 self.rev = linkinfo_node.get('rev')
293 self.baserev = linkinfo_node.get('baserev')
296 """returns True if the linkinfo is not empty, otherwise False"""
297 if self.xsrcmd5 or self.lsrcmd5:
301 def isexpanded(self):
302 """returns True if the package is an expanded link"""
303 if self.lsrcmd5 and not self.xsrcmd5:
308 """returns True if the link is in error state (could not be applied)"""
314 """return an informatory string representation"""
315 if self.islink() and not self.isexpanded():
316 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
317 % (self.project, self.package, self.xsrcmd5, self.rev)
318 elif self.islink() and self.isexpanded():
320 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
321 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
323 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
324 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
330 """represent a project directory, holding packages"""
331 def __init__(self, dir, getPackageList=True, progress_obj=None):
334 self.absdir = os.path.abspath(dir)
335 self.progress_obj = progress_obj
337 self.name = store_read_project(self.dir)
338 self.apiurl = store_read_apiurl(self.dir)
341 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
343 self.pacs_available = []
345 if conf.config['do_package_tracking']:
346 self.pac_root = self.read_packages().getroot()
347 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
348 self.pacs_excluded = [ i for i in os.listdir(self.dir)
349 for j in conf.config['exclude_glob']
350 if fnmatch.fnmatch(i, j) ]
351 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
352 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
353 # in the self.pacs_broken list
354 self.pacs_broken = []
355 for p in self.pacs_have:
356 if not os.path.isdir(os.path.join(self.absdir, p)):
357 # all states will be replaced with the '!'-state
358 # (except it is already marked as deleted ('D'-state))
359 self.pacs_broken.append(p)
361 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
363 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
365 def checkout_missing_pacs(self, expand_link=False):
366 for pac in self.pacs_missing:
368 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
369 # pac is not under version control but a local file/dir exists
370 msg = 'can\'t add package \'%s\': Object already exists' % pac
371 raise oscerr.PackageExists(self.name, pac, msg)
373 print 'checking out new package %s' % pac
374 checkout_package(self.apiurl, self.name, pac, \
375 pathname=getTransActPath(os.path.join(self.dir, pac)), \
376 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
378 def set_state(self, pac, state):
379 node = self.get_package_node(pac)
381 self.new_package_entry(pac, state)
383 node.attrib['state'] = state
385 def get_package_node(self, pac):
386 for node in self.pac_root.findall('package'):
387 if pac == node.get('name'):
391 def del_package_node(self, pac):
392 for node in self.pac_root.findall('package'):
393 if pac == node.get('name'):
394 self.pac_root.remove(node)
396 def get_state(self, pac):
397 node = self.get_package_node(pac)
399 return node.get('state')
403 def new_package_entry(self, name, state):
404 ET.SubElement(self.pac_root, 'package', name=name, state=state)
406 def read_packages(self):
407 packages_file = os.path.join(self.absdir, store, '_packages')
408 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
409 return ET.parse(packages_file)
411 # scan project for existing packages and migrate them
413 for data in os.listdir(self.dir):
414 pac_dir = os.path.join(self.absdir, data)
415 # we cannot use self.pacs_available because we cannot guarantee that the package list
416 # was fetched from the server
417 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
418 and Package(pac_dir).name == data:
419 cur_pacs.append(ET.Element('package', name=data, state=' '))
420 store_write_initial_packages(self.absdir, self.name, cur_pacs)
421 return ET.parse(os.path.join(self.absdir, store, '_packages'))
423 def write_packages(self):
424 # TODO: should we only modify the existing file instead of overwriting?
425 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
427 def addPackage(self, pac):
429 for i in conf.config['exclude_glob']:
430 if fnmatch.fnmatch(pac, i):
431 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
432 raise oscerr.OscIOError(None, msg)
433 state = self.get_state(pac)
434 if state == None or state == 'D':
435 self.new_package_entry(pac, 'A')
436 self.write_packages()
437 # sometimes the new pac doesn't exist in the list because
438 # it would take too much time to update all data structs regularly
439 if pac in self.pacs_unvers:
440 self.pacs_unvers.remove(pac)
442 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
444 def delPackage(self, pac, force = False):
445 state = self.get_state(pac.name)
447 if state == ' ' or state == 'D':
449 for file in pac.filenamelist + pac.filenamelist_unvers:
450 filestate = pac.status(file)
451 if filestate == 'M' or filestate == 'C' or \
452 filestate == 'A' or filestate == '?':
455 del_files.append(file)
456 if can_delete or force:
457 for file in del_files:
458 pac.delete_localfile(file)
459 if pac.status(file) != '?':
460 pac.delete_storefile(file)
461 # this is not really necessary
462 pac.put_on_deletelist(file)
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
464 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
465 pac.write_deletelist()
466 self.set_state(pac.name, 'D')
467 self.write_packages()
469 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
472 delete_dir(pac.absdir)
473 self.del_package_node(pac.name)
474 self.write_packages()
475 print statfrmt('D', pac.name)
477 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
479 print 'package is not under version control'
481 print 'unsupported state'
483 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
486 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
488 # we need to make sure that the _packages file will be written (even if an exception
491 # update complete project
492 # packages which no longer exists upstream
493 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
495 for pac in upstream_del:
496 p = Package(os.path.join(self.dir, pac))
497 self.delPackage(p, force = True)
498 delete_storedir(p.storedir)
503 self.pac_root.remove(self.get_package_node(p.name))
504 self.pacs_have.remove(pac)
506 for pac in self.pacs_have:
507 state = self.get_state(pac)
508 if pac in self.pacs_broken:
509 if self.get_state(pac) != 'A':
510 checkout_package(self.apiurl, self.name, pac,
511 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
512 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
515 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
517 if expand_link and p.islink() and not p.isexpanded():
520 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
522 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
525 rev = p.linkinfo.xsrcmd5
526 print 'Expanding to rev', rev
527 elif unexpand_link and p.islink() and p.isexpanded():
528 rev = p.linkinfo.lsrcmd5
529 print 'Unexpanding to rev', rev
530 elif p.islink() and p.isexpanded():
532 print 'Updating %s' % p.name
533 p.update(rev, service_files)
537 # TODO: Package::update has to fixed to behave like svn does
538 if pac in self.pacs_broken:
539 checkout_package(self.apiurl, self.name, pac,
540 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
541 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
543 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
544 elif state == 'A' and pac in self.pacs_available:
545 # file/dir called pac already exists and is under version control
546 msg = 'can\'t add package \'%s\': Object already exists' % pac
547 raise oscerr.PackageExists(self.name, pac, msg)
552 print 'unexpected state.. package \'%s\'' % pac
554 self.checkout_missing_pacs(expand_link=not unexpand_link)
556 self.write_packages()
558 def commit(self, pacs = (), msg = '', files = {}):
563 if files.has_key(pac):
565 state = self.get_state(pac)
567 self.commitNewPackage(pac, msg, todo)
569 self.commitDelPackage(pac)
571 # display the correct dir when sending the changes
572 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
575 p = Package(os.path.join(self.dir, pac))
578 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
579 print 'osc: \'%s\' is not under version control' % pac
580 elif pac in self.pacs_broken:
581 print 'osc: \'%s\' package not found' % pac
583 self.commitExtPackage(pac, msg, todo)
585 self.write_packages()
587 # if we have packages marked as '!' we cannot commit
588 for pac in self.pacs_broken:
589 if self.get_state(pac) != 'D':
590 msg = 'commit failed: package \'%s\' is missing' % pac
591 raise oscerr.PackageMissing(self.name, pac, msg)
593 for pac in self.pacs_have:
594 state = self.get_state(pac)
597 Package(os.path.join(self.dir, pac)).commit(msg)
599 self.commitDelPackage(pac)
601 self.commitNewPackage(pac, msg)
603 self.write_packages()
605 def commitNewPackage(self, pac, msg = '', files = []):
606 """creates and commits a new package if it does not exist on the server"""
607 if pac in self.pacs_available:
608 print 'package \'%s\' already exists' % pac
610 user = conf.get_apiurl_usr(self.apiurl)
611 edit_meta(metatype='pkg',
612 path_args=(quote_plus(self.name), quote_plus(pac)),
617 # display the correct dir when sending the changes
619 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
623 p = Package(os.path.join(self.dir, pac))
625 print statfrmt('Sending', os.path.normpath(p.dir))
627 self.set_state(pac, ' ')
630 def commitDelPackage(self, pac):
631 """deletes a package on the server and in the working copy"""
633 # display the correct dir when sending the changes
634 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
637 pac_dir = os.path.join(self.dir, pac)
638 p = Package(os.path.join(self.dir, pac))
639 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
640 delete_storedir(p.storedir)
646 pac_dir = os.path.join(self.dir, pac)
647 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
648 print statfrmt('Deleting', getTransActPath(pac_dir))
649 delete_package(self.apiurl, self.name, pac)
650 self.del_package_node(pac)
652 def commitExtPackage(self, pac, msg, files = []):
653 """commits a package from an external project"""
654 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
657 pac_path = os.path.join(self.dir, pac)
659 project = store_read_project(pac_path)
660 package = store_read_package(pac_path)
661 apiurl = store_read_apiurl(pac_path)
662 if meta_exists(metatype='pkg',
663 path_args=(quote_plus(project), quote_plus(package)),
665 create_new=False, apiurl=apiurl):
666 p = Package(pac_path)
670 user = conf.get_apiurl_usr(self.apiurl)
671 edit_meta(metatype='pkg',
672 path_args=(quote_plus(project), quote_plus(package)),
677 p = Package(pac_path)
683 r.append('*****************************************************')
684 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
685 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
686 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
687 r.append('*****************************************************')
693 """represent a package (its directory) and read/keep/write its metadata"""
694 def __init__(self, workingdir, progress_obj=None, limit_size=None):
695 self.dir = workingdir
696 self.absdir = os.path.abspath(self.dir)
697 self.storedir = os.path.join(self.absdir, store)
698 self.progress_obj = progress_obj
699 self.limit_size = limit_size
700 if limit_size and limit_size == 0:
701 self.limit_size = None
703 check_store_version(self.dir)
705 self.prjname = store_read_project(self.dir)
706 self.name = store_read_package(self.dir)
707 self.apiurl = store_read_apiurl(self.dir)
709 self.update_datastructs()
713 self.todo_delete = []
716 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
717 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
720 def addfile(self, n):
721 st = os.stat(os.path.join(self.dir, n))
722 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
724 def delete_file(self, n, force=False):
725 """deletes a file if possible and marks the file as deleted"""
728 state = self.status(n)
732 if state in ['?', 'A', 'M'] and not force:
733 return (False, state)
734 self.delete_localfile(n)
736 self.put_on_deletelist(n)
737 self.write_deletelist()
739 self.delete_storefile(n)
742 def delete_storefile(self, n):
743 try: os.unlink(os.path.join(self.storedir, n))
746 def delete_localfile(self, n):
747 try: os.unlink(os.path.join(self.dir, n))
750 def put_on_deletelist(self, n):
751 if n not in self.to_be_deleted:
752 self.to_be_deleted.append(n)
754 def put_on_conflictlist(self, n):
755 if n not in self.in_conflict:
756 self.in_conflict.append(n)
758 def clear_from_conflictlist(self, n):
759 """delete an entry from the file, and remove the file if it would be empty"""
760 if n in self.in_conflict:
762 filename = os.path.join(self.dir, n)
763 storefilename = os.path.join(self.storedir, n)
764 myfilename = os.path.join(self.dir, n + '.mine')
765 if self.islinkrepair() or self.ispulled():
766 upfilename = os.path.join(self.dir, n + '.new')
768 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
771 os.unlink(myfilename)
772 # the working copy may be updated, so the .r* ending may be obsolete...
774 os.unlink(upfilename)
775 if self.islinkrepair() or self.ispulled():
776 os.unlink(os.path.join(self.dir, n + '.old'))
780 self.in_conflict.remove(n)
782 self.write_conflictlist()
784 def write_sizelimit(self):
785 if self.size_limit and self.size_limit <= 0:
787 os.unlink(os.path.join(self.storedir, '_size_limit'))
791 fname = os.path.join(self.storedir, '_size_limit')
793 f.write(str(self.size_limit))
796 def write_deletelist(self):
797 if len(self.to_be_deleted) == 0:
799 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
803 fname = os.path.join(self.storedir, '_to_be_deleted')
805 f.write('\n'.join(self.to_be_deleted))
809 def delete_source_file(self, n):
810 """delete local a source file"""
811 self.delete_localfile(n)
812 self.delete_storefile(n)
814 def delete_remote_source_file(self, n):
815 """delete a remote source file (e.g. from the server)"""
817 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
820 def put_source_file(self, n):
822 # escaping '+' in the URL path (note: not in the URL query string) is
823 # only a workaround for ruby on rails, which swallows it otherwise
825 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
826 http_PUT(u, file = os.path.join(self.dir, n))
828 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
830 def commit(self, msg=''):
831 # commit only if the upstream revision is the same as the working copy's
832 upstream_rev = self.latest_rev()
833 if self.rev != upstream_rev:
834 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
837 self.todo = self.filenamelist_unvers + self.filenamelist
839 pathn = getTransActPath(self.dir)
841 have_conflicts = False
842 for filename in self.todo:
843 if not filename.startswith('_service:') and not filename.startswith('_service_'):
844 st = self.status(filename)
846 self.todo.remove(filename)
847 elif st == 'A' or st == 'M':
848 self.todo_send.append(filename)
849 print statfrmt('Sending', os.path.join(pathn, filename))
851 self.todo_delete.append(filename)
852 print statfrmt('Deleting', os.path.join(pathn, filename))
854 have_conflicts = True
857 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
860 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
861 print 'nothing to do for package %s' % self.name
864 if self.islink() and self.isexpanded():
865 # resolve the link into the upload revision
866 # XXX: do this always?
867 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
868 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
871 print 'Transmitting file data ',
873 for filename in self.todo_delete:
874 # do not touch local files on commit --
875 # delete remotely instead
876 self.delete_remote_source_file(filename)
877 self.to_be_deleted.remove(filename)
878 for filename in self.todo_send:
879 sys.stdout.write('.')
881 self.put_source_file(filename)
883 # all source files are committed - now comes the log
884 query = { 'cmd' : 'commit',
886 'user' : conf.get_apiurl_usr(self.apiurl),
888 if self.islink() and self.isexpanded():
889 query['keeplink'] = '1'
890 if conf.config['linkcontrol'] or self.isfrozen():
891 query['linkrev'] = self.linkinfo.srcmd5
893 query['repairlink'] = '1'
894 query['linkrev'] = self.get_pulled_srcmd5()
895 if self.islinkrepair():
896 query['repairlink'] = '1'
897 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
900 # delete upload revision
902 query = { 'cmd': 'deleteuploadrev' }
903 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
909 root = ET.parse(f).getroot()
910 self.rev = int(root.get('rev'))
912 print 'Committed revision %s.' % self.rev
915 os.unlink(os.path.join(self.storedir, '_pulled'))
916 if self.islinkrepair():
917 os.unlink(os.path.join(self.storedir, '_linkrepair'))
918 self.linkrepair = False
919 # XXX: mark package as invalid?
920 print 'The source link has been repaired. This directory can now be removed.'
921 if self.islink() and self.isexpanded():
922 self.update_local_filesmeta(revision=self.latest_rev())
924 self.update_local_filesmeta()
925 self.write_deletelist()
926 self.update_datastructs()
928 if self.filenamelist.count('_service'):
929 print 'The package contains a source service.'
930 for filename in self.todo:
931 if filename.startswith('_service:') and os.path.exists(filename):
932 os.unlink(filename) # remove local files
933 print_request_list(self.apiurl, self.prjname, self.name)
935 def write_conflictlist(self):
936 if len(self.in_conflict) == 0:
938 os.unlink(os.path.join(self.storedir, '_in_conflict'))
942 fname = os.path.join(self.storedir, '_in_conflict')
944 f.write('\n'.join(self.in_conflict))
948 def updatefile(self, n, revision):
949 filename = os.path.join(self.dir, n)
950 storefilename = os.path.join(self.storedir, n)
951 mtime = self.findfilebyname(n).mtime
953 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
954 os.utime(filename, (-1, mtime))
956 shutil.copyfile(filename, storefilename)
958 def mergefile(self, n):
959 filename = os.path.join(self.dir, n)
960 storefilename = os.path.join(self.storedir, n)
961 myfilename = os.path.join(self.dir, n + '.mine')
962 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
963 os.rename(filename, myfilename)
965 mtime = self.findfilebyname(n).mtime
966 get_source_file(self.apiurl, self.prjname, self.name, n,
967 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
968 os.utime(upfilename, (-1, mtime))
970 if binary_file(myfilename) or binary_file(upfilename):
972 shutil.copyfile(upfilename, filename)
973 shutil.copyfile(upfilename, storefilename)
974 self.in_conflict.append(n)
975 self.write_conflictlist()
979 # diff3 OPTIONS... MINE OLDER YOURS
980 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
981 # we would rather use the subprocess module, but it is not availablebefore 2.4
982 ret = subprocess.call(merge_cmd, shell=True)
984 # "An exit status of 0 means `diff3' was successful, 1 means some
985 # conflicts were found, and 2 means trouble."
987 # merge was successful... clean up
988 shutil.copyfile(upfilename, storefilename)
989 os.unlink(upfilename)
990 os.unlink(myfilename)
994 shutil.copyfile(upfilename, storefilename)
995 self.in_conflict.append(n)
996 self.write_conflictlist()
999 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
1000 print >>sys.stderr, 'the command line was:'
1001 print >>sys.stderr, merge_cmd
1006 def update_local_filesmeta(self, revision=None):
1008 Update the local _files file in the store.
1009 It is replaced with the version pulled from upstream.
1011 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision, limit_size=self.limit_size))
1012 store_write_string(self.absdir, '_files', meta)
1014 def update_datastructs(self):
1016 Update the internal data structures if the local _files
1017 file has changed (e.g. update_local_filesmeta() has been
1021 files_tree = read_filemeta(self.dir)
1022 files_tree_root = files_tree.getroot()
1024 self.rev = files_tree_root.get('rev')
1025 self.srcmd5 = files_tree_root.get('srcmd5')
1027 self.linkinfo = Linkinfo()
1028 self.linkinfo.read(files_tree_root.find('linkinfo'))
1030 self.filenamelist = []
1033 for node in files_tree_root.findall('entry'):
1035 f = File(node.get('name'),
1037 int(node.get('size')),
1038 int(node.get('mtime')))
1039 if node.get('skipped'):
1040 self.skipped.append(f.name)
1042 # okay, a very old version of _files, which didn't contain any metadata yet...
1043 f = File(node.get('name'), '', 0, 0)
1044 self.filelist.append(f)
1045 self.filenamelist.append(f.name)
1047 self.to_be_deleted = read_tobedeleted(self.dir)
1048 self.in_conflict = read_inconflict(self.dir)
1049 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1050 self.size_limit = read_sizelimit(self.dir)
1052 # gather unversioned files, but ignore some stuff
1053 self.excluded = [ i for i in os.listdir(self.dir)
1054 for j in conf.config['exclude_glob']
1055 if fnmatch.fnmatch(i, j) ]
1056 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1057 if i not in self.excluded
1058 if i not in self.filenamelist ]
1061 """tells us if the package is a link (has 'linkinfo').
1062 A package with linkinfo is a package which links to another package.
1063 Returns True if the package is a link, otherwise False."""
1064 return self.linkinfo.islink()
1066 def isexpanded(self):
1067 """tells us if the package is a link which is expanded.
1068 Returns True if the package is expanded, otherwise False."""
1069 return self.linkinfo.isexpanded()
1071 def islinkrepair(self):
1072 """tells us if we are repairing a broken source link."""
1073 return self.linkrepair
1076 """tells us if we have pulled a link."""
1077 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1080 """tells us if the link is frozen."""
1081 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1083 def get_pulled_srcmd5(self):
1085 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1086 pulledrev = line.strip()
1089 def haslinkerror(self):
1091 Returns True if the link is broken otherwise False.
1092 If the package is not a link it returns False.
1094 return self.linkinfo.haserror()
1096 def linkerror(self):
1098 Returns an error message if the link is broken otherwise None.
1099 If the package is not a link it returns None.
1101 return self.linkinfo.error
1103 def update_local_pacmeta(self):
1105 Update the local _meta file in the store.
1106 It is replaced with the version pulled from upstream.
1108 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1109 store_write_string(self.absdir, '_meta', meta)
1111 def findfilebyname(self, n):
1112 for i in self.filelist:
1116 def status(self, n):
1120 file storefile file present STATUS
1121 exists exists in _files
1124 x x x ' ' if digest differs: 'M'
1125 and if in conflicts file: 'C'
1127 x - x 'D' and listed in _to_be_deleted
1129 - x - 'D' (when file in working copy is already deleted)
1130 - - x 'F' (new in repo, but not yet in working copy)
1135 known_by_meta = False
1137 exists_in_store = False
1138 if n in self.filenamelist:
1139 known_by_meta = True
1140 if os.path.exists(os.path.join(self.absdir, n)):
1142 if os.path.exists(os.path.join(self.storedir, n)):
1143 exists_in_store = True
1146 if n in self.skipped:
1148 elif exists and not exists_in_store and known_by_meta:
1150 elif n in self.to_be_deleted:
1152 elif n in self.in_conflict:
1154 elif exists and exists_in_store and known_by_meta:
1155 #print self.findfilebyname(n)
1156 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1160 elif exists and not exists_in_store and not known_by_meta:
1162 elif exists and exists_in_store and not known_by_meta:
1164 elif not exists and exists_in_store and known_by_meta:
1166 elif not exists and not exists_in_store and known_by_meta:
1168 elif not exists and exists_in_store and not known_by_meta:
1170 elif not exists and not exists_in_store and not known_by_meta:
1171 # this case shouldn't happen (except there was a typo in the filename etc.)
1172 raise IOError('osc: \'%s\' is not under version control' % n)
1176 def comparePac(self, cmp_pac):
1178 This method compares the local filelist with
1179 the filelist of the passed package to see which files
1180 were added, removed and changed.
1187 for file in self.filenamelist+self.filenamelist_unvers:
1188 state = self.status(file)
1189 if file in self.skipped:
1191 if state == 'A' and (not file in cmp_pac.filenamelist):
1192 added_files.append(file)
1193 elif file in cmp_pac.filenamelist and state == 'D':
1194 removed_files.append(file)
1195 elif state == ' ' and not file in cmp_pac.filenamelist:
1196 added_files.append(file)
1197 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1198 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1199 changed_files.append(file)
1200 for file in cmp_pac.filenamelist:
1201 if not file in self.filenamelist:
1202 removed_files.append(file)
1203 removed_files = set(removed_files)
1205 return changed_files, added_files, removed_files
1207 def merge(self, otherpac):
1208 self.todo += otherpac.todo
1222 '\n '.join(self.filenamelist),
1230 def read_meta_from_spec(self, spec = None):
1235 # scan for spec files
1236 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1237 if len(speclist) == 1:
1238 specfile = speclist[0]
1239 elif len(speclist) > 1:
1240 print 'the following specfiles were found:'
1241 for file in speclist:
1243 print 'please specify one with --specfile'
1246 print 'no specfile was found - please specify one ' \
1250 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1251 self.summary = data['Summary']
1252 self.url = data['Url']
1253 self.descr = data['%description']
1256 def update_package_meta(self, force=False):
1258 for the updatepacmetafromspec subcommand
1259 argument force supress the confirm question
1262 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1264 root = ET.fromstring(m)
1265 root.find('title').text = self.summary
1266 root.find('description').text = ''.join(self.descr)
1267 url = root.find('url')
1269 url = ET.SubElement(root, 'url')
1272 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1273 mf = metafile(u, ET.tostring(root))
1276 print '*' * 36, 'old', '*' * 36
1278 print '*' * 36, 'new', '*' * 36
1279 print ET.tostring(root)
1281 repl = raw_input('Write? (y/N/e) ')
1292 def mark_frozen(self):
1293 store_write_string(self.absdir, '_frozenlink', '')
1295 print "The link in this package is currently broken. Checking"
1296 print "out the last working version instead; please use 'osc pull'"
1297 print "to repair the link."
1300 def unmark_frozen(self):
1301 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1302 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1304 def latest_rev(self):
1305 if self.islinkrepair():
1306 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1307 elif self.islink() and self.isexpanded():
1308 if self.isfrozen() or self.ispulled():
1309 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1312 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1315 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1317 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1320 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1323 def update(self, rev = None, service_files = False, limit_size = None):
1324 # save filelist and (modified) status before replacing the meta file
1325 saved_filenames = self.filenamelist
1326 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1330 self.limit_size = limit_size
1332 self.limit_size = read_sizelimit(self.dir)
1333 self.update_local_filesmeta(rev)
1334 self = Package(self.dir, progress_obj=self.progress_obj)
1336 # which files do no longer exist upstream?
1337 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1339 pathn = getTransActPath(self.dir)
1341 for filename in saved_filenames:
1342 if filename in self.skipped:
1344 if not filename.startswith('_service:') and filename in disappeared:
1345 print statfrmt('D', os.path.join(pathn, filename))
1346 # keep file if it has local modifications
1347 if oldp.status(filename) == ' ':
1348 self.delete_localfile(filename)
1349 self.delete_storefile(filename)
1351 for filename in self.filenamelist:
1352 if filename in self.skipped:
1355 state = self.status(filename)
1356 if not service_files and filename.startswith('_service:'):
1358 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1359 # no merge necessary... local file is changed, but upstream isn't
1361 elif state == 'M' and filename in saved_modifiedfiles:
1362 status_after_merge = self.mergefile(filename)
1363 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1365 self.updatefile(filename, rev)
1366 print statfrmt('U', os.path.join(pathn, filename))
1368 self.updatefile(filename, rev)
1369 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1371 self.updatefile(filename, rev)
1372 print statfrmt('A', os.path.join(pathn, filename))
1373 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1374 self.updatefile(filename, rev)
1375 self.delete_storefile(filename)
1376 print statfrmt('U', os.path.join(pathn, filename))
1380 self.update_local_pacmeta()
1382 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1383 print 'At revision %s.' % self.rev
1385 if not service_files:
1386 self.run_source_services()
1388 def run_source_services(self):
1389 if self.filenamelist.count('_service'):
1390 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1393 si.execute(self.absdir)
1395 def prepare_filelist(self):
1396 """Prepare a list of files, which will be processed by process_filelist
1397 method. This allows easy modifications of a file list in commit
1401 self.todo = self.filenamelist + self.filenamelist_unvers
1405 for f in [f for f in self.todo if not os.path.isdir(f)]:
1407 status = self.status(f)
1412 ret += "%s %s %s\n" % (action, status, f)
1415 # Edit a filelist for package \'%s\'
1417 # l, leave = leave a file as is
1418 # r, remove = remove a file
1419 # a, add = add a file
1421 # If you remove file from a list, it will be unchanged
1422 # If you remove all, commit will be aborted""" % self.name
1426 def edit_filelist(self):
1427 """Opens a package list in editor for editing. This allows easy
1428 modifications of it just by simple text editing
1432 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1433 f = os.fdopen(fd, 'w')
1434 f.write(self.prepare_filelist())
1436 mtime_orig = os.stat(filename).st_mtime
1438 if sys.platform[:3] != 'win':
1439 editor = os.getenv('EDITOR', default='vim')
1441 editor = os.getenv('EDITOR', default='notepad')
1443 subprocess.call('%s %s' % (editor, filename), shell=True)
1444 mtime = os.stat(filename).st_mtime
1445 if mtime_orig < mtime:
1446 filelist = open(filename).readlines()
1450 raise oscerr.UserAbort()
1452 return self.process_filelist(filelist)
1454 def process_filelist(self, filelist):
1455 """Process a filelist - it add/remove or leave files. This depends on
1456 user input. If no file is processed, it raises an ValueError
1460 for line in [l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')]:
1462 foo = line.split(' ')
1464 action, state, name = (foo[0], ' ', foo[3])
1466 action, state, name = (foo[0], foo[1], foo[2])
1469 action = action.lower()
1472 if action in ('r', 'remove'):
1473 if self.status(name) == '?':
1475 if name in self.todo:
1476 self.todo.remove(name)
1478 self.delete_file(name, True)
1479 elif action in ('a', 'add'):
1480 if self.status(name) != '?':
1481 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1484 elif action in ('l', 'leave'):
1487 raise ValueError("Unknow action `%s'" % action)
1490 raise ValueError("Empty filelist")
1493 """for objects to represent the review state in a request"""
1494 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1496 self.by_user = by_user
1497 self.by_group = by_group
1500 self.comment = comment
1503 """for objects to represent the "state" of a request"""
1504 def __init__(self, name=None, who=None, when=None, comment=None):
1508 self.comment = comment
1511 """represents an action"""
1512 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1514 self.src_project = src_project
1515 self.src_package = src_package
1516 self.src_rev = src_rev
1517 self.dst_project = dst_project
1518 self.dst_package = dst_package
1519 self.src_update = src_update
1522 """represent a request and holds its metadata
1523 it has methods to read in metadata from xml,
1524 different views, ..."""
1527 self.state = RequestState()
1530 self.last_author = None
1533 self.statehistory = []
1536 def read(self, root):
1537 self.reqid = int(root.get('id'))
1538 actions = root.findall('action')
1539 if len(actions) == 0:
1540 actions = [ root.find('submit') ] # for old style requests
1542 for action in actions:
1543 type = action.get('type', 'submit')
1545 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1546 if action.findall('source'):
1547 n = action.find('source')
1548 src_prj = n.get('project', None)
1549 src_pkg = n.get('package', None)
1550 src_rev = n.get('rev', None)
1551 if action.findall('target'):
1552 n = action.find('target')
1553 dst_prj = n.get('project', None)
1554 dst_pkg = n.get('package', None)
1555 if action.findall('options'):
1556 n = action.find('options')
1557 if n.findall('sourceupdate'):
1558 src_update = n.find('sourceupdate').text.strip()
1559 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1561 msg = 'invalid request format:\n%s' % ET.tostring(root)
1562 raise oscerr.APIError(msg)
1565 n = root.find('state')
1566 self.state.name, self.state.who, self.state.when \
1567 = n.get('name'), n.get('who'), n.get('when')
1569 self.state.comment = n.find('comment').text.strip()
1571 self.state.comment = None
1573 # read the review states
1574 for r in root.findall('review'):
1576 s.state = r.get('state')
1577 s.by_user = r.get('by_user')
1578 s.by_group = r.get('by_group')
1579 s.who = r.get('who')
1580 s.when = r.get('when')
1582 s.comment = r.find('comment').text.strip()
1585 self.reviews.append(s)
1587 # read the state history
1588 for h in root.findall('history'):
1590 s.name = h.get('name')
1591 s.who = h.get('who')
1592 s.when = h.get('when')
1594 s.comment = h.find('comment').text.strip()
1597 self.statehistory.append(s)
1598 self.statehistory.reverse()
1600 # read a description, if it exists
1602 n = root.find('description').text
1607 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1608 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1609 dst_prj, dst_pkg, src_update)
1612 def list_view(self):
1613 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1615 for a in self.actions:
1616 dst = "%s/%s" % (a.dst_project, a.dst_package)
1617 if a.src_package == a.dst_package:
1621 if a.type=="submit":
1622 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1623 if a.type=="change_devel":
1624 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1625 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1627 ret += '\n %s: %-50s %-20s ' % \
1628 (a.type, sr_source, dst)
1630 if self.statehistory and self.statehistory[0]:
1632 for h in self.statehistory:
1633 who.append("%s(%s)" % (h.who,h.name))
1635 ret += "\n From: %s" % (' -> '.join(who))
1637 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1639 lines = txt.splitlines()
1640 wrapper = textwrap.TextWrapper( width = 80,
1641 initial_indent=' Descr: ',
1642 subsequent_indent=' ')
1643 ret += "\n" + wrapper.fill(lines[0])
1644 wrapper.initial_indent = ' '
1645 for line in lines[1:]:
1646 ret += "\n" + wrapper.fill(line)
1652 def __cmp__(self, other):
1653 return cmp(self.reqid, other.reqid)
1657 for action in self.actions:
1658 action_list=" %s: " % (action.type)
1659 if action.type=="submit":
1662 r="(r%s)" % (action.src_rev)
1664 if action.src_update:
1665 m="(%s)" % (action.src_update)
1666 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1667 if action.dst_package:
1668 action_list=action_list+"/%s" % ( action.dst_package )
1669 elif action.type=="delete":
1670 action_list=action_list+" %s" % ( action.dst_project )
1671 if action.dst_package:
1672 action_list=action_list+"/%s" % ( action.dst_package )
1673 elif action.type=="change_devel":
1674 action_list=action_list+" %s/%s developed in %s/%s" % \
1675 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1676 action_list=action_list+"\n"
1691 self.state.name, self.state.when, self.state.who,
1694 if len(self.reviews):
1695 reviewitems = [ '%-10s %s %s %s %s %s' \
1696 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1697 for i in self.reviews ]
1698 s += '\nReview: ' + '\n '.join(reviewitems)
1701 if len(self.statehistory):
1702 histitems = [ '%-10s %s %s' \
1703 % (i.name, i.when, i.who) \
1704 for i in self.statehistory ]
1705 s += '\nHistory: ' + '\n '.join(histitems)
1712 """format time as Apr 02 18:19
1714 depending on whether it is in the current year
1718 if time.localtime()[0] == time.localtime(t)[0]:
1720 return time.strftime('%b %d %H:%M',time.localtime(t))
1722 return time.strftime('%b %d %Y',time.localtime(t))
1725 def is_project_dir(d):
1726 return os.path.exists(os.path.join(d, store, '_project')) and not \
1727 os.path.exists(os.path.join(d, store, '_package'))
1730 def is_package_dir(d):
1731 return os.path.exists(os.path.join(d, store, '_project')) and \
1732 os.path.exists(os.path.join(d, store, '_package'))
1734 def parse_disturl(disturl):
1735 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1736 revision), else raises an oscerr.WrongArgs exception
1739 m = DISTURL_RE.match(disturl)
1741 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1743 apiurl = m.group('apiurl')
1744 if apiurl.split('.')[0] != 'api':
1745 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1746 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1748 def parse_buildlogurl(buildlogurl):
1749 """Parse a build log url, returns a tuple (apiurl, project, package,
1750 repository, arch), else raises oscerr.WrongArgs exception"""
1752 global BUILDLOGURL_RE
1754 m = BUILDLOGURL_RE.match(buildlogurl)
1756 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1758 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1761 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1762 This is handy to allow copy/paste a project/package combination in this form.
1764 Trailing slashes are removed before the split, because the split would
1765 otherwise give an additional empty string.
1773 def expand_proj_pack(args, idx=0, howmany=0):
1774 """looks for occurance of '.' at the position idx.
1775 If howmany is 2, both proj and pack are expanded together
1776 using the current directory, or none of them, if not possible.
1777 If howmany is 0, proj is expanded if possible, then, if there
1778 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1779 expanded, if possible.
1780 If howmany is 1, only proj is expanded if possible.
1782 If args[idx] does not exists, an implicit '.' is assumed.
1783 if not enough elements up to idx exist, an error is raised.
1785 See also parseargs(args), slash_split(args), findpacs(args)
1786 All these need unification, somehow.
1789 # print args,idx,howmany
1792 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1794 if len(args) == idx:
1796 if args[idx+0] == '.':
1797 if howmany == 0 and len(args) > idx+1:
1798 if args[idx+1] == '.':
1800 # remove one dot and make sure to expand both proj and pack
1805 # print args,idx,howmany
1807 args[idx+0] = store_read_project('.')
1810 package = store_read_package('.')
1811 args.insert(idx+1, package)
1815 package = store_read_package('.')
1816 args.insert(idx+1, package)
1820 def findpacs(files, progress_obj=None):
1821 """collect Package objects belonging to the given files
1822 and make sure each Package is returned only once"""
1825 p = filedir_to_pac(f, progress_obj)
1828 if i.name == p.name:
1838 def filedir_to_pac(f, progress_obj=None):
1839 """Takes a working copy path, or a path to a file inside a working copy,
1840 and returns a Package object instance
1842 If the argument was a filename, add it onto the "todo" list of the Package """
1844 if os.path.isdir(f):
1846 p = Package(wd, progress_obj=progress_obj)
1848 wd = os.path.dirname(f) or os.curdir
1849 p = Package(wd, progress_obj=progress_obj)
1850 p.todo = [ os.path.basename(f) ]
1854 def read_filemeta(dir):
1856 r = ET.parse(os.path.join(dir, store, '_files'))
1857 except SyntaxError, e:
1858 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1859 'When parsing .osc/_files, the following error was encountered:\n'
1864 def read_tobedeleted(dir):
1866 fname = os.path.join(dir, store, '_to_be_deleted')
1868 if os.path.exists(fname):
1869 r = [ line.strip() for line in open(fname) ]
1874 def read_sizelimit(dir):
1876 fname = os.path.join(dir, store, '_size_limit')
1878 if os.path.exists(fname):
1879 r = open(fname).readline()
1881 if r is None or not r.isdigit():
1885 def read_inconflict(dir):
1887 fname = os.path.join(dir, store, '_in_conflict')
1889 if os.path.exists(fname):
1890 r = [ line.strip() for line in open(fname) ]
1895 def parseargs(list_of_args):
1896 """Convenience method osc's commandline argument parsing.
1898 If called with an empty tuple (or list), return a list containing the current directory.
1899 Otherwise, return a list of the arguments."""
1901 return list(list_of_args)
1906 def statfrmt(statusletter, filename):
1907 return '%s %s' % (statusletter, filename)
1910 def pathjoin(a, *p):
1911 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1912 path = os.path.join(a, *p)
1913 if path.startswith('./'):
1918 def makeurl(baseurl, l, query=[]):
1919 """Given a list of path compoments, construct a complete URL.
1921 Optional parameters for a query string can be given as a list, as a
1922 dictionary, or as an already assembled string.
1923 In case of a dictionary, the parameters will be urlencoded by this
1924 function. In case of a list not -- this is to be backwards compatible.
1927 if conf.config['verbose'] > 1:
1928 print 'makeurl:', baseurl, l, query
1930 if type(query) == type(list()):
1931 query = '&'.join(query)
1932 elif type(query) == type(dict()):
1933 query = urlencode(query)
1935 scheme, netloc = urlsplit(baseurl)[0:2]
1936 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1939 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1940 """wrapper around urllib2.urlopen for error handling,
1941 and to support additional (PUT, DELETE) methods"""
1945 if conf.config['http_debug']:
1948 print '--', method, url
1950 if method == 'POST' and not file and not data:
1951 # adding data to an urllib2 request transforms it into a POST
1954 req = urllib2.Request(url)
1955 api_host_options = {}
1957 api_host_options = conf.get_apiurl_api_host_options(url)
1958 for header, value in api_host_options['http_headers']:
1959 req.add_header(header, value)
1961 # "external" request (url is no apiurl)
1964 req.get_method = lambda: method
1966 # POST requests are application/x-www-form-urlencoded per default
1967 # since we change the request into PUT, we also need to adjust the content type header
1968 if method == 'PUT' or (method == 'POST' and data):
1969 req.add_header('Content-Type', 'application/octet-stream')
1971 if type(headers) == type({}):
1972 for i in headers.keys():
1974 req.add_header(i, headers[i])
1976 if file and not data:
1977 size = os.path.getsize(file)
1979 data = open(file, 'rb').read()
1982 filefd = open(file, 'rb')
1984 if sys.platform[:3] != 'win':
1985 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1987 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1989 except EnvironmentError, e:
1991 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1992 '\non a filesystem which does not support this.' % (e, file))
1993 elif hasattr(e, 'winerror') and e.winerror == 5:
1994 # falling back to the default io
1995 data = open(file, 'rb').read()
1999 if conf.config['debug']: print method, url
2001 old_timeout = socket.getdefaulttimeout()
2002 # XXX: dirty hack as timeout doesn't work with python-m2crypto
2003 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2004 socket.setdefaulttimeout(timeout)
2006 fd = urllib2.urlopen(req, data=data)
2008 if old_timeout != timeout and not api_host_options.get('sslcertck'):
2009 socket.setdefaulttimeout(old_timeout)
2010 if hasattr(conf.cookiejar, 'save'):
2011 conf.cookiejar.save(ignore_discard=True)
2013 if filefd: filefd.close()
2018 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
2019 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
2020 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
2021 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
2024 def init_project_dir(apiurl, dir, project):
2025 if not os.path.exists(dir):
2026 if conf.config['checkout_no_colon']:
2027 os.makedirs(dir) # helpful with checkout_no_colon
2030 if not os.path.exists(os.path.join(dir, store)):
2031 os.mkdir(os.path.join(dir, store))
2033 # print 'project=',project,' dir=',dir
2034 store_write_project(dir, project)
2035 store_write_apiurl(dir, apiurl)
2036 if conf.config['do_package_tracking']:
2037 store_write_initial_packages(dir, project, [])
2039 def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None):
2040 if not os.path.isdir(store):
2043 f = open('_project', 'w')
2044 f.write(project + '\n')
2046 f = open('_package', 'w')
2047 f.write(package + '\n')
2051 f = open('_size_limit', 'w')
2052 f.write(str(limit_size))
2056 f = open('_files', 'w')
2057 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size)))
2061 ET.ElementTree(element=ET.Element('directory')).write('_files')
2063 f = open('_osclib_version', 'w')
2064 f.write(__store_version__ + '\n')
2067 store_write_apiurl(os.path.pardir, apiurl)
2073 def check_store_version(dir):
2074 versionfile = os.path.join(dir, store, '_osclib_version')
2076 v = open(versionfile).read().strip()
2081 msg = 'Error: "%s" is not an osc package working copy.' % os.path.abspath(dir)
2082 if os.path.exists(os.path.join(dir, '.svn')):
2083 msg = msg + '\nTry svn instead of osc.'
2084 raise oscerr.NoWorkingCopy(msg)
2086 if v != __store_version__:
2087 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2088 # version is fine, no migration needed
2089 f = open(versionfile, 'w')
2090 f.write(__store_version__ + '\n')
2093 msg = 'The osc metadata of your working copy "%s"' % dir
2094 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2095 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2096 raise oscerr.WorkingCopyWrongVersion, msg
2099 def meta_get_packagelist(apiurl, prj):
2101 u = makeurl(apiurl, ['source', prj])
2103 root = ET.parse(f).getroot()
2104 return [ node.get('name') for node in root.findall('entry') ]
2107 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2108 """return a list of file names,
2109 or a list File() instances if verbose=True"""
2115 query['rev'] = revision
2117 query['rev'] = 'latest'
2119 u = makeurl(apiurl, ['source', prj, package], query=query)
2121 root = ET.parse(f).getroot()
2124 return [ node.get('name') for node in root.findall('entry') ]
2128 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2129 rev = root.get('rev')
2130 for node in root.findall('entry'):
2131 f = File(node.get('name'),
2133 int(node.get('size')),
2134 int(node.get('mtime')))
2140 def meta_get_project_list(apiurl):
2141 u = makeurl(apiurl, ['source'])
2143 root = ET.parse(f).getroot()
2144 return sorted([ node.get('name') for node in root ])
2147 def show_project_meta(apiurl, prj):
2148 url = makeurl(apiurl, ['source', prj, '_meta'])
2150 return f.readlines()
2153 def show_project_conf(apiurl, prj):
2154 url = makeurl(apiurl, ['source', prj, '_config'])
2156 return f.readlines()
2159 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2160 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2164 except urllib2.HTTPError, e:
2165 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2169 def show_package_meta(apiurl, prj, pac):
2170 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2173 return f.readlines()
2174 except urllib2.HTTPError, e:
2175 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2179 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2181 path.append('source')
2187 path.append('_attribute')
2189 path.append(attribute)
2192 query.append("with_default=1")
2194 query.append("with_project=1")
2195 url = makeurl(apiurl, path, query)
2198 return f.readlines()
2199 except urllib2.HTTPError, e:
2200 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2204 def show_develproject(apiurl, prj, pac):
2205 m = show_package_meta(apiurl, prj, pac)
2207 return ET.fromstring(''.join(m)).find('devel').get('project')
2212 def show_pattern_metalist(apiurl, prj):
2213 url = makeurl(apiurl, ['source', prj, '_pattern'])
2217 except urllib2.HTTPError, e:
2218 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2220 r = [ node.get('name') for node in tree.getroot() ]
2225 def show_pattern_meta(apiurl, prj, pattern):
2226 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2229 return f.readlines()
2230 except urllib2.HTTPError, e:
2231 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2236 """metafile that can be manipulated and is stored back after manipulation."""
2237 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2241 self.change_is_required = change_is_required
2242 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2243 f = os.fdopen(fd, 'w')
2244 f.write(''.join(input))
2246 self.hash_orig = dgst(self.filename)
2249 hash = dgst(self.filename)
2250 if self.change_is_required == True and hash == self.hash_orig:
2251 print 'File unchanged. Not saving.'
2252 os.unlink(self.filename)
2255 print 'Sending meta data...'
2256 # don't do any exception handling... it's up to the caller what to do in case
2258 http_PUT(self.url, file=self.filename)
2259 os.unlink(self.filename)
2263 if sys.platform[:3] != 'win':
2264 editor = os.getenv('EDITOR', default='vim')
2266 editor = os.getenv('EDITOR', default='notepad')
2269 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2273 except urllib2.HTTPError, e:
2274 error_help = "%d" % e.code
2275 if e.headers.get('X-Opensuse-Errorcode'):
2276 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2278 print >>sys.stderr, 'BuildService API error:', error_help
2279 # examine the error - we can't raise an exception because we might want
2282 if '<summary>' in data:
2283 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2284 input = raw_input('Try again? ([y/N]): ')
2285 if input not in ['y', 'Y']:
2291 if os.path.exists(self.filename):
2292 print 'discarding %s' % self.filename
2293 os.unlink(self.filename)
2296 # different types of metadata
2297 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2298 'template': new_project_templ,
2301 'pkg': { 'path' : 'source/%s/%s/_meta',
2302 'template': new_package_templ,
2305 'attribute': { 'path' : 'source/%s/%s/_meta',
2306 'template': new_attribute_templ,
2309 'prjconf': { 'path': 'source/%s/_config',
2313 'user': { 'path': 'person/%s',
2314 'template': new_user_template,
2317 'pattern': { 'path': 'source/%s/_pattern/%s',
2318 'template': new_pattern_template,
2323 def meta_exists(metatype,
2330 apiurl = conf.config['apiurl']
2331 url = make_meta_url(metatype, path_args, apiurl)
2333 data = http_GET(url).readlines()
2334 except urllib2.HTTPError, e:
2335 if e.code == 404 and create_new:
2336 data = metatypes[metatype]['template']
2338 data = StringIO(data % template_args).readlines()
2343 def make_meta_url(metatype, path_args=None, apiurl=None):
2345 apiurl = conf.config['apiurl']
2346 if metatype not in metatypes.keys():
2347 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2348 path = metatypes[metatype]['path']
2351 path = path % path_args
2353 return makeurl(apiurl, [path])
2356 def edit_meta(metatype,
2361 change_is_required=False,
2365 apiurl = conf.config['apiurl']
2367 data = meta_exists(metatype,
2370 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2374 change_is_required = True
2376 url = make_meta_url(metatype, path_args, apiurl)
2377 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2385 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None):
2388 query['rev'] = revision
2390 query['rev'] = 'latest'
2392 query['linkrev'] = linkrev
2393 elif conf.config['linkcontrol']:
2394 query['linkrev'] = 'base'
2398 query['emptylink'] = 1
2399 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2401 # look for "too large" files according to size limit and mark them
2402 root = ET.fromstring(''.join(f.readlines()))
2403 for e in root.findall('entry'):
2404 size = e.get('size')
2405 if size and limit_size and int(size) > int(limit_size):
2406 e.set('skipped', 'true')
2407 return ET.tostring(root)
2410 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2411 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2412 return ET.fromstring(''.join(m)).get('srcmd5')
2415 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2416 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2418 # only source link packages have a <linkinfo> element.
2419 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2427 raise oscerr.LinkExpandError(prj, pac, li.error)
2431 def show_upstream_rev(apiurl, prj, pac):
2432 m = show_files_meta(apiurl, prj, pac)
2433 return ET.fromstring(''.join(m)).get('rev')
2436 def read_meta_from_spec(specfile, *args):
2437 import codecs, locale, re
2439 Read tags and sections from spec file. To read out
2440 a tag the passed argument mustn't end with a colon. To
2441 read out a section the passed argument must start with
2443 This method returns a dictionary which contains the
2447 if not os.path.isfile(specfile):
2448 raise IOError('\'%s\' is not a regular file' % specfile)
2451 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2452 except UnicodeDecodeError:
2453 lines = open(specfile).readlines()
2460 if itm.startswith('%'):
2461 sections.append(itm)
2465 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2467 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2468 if m and m.group('val'):
2469 spec_data[tag] = m.group('val').strip()
2471 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2474 section_pat = '^%s\s*?$'
2475 for section in sections:
2476 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2478 start = lines.index(m.group()+'\n') + 1
2480 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2483 for line in lines[start:]:
2484 if line.startswith('%'):
2487 spec_data[section] = data
2492 def edit_message(footer='', template=''):
2493 delim = '--This line, and those below, will be ignored--\n'
2495 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2496 f = os.fdopen(fd, 'w')
2505 if sys.platform[:3] != 'win':
2506 editor = os.getenv('EDITOR', default='vim')
2508 editor = os.getenv('EDITOR', default='notepad')
2511 subprocess.call('%s %s' % (editor, filename), shell=True)
2512 msg = open(filename).read().split(delim)[0].rstrip()
2517 input = raw_input('Log message not specified\n'
2518 'a)bort, c)ontinue, e)dit: ')
2520 raise oscerr.UserAbort()
2530 def create_delete_request(apiurl, project, package, message):
2535 package = """package="%s" """ % (package)
2541 <action type="delete">
2542 <target project="%s" %s/>
2545 <description>%s</description>
2547 """ % (project, package,
2548 cgi.escape(message or ''))
2550 u = makeurl(apiurl, ['request'], query='cmd=create')
2551 f = http_POST(u, data=xml)
2553 root = ET.parse(f).getroot()
2554 return root.get('id')
2557 def create_change_devel_request(apiurl,
2558 devel_project, devel_package,
2565 <action type="change_devel">
2566 <source project="%s" package="%s" />
2567 <target project="%s" package="%s" />
2570 <description>%s</description>
2572 """ % (devel_project,
2576 cgi.escape(message or ''))
2578 u = makeurl(apiurl, ['request'], query='cmd=create')
2579 f = http_POST(u, data=xml)
2581 root = ET.parse(f).getroot()
2582 return root.get('id')
2585 # This creates an old style submit request for server api 1.0
2586 def create_submit_request(apiurl,
2587 src_project, src_package,
2588 dst_project=None, dst_package=None,
2589 message=None, orev=None, src_update=None):
2594 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2596 # Yes, this kind of xml construction is horrible
2601 packagexml = """package="%s" """ %( dst_package )
2602 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2603 # XXX: keep the old template for now in order to work with old obs instances
2605 <request type="submit">
2607 <source project="%s" package="%s" rev="%s"/>
2612 <description>%s</description>
2616 orev or show_upstream_rev(apiurl, src_project, src_package),
2619 cgi.escape(message or ""))
2621 u = makeurl(apiurl, ['request'], query='cmd=create')
2622 f = http_POST(u, data=xml)
2624 root = ET.parse(f).getroot()
2625 return root.get('id')
2628 def get_request(apiurl, reqid):
2629 u = makeurl(apiurl, ['request', reqid])
2631 root = ET.parse(f).getroot()
2638 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2641 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2642 f = http_POST(u, data=message)
2645 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2648 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2649 f = http_POST(u, data=message)
2653 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2655 if not 'all' in req_state:
2656 for state in req_state:
2657 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2659 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2661 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2665 todo['project'] = project
2667 todo['package'] = package
2668 for kind, val in todo.iteritems():
2669 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2670 'action/source/@%(kind)s=\'%(val)s\' or ' \
2671 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2672 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2674 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2675 for i in exclude_target_projects:
2676 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2677 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2679 if conf.config['verbose'] > 1:
2680 print '[ %s ]' % xpath
2681 res = search(apiurl, request=xpath)
2682 collection = res['request']
2684 for root in collection.findall('request'):
2690 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2691 """Return all new requests for all projects/packages where is user is involved"""
2693 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2694 for i in res['project_id'].findall('project'):
2695 projpkgs[i.get('name')] = []
2696 for i in res['package_id'].findall('package'):
2697 if not i.get('project') in projpkgs.keys():
2698 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2700 for prj, pacs in projpkgs.iteritems():
2702 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2706 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2707 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2708 xpath = xpath_join(xpath, xp, inner=True)
2710 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2711 if not 'all' in req_state:
2713 for state in req_state:
2714 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2715 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2716 res = search(apiurl, request=xpath)
2718 for root in res['request'].findall('request'):
2724 def get_request_log(apiurl, reqid):
2725 r = get_request(conf.config['apiurl'], reqid)
2727 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2728 # the description of the request is used for the initial log entry
2729 # otherwise its comment attribute would contain None
2730 if len(r.statehistory) >= 1:
2731 r.statehistory[-1].comment = r.descr
2733 r.state.comment = r.descr
2734 for state in [ r.state ] + r.statehistory:
2735 s = frmt % (state.name, state.who, state.when, str(state.comment))
2740 def get_user_meta(apiurl, user):
2741 u = makeurl(apiurl, ['person', quote_plus(user)])
2744 return ''.join(f.readlines())
2745 except urllib2.HTTPError:
2746 print 'user \'%s\' not found' % user
2750 def get_user_data(apiurl, user, *tags):
2751 """get specified tags from the user meta"""
2752 meta = get_user_meta(apiurl, user)
2755 root = ET.fromstring(meta)
2758 if root.find(tag).text != None:
2759 data.append(root.find(tag).text)
2763 except AttributeError:
2764 # this part is reached if the tags tuple contains an invalid tag
2765 print 'The xml file for user \'%s\' seems to be broken' % user
2770 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2771 import tempfile, shutil
2774 query = { 'rev': revision }
2778 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2779 o = os.fdopen(fd, 'wb')
2780 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2781 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2784 shutil.move(tmpfile, targetfilename or filename)
2785 os.chmod(targetfilename or filename, 0644)
2793 def get_binary_file(apiurl, prj, repo, arch,
2796 target_filename = None,
2797 target_mtime = None,
2798 progress_meter = False):
2800 target_filename = target_filename or filename
2802 where = package or '_repository'
2803 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2806 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2810 binsize = int(f.headers['content-length'])
2813 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2814 os.chmod(tmpfilename, 0644)
2817 o = os.fdopen(fd, 'wb')
2821 #buf = f.read(BUFSIZE)
2825 downloaded += len(buf)
2827 completion = str(int((float(downloaded)/binsize)*100))
2828 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2833 sys.stdout.write('\n')
2835 shutil.move(tmpfilename, target_filename)
2837 os.utime(target_filename, (-1, target_mtime))
2839 # make sure that the temp file is cleaned up when we are interrupted
2841 try: os.unlink(tmpfilename)
2844 def dgst_from_string(str):
2845 # Python 2.5 depracates the md5 modules
2846 # Python 2.4 doesn't have hashlib yet
2849 md5_hash = hashlib.md5()
2852 md5_hash = md5.new()
2853 md5_hash.update(str)
2854 return md5_hash.hexdigest()
2858 #if not os.path.exists(file):
2868 f = open(file, 'rb')
2870 buf = f.read(BUFSIZE)
2873 return s.hexdigest()
2878 """return true if a string is binary data using diff's heuristic"""
2879 if s and '\0' in s[:4096]:
2884 def binary_file(fn):
2885 """read 4096 bytes from a file named fn, and call binary() on the data"""
2886 return binary(open(fn, 'rb').read(4096))
2889 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2891 This methods diffs oldfilename against filename (so filename will
2892 be shown as the new file).
2893 The variable origfilename is used if filename and oldfilename differ
2894 in their names (for instance if a tempfile is used for filename etc.)
2900 oldfilename = filename
2903 olddir = os.path.join(dir, store)
2905 if not origfilename:
2906 origfilename = filename
2908 file1 = os.path.join(olddir, oldfilename) # old/stored original
2909 file2 = os.path.join(dir, filename) # working copy
2911 f1 = open(file1, 'rb')
2915 f2 = open(file2, 'rb')
2919 if binary(s1) or binary (s2):
2920 d = ['Binary file %s has changed\n' % origfilename]
2923 d = difflib.unified_diff(\
2926 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2927 tofile = '%s\t(working copy)' % origfilename)
2929 # if file doesn't end with newline, we need to append one in the diff result
2931 for i, line in enumerate(d):
2932 if not line.endswith('\n'):
2933 d[i] += '\n\\ No newline at end of file'
2939 def make_diff(wc, revision):
2945 diff_hdr = 'Index: %s\n'
2946 diff_hdr += '===================================================================\n'
2948 olddir = os.getcwd()
2952 for file in wc.todo:
2953 if file in wc.skipped:
2955 if file in wc.filenamelist+wc.filenamelist_unvers:
2956 state = wc.status(file)
2958 added_files.append(file)
2960 removed_files.append(file)
2961 elif state == 'M' or state == 'C':
2962 changed_files.append(file)
2964 diff.append('osc: \'%s\' is not under version control' % file)
2966 for file in wc.filenamelist+wc.filenamelist_unvers:
2967 if file in wc.skipped:
2969 state = wc.status(file)
2970 if state == 'M' or state == 'C':
2971 changed_files.append(file)
2973 added_files.append(file)
2975 removed_files.append(file)
2977 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2979 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2980 cmp_pac = Package(tmpdir)
2982 for file in wc.todo:
2983 if file in cmp_pac.skipped:
2985 if file in cmp_pac.filenamelist:
2986 if file in wc.filenamelist:
2987 changed_files.append(file)
2989 diff.append('osc: \'%s\' is not under version control' % file)
2991 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2993 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2995 for file in changed_files:
2996 diff.append(diff_hdr % file)
2998 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
3000 cmp_pac.updatefile(file, revision)
3001 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
3002 cmp_pac.absdir, file))
3003 (fd, tmpfile) = tempfile.mkstemp()
3004 for file in added_files:
3005 diff.append(diff_hdr % file)
3007 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
3008 os.path.dirname(tmpfile), file))
3010 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
3011 os.path.dirname(tmpfile), file))
3013 # FIXME: this is ugly but it cannot be avoided atm
3014 # if a file is deleted via "osc rm file" we should keep the storefile.
3016 if cmp_pac == None and removed_files:
3017 tmpdir = tempfile.mkdtemp()
3019 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
3020 tmp_pac = Package(tmpdir)
3023 for file in removed_files:
3024 diff.append(diff_hdr % file)
3026 tmp_pac.updatefile(file, tmp_pac.rev)
3027 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3028 wc.rev, file, tmp_pac.storedir, file))
3030 cmp_pac.updatefile(file, revision)
3031 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
3032 revision, file, cmp_pac.storedir, file))
3036 delete_dir(cmp_pac.absdir)
3038 delete_dir(tmp_pac.absdir)
3042 def server_diff(apiurl,
3043 old_project, old_package, old_revision,
3044 new_project, new_package, new_revision, unified=False, missingok=False):
3045 query = {'cmd': 'diff', 'expand': '1'}
3047 query['oproject'] = old_project
3049 query['opackage'] = old_package
3051 query['orev'] = old_revision
3053 query['rev'] = new_revision
3055 query['unified'] = 1
3057 query['missingok'] = 1
3059 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
3065 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3067 creates the plain directory structure for a package dir.
3068 The 'apiurl' parameter is needed for the project dir initialization.
3069 The 'project' and 'package' parameters specify the name of the
3070 project and the package. The optional 'pathname' parameter is used
3071 for printing out the message that a new dir was created (default: 'prj_dir/package').
3072 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3074 prj_dir = prj_dir or project
3076 # FIXME: carefully test each patch component of prj_dir,
3077 # if we have a .osc/_files entry at that level.
3078 # -> if so, we have a package/project clash,
3079 # and should rename this path component by appending '.proj'
3080 # and give user a warning message, to discourage such clashes
3082 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3083 if is_package_dir(prj_dir):
3084 # we want this to become a project directory,
3085 # but it already is a package directory.
3086 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3088 if not is_project_dir(prj_dir):
3089 # this directory could exist as a parent direory for one of our earlier
3090 # checked out sub-projects. in this case, we still need to initialize it.
3091 print statfrmt('A', prj_dir)
3092 init_project_dir(apiurl, prj_dir, project)
3094 if is_project_dir(os.path.join(prj_dir, package)):
3095 # the thing exists, but is a project directory and not a package directory
3096 # FIXME: this should be a warning message to discourage package/project clashes
3097 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3099 if not os.path.exists(os.path.join(prj_dir, package)):
3100 print statfrmt('A', pathname)
3101 os.mkdir(os.path.join(prj_dir, package))
3102 os.mkdir(os.path.join(prj_dir, package, store))
3104 return(os.path.join(prj_dir, package))
3107 def checkout_package(apiurl, project, package,
3108 revision=None, pathname=None, prj_obj=None,
3109 expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None):
3111 # the project we're in might be deleted.
3112 # that'll throw an error then.
3113 olddir = os.getcwd()
3115 olddir = os.environ.get("PWD")
3120 if sys.platform[:3] == 'win':
3121 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3123 if conf.config['checkout_no_colon']:
3124 prj_dir = prj_dir.replace(':', '/')
3127 pathname = getTransActPath(os.path.join(prj_dir, package))
3129 # before we create directories and stuff, check if the package actually
3131 show_package_meta(apiurl, project, package)
3135 # try to read from the linkinfo
3136 # if it is a link we use the xsrcmd5 as the revision to be
3139 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3141 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3146 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3147 init_package_dir(apiurl, project, package, store, revision, limit_size=limit_size)
3149 p = Package(package, progress_obj=progress_obj)
3152 for filename in p.filenamelist:
3153 if filename in p.skipped:
3155 if service_files or not filename.startswith('_service:'):
3156 p.updatefile(filename, revision)
3157 # print 'A ', os.path.join(project, package, filename)
3158 print statfrmt('A', os.path.join(pathname, filename))
3159 if conf.config['do_package_tracking']:
3160 # check if we can re-use an existing project object
3162 prj_obj = Project(os.getcwd())
3163 prj_obj.set_state(p.name, ' ')
3164 prj_obj.write_packages()
3168 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3169 dst_userid = None, keep_develproject = False):
3171 update pkgmeta with new new_name and new_prj and set calling user as the
3172 only maintainer (unless keep_maintainers is set). Additionally remove the
3173 develproject entry (<devel />) unless keep_develproject is true.
3175 root = ET.fromstring(''.join(pkgmeta))
3176 root.set('name', new_name)
3177 root.set('project', new_prj)
3178 if not keep_maintainers:
3179 for person in root.findall('person'):
3181 if not keep_develproject:
3182 for dp in root.findall('devel'):
3184 return ET.tostring(root)
3186 def link_to_branch(apiurl, project, package):
3188 convert a package with a _link + project.diff to a branch
3191 if '_link' in meta_get_filelist(apiurl, project, package):
3192 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3195 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3197 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3199 create a linked package
3200 - "src" is the original package
3201 - "dst" is the "link" package that we are creating here
3206 dst_meta = meta_exists(metatype='pkg',
3207 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3209 create_new=False, apiurl=conf.config['apiurl'])
3211 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3212 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3217 root = ET.fromstring(''.join(dst_meta))
3218 elm = root.find('publish')
3220 elm = ET.SubElement(root, 'publish')
3222 ET.SubElement(elm, 'disable')
3223 dst_meta = ET.tostring(root)
3226 path_args=(dst_project, dst_package),
3228 # create the _link file
3229 # but first, make sure not to overwrite an existing one
3230 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3232 print >>sys.stderr, 'forced overwrite of existing _link file'
3235 print >>sys.stderr, '_link file already exists...! Aborting'
3239 rev = 'rev="%s"' % rev
3244 cicount = 'cicount="%s"' % cicount
3248 print 'Creating _link...',
3249 link_template = """\
3250 <link project="%s" package="%s" %s %s>
3252 <!-- <apply name="patch" /> apply a patch on the source directory -->
3253 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3254 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3255 <!-- <delete>filename</delete> delete a file -->
3258 """ % (src_project, src_package, rev, cicount)
3260 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3261 http_PUT(u, data=link_template)
3264 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3267 - "src" is the original package
3268 - "dst" is the "aggregate" package that we are creating here
3269 - "map" is a dictionary SRC => TARGET repository mappings
3274 dst_meta = meta_exists(metatype='pkg',
3275 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3277 create_new=False, apiurl=conf.config['apiurl'])
3279 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3280 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3285 root = ET.fromstring(''.join(dst_meta))
3286 elm = root.find('publish')
3288 elm = ET.SubElement(root, 'publish')
3290 ET.SubElement(elm, 'disable')
3291 dst_meta = ET.tostring(root)
3294 path_args=(dst_project, dst_package),
3297 # create the _aggregate file
3298 # but first, make sure not to overwrite an existing one
3299 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3301 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3304 print 'Creating _aggregate...',
3305 aggregate_template = """\
3307 <aggregate project="%s">
3309 for tgt, src in repo_map.iteritems():
3310 aggregate_template += """\
3311 <repository target="%s" source="%s" />
3314 aggregate_template += """\
3315 <package>%s</package>
3318 """ % ( src_package)
3320 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3321 http_PUT(u, data=aggregate_template)
3325 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3327 Branch packages defined via attributes (via API call)
3329 query = { 'cmd': 'branch' }
3330 query['attribute'] = attribute
3332 query['target_project'] = targetproject
3334 query['package'] = package
3335 if maintained_update_project_attribute:
3336 query['update_project_attribute'] = maintained_update_project_attribute
3338 u = makeurl(apiurl, ['source'], query=query)
3342 except urllib2.HTTPError, e:
3343 msg = ''.join(e.readlines())
3344 msg = msg.split('<summary>')[1]
3345 msg = msg.split('</summary>')[0]
3346 raise oscerr.APIError(msg)
3349 r = r.split('targetproject">')[1]
3350 r = r.split('</data>')[0]
3354 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False, msg=''):
3356 Branch a package (via API call)
3358 query = { 'cmd': 'branch' }
3360 query['ignoredevel'] = '1'
3364 query['target_project'] = target_project
3366 query['target_package'] = target_package
3368 query['comment'] = msg